blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
53a0a598618b2abe17ca21723b62f4fddc3b80e0
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/run-pass/issues/issue-3935.rs
|
UTF-8
| 235
| 2.90625
| 3
|
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// run-pass
#[derive(PartialEq)]
struct Bike {
name: String,
}
pub fn main() {
let town_bike = Bike { name: "schwinn".to_string() };
let my_bike = Bike { name: "surly".to_string() };
assert!(town_bike != my_bike);
}
| true
|
6570fcf3bc138df634bded701e930ce0a8034aa0
|
Rust
|
vadhri/exercism-rust
|
/matching-brackets/src/lib.rs
|
UTF-8
| 732
| 3.40625
| 3
|
[] |
no_license
|
pub fn brackets_are_balanced(string: &str) -> bool {
let mut vector: Vec<char> = Vec::new();
let mut last_char: char;
for c in string.chars() {
last_char = match vector.last() {
Some(x) => *x,
_ => ' '
};
if c == '[' || c == '{' || c == '(' {
vector.push(c);
} else if c == ']' && last_char == '[' {
vector.pop();
} else if c == '}' && last_char == '{' {
vector.pop();
} else if c == ')' && last_char == '(' {
vector.pop();
} else if c == ']' || c == '}' || c == ')' {
vector.push(c);
}
}
match vector.len() {
0 => true,
_ => false
}
}
| true
|
0829f48d87e0df111260b822512e8dc5728f5e71
|
Rust
|
jDomantas/plank
|
/plank-frontend/src/build_cfg.rs
|
UTF-8
| 29,143
| 2.625
| 3
|
[
"MIT"
] |
permissive
|
use ast::cfg;
use ast::typed::{self as t, Mutability as Mut};
use plank_syntax::position::{Span, Spanned};
use std::collections::HashMap;
use CompileCtx;
struct LoopDescr {
start: cfg::BlockId,
after: cfg::BlockId,
}
#[derive(Debug)]
enum LValue {
Reg(Mut, cfg::Reg, Vec<usize>),
Deref(Mut, RValue, t::Type, Vec<usize>),
Invalid,
Error,
}
impl LValue {
fn add_field(&mut self, index: usize) {
match *self {
LValue::Reg(_, _, ref mut fields) | LValue::Deref(_, _, _, ref mut fields) => {
fields.push(index)
}
LValue::Error | LValue::Invalid => {}
}
}
}
#[derive(Debug)]
enum RValue {
Temp(cfg::Value),
Var(cfg::Reg),
}
impl RValue {
fn as_value(&self) -> cfg::Value {
match *self {
RValue::Temp(ref value) => value.clone(),
RValue::Var(reg) => cfg::Value::Reg(reg),
}
}
}
struct Builder<'a> {
parameters: Vec<cfg::Reg>,
registers: HashMap<cfg::Reg, cfg::Type>,
blocks: HashMap<cfg::BlockId, cfg::Block>,
current_loop: Option<LoopDescr>,
ctx: &'a mut CompileCtx,
next_block_id: u32,
next_reg: u32,
var_registers: HashMap<t::Symbol, cfg::Reg>,
register_vars: HashMap<cfg::Reg, t::Symbol>,
current_block: Option<(cfg::BlockId, Vec<Spanned<cfg::Instruction>>)>,
var_mutability: HashMap<t::Symbol, Mut>,
}
impl<'a> Builder<'a> {
fn new(ctx: &'a mut CompileCtx) -> Self {
Builder {
parameters: Vec::new(),
registers: HashMap::new(),
blocks: HashMap::new(),
current_loop: None,
ctx,
next_block_id: 0,
next_reg: 0,
var_registers: HashMap::new(),
register_vars: HashMap::new(),
current_block: None,
var_mutability: HashMap::new(),
}
}
fn new_register(&mut self, typ: t::Type) -> cfg::Reg {
let reg = cfg::Reg(self.next_reg);
self.registers.insert(reg, typ);
self.next_reg += 1;
reg
}
fn new_var_register(&mut self, symbol: t::Symbol, typ: t::Type) -> cfg::Reg {
let reg = self.new_register(typ);
self.var_registers.insert(symbol, reg);
self.register_vars.insert(reg, symbol);
reg
}
fn drop_value(&mut self, value: &RValue, span: Span) {
if let RValue::Temp(cfg::Value::Reg(reg)) = *value {
self.emit_instruction(cfg::Instruction::Drop(reg), span);
}
}
fn new_block(&mut self) -> cfg::BlockId {
let block = cfg::BlockId(self.next_block_id);
self.next_block_id += 1;
block
}
fn emit_instruction(&mut self, op: cfg::Instruction, span: Span) {
self.current_block
.as_mut()
.unwrap()
.1
.push(Spanned::new(op, span));
}
fn emit_store(&mut self, target: Spanned<LValue>, value: Spanned<cfg::Value>, op_span: Span) {
let target_span = Spanned::span(&target);
match Spanned::into_value(target) {
LValue::Invalid => {
self.ctx
.reporter
.error("invalid lvalue", target_span)
.span(target_span)
.build();
}
LValue::Error => {}
LValue::Deref(Mut::Const, _, _, _) | LValue::Reg(Mut::Const, _, _) => {
self.ctx
.reporter
.error("cannot modify non-mut value", target_span)
.span(target_span)
.build();
}
LValue::Deref(Mut::Mut, val, typ, fields) => {
self.emit_instruction(
cfg::Instruction::DerefStore(
Spanned::new(val.as_value(), target_span),
typ,
fields,
value,
),
op_span,
);
self.drop_value(&val, target_span);
}
LValue::Reg(Mut::Mut, reg, ref fields) if fields.is_empty() => {
self.emit_instruction(cfg::Instruction::Assign(reg, value), op_span);
}
LValue::Reg(Mut::Mut, reg, fields) => {
self.emit_instruction(
cfg::Instruction::FieldStore(Spanned::new(reg, target_span), fields, value),
op_span,
);
}
}
}
fn emit_take_address(
&mut self,
target: cfg::Reg,
value: Spanned<LValue>,
op_span: Span,
mutable: bool,
) {
let value_span = Spanned::span(&value);
match Spanned::into_value(value) {
LValue::Invalid => {
self.ctx
.reporter
.error("invalid lvalue", value_span)
.span(value_span)
.build();
}
LValue::Error => {}
LValue::Deref(Mut::Const, _, _, _) | LValue::Reg(Mut::Const, _, _) if mutable => {
let span = op_span.merge(value_span);
self.ctx
.reporter
.error("cannot take mutable reference to non-mut value", span)
.span(span)
.build();
}
LValue::Deref(_, ref val, _, ref fields) if fields.is_empty() => {
self.emit_instruction(
cfg::Instruction::Assign(target, Spanned::new(val.as_value(), value_span)),
op_span,
);
self.drop_value(val, value_span);
}
LValue::Deref(_, val, typ, fields) => {
self.emit_instruction(
cfg::Instruction::UnaryOp(
target,
cfg::UnaryOp::OffsetAddress(typ, fields),
Spanned::new(val.as_value(), value_span),
),
op_span,
);
self.drop_value(&val, value_span);
}
LValue::Reg(_, reg, fields) => {
self.emit_instruction(
cfg::Instruction::TakeAddress(target, Spanned::new(reg, value_span), fields),
op_span,
);
}
}
}
fn start_block(&mut self, id: cfg::BlockId) {
if self.current_block.is_some() {
panic!("previous block not finished");
}
self.current_block = Some((id, Vec::new()));
}
fn end_block(&mut self, end: cfg::BlockEnd, link: cfg::BlockLink) {
let (id, ops) = self.current_block.take().unwrap();
let block = cfg::Block { ops, end, link };
self.blocks.insert(id, block);
}
fn build_function(&mut self, f: &t::Function) -> Option<cfg::BlockId> {
for var in &f.params {
let param = self.new_var_register(var.name, var.typ.clone());
self.var_mutability.insert(var.name, var.mutability);
self.parameters.push(param);
}
if let Some(ref body) = f.body {
let body_block = self.new_block();
self.start_block(body_block);
self.build_statement(body);
if self.current_block.is_some() {
self.end_block(cfg::BlockEnd::Error, cfg::BlockLink::None);
}
Some(body_block)
} else {
None
}
}
fn build_statement(&mut self, s: &Spanned<t::Statement>) {
let span = Spanned::span(s);
match **s {
t::Statement::Block(ref stmts) => {
for stmt in stmts {
let span = Spanned::span(stmt);
self.emit_instruction(cfg::Instruction::StartStatement, span);
self.build_statement(stmt);
}
for stmt in stmts {
if let t::Statement::Let(_, sym, _, _) = **stmt {
let span = Spanned::span(stmt);
let reg = self.var_registers[&sym];
self.emit_instruction(cfg::Instruction::Drop(reg), span);
}
}
}
t::Statement::Break => match self.current_loop {
Some(LoopDescr { after, .. }) => {
let new = self.new_block();
let link = cfg::BlockLink::Strong(new);
self.end_block(cfg::BlockEnd::Jump(after), link);
self.start_block(new);
}
None => {
self.emit_instruction(cfg::Instruction::Error, span);
self.ctx
.reporter
.error("cannot use `break` outside loop", span)
.span(span)
.build();
}
},
t::Statement::Continue => match self.current_loop {
Some(LoopDescr { start, .. }) => {
let new = self.new_block();
let link = cfg::BlockLink::Strong(new);
self.end_block(cfg::BlockEnd::Jump(start), link);
self.start_block(new);
}
None => {
self.emit_instruction(cfg::Instruction::Error, span);
self.ctx
.reporter
.error("cannot use `continue` outside loop", span)
.span(span)
.build();
}
},
t::Statement::Expr(ref e) => {
let expr = self.build_expr(e);
self.drop_value(&expr, e.span);
}
t::Statement::If(ref cond, ref then, Some(ref else_)) => {
let c = self.build_expr(cond);
let body = self.new_block();
let else_body = self.new_block();
let after = self.new_block();
let link = cfg::BlockLink::Weak(body);
self.end_block(
cfg::BlockEnd::Branch(Spanned::new(c.as_value(), cond.span), body, else_body),
link,
);
self.start_block(body);
self.drop_value(&c, cond.span);
self.build_statement(then);
let link = cfg::BlockLink::Weak(else_body);
self.end_block(cfg::BlockEnd::Jump(after), link);
self.start_block(else_body);
self.drop_value(&c, cond.span);
self.build_statement(else_);
let link = cfg::BlockLink::Weak(after);
self.end_block(cfg::BlockEnd::Jump(after), link);
self.start_block(after);
}
t::Statement::If(ref cond, ref then, None) => {
let c = self.build_expr(cond);
let body = self.new_block();
let after = self.new_block();
let link = cfg::BlockLink::Weak(body);
self.end_block(
cfg::BlockEnd::Branch(Spanned::new(c.as_value(), cond.span), body, after),
link,
);
self.start_block(body);
self.drop_value(&c, cond.span);
self.build_statement(then);
let link = cfg::BlockLink::Weak(after);
self.end_block(cfg::BlockEnd::Jump(after), link);
self.start_block(after);
self.drop_value(&c, cond.span);
}
t::Statement::Let(mutability, name, ref typ, Some(ref value)) => {
self.var_mutability.insert(*name, mutability);
let typ = (**typ).clone();
let var_register = self.new_var_register(*name, typ);
let built_value = self.build_expr(value);
self.emit_instruction(
cfg::Instruction::Assign(
var_register,
Spanned::new(built_value.as_value(), value.span),
),
span,
);
self.drop_value(&built_value, value.span);
}
t::Statement::Let(mutability, name, ref typ, None) => {
self.var_mutability.insert(*name, mutability);
// give it a register, but don't initialize it
self.new_var_register(Spanned::into_value(name), (**typ).clone());
}
t::Statement::Loop(ref body) => {
let start = self.new_block();
let after = self.new_block();
let outer_loop = self.current_loop.take();
self.current_loop = Some(LoopDescr { start, after });
let link = cfg::BlockLink::Weak(start);
self.end_block(cfg::BlockEnd::Jump(start), link);
self.start_block(start);
self.build_statement(body);
let link = cfg::BlockLink::Weak(after);
self.end_block(cfg::BlockEnd::Jump(start), link);
self.current_loop = outer_loop;
self.start_block(after);
}
t::Statement::Return(ref e) => {
let value = self.build_expr(e);
let new = self.new_block();
let link = cfg::BlockLink::Strong(new);
self.end_block(
cfg::BlockEnd::Return(Spanned::new(value.as_value(), e.span)),
link,
);
self.start_block(new);
}
t::Statement::While(ref cond, ref body) => {
let start = self.new_block();
let body_start = self.new_block();
let after = self.new_block();
let outer_loop = self.current_loop.take();
self.current_loop = Some(LoopDescr { start, after });
let link = cfg::BlockLink::Weak(start);
self.end_block(cfg::BlockEnd::Jump(start), link);
self.start_block(start);
let c = self.build_expr(cond);
let link = cfg::BlockLink::Weak(body_start);
self.end_block(
cfg::BlockEnd::Branch(Spanned::new(c.as_value(), cond.span), body_start, after),
link,
);
self.start_block(body_start);
self.drop_value(&c, cond.span);
self.build_statement(body);
let link = cfg::BlockLink::Weak(after);
self.end_block(cfg::BlockEnd::Jump(start), link);
self.current_loop = outer_loop;
self.start_block(after);
self.drop_value(&c, cond.span);
}
t::Statement::Error => {
self.emit_instruction(cfg::Instruction::Error, span);
}
}
}
fn build_expr(&mut self, e: &t::TypedExpr) -> RValue {
match *e.expr {
t::Expr::Binary(ref lhs, op, ref rhs) => match Spanned::into_value(op) {
t::BinaryOp::Assign => {
let target = self.build_expr_lvalue(lhs);
let value = self.build_expr(rhs);
self.emit_store(
Spanned::new(target, lhs.span),
Spanned::new(value.as_value(), rhs.span),
e.span,
);
value
}
t::BinaryOp::And => self.build_and(lhs, rhs),
t::BinaryOp::Or => self.build_or(lhs, rhs),
op => {
if let Some(op) = binop_to_instruction(op, &lhs.typ) {
let built_lhs = self.build_expr(lhs);
let built_rhs = self.build_expr(rhs);
let result = self.new_register(e.typ.clone());
self.emit_instruction(
cfg::Instruction::BinaryOp(
result,
op,
Spanned::new(built_lhs.as_value(), lhs.span),
Spanned::new(built_rhs.as_value(), rhs.span),
),
e.span,
);
self.drop_value(&built_lhs, lhs.span);
self.drop_value(&built_rhs, rhs.span);
RValue::Temp(cfg::Value::Reg(result))
} else {
RValue::Temp(cfg::Value::Error)
}
}
},
t::Expr::Call(ref name, ref params) => {
let callee = self.build_expr(name);
let params = params
.iter()
.map(|p| Spanned::new(self.build_expr(p), p.span))
.collect::<Vec<_>>();
let param_values = params
.iter()
.map(|p| Spanned::new(p.as_value(), Spanned::span(p)))
.collect();
let target = self.new_register(e.typ.clone());
let instr = cfg::Instruction::Call(
target,
Spanned::new(callee.as_value(), name.span),
param_values,
);
self.emit_instruction(instr, e.span);
self.drop_value(&callee, name.span);
for param in ¶ms {
self.drop_value(param, name.span);
}
RValue::Temp(cfg::Value::Reg(target))
}
t::Expr::Error => RValue::Temp(cfg::Value::Error),
t::Expr::Field(ref expr, index) => {
let built_expr = self.build_expr(expr);
let target = self.new_register(e.typ.clone());
self.emit_instruction(
cfg::Instruction::UnaryOp(
target,
cfg::UnaryOp::FieldLoad(expr.typ.clone(), vec![Spanned::into_value(index)]),
Spanned::new(built_expr.as_value(), expr.span),
),
e.span,
);
self.drop_value(&built_expr, expr.span);
RValue::Temp(cfg::Value::Reg(target))
}
t::Expr::Literal(ref literal) => RValue::Temp(match *literal {
t::Literal::Unit => cfg::Value::Unit,
t::Literal::Bool(b) => {
if b {
cfg::Value::Int(1, cfg::Size::Bit8)
} else {
cfg::Value::Int(0, cfg::Size::Bit8)
}
}
t::Literal::Char(c) => cfg::Value::Int(u64::from(c), cfg::Size::Bit8),
t::Literal::Number(n) => {
let size = match e.typ {
t::Type::Int(_, size) => size,
t::Type::Error => return RValue::Temp(cfg::Value::Error),
_ => panic!("bad int type"),
};
cfg::Value::Int(n.value, size)
}
t::Literal::Str(ref bytes) => {
let mut bytes = bytes.clone();
// add null terminator
bytes.push(0);
cfg::Value::Bytes(bytes)
}
}),
t::Expr::Name(name, ref type_params) => {
let name = Spanned::into_value(name);
if let Some(reg) = self.var_registers.get(&name).cloned() {
RValue::Var(reg)
} else {
let type_params = type_params.iter().map(Spanned::value).cloned().collect();
RValue::Temp(cfg::Value::Symbol(name, type_params))
}
}
t::Expr::Unary(op, ref expr) => {
let op = Spanned::into_value(op);
if op == t::UnaryOp::AddressOf || op == t::UnaryOp::MutAddressOf {
let value = self.build_expr_lvalue(expr);
let result = self.new_register(e.typ.clone());
self.emit_take_address(
result,
Spanned::new(value, expr.span),
expr.span,
op == t::UnaryOp::MutAddressOf,
);
RValue::Temp(cfg::Value::Reg(result))
} else if let Some(op) = unop_to_instruction(op, &expr.typ) {
let built_expr = self.build_expr(expr);
let result = self.new_register(e.typ.clone());
self.emit_instruction(
cfg::Instruction::UnaryOp(
result,
op,
Spanned::new(built_expr.as_value(), expr.span),
),
e.span,
);
self.drop_value(&built_expr, e.span);
RValue::Temp(cfg::Value::Reg(result))
} else {
RValue::Temp(cfg::Value::Error)
}
}
t::Expr::Cast(ref expr, ref typ) => {
let value = self.build_expr(expr);
let result = self.new_register((**typ).clone());
self.emit_instruction(
cfg::Instruction::CastAssign(result, Spanned::new(value.as_value(), expr.span)),
expr.span,
);
self.drop_value(&value, expr.span);
RValue::Temp(cfg::Value::Reg(result))
}
}
}
fn build_expr_lvalue(&mut self, e: &t::TypedExpr) -> LValue {
match *e.expr {
t::Expr::Binary(_, _, _)
| t::Expr::Call(_, _)
| t::Expr::Literal(_)
| t::Expr::Cast(_, _) => LValue::Invalid,
t::Expr::Error => LValue::Error,
t::Expr::Field(ref expr, index) => {
let mut lvalue = self.build_expr_lvalue(expr);
lvalue.add_field(Spanned::into_value(index));
lvalue
}
t::Expr::Name(ref name, _) => {
let mutability = self.var_mutability[name];
if let Some(reg) = self.var_registers.get(&**name).cloned() {
LValue::Reg(mutability, reg, Vec::new())
} else {
LValue::Invalid
}
}
t::Expr::Unary(op, ref expr) => match Spanned::into_value(op) {
t::UnaryOp::Deref => {
let ptr = self.build_expr(expr);
let mutability = match expr.typ {
t::Type::Pointer(mutability, _) => mutability,
_ => panic!("cannot deref {:?}", expr.typ),
};
LValue::Deref(mutability, ptr, expr.typ.clone(), Vec::new())
}
_ => LValue::Invalid,
},
}
}
fn build_and(&mut self, lhs: &t::TypedExpr, rhs: &t::TypedExpr) -> RValue {
let built_lhs = self.build_expr(lhs);
let rhs_block = self.new_block();
let reset_block = self.new_block();
let after_block = self.new_block();
let result = self.new_register(t::Type::Bool);
let link = cfg::BlockLink::Strong(rhs_block);
self.end_block(
cfg::BlockEnd::Branch(
Spanned::new(built_lhs.as_value(), lhs.span),
rhs_block,
reset_block,
),
link,
);
self.start_block(rhs_block);
self.drop_value(&built_lhs, lhs.span);
let built_rhs = self.build_expr(rhs);
self.emit_instruction(
cfg::Instruction::Assign(result, Spanned::new(built_rhs.as_value(), rhs.span)),
rhs.span,
);
self.drop_value(&built_rhs, rhs.span);
self.end_block(
cfg::BlockEnd::Jump(after_block),
cfg::BlockLink::Weak(reset_block),
);
self.start_block(reset_block);
self.drop_value(&built_lhs, lhs.span);
self.emit_instruction(
cfg::Instruction::Assign(
result,
Spanned::new(cfg::Value::Int(0, cfg::Size::Bit8), rhs.span),
),
lhs.span,
);
self.end_block(
cfg::BlockEnd::Jump(after_block),
cfg::BlockLink::Weak(after_block),
);
self.start_block(after_block);
RValue::Temp(cfg::Value::Reg(result))
}
fn build_or(&mut self, lhs: &t::TypedExpr, rhs: &t::TypedExpr) -> RValue {
let built_lhs = self.build_expr(lhs);
let rhs_block = self.new_block();
let reset_block = self.new_block();
let after_block = self.new_block();
let result = self.new_register(t::Type::Bool);
let link = cfg::BlockLink::Strong(rhs_block);
self.end_block(
cfg::BlockEnd::Branch(
Spanned::new(built_lhs.as_value(), lhs.span),
reset_block,
rhs_block,
),
link,
);
self.start_block(rhs_block);
self.drop_value(&built_lhs, lhs.span);
let built_rhs = self.build_expr(rhs);
self.emit_instruction(
cfg::Instruction::Assign(result, Spanned::new(built_rhs.as_value(), rhs.span)),
rhs.span,
);
self.drop_value(&built_rhs, rhs.span);
self.end_block(
cfg::BlockEnd::Jump(after_block),
cfg::BlockLink::Weak(reset_block),
);
self.start_block(reset_block);
self.drop_value(&built_lhs, lhs.span);
self.emit_instruction(
cfg::Instruction::Assign(
result,
Spanned::new(cfg::Value::Int(1, cfg::Size::Bit8), rhs.span),
),
lhs.span,
);
self.end_block(
cfg::BlockEnd::Jump(after_block),
cfg::BlockLink::Weak(after_block),
);
self.start_block(after_block);
RValue::Temp(cfg::Value::Reg(result))
}
}
fn binop_to_instruction(op: t::BinaryOp, arg_type: &t::Type) -> Option<cfg::BinaryOp> {
let int = match *arg_type {
t::Type::Int(sign, size) => Some((sign, size)),
_ => None,
};
match op {
t::BinaryOp::Add => int.map(|(sign, size)| cfg::BinaryOp::Add(sign, size)),
t::BinaryOp::Divide => int.map(|(sign, size)| cfg::BinaryOp::Div(sign, size)),
t::BinaryOp::Greater => int.map(|(sign, size)| cfg::BinaryOp::Greater(sign, size)),
t::BinaryOp::GreaterEqual => int.map(|(sign, size)| cfg::BinaryOp::GreaterEq(sign, size)),
t::BinaryOp::Less => int.map(|(sign, size)| cfg::BinaryOp::Less(sign, size)),
t::BinaryOp::LessEqual => int.map(|(sign, size)| cfg::BinaryOp::LessEq(sign, size)),
t::BinaryOp::Modulo => int.map(|(sign, size)| cfg::BinaryOp::Mod(sign, size)),
t::BinaryOp::Multiply => int.map(|(sign, size)| cfg::BinaryOp::Mul(sign, size)),
t::BinaryOp::Subtract => int.map(|(sign, size)| cfg::BinaryOp::Sub(sign, size)),
t::BinaryOp::Equal => Some(cfg::BinaryOp::Eq),
t::BinaryOp::NotEqual => Some(cfg::BinaryOp::Neq),
t::BinaryOp::And | t::BinaryOp::Or | t::BinaryOp::Assign => {
panic!("invalid binop");
}
}
}
fn unop_to_instruction(op: t::UnaryOp, arg_type: &t::Type) -> Option<cfg::UnaryOp> {
let int = match *arg_type {
t::Type::Int(sign, size) => Some((sign, size)),
_ => None,
};
match op {
t::UnaryOp::Deref => Some(cfg::UnaryOp::DerefLoad),
t::UnaryOp::Minus => int.map(|(sign, size)| cfg::UnaryOp::Negate(sign, size)),
t::UnaryOp::Not => Some(cfg::UnaryOp::Not),
t::UnaryOp::Plus | t::UnaryOp::AddressOf | t::UnaryOp::MutAddressOf => {
panic!("invalid unary op")
}
}
}
fn compile_fn(f: &t::Function, ctx: &mut CompileCtx) -> cfg::Function {
let mut builder = Builder::new(ctx);
let start_block = builder.build_function(f);
debug_assert!(builder.current_block.is_none());
cfg::Function {
parameters: builder.parameters,
complete_span: f.complete_span,
type_params: f.type_params.clone(),
registers: builder.registers,
register_symbols: builder.register_vars,
out_type: f.return_type.clone(),
blocks: builder.blocks,
start_block,
}
}
pub(crate) fn build_cfg(program: &t::Program, ctx: &mut CompileCtx) -> cfg::Program {
let functions = program
.functions
.iter()
.map(|f| (f.name, compile_fn(f, ctx)))
.collect();
let structs = program.structs.clone();
cfg::Program { structs, functions }
}
| true
|
24816b1d4868e89dc10ad1e982999b9db4b21548
|
Rust
|
kadtewsd/mandelbrot
|
/src/pixel/mod.rs
|
UTF-8
| 924
| 3.078125
| 3
|
[] |
no_license
|
// needs pub!!!!
//pub use not only imports it, but also publically exports it under this namespace as part of the non-canonical module graph.
// 絶対パスか public にしないと行けない
use ::num::Complex;
pub fn pixel_to_point(bounds: (usize, usize),
pixel: (usize, usize),
upper_left: Complex<f64>,
lower_left: Complex<f64>) -> Complex<f64> {
let (width, height) = (lower_left.re - upper_left.re, upper_left.im - lower_left.im);
Complex {
re: upper_left.re + pixel.0 as f64 * width /bounds.0 as f64,
im: upper_left.im - pixel.1 as f64 * height / bounds.1 as f64
}
}
#[test]
fn test_pixel_to_point() {
assert_eq!(pixel_to_point((100, 100), (25, 75),
Complex {re: -1.0, im: 1.0},
Complex {re:1.0, im: -1.0}),
Complex {re: -0.5, im: -0.5});
}
| true
|
c73f0457fb9cd78823e66cab8d410f52785b5c06
|
Rust
|
orDnans/rust-server
|
/src/main.rs
|
UTF-8
| 6,170
| 3.171875
| 3
|
[] |
no_license
|
use std::io::prelude::*;
use std::io::BufReader;
use std::fs;
use std::net::*;
// use std::collections::BTreeMap;
use serde::{Serialize, Deserialize};
use std::env;
const FILENAME: &str = "config.json";
#[derive(Debug, Serialize, Deserialize)]
struct Config {
ip_address: String,
port_number: String
}
impl Config {
//constructor if needed
fn new(address: String, port: String) -> Config {
let new_config = Config {
ip_address: address.clone(),
port_number: port.clone()
};
new_config
}
//static method to create local server
fn default() -> Config {
let default_config = Config {
ip_address: String::from("127.0.0.1"),
port_number: String::from("8080")
};
default_config
}
}
// struct HandlerStruct {
// method: String,
// path: String,
// handler: fn(TcpStream)
// }
/*
should create something like ServerStruct to contain multiple HandlerStruct
maybe B-trees
*/
fn placeholder(_stream:TcpStream) {}
struct TreeNode {
path: String,
method: String,
handler: fn(TcpStream),
children: Vec<Option<Box<TreeNode>>>
}
impl TreeNode {
fn new() -> TreeNode {
let new_tree = TreeNode {
path: String::from("/"),
method: String::from(""),
handler: placeholder,
children: Vec::new()
};
new_tree
}
// fn insert(&mut self, method:String, path:String, handler:fn(TcpStream)) {
// if path.starts_with(&self.path) {
// let mut found : bool = false;
// for &child in self.children {
// match child {
// Some(tree) => if path.starts_with(&tree.path) {
// tree.insert(method, path, handler);
// found = true;
// },
// None => continue,
// }
// }
// if found == false {
// //append new tree in this tree's children vector/list
// new_tree : TreeNode = TreeNode.new()
// self.children.append()
// }
// }
// }
fn insert(&mut self, path:&str, method:&str, handler: fn(TcpStream)) {
if path.eq(&self.path) && method.eq(&self.method){
self.handler = handler;
return;
}
if path.starts_with(&self.path) {
let child_iter = self.children.iter_mut();
let mut found : bool = false;
for child in child_iter {
match child {
Some(child_tree) => if path.starts_with(&child_tree.path) {
child_tree.insert(path, method, handler);
found = true;
},
None => continue,
}
}
if found == false {
let new_tree = TreeNode {
path: path.to_string(),
method: method.to_string(),
handler: handler,
children: Vec::new()
};
self.children.push(Some(Box::new(new_tree)));
}
}
}
//register a GET request on a path with a specific handler
fn get(&mut self, get_path:&str, get_handler:fn(TcpStream)) {
self.insert(get_path, "GET", get_handler);
}
fn post(&mut self, post_path:&str, post_handler:fn(TcpStream)) {
self.insert(post_path, "POST", post_handler);
}
fn search(&self, path:&str, method:&str) -> Option<&fn(TcpStream)> {
println!("current path: {}", self.path);
if path.eq(&self.path) {
println!("found!");
return Some(&self.handler);
} else if path.starts_with(&self.path) {
let child_iter = self.children.iter();
for child in child_iter {
match child {
Some(tree_child) => return tree_child.search(path, method),
None => continue,
}
}
}
None
}
fn routing(&self, stream:TcpStream) {
let mut reader = BufReader::new(stream.try_clone().unwrap());
let mut path = String::new();
reader.read_line(&mut path).unwrap();
println!("{}", path);
let mut handler : Option<&fn(TcpStream)> = None;
if path.starts_with("GET") {
println!("finding get");
let route = &path[4..path.len()-2];
println!("path: {}", route);
handler = self.search(route, "GET");
} else if path.starts_with("POST") {
let route = &path[5..path.len()-2];
handler = self.search(route, "POST");
}
match handler {
Some(func) => func(stream),
None => println!("no handler"),
}
}
}
fn hello(mut stream: TcpStream) {
let contents = fs::read_to_string("hello.html").unwrap();
let response = format!(
"HTTP/1.1 200 OK\r\nContent-Length: {}\r\n\r\n{}",
contents.len(),
contents
);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
}
fn main() {
let path = env::current_dir().unwrap();
println!("The current directory is {}", path.display());
//read json config file to str
let json_input = fs::read_to_string(FILENAME).expect("config file not found, aborting");
//call serde_json::from_str(&input).unwrap() to deserialize
let server_config: Config = serde_json::from_str(&json_input).unwrap();
//create ip:port format
let mut ip_port = server_config.ip_address.clone();
ip_port.push_str(":");
ip_port.push_str(&server_config.port_number);
println!("ip_port: {}", ip_port);
//create socket
let socket = TcpListener::bind(ip_port).unwrap();
//create routing tree
let mut router = TreeNode::new();
router.get("/ HTTP/1.1", hello);
//read stream here
for stream in socket.incoming() {
//catch errors
let stream = stream.unwrap();
router.routing(stream);
}
}
| true
|
d6c3c632ad3486c0b63a542e17a42f3c77d9c503
|
Rust
|
shironecko/aoc_2020
|
/src/parser_combinator/mod.rs
|
UTF-8
| 11,907
| 3.140625
| 3
|
[
"MIT"
] |
permissive
|
///
/// made with guidance of excellent https://bodil.lol/parser-combinators/
///
use std::iter::once;
pub type ParseResult<'a, Output> = Result<(&'a str, Output), &'a str>;
pub trait Parser<'a, Output> {
fn parse(&self, input: &'a str) -> ParseResult<'a, Output>;
}
impl<'a, F, Output> Parser<'a, Output> for F
where
F: Fn(&'a str) -> ParseResult<'a, Output>,
{
fn parse(&self, input: &'a str) -> ParseResult<'a, Output> {
self(input)
}
}
#[allow(dead_code)]
pub fn match_literal<'a>(expected: &'static str) -> impl Parser<'a, ()> {
move |input: &'a str| match input.get(0..expected.len()) {
Some(slice) if slice == expected => Ok((&input[expected.len()..], ())),
_ => Err(input),
}
}
#[allow(dead_code)]
pub fn one_or_more<'a, P, A>(parser: P) -> impl Parser<'a, Vec<A>>
where
P: Parser<'a, A>,
{
move |mut input| {
let mut result = Vec::new();
if let Ok((next_input, first_item)) = parser.parse(input) {
input = next_input;
result.push(first_item);
} else {
return Err(input);
}
while let Ok((next_input, next_item)) = parser.parse(input) {
input = next_input;
result.push(next_item);
}
Ok((input, result))
}
}
#[allow(dead_code)]
pub fn zero_or_more<'a, P, A>(parser: P) -> impl Parser<'a, Vec<A>>
where
P: Parser<'a, A>,
{
move |mut input| {
let mut result = Vec::new();
while let Ok((next_input, next_item)) = parser.parse(input) {
input = next_input;
result.push(next_item);
}
Ok((input, result))
}
}
#[allow(dead_code)]
pub fn one_or_more_until<'a, P1, P2, Output, Discard>(
parser_1: P1,
parser_2: P2,
) -> impl Parser<'a, Vec<Output>>
where
P1: Parser<'a, Output>,
P2: Parser<'a, Discard>,
{
move |input: &'a str| {
if let Ok(_) = parser_2.parse(input) {
return Err(input);
}
let (mut input_cursor, first_result) = parser_1.parse(input)?;
let mut results = vec![first_result];
while let Err(_) = parser_2.parse(input_cursor) {
let (next_input, result) = parser_1.parse(input_cursor)?;
input_cursor = next_input;
results.push(result);
}
Ok((input_cursor, results))
}
}
#[allow(dead_code)]
pub fn zero_or_more_until<'a, P1, P2, Output, Discard>(
parser_1: P1,
parser_2: P2,
) -> impl Parser<'a, Vec<Output>>
where
P1: Parser<'a, Output>,
P2: Parser<'a, Discard>,
{
move |input: &'a str| {
let mut input_cursor = input;
let mut results = Vec::new();
while let Err(_) = parser_2.parse(input_cursor) {
let (next_input, result) = parser_1.parse(input_cursor)?;
input_cursor = next_input;
results.push(result);
}
Ok((input_cursor, results))
}
}
#[allow(dead_code)]
pub fn pair<'a, P1, P2, R1, R2>(parser_1: P1, parser_2: P2) -> impl Parser<'a, (R1, R2)>
where
P1: Parser<'a, R1>,
P2: Parser<'a, R2>,
{
move |input| {
let (tail, result_1) = parser_1.parse(input)?;
let (tail, result_2) = parser_2.parse(tail)?;
Ok((tail, (result_1, result_2)))
}
}
#[allow(dead_code)]
pub fn either<'a, P1, P2, Output>(parser_1: P1, parser_2: P2) -> impl Parser<'a, Output>
where
P1: Parser<'a, Output>,
P2: Parser<'a, Output>,
{
move |input| match parser_1.parse(input) {
result @ Ok(_) => result,
Err(_) => parser_2.parse(input),
}
}
#[allow(dead_code)]
pub fn map<'a, P, F, A, B>(parser: P, map_fn: F) -> impl Parser<'a, B>
where
P: Parser<'a, A>,
F: Fn(A) -> B,
{
move |input| {
parser
.parse(input)
.map(|(tail, output)| (tail, map_fn(output)))
}
}
#[allow(dead_code)]
pub fn map_opt<'a, P, F, A, B>(parser: P, map_fn: F) -> impl Parser<'a, B>
where
P: Parser<'a, A>,
F: Fn(A) -> Option<B>,
{
move |input| {
let (tail, result) = parser.parse(input)?;
if let Some(mapped) = map_fn(result) {
Ok((tail, mapped))
} else {
Err(input)
}
}
}
#[allow(dead_code)]
pub fn left<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, R1>
where
P1: Parser<'a, R1>,
P2: Parser<'a, R2>,
{
map(pair(parser1, parser2), |(left, _right)| left)
}
#[allow(dead_code)]
pub fn right<'a, P1, P2, R1, R2>(parser1: P1, parser2: P2) -> impl Parser<'a, R2>
where
P1: Parser<'a, R1>,
P2: Parser<'a, R2>,
{
map(pair(parser1, parser2), |(_left, right)| right)
}
#[allow(dead_code)]
pub fn any_char<'a>() -> impl Parser<'a, char> {
move |input: &'a str| match input.chars().next() {
Some(c) => Ok((&input[c.len_utf8()..], c)),
_ => Err(input),
}
}
#[allow(dead_code)]
pub fn pred<'a, P, R, F>(parser: P, predicate: F) -> impl Parser<'a, R>
where
P: Parser<'a, R>,
F: Fn(&R) -> bool,
{
move |input| {
let (input_tail, result) = parser.parse(input)?;
if predicate(&result) {
Ok((input_tail, result))
} else {
Err(input)
}
}
}
#[allow(dead_code)]
pub fn whitespace_char<'a>() -> impl Parser<'a, char> {
pred(any_char(), |c| c.is_whitespace())
}
#[allow(dead_code)]
pub fn space_1<'a>() -> impl Parser<'a, Vec<char>> {
one_or_more(whitespace_char())
}
#[allow(dead_code)]
pub fn space_0<'a>() -> impl Parser<'a, Vec<char>> {
zero_or_more(whitespace_char())
}
#[allow(dead_code)]
pub fn quoted_string<'a>() -> impl Parser<'a, String> {
map(
right(
match_literal("\""),
left(
zero_or_more(pred(any_char(), |c| *c != '"')),
match_literal("\""),
),
),
|chars| chars.into_iter().collect(),
)
}
#[allow(dead_code)]
pub fn word<'a>() -> impl Parser<'a, String> {
map(
one_or_more(pred(any_char(), |c| c.is_alphabetic())),
|chars| chars.into_iter().collect(),
)
}
#[allow(dead_code)]
pub fn join<'a, P: Parser<'a, Vec<String>>>(
parser: P,
separator: &'static str,
) -> impl Parser<'a, String> {
map(parser, move |vector| vector.join(separator))
}
#[allow(dead_code)]
pub fn identifier<'a>() -> impl Parser<'a, String> {
move |input: &'a str| match input.chars().next() {
Some(head) if head.is_alphabetic() => {
let tail = input
.chars()
.skip(1)
.take_while(|&c| c.is_alphanumeric() || ['-', '_'].contains(&c));
let ident: String = once(head).chain(tail).collect();
Ok((&input[ident.len()..], ident))
}
_ => Err(input),
}
}
#[allow(dead_code)]
pub fn number<'a>() -> impl Parser<'a, u32> {
map_opt(
one_or_more(pred(any_char(), |c| c.is_numeric())),
|digits| digits.into_iter().collect::<String>().parse().ok(),
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn literal_parser() {
let parse_joe = match_literal("Hello, Joe!");
assert_eq!(Ok(("", ())), parse_joe.parse("Hello, Joe!"));
assert_eq!(
Ok((" Hello, Robert!", ())),
parse_joe.parse("Hello, Joe! Hello, Robert!")
);
assert_eq!(Err("Hello, Mike!"), parse_joe.parse("Hello, Mike!"));
}
#[test]
fn identifier_parser() {
assert_eq!(
Ok(("", "one-identifier_underscore".into())),
identifier().parse("one-identifier_underscore")
);
assert_eq!(
Ok((" identifiers", "two".into())),
identifier().parse("two identifiers")
);
assert_eq!(
Err("#no-identifiers"),
identifier().parse("#no-identifiers")
);
}
#[test]
fn pair_combinator() {
let tag_opener = pair(match_literal("<"), identifier());
assert_eq!(
Ok(("/>", ((), "my-first-element".into()))),
tag_opener.parse("<my-first-element/>")
);
assert_eq!(Err("oops"), tag_opener.parse("oops"));
assert_eq!(Err("!oops"), tag_opener.parse("<!oops"));
}
#[test]
fn either_combinator() {
let parser = either(match_literal("one"), match_literal("two"));
assert_eq!(Ok((" two", ())), parser.parse("one two"));
assert_eq!(Ok((" one", ())), parser.parse("two one"));
assert_eq!(Err("none"), parser.parse("none"));
}
#[test]
fn left_combinator() {
let tag_opener = left(match_literal("<"), identifier());
assert_eq!(Ok(("/>", ())), tag_opener.parse("<my-first-element/>"));
assert_eq!(Err("oops"), tag_opener.parse("oops"));
assert_eq!(Err("!oops"), tag_opener.parse("<!oops"));
}
#[test]
fn right_combinator() {
let tag_opener = right(match_literal("<"), identifier());
assert_eq!(
Ok(("/>", "my-first-element".into())),
tag_opener.parse("<my-first-element/>")
);
assert_eq!(Err("oops"), tag_opener.parse("oops"));
assert_eq!(Err("!oops"), tag_opener.parse("<!oops"));
}
#[test]
fn one_or_more_combinator() {
let parser = one_or_more(match_literal("ha"));
assert_eq!(Ok(("", vec![(), (), ()])), parser.parse("hahaha"));
assert_eq!(Err("ahah"), parser.parse("ahah"));
assert_eq!(Err(""), parser.parse(""));
}
#[test]
fn zero_or_more_combinator() {
let parser = zero_or_more(match_literal("ha"));
assert_eq!(Ok(("", vec![(), (), ()])), parser.parse("hahaha"));
assert_eq!(Ok(("ahah", vec![])), parser.parse("ahah"));
assert_eq!(Ok(("", vec![])), parser.parse(""));
}
#[test]
fn one_or_more_until_combinator() {
let parser = one_or_more_until(right(space_0(), word()), match_literal(";"));
assert_eq!(
Ok((";", vec!["one".into(), "two".into(), "three".into()])),
parser.parse("one two three;")
);
assert_eq!(Err(";nothing"), parser.parse(";nothing"));
assert_eq!(Err("!error two;"), parser.parse("one !error two;"));
}
#[test]
fn zero_or_more_until_combinator() {
let parser = zero_or_more_until(right(space_0(), word()), match_literal(";"));
assert_eq!(
Ok((";", vec!["one".into(), "two".into(), "three".into()])),
parser.parse("one two three;")
);
assert_eq!(Ok((";nothing", Vec::new())), parser.parse(";nothing"));
assert_eq!(Err("!error two;"), parser.parse("one !error two;"));
}
#[test]
fn predicate_combinator() {
let parser = pred(any_char(), |c| *c == 'o');
assert_eq!(Ok(("mg", 'o')), parser.parse("omg"));
assert_eq!(Err("lol"), parser.parse("lol"));
}
#[test]
fn quoted_string_parser() {
assert_eq!(
Ok(("", "Hello Joe!".into())),
quoted_string().parse("\"Hello Joe!\"")
);
}
#[test]
fn word_parser() {
assert_eq!(
Ok((" quick brown fox", "a".into())),
word().parse("a quick brown fox")
);
assert_eq!(Ok((" word", "first".into())), word().parse("first word"));
assert_eq!(Ok(("", "onlyWord".into())), word().parse("onlyWord"));
assert_eq!(Err("~no-words"), word().parse("~no-words"));
}
#[test]
fn join_combinator() {
assert_eq!(
Ok(("", "one two three".into())),
join(one_or_more(left(word(), space_0())), " ").parse("one two three")
);
}
#[test]
fn number_parser() {
assert_eq!(Ok(("", 42)), number().parse("42"));
assert_eq!(
Ok((" and then some", 16746)),
number().parse("16746 and then some")
);
assert_eq!(Err("NaN"), number().parse("NaN"));
}
}
| true
|
ec5b5a06021adf623efb91dc8f96671473acad61
|
Rust
|
SCdF/weekend
|
/src/ray.rs
|
UTF-8
| 319
| 3.234375
| 3
|
[] |
no_license
|
use vec3::Vec3;
#[derive(Debug)]
pub struct Ray {
pub origin: Vec3,
pub direction: Vec3,
}
impl Ray {
pub fn point_at_parameter(&self, t: f32) -> Vec3 {
let nv = &self.origin + &(t * &self.direction);
Vec3 {
x: nv.x,
y: nv.y,
z: nv.z,
}
}
}
| true
|
c4d4f0ecb523a078cf1a4387c01ab576a8237d99
|
Rust
|
Kostassoid/lethe
|
/src/storage/nix/macos.rs
|
UTF-8
| 5,479
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
use ::nix::*;
use anyhow::{Context, Result};
use plist;
use std::fs::{File, OpenOptions};
use std::os::unix::io::*;
use std::path::{Path, PathBuf};
use std::process::Command;
use crate::storage::*;
use std::ffi::CString;
impl System {
pub fn enumerate_storage_devices() -> Result<Vec<StorageRef>> {
DiskUtilCli::default().get_list()
}
}
pub fn open_file_direct<P: AsRef<Path>>(file_path: P, write_access: bool) -> Result<File> {
let file = OpenOptions::new()
.create(false)
.append(false)
.write(write_access)
.read(true)
.truncate(false)
.open(file_path.as_ref())?;
unsafe {
let fd = file.as_raw_fd();
nix::libc::fcntl(fd, nix::libc::F_NOCACHE, 1);
}
Ok(file)
}
#[allow(dead_code)]
pub fn get_block_device_size(fd: libc::c_int) -> u64 {
ioctl_read!(dk_get_block_size, b'd', 24, u32); // DKIOCGETBLOCKSIZE
ioctl_read!(dk_get_block_count, b'd', 25, u64); // DKIOCGETBLOCKCOUNT
unsafe {
let mut block_size: u32 = std::mem::zeroed();
let mut block_count: u64 = std::mem::zeroed();
dk_get_block_size(fd, &mut block_size).unwrap();
dk_get_block_count(fd, &mut block_count).unwrap();
(block_size as u64) * block_count
}
}
//todo: remove this common dependency, the current implementation is not relying on StorageRef ctor
#[allow(dead_code)]
pub fn enrich_storage_details<P: AsRef<Path>>(
_path: P,
_details: &mut StorageDetails,
) -> Result<()> {
Ok(())
}
pub trait StorageDeviceEnumerator {
fn get_list(&self) -> Result<Vec<StorageRef>>;
}
pub struct DiskUtilCli {
path: PathBuf,
}
impl Default for DiskUtilCli {
fn default() -> Self {
DiskUtilCli {
path: "/usr/sbin/diskutil".into(),
}
}
}
#[derive(Deserialize)]
#[serde(rename_all = "PascalCase")]
struct DUPartition {
device_identifier: String,
// size: u64,
// volume_name: Option<String>,
// mount_point: Option<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "PascalCase")]
struct DUDiskAndPartitions {
device_identifier: String,
// size: u64,
partitions: Option<Vec<DUPartition>>,
a_p_f_s_volumes: Option<Vec<DUPartition>>,
// volume_name: Option<String>,
// mount_point: Option<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "PascalCase")]
struct DUDiskInfo {
size: u64,
device_block_size: usize,
removable: bool,
whole_disk: bool,
volume_name: Option<String>,
mount_point: Option<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "PascalCase")]
struct DUList {
all_disks_and_partitions: Vec<DUDiskAndPartitions>,
}
impl DiskUtilCli {
fn get_storage_details(&self, id: &str) -> Result<StorageDetails> {
let mut command = Command::new(&self.path);
command.arg("info").arg("-plist").arg(id);
let output = command.output()?;
if !output.status.success() {
return Err(anyhow!("Can't run diskutil"));
};
let info: DUDiskInfo =
plist::from_bytes(&output.stdout).context("Unable to parse diskutil info plist")?;
let storage_type = if !info.whole_disk {
StorageType::Partition
} else if info.removable {
StorageType::Removable
} else {
StorageType::Fixed
};
Ok(StorageDetails {
size: info.size,
block_size: info.device_block_size,
storage_type,
mount_point: info.mount_point.to_owned(),
label: info.volume_name.to_owned(),
})
}
}
impl StorageDeviceEnumerator for DiskUtilCli {
fn get_list(&self) -> Result<Vec<StorageRef>> {
let mut command = Command::new(&self.path);
command.arg("list").arg("-plist");
let output = command.output()?;
if !output.status.success() {
return Err(anyhow!("Can't run diskutil"));
};
let info: DUList =
plist::from_bytes(&output.stdout).context("Unable to parse diskutil info plist")?;
info.all_disks_and_partitions
.iter()
.map(|d| {
let children: Result<Vec<StorageRef>> = d
.partitions
.as_ref()
.unwrap_or(&vec![])
.iter()
.chain(d.a_p_f_s_volumes.as_ref().unwrap_or(&vec![]).iter())
.map(|p| {
Ok(StorageRef {
id: format!("/dev/r{}", p.device_identifier),
details: self.get_storage_details(&p.device_identifier)?,
children: vec![],
})
})
.collect();
Ok(StorageRef {
id: format!("/dev/r{}", d.device_identifier),
details: self.get_storage_details(&d.device_identifier)?,
children: children?,
})
})
.collect()
}
}
pub fn unmount(path: &str) -> Result<()> {
let cpath = CString::new(path)?;
match unsafe { libc::unmount(cpath.as_ptr(), libc::MNT_FORCE) } {
0 => Ok(()),
_ if std::io::Error::last_os_error().raw_os_error() == Some(libc::ENOENT) => Ok(()), // not found
_ => Err(anyhow::Error::new(std::io::Error::last_os_error())
.context("Failed to unmount a volume")),
}
}
| true
|
0567920fe11daaf3653a2076cdcc014b2560d8b1
|
Rust
|
candtechsoftware/Jit_Compiler
|
/jit_parser/src/types.rs
|
UTF-8
| 427
| 2.84375
| 3
|
[] |
no_license
|
#[derive(Debug)]
pub enum BuiltInType {
VOID,
INT8,
UINT8,
INT32,
UINT32,
DOUBLE,
STRUCT,
}
#[derive(Debug)]
pub struct Type {
name: String,
item_type: BuiltInType,
fields: Vec<Type>,
}
impl Type {
pub fn new(name: &str, item_type: BuiltInType) -> Self {
Self {
name: String::from(name),
item_type,
fields: Vec::new(),
}
}
}
| true
|
776e6f7e101ee72a6dc29c370783677b724b1ae9
|
Rust
|
isgasho/RVM1.5
|
/src/memory/mm.rs
|
UTF-8
| 4,606
| 2.90625
| 3
|
[
"MIT"
] |
permissive
|
#![allow(dead_code)]
//! Memory management.
use alloc::collections::btree_map::{BTreeMap, Entry, Values};
use core::fmt::{Debug, Formatter, Result};
use super::addr::{align_down, align_up};
use super::mapper::Mapper;
use super::paging::{GenericPageTable, MemFlags};
use crate::error::HvResult;
#[derive(Clone)]
pub struct MemoryRegion<VA> {
pub start: VA,
pub size: usize,
pub flags: MemFlags,
pub(super) mapper: Mapper,
}
pub struct MemorySet<PT: GenericPageTable>
where
PT::VA: Ord,
{
regions: BTreeMap<PT::VA, MemoryRegion<PT::VA>>,
pt: PT,
}
impl<VA: From<usize> + Into<usize> + Copy> MemoryRegion<VA> {
pub(super) fn new(start: VA, size: usize, flags: MemFlags, mapper: Mapper) -> Self {
let start = align_down(start.into());
let size = align_up(size);
Self {
start: start.into(),
size,
flags,
mapper,
}
}
/// Test whether this region is overlap with `other`.
fn is_overlap_with(&self, other: &Self) -> bool {
let p0 = self.start.into();
let p1 = p0 + self.size;
let p2 = other.start.into();
let p3 = p2 + other.size;
!(p1 <= p2 || p0 >= p3)
}
}
impl<PT: GenericPageTable> MemorySet<PT>
where
PT::VA: Ord,
{
pub fn new() -> Self {
Self {
regions: BTreeMap::new(),
pt: PT::new(),
}
}
pub fn new_with_page_table(pt: PT) -> Self {
Self {
regions: BTreeMap::new(),
pt,
}
}
pub fn clone(&self) -> Self {
Self {
regions: self.regions.clone(),
pt: self.pt.clone(),
}
}
pub fn len(&self) -> usize {
self.regions.len()
}
fn test_free_area(&self, other: &MemoryRegion<PT::VA>) -> bool {
if let Some((_, before)) = self.regions.range(..other.start).last() {
if before.is_overlap_with(other) {
return false;
}
}
if let Some((_, after)) = self.regions.range(other.start..).next() {
if after.is_overlap_with(other) {
return false;
}
}
true
}
/// Add a memory region to this set.
pub fn insert(&mut self, region: MemoryRegion<PT::VA>) -> HvResult {
if !self.test_free_area(®ion) {
warn!(
"MemoryRegion overlapped in MemorySet: {:#x?}\n{:#x?}",
region, self
);
return hv_result_err!(EINVAL);
}
self.pt.map(®ion)?;
self.regions.insert(region.start, region);
Ok(())
}
/// Find and remove memory region which starts from `start`.
pub fn delete(&mut self, start: PT::VA) -> HvResult {
if let Entry::Occupied(e) = self.regions.entry(start) {
self.pt.unmap(&e.get())?;
e.remove();
Ok(())
} else {
hv_result_err!(
EINVAL,
format!(
"MemorySet::delete(): no memory region starts from {:#x?}",
start.into()
)
)
}
}
pub fn clear(&mut self) {
for region in self.regions.values() {
self.pt.unmap(®ion).unwrap();
}
self.regions.clear();
}
pub fn iter(&self) -> Values<'_, PT::VA, MemoryRegion<PT::VA>> {
self.regions.values()
}
pub unsafe fn activate(&self) {
self.pt.activate();
}
pub fn page_table(&self) -> &PT {
&self.pt
}
pub fn page_table_mut(&mut self) -> &mut PT {
&mut self.pt
}
}
impl<VA: Into<usize> + Copy> Debug for MemoryRegion<VA> {
fn fmt(&self, f: &mut Formatter) -> Result {
let start = self.start.into();
f.debug_struct("MemoryRegion")
.field("vaddr_range", &(start..start + self.size))
.field("size", &self.size)
.field("flags", &self.flags)
.field("mapper", &self.mapper)
.finish()
}
}
impl<PT: GenericPageTable> Debug for MemorySet<PT>
where
PT::VA: Ord,
{
fn fmt(&self, f: &mut Formatter) -> Result {
f.debug_struct("MemorySet")
.field("regions", &self.regions.values())
.field("page_table", &core::any::type_name::<PT>())
.field("page_table_root", &self.pt.root_paddr())
.finish()
}
}
impl<PT: GenericPageTable> Drop for MemorySet<PT>
where
PT::VA: Ord,
{
fn drop(&mut self) {
debug!("Drop {:#x?}", self);
self.clear();
}
}
| true
|
331383d6e3186431d6a272e50f090c70e403f562
|
Rust
|
skyofsmith/servePractice
|
/rust/advanced/traits2/src/main.rs
|
UTF-8
| 504
| 3.40625
| 3
|
[
"MIT"
] |
permissive
|
trait GetName{
fn get_name(&self) -> &String;
}
// 对任何实现特定trait的类型有条件的实现trait
trait PrintName {
fn print_name(&self);
}
impl<T: GetName> PrintName for T {
fn print_name(&self) {
println!("name = {}", self.get_name());
}
}
struct Student {
name: String
}
impl GetName for Student {
fn get_name(&self) -> &String {
&(self.name)
}
}
fn main() {
let s = Student {
name: String::from("tea"),
};
s.print_name();
}
| true
|
dc5c37101d32e7db251f706811d278a05bb01b28
|
Rust
|
MaulingMonkey/lies
|
/crates/example-console/src/main.rs
|
UTF-8
| 1,610
| 3.203125
| 3
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0",
"MIT"
] |
permissive
|
use std::env;
use std::process::exit;
fn main() {
let mut args = env::args();
let _exe = args.next();
let command = args.next();
let command = if let Some(c) = command { c } else {
print_usage();
exit(0);
};
match command.as_str() {
"help" | "--help" => { print_usage(); exit(0); },
"version" | "--version" => { print_version(); exit(0); },
"about" => { print_about(); exit(0); },
"add" => { add(args); exit(0); },
other => {
eprintln!("Unrecognized subcommand: {}", other);
print_usage();
exit(1);
},
}
}
fn print_version() {
println!("example-console v{}", env!("CARGO_PKG_VERSION"));
println!();
}
fn print_usage() {
println!("{}", USAGE.trim());
println!();
}
fn print_about() {
print_version();
println!("Licenses and libraries used in this project:");
println!();
println!("{}", lies::licenses_ansi!());
println!();
println!();
}
fn add(args: env::Args) {
let mut sum = 0.0;
for value in args {
sum += value.parse::<f64>().expect("\"example-console add\" expected only numerical arguments");
}
println!("{}", sum);
}
const USAGE : &'static str = "
Usage: example-console [subcommand]
Subcommands:
help Print this help/usage information
version Print the current version of example-console
about Print the license information of example-console
add Add a series of numbers together and print the result
";
| true
|
3ae8ec35a703e416eae8f73ce5a106de4b4fe92d
|
Rust
|
roy-ganz/toql
|
/crates/core/src/join/keyed.rs
|
UTF-8
| 816
| 2.53125
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use super::Join;
use crate::keyed::{Keyed, KeyedMut};
impl<T> Keyed for Join<T>
where
T: Keyed,
T::Key: Clone,
{
type Key = T::Key;
fn key(&self) -> T::Key {
match self {
Join::Key(k) => k.clone(),
Join::Entity(e) => e.key(),
}
}
}
impl<T> Keyed for &Join<T>
where
T: Keyed,
T::Key: Clone,
{
type Key = T::Key;
fn key(&self) -> T::Key {
match self {
Join::Key(k) => k.clone(),
Join::Entity(e) => e.key(),
}
}
}
impl<T> KeyedMut for Join<T>
where
T: KeyedMut,
T::Key: Clone,
{
fn set_key(&mut self, key: T::Key) {
match self {
Join::Key(_) => {
*self = Join::Key(key);
}
Join::Entity(e) => e.set_key(key),
}
}
}
| true
|
3fa401cd4351fc2e4270bbee5032f11285ca0938
|
Rust
|
pksunkara/reign
|
/reign_view/src/parse/parse_stream.rs
|
UTF-8
| 11,543
| 3.265625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use super::{Error, Parse, StringPart};
use regex::Regex;
#[derive(Debug)]
pub struct ParseStream {
pub content: String,
pub cursor: usize,
}
// TODO:(perf) This is not really efficient since getting to the
// start point specified in String::get is not a constant
// time operation because the String is UTF8.
//
// What we can do is consume the string as we keep parsing.
impl ParseStream {
pub(super) fn new(content: String) -> Self {
ParseStream { content, cursor: 0 }
}
pub(super) fn error(&self, msg: &str) -> Error {
Error {
content: self.content.clone(),
cursor: self.cursor,
message: msg.to_string(),
}
}
pub(super) fn parse<T>(&mut self) -> Result<T, Error>
where
T: Parse,
{
T::parse(self)
}
pub(super) fn is_match(&self, r: &str) -> bool {
Regex::new(&format!("^{}", r))
.unwrap()
.is_match(self.content.get(self.cursor..).unwrap())
}
pub(super) fn matched(&mut self, r: &str) -> Result<String, Error> {
let reg = Regex::new(&format!("^{}", r)).unwrap();
let mat = reg.find(self.content.get(self.cursor..).unwrap());
if mat.is_none() {
return Err(self.error(&format!("unable to match `{}`", r)));
}
let mat_end = self.cursor + mat.unwrap().end();
let sub_string = self.content.get(self.cursor..mat_end);
if sub_string.is_none() {
return Err(self.error("out of bounds"));
}
self.cursor = mat_end;
Ok(sub_string.unwrap().to_string())
}
pub(super) fn capture(&mut self, r: &str, index: usize) -> Result<String, Error> {
let reg = Regex::new(&format!("^{}", r)).unwrap();
let cap = reg.captures(self.content.get(self.cursor..).unwrap());
if cap.is_none() {
return Err(self.error(&format!("unable to match `{}`", r)));
}
let captures = cap.unwrap();
let val = captures.get(index);
if val.is_none() {
return Err(self.error(&format!("unable to get capture group {} in `{}`", index, r)));
}
self.cursor += captures.get(0).unwrap().as_str().len();
Ok(val.unwrap().as_str().to_string())
}
pub(super) fn peek(&self, sub: &str) -> bool {
let sub_end = self.cursor + sub.len();
let sub_string = self.content.get(self.cursor..sub_end);
if sub_string.is_none() {
return false;
}
sub_string.unwrap() == sub
}
pub(super) fn step(&mut self, sub: &str) -> Result<String, Error> {
let sub_end = self.cursor + sub.len();
let sub_string = self.content.get(self.cursor..sub_end);
if sub_string.is_none() {
return Err(self.error(&format!("out of bounds when trying to find `{}`", sub)));
}
if sub_string.unwrap() != sub {
return Err(self.error(&format!("expected `{}`", sub)));
}
self.cursor = sub_end;
Ok(sub_string.unwrap().to_string())
}
pub(super) fn seek(&self, sub: &str) -> Result<usize, Error> {
let index = self.content.get(self.cursor..).unwrap().find(sub);
if index.is_none() {
return Err(self.error(&format!("expected `{}`", sub)));
}
Ok(self.cursor + index.unwrap())
}
pub(super) fn until(&mut self, sub: &str, consume: bool) -> Result<String, Error> {
let index = self.seek(sub)?;
let sub_string = self.content.get(self.cursor..index);
self.cursor = index;
if consume {
self.cursor += sub.len();
}
Ok(sub_string.unwrap().to_string())
}
pub(super) fn skip_spaces(&mut self) -> Result<(), Error> {
self.matched("\\s*")?;
Ok(())
}
// TODO: Move this to Text by making self a RefCell
pub(super) fn parse_text(&mut self) -> Result<Vec<StringPart>, Error> {
StringPart::parse(self, &self.content.clone(), false)
}
}
#[cfg(test)]
mod test {
use super::super::{Code, Expr};
use super::{ParseStream, StringPart};
use proc_macro2::Span;
use syn::Ident;
#[test]
fn test_parse_text_in_the_middle() {
let mut ps = ParseStream {
content: "<b>Hello</b>".to_string(),
cursor: 3,
};
let parts = ps.parse_text().unwrap();
assert_eq!(ps.cursor, 8);
assert_eq!(parts.len(), 1);
assert!(if let Some(StringPart::Normal(s)) = parts.get(0) {
s == "Hello"
} else {
false
});
}
#[test]
fn test_parse_text_at_end() {
let mut ps = ParseStream::new("text".to_string());
let parts = ps.parse_text().unwrap();
assert_eq!(ps.cursor, 4);
assert_eq!(parts.len(), 1);
assert!(if let Some(StringPart::Normal(s)) = parts.get(0) {
s == "text"
} else {
false
});
}
#[test]
fn test_parse_text_escaped_curly_braces() {
let mut ps = ParseStream::new("\\{{ text }}".to_string());
let parts = ps.parse_text().unwrap();
assert_eq!(ps.cursor, 11);
assert_eq!(parts.len(), 2);
assert!(if let Some(StringPart::Normal(s)) = parts.get(0) {
s == "\\{{"
} else {
false
});
assert!(if let Some(StringPart::Normal(s)) = parts.get(1) {
s == " text }}"
} else {
false
});
}
#[test]
fn test_parse_text_expr() {
let mut ps = ParseStream::new("{{ text}}{{u}}".to_string());
let parts = ps.parse_text().unwrap();
assert_eq!(ps.cursor, 14);
assert_eq!(parts.len(), 2);
assert!(
if let Some(StringPart::Expr(Code::Expr(Expr::Path(p)))) = parts.get(0) {
p.path.is_ident(&Ident::new("text", Span::call_site()))
} else {
false
}
);
assert!(
if let Some(StringPart::Expr(Code::Expr(Expr::Path(p)))) = parts.get(1) {
p.path.is_ident(&Ident::new("u", Span::call_site()))
} else {
false
}
);
}
#[test]
fn test_parse_text_empty_string() {
let mut ps = ParseStream::new("".to_string());
let parts = ps.parse_text().unwrap();
assert_eq!(ps.cursor, 0);
assert_eq!(parts.len(), 0);
}
#[test]
fn test_parse_text_incomplete_expr() {
let mut ps = ParseStream::new("{{ text ".to_string());
let err = ps.parse_text().unwrap_err();
assert_eq!(ps.cursor, 2);
assert_eq!(err.cursor, 2);
assert_eq!(err.message, "expression incomplete".to_string())
}
#[test]
fn test_is_match() {
let ps = ParseStream {
content: "Hello World".to_string(),
cursor: 1,
};
assert!(ps.is_match("[a-z]+"));
assert!(!ps.is_match("[A-Z][a-z]+"));
}
#[test]
fn test_matched() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 2,
};
let val = ps.matched("[a-z]+").unwrap();
assert_eq!(ps.cursor, 5);
assert_eq!(val, "llo".to_string());
}
#[test]
fn test_matched_error() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 1,
};
let err = ps.matched("[A-Z]+").unwrap_err();
assert_eq!(ps.cursor, 1);
assert_eq!(err.cursor, 1);
assert_eq!(err.message, "unable to match `[A-Z]+`".to_string());
}
#[test]
fn test_capture() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 3,
};
let val = ps.capture("([a-z])([a-z])", 2).unwrap();
assert_eq!(ps.cursor, 5);
assert_eq!(val, "o".to_string());
}
#[test]
fn test_capture_error() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 1,
};
let err = ps.capture("[A-Z]+", 1).unwrap_err();
assert_eq!(ps.cursor, 1);
assert_eq!(err.cursor, 1);
assert_eq!(err.message, "unable to match `[A-Z]+`".to_string());
}
#[test]
fn test_capture_number_error() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 1,
};
let err = ps.capture("([a-z])([a-z])", 3).unwrap_err();
assert_eq!(ps.cursor, 1);
assert_eq!(err.cursor, 1);
assert_eq!(
err.message,
"unable to get capture group 3 in `([a-z])([a-z])`".to_string()
);
}
#[test]
fn test_peek() {
let ps = ParseStream {
content: "Hello".to_string(),
cursor: 1,
};
assert!(ps.peek("ello"));
assert!(!ps.peek("Hello"));
}
#[test]
fn test_step() {
let mut ps = ParseStream {
content: "Hello".to_string(),
cursor: 1,
};
let val = ps.step("el").unwrap();
assert_eq!(ps.cursor, 3);
assert_eq!(val, "el");
}
#[test]
fn test_step_error() {
let mut ps = ParseStream {
content: "Hello".to_string(),
cursor: 1,
};
let err = ps.step("Hel").unwrap_err();
assert_eq!(ps.cursor, 1);
assert_eq!(err.cursor, 1);
assert_eq!(err.message, "expected `Hel`".to_string())
}
#[test]
fn test_step_bounds_error() {
let mut ps = ParseStream {
content: "Hello".to_string(),
cursor: 1,
};
let err = ps.step("Hello").unwrap_err();
assert_eq!(ps.cursor, 1);
assert_eq!(err.cursor, 1);
assert_eq!(
err.message,
"out of bounds when trying to find `Hello`".to_string(),
)
}
#[test]
fn test_seek() {
let ps = ParseStream {
content: "Hello".to_string(),
cursor: 1,
};
let index = ps.seek("lo").unwrap();
assert_eq!(ps.cursor, 1);
assert_eq!(index, 3);
}
#[test]
fn test_seek_error() {
let ps = ParseStream {
content: "Hello".to_string(),
cursor: 1,
};
let err = ps.seek("H").unwrap_err();
assert_eq!(ps.cursor, 1);
assert_eq!(err.cursor, 1);
assert_eq!(err.message, "expected `H`".to_string())
}
#[test]
fn test_until() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 1,
};
let val = ps.until("lo", true).unwrap();
assert_eq!(ps.cursor, 5);
assert_eq!(val, "el".to_string());
}
#[test]
fn test_until_non_consume() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 1,
};
let val = ps.until("lo", false).unwrap();
assert_eq!(ps.cursor, 3);
assert_eq!(val, "el".to_string());
}
#[test]
fn test_until_error() {
let mut ps = ParseStream {
content: "Hello World".to_string(),
cursor: 1,
};
let err = ps.until("H", true).unwrap_err();
assert_eq!(ps.cursor, 1);
assert_eq!(err.cursor, 1);
assert_eq!(err.message, "expected `H`".to_string());
}
}
| true
|
f179feba984d1c5d9f53c75467af855ef6da2767
|
Rust
|
mtfcd/exercism-rust
|
/simple-linked-list/src/lib.rs
|
UTF-8
| 3,793
| 3.8125
| 4
|
[] |
no_license
|
use std::iter::FromIterator;
pub struct Node<T> {
next: Option<Box<Node<T>>>,
value: T,
}
impl<T> Node<T> {
fn new(value: T) -> Self {
Self { next: None, value }
}
}
pub struct SimpleLinkedList<T> {
head: Option<Box<Node<T>>>,
}
impl<T> SimpleLinkedList<T> {
pub fn new() -> Self {
Self { head: None }
}
// You may be wondering why it's necessary to have is_empty()
// when it can easily be determined from len().
// It's good custom to have both because len() can be expensive for some types,
// whereas is_empty() is almost always cheap.
// (Also ask yourself whether len() is expensive for SimpleLinkedList)
pub fn is_empty(&self) -> bool {
self.head.is_none()
}
pub fn len(&self) -> usize {
let mut l = 0;
let mut next = &self.head;
while let Some(node) = next {
l += 1;
next = &node.next
}
l
}
pub fn push(&mut self, _element: T) {
let mut append_point = &mut self.head;
while let Some(node) = append_point {
append_point = &mut node.next
}
*append_point = Some(Box::new(Node::new(_element)));
}
pub fn pop(&mut self) -> Option<T> {
if self.head.is_none() {
return None
}
let mut next = &mut self.head;
while next.as_ref().unwrap().next.is_some() {
next = &mut next
.as_mut()
.unwrap()
.next;
}
next.take().map(|n| {
n.value
})
}
pub fn peek(&self) -> Option<&T> {
if self.head.is_none() {
return None
}
let mut next = &self.head;
while next.as_ref().unwrap().next.is_some() {
next = &next
.as_ref()
.unwrap()
.next;
}
next.as_ref().map(|n| {
&n.value
})
}
pub fn rev(mut self) -> SimpleLinkedList<T> {
let mut prev = None;
let mut head = self.head;
// head -> a (value, Some(b)) -> b (value, Some(c)) -> c (value, None)
// head = &a.next; a.next = prev; prev = &a;
// a <- prev | head -> b -> c
// head = &b.next; b.next = prev; prev = &b;
// a <- b <- prev | head -> c
// head = &c.next; c.next = prev; prev = &c
// a <- b <- c <- prev | head
// head = prev;
// a <- b <- c <- head
while let Some(mut a) = head {
head = a.next;
a.next = prev;
prev = Some(a);
}
self.head = prev;
self
}
}
impl<T> FromIterator<T> for SimpleLinkedList<T> {
fn from_iter<I: IntoIterator<Item = T>>(_iter: I) -> Self {
let mut s = Self::new();
for i in _iter {
s.push(i);
}
s
}
}
// In general, it would be preferable to implement IntoIterator for SimpleLinkedList<T>
// instead of implementing an explicit conversion to a vector. This is because, together,
// FromIterator and IntoIterator enable conversion between arbitrary collections.
// Given that implementation, converting to a vector is trivial:
//
// let vec: Vec<_> = simple_linked_list.into_iter().collect();
//
// The reason this exercise's API includes an explicit conversion to Vec<T> instead
// of IntoIterator is that implementing that interface is fairly complicated, and
// demands more of the student than we expect at this point in the track.
impl<T> Into<Vec<T>> for SimpleLinkedList<T> {
fn into(self) -> Vec<T> {
let mut v = Vec::new();
let mut head = self.head;
while let Some(a) = head {
head = a.next;
v.push(a.value);
}
v
}
}
| true
|
0004be2833648507cbc7d781defc646bf16cc9c5
|
Rust
|
tuzz/moonlight
|
/src/components/frame/mod.rs
|
UTF-8
| 171
| 2.703125
| 3
|
[] |
no_license
|
use specs::prelude::*;
#[derive(Component)]
pub struct Frame {
pub number: u32,
}
impl Frame {
pub fn new(number: u32) -> Self {
Self { number }
}
}
| true
|
fac472da5dfdfb04d0a31a88c745d948381e0d51
|
Rust
|
TianTianForever/Rust-Tutorial
|
/test8.rs
|
UTF-8
| 737
| 3.859375
| 4
|
[] |
no_license
|
use std::f64::consts::PI;
use std::fmt::Debug;
trait HasArea {
fn area(&self) -> f64;
}
struct Circle {
x: f64,
y: f64,
radius: f64,
}
impl HasArea for Circle {
fn area(&self) -> f64 {
PI * self.radius * self.radius
}
}
// Tait constraint
fn print_area<T: HasArea>(shap: T){
println!("This shap has an area of {}", shap.area());
}
// multiple trait bounds
fn foo<T: Clone, K: Clone + Debug>(x: T,y: K) {
x.clone();
y.clone();
println!("{:?}",y)
}
fn bar<T, K>(x: T, y: K)
where T: Clone,
K: Clone + Debug
{
x.clone();
y.clone();
println!("{:?}",y);
}
fn main() {
let c = Circle {x: 0.0, y: 0.0, radius: 1.0};
print_area(c);
foo(1,2);
bar(1,2);
}
| true
|
ad6c66dd05047103bc6ee30d1ab4f6de10668800
|
Rust
|
bluenote10/rust-array-stump
|
/benchmarks/src/integration_tests.rs
|
UTF-8
| 3,738
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
//extern crate array_stump;
use std::cmp::Ordering;
use super::helpers::{gen_rand_values_i32, shuffle_clone};
use super::alternatives::splay::SplaySet;
use array_stump::ArrayStump;
// TODO: Similar functionality is in core array_stump. Introduce "testing" feature, if more
// code sharing is needed.
pub fn enrich_with_neighbors(data: &[i32]) -> Vec<i32> {
let mut test_values = Vec::with_capacity(data.len() * 3);
for x in data {
test_values.push(*x - 1);
test_values.push(*x);
test_values.push(*x + 1);
}
test_values
}
fn cmp(a: &i32, b: &i32) -> Ordering {
a.cmp(b)
}
#[test]
fn insert_and_remove() {
let repeats = 8;
for _ in 0 .. repeats {
for array_len in 0 .. 64 {
for cap in 2 .. 64 {
let values = gen_rand_values_i32(array_len);
// println!("\nInserting: {:?}", values);
let mut set_a = ArrayStump::new_explicit(cmp, cap);
let mut set_b = SplaySet::new(cmp);
for x in &values {
let res_a = set_a.insert(*x);
let res_b = set_b.insert(*x);
// println!("{} {} {}", x, res_a, res_b);
assert_eq!(res_a.is_some(), res_b);
assert_eq!(set_a.len(), set_b.len());
assert_eq!(set_a.collect(), set_b.collect());
// Test for index correctness
if let Some(res_a) = res_a {
assert_eq!(set_a.get_by_index(res_a), x);
}
}
let values = shuffle_clone(&values);
for x in &values {
let res_a = set_a.remove(x);
let res_b = set_b.remove(x);
// println!("{} {} {}", x, res_a, res_b);
assert_eq!(res_a, res_b);
assert_eq!(set_a.len(), set_b.len());
assert_eq!(set_a.collect(), set_b.collect());
}
}
}
}
}
#[test]
fn find() {
let repeats = 8;
for _ in 0 .. repeats {
for array_len in 0 .. 16 {
for cap in 2 .. 16 {
let values = gen_rand_values_i32(array_len);
let mut set_a = ArrayStump::new_explicit(cmp, cap);
let mut set_b = SplaySet::new(cmp);
for x in &values {
set_a.insert(*x);
set_b.insert(*x);
let existing_values = set_a.collect();
let existing_values_enriched = enrich_with_neighbors(&existing_values);
for y in existing_values_enriched {
let res_a = set_a.find(&y);
let res_b = set_b.find(&y);
assert_eq!(res_a.is_some(), res_b.is_some());
}
}
}
}
}
}
#[test]
fn find_and_remove_by_index() {
let repeats = 8;
for _ in 0 .. repeats {
for array_len in 0 .. 16 {
for cap in 2 .. 16 {
let values = gen_rand_values_i32(array_len);
let mut set_a = ArrayStump::new_explicit(cmp, cap);
let mut set_b = SplaySet::new(cmp);
for x in &values {
set_a.insert(*x);
set_b.insert(*x);
}
let values = shuffle_clone(&values);
for x in &values {
set_a.find(x).map(|idx| set_a.remove_by_index(idx));
set_b.remove(x);
assert_eq!(set_a.collect(), set_b.collect());
}
}
}
}
}
| true
|
3a948060494d50d6f774401339af65d84e55c934
|
Rust
|
w3aseL/pi-weather-station
|
/src/config.rs
|
UTF-8
| 797
| 3.03125
| 3
|
[] |
no_license
|
use serde::{ Deserialize };
use std::fs::read_to_string;
#[derive(Deserialize, Debug, Clone)]
pub struct Config {
pub env: String,
pub dev: Dev,
pub prod: Prod
}
#[derive(Deserialize, Debug, Clone)]
pub struct Dev {
pub addr: String,
pub username: String,
pub password: String,
pub dbname: String
}
#[derive(Deserialize, Debug, Clone)]
pub struct Prod {
pub addr: String,
pub username: String,
pub password: String,
pub dbname: String
}
impl Config {
pub fn retrieve_config() -> Self {
let config_str = read_to_string("Config.toml").expect("Failed to open Config.toml");
let config: Config = toml::from_str(&config_str).unwrap();
config
}
pub fn is_prod_env(&self) -> bool {
"prod" == &self.env
}
}
| true
|
ee90143fa45a85a3f217211e65288c02cd511f4c
|
Rust
|
mr1sunshine/mapbox-maps-rs
|
/src/map/config.rs
|
UTF-8
| 1,148
| 2.6875
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
use std::rc::Rc;
use winit::window::Window;
pub struct Config {
token: String,
window: Rc<Window>,
min_zoom: f32,
max_zoom: f32,
min_pitch: f32,
max_pitch: f32,
render_world_copies: bool,
}
impl<'a> Config {
pub fn new(
token: &str,
window: Rc<Window>,
min_zoom: f32,
max_zoom: f32,
min_pitch: f32,
max_pitch: f32,
render_world_copies: bool,
) -> Self {
Self {
token: token.to_owned(),
window,
min_zoom,
max_zoom,
min_pitch,
max_pitch,
render_world_copies,
}
}
pub fn token(&self) -> &str {
&self.token
}
pub fn window(&self) -> &Window {
self.window.as_ref()
}
pub fn min_zoom(&self) -> f32 {
self.min_zoom
}
pub fn max_zoom(&self) -> f32 {
self.max_zoom
}
pub fn min_pitch(&self) -> f32 {
self.min_pitch
}
pub fn max_pitch(&self) -> f32 {
self.max_pitch
}
pub fn render_world_copies(&self) -> bool {
self.render_world_copies
}
}
| true
|
52ad050db9625d8037b9ee5edc9623a4bfe11647
|
Rust
|
RUSTools/autograph
|
/tests/linalg.rs
|
UTF-8
| 4,276
| 2.546875
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
#![allow(warnings)]
use approx::assert_relative_eq;
use autograph::backend::Device;
use autograph::tensor::{Dot, Num, Tensor, Tensor2, TensorView2};
use autograph::Result;
use half::bf16;
use ndarray::{linalg::Dot as ArrayDot, Array, Array2, ArrayView2, LinalgScalar};
use num_traits::FromPrimitive;
use std::any::TypeId;
use std::fmt::Debug;
#[allow(unused)]
#[derive(Clone, Copy, Debug)]
enum Transpose {
N,
T,
}
fn gen_array<T: From<u8>>(dim: [usize; 2]) -> Array2<T> {
let n = dim[0] * dim[1];
let mut vec: Vec<T> = (0..n)
.into_iter()
.map(|x| T::from((((x + 100) % 100) + 1) as u8))
.collect();
Array2::from_shape_vec(dim, vec).unwrap()
}
macro_rules! tensor_dot {
($t1:ty, $t2:tt, $device:expr, $args:expr) => {{
let device = $device;
let (m, k, n, a_t, b_t) = $args;
let dim1 = match a_t {
Transpose::N => [m, k],
Transpose::T => [k, m],
};
let dim2 = match b_t {
Transpose::N => [k, n],
Transpose::T => [n, k],
};
let a1 = gen_array::<$t1>(dim1);
let t1 = Tensor2::<$t2>::from_array(device, gen_array(dim1))?;
let (a1, t1) = match a_t {
Transpose::N => (a1.view(), t1.view()),
Transpose::T => (a1.t(), t1.t()),
};
let a2 = gen_array::<$t1>(dim2);
let t2 = Tensor2::<$t2>::from_array(device, gen_array(dim2))?;
let (a2, t2) = match b_t {
Transpose::N => (a2.view(), t2.view()),
Transpose::T => (a2.t(), t2.t()),
};
let a_true = a1.dot(&a2);
let t_out = t1.dot(&t2)?;
let a_out = smol::block_on(t_out.to_array()?)?;
(a_true, a_out)
}};
}
macro_rules! check_arrays {
(f32 => ($a:expr, $b:expr)) => {
assert_relative_eq!($a, $b, max_relative = 0.000_1);
};
(bf16 => ($a:expr, $b:expr)) => {
let b = $b.map(|x| x.to_f32());
assert_relative_eq!($a, b, epsilon = 0.01, max_relative = 0.01);
};
($t:tt => ($a:expr, $b:expr)) => {
assert_eq!($a, $b);
};
}
macro_rules! test_dot {
(bf16; $($name:ident => $args:expr,)+) => (
$(
#[test]
fn $name () -> Result<()> {
for device in Device::list() {
let (a_true, a_out) = tensor_dot! { f32, bf16, &device, $args };
check_arrays!(bf16 => (a_true, a_out));
}
Ok(())
}
)+
);
($t:tt; $($name:ident => $args:expr,)+) => (
$(
#[test]
fn $name () -> Result<()> {
for device in Device::list() {
let (a_true, a_out) = tensor_dot! { $t, $t, &device, $args };
check_arrays!($t => (a_true, a_out));
}
Ok(())
}
)+
);
}
use Transpose::*;
test_dot!(
u32;
tensor_dot_u32_m21_k31_n41_N_N => (21, 31, 41, N, N),
tensor_dot_u32_m121_k131_n141_N_N => (121, 131, 141, N, N),
tensor_dot_u32_m121_k131_n141_T_N => (121, 131, 141, T, N),
tensor_dot_u32_m121_k131_n141_N_T => (121, 131, 141, N, T),
tensor_dot_u32_m121_k131_n141_T_T => (121, 131, 141, T, T),
);
test_dot!(
i32;
tensor_dot_i32_m21_k31_n41_N_N => (21, 31, 41, N, N),
tensor_dot_i32_m121_k131_n141_N_N => (121, 131, 141, N, N),
tensor_dot_i32_m121_k131_n141_T_N => (121, 131, 141, T, N),
tensor_dot_i32_m121_k131_n141_N_T => (121, 131, 141, N, T),
tensor_dot_i32_m121_k131_n141_T_T => (121, 131, 141, T, T),
);
test_dot!(
f32;
tensor_dot_f32_m21_k31_n41_N_N => (21, 31, 41, N, N),
tensor_dot_f32_m121_k131_n141_N_N => (121, 131, 141, N, N),
tensor_dot_f32_m121_k131_n141_T_N => (121, 131, 141, T, N),
tensor_dot_f32_m121_k131_n141_N_T => (121, 131, 141, N, T),
tensor_dot_f32_m121_k131_n141_T_T => (121, 131, 141, T, T),
);
test_dot!(
bf16;
tensor_dot_bf16_m21_k31_n41_N_N => (21, 31, 41, N, N),
tensor_dot_bf16_m121_k131_n141_N_N => (121, 131, 141, N, N),
tensor_dot_bf16_m121_k131_n141_T_N => (121, 131, 141, T, N),
tensor_dot_bf16_m121_k131_n141_N_T => (121, 131, 141, N, T),
tensor_dot_bf16_m121_k131_n141_T_T => (121, 131, 141, T, T),
);
| true
|
6db4a09c7b46b1339f96e441d0764131c2ec705b
|
Rust
|
udtrokia/dast
|
/queue/src/queue.rs
|
UTF-8
| 327
| 3.578125
| 4
|
[] |
no_license
|
#[derive(Debug)]
pub struct Queue<T> {
qdata: Vec<T>,
}
impl <T> Queue<T> {
pub fn new() -> Self {
// Self is a `Type`
Queue{ qdata: Vec::new() }
}
pub fn push(&mut self, item: T) {
self.qdata.push(item);
}
pub fn pop(&mut self) -> T{
self.qdata.remove(0)
}
}
| true
|
0636f1e0842aaf3f1e127c84c2008b2227a701ab
|
Rust
|
josephDunne/ring_test
|
/src/main.rs
|
UTF-8
| 1,432
| 2.703125
| 3
|
[] |
no_license
|
extern crate futures;
extern crate tokio_core;
use tokio_core::reactor::Core;
use tokio_core::channel::{Sender, channel};
use futures::stream::Stream;
use futures::finished;
use std::thread::spawn;
use std::io;
use std::sync::{Arc,Mutex, Barrier};
fn spawn_aux(trx: Sender<u32>, result: Arc<Mutex<Option<Sender<u32>>>>, barrier: Arc<Barrier>) {
spawn(move || {
let mut aux_loop = Core::new().unwrap();
let (trx2, rx) = channel::<u32>(&aux_loop.handle()).unwrap();
let future = rx.for_each(|s| {
trx.send(s + 1)
});
{
let mut data = result.lock().unwrap();
*data = Some(trx2);
}
barrier.wait();
aux_loop.run(future)
});
}
fn main() {
let mut main_loop = Core::new().unwrap();
let (mut last_trx, last_rx) = channel::<u32>(&main_loop.handle()).unwrap();
last_trx.send(1).unwrap();
let sync_mutex = Arc::new(Mutex::new(None));
for _ in 1..8 {
let barrier = Arc::new(Barrier::new(2));
spawn_aux(last_trx, sync_mutex.clone(), barrier.clone());
barrier.wait();
last_trx = sync_mutex.lock().unwrap().take().unwrap();
}
let future = last_rx.take(1_000_000).fold(0, |_, num| {
let num = num + 1;
last_trx.send(num).unwrap();
finished::<u32, io::Error>(num)
});
let res = main_loop.run(future).unwrap();
println!("res {}", res);
}
| true
|
a4b387e2b383c6fdfab35b8109a0d5b2716b8b07
|
Rust
|
JulianSchmid/etherparse
|
/etherparse/src/packet_slicing.rs
|
UTF-8
| 56,497
| 3.5
| 4
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::err::LenSource;
use super::*;
/// Packet slice split into multiple slices containing the different headers & payload.
///
/// Everything that could not be parsed is stored in a slice in the field "payload".
///
/// You can use
///
/// * [`SlicedPacket::from_ethernet`]
/// * [`SlicedPacket::from_ether_type`]
/// * [`SlicedPacket::from_ip`]
///
/// depending on your starting header to slice a packet.
///
/// # Examples
///
/// Basic usage:
///
///```
/// # use etherparse::{SlicedPacket, PacketBuilder};
/// # let builder = PacketBuilder::
/// # ethernet2([1,2,3,4,5,6], //source mac
/// # [7,8,9,10,11,12]) //destionation mac
/// # .ipv4([192,168,1,1], //source ip
/// # [192,168,1,2], //desitionation ip
/// # 20) //time to life
/// # .udp(21, //source port
/// # 1234); //desitnation port
/// # //payload of the udp packet
/// # let payload = [1,2,3,4,5,6,7,8];
/// # //get some memory to store the serialized data
/// # let mut packet = Vec::<u8>::with_capacity(
/// # builder.size(payload.len()));
/// # builder.write(&mut packet, &payload).unwrap();
/// match SlicedPacket::from_ethernet(&packet) {
/// Err(value) => println!("Err {:?}", value),
/// Ok(value) => {
/// println!("link: {:?}", value.link);
/// println!("vlan: {:?}", value.vlan);
/// println!("ip: {:?}", value.ip);
/// println!("transport: {:?}", value.transport);
/// }
/// }
/// ```
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct SlicedPacket<'a> {
/// Ethernet II header if present.
pub link: Option<LinkSlice<'a>>,
/// Single or double vlan headers if present.
pub vlan: Option<VlanSlice<'a>>,
/// IPv4 or IPv6 header and IP extension headers if present.
pub ip: Option<InternetSlice<'a>>,
/// TCP or UDP header if present.
pub transport: Option<TransportSlice<'a>>,
/// The payload field points to the rest of the packet that could not be parsed by etherparse.
///
/// Depending on what other fields contain a "Some" values the payload contains the corresponding
/// payload.
///
/// For example if transport field contains Some(Udp(_)) then the payload field points to the udp payload.
/// On the other hand if the transport field contains None then the payload contains the payload of
/// next field containing a Some value (in order of transport, ip, vlan, link).
pub payload: &'a [u8],
}
impl<'a> SlicedPacket<'a> {
/// Seperates a network packet slice into different slices containing the headers from the ethernet header downwards.
///
/// The result is returned as a [`SlicedPacket`] struct. This function assumes the given data starts
/// with an ethernet II header.
///
/// # Examples
///
/// Basic usage:
///
///```
/// # use etherparse::{SlicedPacket, PacketBuilder};
/// # let builder = PacketBuilder::
/// # ethernet2([1,2,3,4,5,6], //source mac
/// # [7,8,9,10,11,12]) //destionation mac
/// # .ipv4([192,168,1,1], //source ip
/// # [192,168,1,2], //desitionation ip
/// # 20) //time to life
/// # .udp(21, //source port
/// # 1234); //desitnation port
/// # //payload of the udp packet
/// # let payload = [1,2,3,4,5,6,7,8];
/// # //get some memory to store the serialized data
/// # let mut packet = Vec::<u8>::with_capacity(
/// # builder.size(payload.len()));
/// # builder.write(&mut packet, &payload).unwrap();
/// match SlicedPacket::from_ethernet(&packet) {
/// Err(value) => println!("Err {:?}", value),
/// Ok(value) => {
/// println!("link: {:?}", value.link);
/// println!("vlan: {:?}", value.vlan);
/// println!("ip: {:?}", value.ip);
/// println!("transport: {:?}", value.transport);
/// }
/// }
/// ```
pub fn from_ethernet(data: &'a [u8]) -> Result<SlicedPacket, err::packet::EthSliceError> {
CursorSlice::new(data).slice_ethernet2()
}
/// Seperates a network packet slice into different slices containing the headers using
/// the given `ether_type` number to identify the first header.
///
/// The result is returned as a [`SlicedPacket`] struct. Currently supported
/// ether type numbers are:
///
/// * `ether_type::IPV4`
/// * `ether_type::IPV6`
/// * `ether_type::VLAN_TAGGED_FRAME`
/// * `ether_type::PROVIDER_BRIDGING`
/// * `ether_type::VLAN_DOUBLE_TAGGED_FRAME`
///
/// If an unsupported ether type is given the given slice will be set as payload
/// and all other fields will be set to `None`.
///
/// # Example
///
/// Basic usage:
///
///```
/// # use etherparse::{Ethernet2Header, PacketBuilder};
/// # let builder = PacketBuilder::
/// # ethernet2([1,2,3,4,5,6], //source mac
/// # [7,8,9,10,11,12]) //destionation mac
/// # .ipv4([192,168,1,1], //source ip
/// # [192,168,1,2], //desitionation ip
/// # 20) //time to life
/// # .udp(21, //source port
/// # 1234); //desitnation port
/// # // payload of the udp packet
/// # let payload = [1,2,3,4,5,6,7,8];
/// # // get some memory to store the serialized data
/// # let mut complete_packet = Vec::<u8>::with_capacity(
/// # builder.size(payload.len())
/// # );
/// # builder.write(&mut complete_packet, &payload).unwrap();
/// #
/// # // skip ethernet 2 header so we can parse from there downwards
/// # let packet = &complete_packet[Ethernet2Header::LEN..];
/// #
/// use etherparse::{ether_type, SlicedPacket};
///
/// match SlicedPacket::from_ether_type(ether_type::IPV4, packet) {
/// Err(value) => println!("Err {:?}", value),
/// Ok(value) => {
/// println!("link: {:?}", value.link);
/// println!("vlan: {:?}", value.vlan);
/// println!("ip: {:?}", value.ip);
/// println!("transport: {:?}", value.transport);
/// }
/// }
/// ```
pub fn from_ether_type(
ether_type: EtherType,
data: &'a [u8],
) -> Result<SlicedPacket, err::packet::EthSliceError> {
use ether_type::*;
match ether_type {
IPV4 => CursorSlice::new(data).slice_ipv4(),
IPV6 => CursorSlice::new(data).slice_ipv6(),
VLAN_TAGGED_FRAME | PROVIDER_BRIDGING | VLAN_DOUBLE_TAGGED_FRAME => {
CursorSlice::new(data).slice_vlan()
}
_ => Ok(SlicedPacket {
link: None,
vlan: None,
ip: None,
transport: None,
payload: data,
}),
}
}
/// Seperates a network packet slice into different slices containing the headers from the ip header downwards.
///
/// The result is returned as a [`SlicedPacket`] struct. This function assumes the given data starts
/// with an IPv4 or IPv6 header.
///
/// # Examples
///
/// Basic usage:
///
///```
/// # use etherparse::{SlicedPacket, PacketBuilder};
/// # let builder = PacketBuilder::
/// # ipv4([192,168,1,1], //source ip
/// # [192,168,1,2], //desitionation ip
/// # 20) //time to life
/// # .udp(21, //source port
/// # 1234); //desitnation port
/// # //payload of the udp packet
/// # let payload = [1,2,3,4,5,6,7,8];
/// # //get some memory to store the serialized data
/// # let mut packet = Vec::<u8>::with_capacity(
/// # builder.size(payload.len()));
/// # builder.write(&mut packet, &payload).unwrap();
/// match SlicedPacket::from_ip(&packet) {
/// Err(value) => println!("Err {:?}", value),
/// Ok(value) => {
/// //link & vlan fields are empty when parsing from ip downwards
/// assert_eq!(None, value.link);
/// assert_eq!(None, value.vlan);
///
/// //ip & transport (udp or tcp)
/// println!("ip: {:?}", value.ip);
/// println!("transport: {:?}", value.transport);
/// }
/// }
/// ```
pub fn from_ip(data: &'a [u8]) -> Result<SlicedPacket, err::packet::IpSliceError> {
CursorSlice::new(data).slice_ip()
}
/// If the slice in the `payload` field contains an ethernet payload
/// this method returns the ether type number describing the payload type.
///
/// The ether type number can come from an ethernet II header or a
/// VLAN header depending on which headers are present.
///
/// In case that `ip` and/or `transport` fields are the filled None
/// is returned, as the payload contents then are defined by a
/// lower layer protocol described in these fields.
pub fn payload_ether_type(&self) -> Option<EtherType> {
if self.ip.is_some() || self.transport.is_some() {
None
} else if let Some(vlan) = &self.vlan {
use VlanSlice::*;
match vlan {
SingleVlan(s) => Some(s.ether_type()),
DoubleVlan(d) => Some(d.inner().ether_type()),
}
} else if let Some(link) = &self.link {
use LinkSlice::*;
match link {
Ethernet2(eth) => Some(eth.ether_type()),
}
} else {
None
}
}
}
///Helper class for slicing packets
struct CursorSlice<'a> {
pub slice: &'a [u8],
pub offset: usize,
pub len_source: LenSource,
pub result: SlicedPacket<'a>,
}
impl<'a> CursorSlice<'a> {
pub fn new(slice: &'a [u8]) -> CursorSlice<'a> {
CursorSlice {
slice,
offset: 0,
len_source: LenSource::Slice,
result: SlicedPacket {
link: None,
vlan: None,
ip: None,
transport: None,
payload: slice,
},
}
}
fn move_by_slice(&mut self, other: &'a [u8]) {
unsafe {
use core::slice::from_raw_parts;
self.slice = from_raw_parts(
self.slice.as_ptr().add(other.len()),
self.slice.len() - other.len(),
);
}
self.offset += other.len();
}
pub fn slice_ethernet2(mut self) -> Result<SlicedPacket<'a>, err::packet::EthSliceError> {
use err::packet::EthSliceError::*;
use ether_type::*;
use LinkSlice::*;
let result = Ethernet2HeaderSlice::from_slice(self.slice)
.map_err(|err| Len(err.add_offset(self.offset)))?;
//cache the ether_type for later
let ether_type = result.ether_type();
//set the new data
self.move_by_slice(result.slice());
self.result.link = Some(Ethernet2(result));
//continue parsing (if required)
match ether_type {
IPV4 => self.slice_ipv4(),
IPV6 => self.slice_ipv6(),
VLAN_TAGGED_FRAME | PROVIDER_BRIDGING | VLAN_DOUBLE_TAGGED_FRAME => self.slice_vlan(),
_ => Ok(self.slice_payload()),
}
}
pub fn slice_vlan(mut self) -> Result<SlicedPacket<'a>, err::packet::EthSliceError> {
use err::packet::EthSliceError::*;
use ether_type::*;
use VlanSlice::*;
let outer = SingleVlanHeaderSlice::from_slice(self.slice)
.map_err(|err| Len(err.add_offset(self.offset)))?;
//check if it is a double vlan header
match outer.ether_type() {
//in case of a double vlan header continue with the inner
VLAN_TAGGED_FRAME | PROVIDER_BRIDGING | VLAN_DOUBLE_TAGGED_FRAME => {
self.move_by_slice(outer.slice());
let inner = SingleVlanHeaderSlice::from_slice(self.slice)
.map_err(|err| Len(err.add_offset(self.offset)))?;
self.move_by_slice(inner.slice());
let inner_ether_type = inner.ether_type();
self.result.vlan = Some(DoubleVlan(DoubleVlanHeaderSlice {
// SAFETY: Safe as the lenght of the slice was previously verified.
slice: unsafe {
core::slice::from_raw_parts(
outer.slice().as_ptr(),
outer.slice().len() + inner.slice().len(),
)
},
}));
match inner_ether_type {
IPV4 => self.slice_ipv4(),
IPV6 => self.slice_ipv6(),
_ => Ok(self.slice_payload()),
}
}
value => {
//set the vlan header and continue the normal parsing
self.move_by_slice(outer.slice());
self.result.vlan = Some(SingleVlan(outer));
match value {
IPV4 => self.slice_ipv4(),
IPV6 => self.slice_ipv6(),
_ => Ok(self.slice_payload()),
}
}
}
}
pub fn slice_ip(mut self) -> Result<SlicedPacket<'a>, err::packet::IpSliceError> {
use err::packet::IpSliceError::*;
// slice header, extension headers and identify payload range
let ip = InternetSlice::from_ip_slice(self.slice).map_err(|err| {
use err::ip::SliceError as I;
match err {
I::Len(mut err) => {
err.layer_start_offset += self.offset;
Len(err)
}
I::IpHeader(err) => Ip(err),
}
})?;
// safe data needed
let payload = ip.payload().clone();
// set the new data
self.offset += unsafe {
// SAFETY: The payload is a subslice of self.slice.
// therefor calculating the offset from it is safe and
// the result should always be a positive number.
payload.payload.as_ptr().offset_from(self.slice.as_ptr()) as usize
};
self.len_source = payload.len_source;
self.slice = payload.payload;
self.result.ip = Some(ip);
// continue to the lower layers
if payload.fragmented {
Ok(self.slice_payload())
} else {
match payload.ip_number {
ip_number::ICMP => self.slice_icmp4().map_err(Len),
ip_number::UDP => self.slice_udp().map_err(Len),
ip_number::TCP => self.slice_tcp().map_err(|err| {
use err::tcp::HeaderSliceError as I;
match err {
I::Len(err) => Len(err),
I::Content(err) => Tcp(err),
}
}),
ip_number::IPV6_ICMP => self.slice_icmp6().map_err(Len),
value => {
use TransportSlice::*;
self.result.transport = Some(Unknown(value));
Ok(self.slice_payload())
}
}
}
}
pub fn slice_ipv4(mut self) -> Result<SlicedPacket<'a>, err::packet::EthSliceError> {
use err::packet::EthSliceError::*;
// slice ipv4 header & extension headers
let ipv4 = Ipv4Slice::from_slice(self.slice).map_err(|err| {
use err::ipv4::SliceError as I;
match err {
I::Len(mut err) => {
err.layer_start_offset += self.offset;
Len(err)
}
I::Header(err) => Ipv4(err),
I::Exts(err) => Ipv4Exts(err),
}
})?;
// safe data needed in following steps
let payload = ipv4.payload().clone();
// set the new data
self.offset += unsafe {
// SAFETY: The payload is a subslice of self.slice.
// therefor calculating the offset from it is safe and
// the result should always be a positive number.
payload.payload.as_ptr().offset_from(self.slice.as_ptr()) as usize
};
self.len_source = payload.len_source;
self.slice = payload.payload;
self.result.ip = Some(InternetSlice::Ipv4(ipv4));
if payload.fragmented {
Ok(self.slice_payload())
} else {
match payload.ip_number {
ip_number::UDP => self.slice_udp().map_err(Len),
ip_number::TCP => self.slice_tcp().map_err(|err| {
use err::tcp::HeaderSliceError as I;
match err {
I::Len(err) => Len(err),
I::Content(err) => Tcp(err),
}
}),
ip_number::ICMP => self.slice_icmp4().map_err(Len),
ip_number::IPV6_ICMP => self.slice_icmp6().map_err(Len),
value => {
use TransportSlice::*;
self.result.transport = Some(Unknown(value));
Ok(self.slice_payload())
}
}
}
}
pub fn slice_ipv6(mut self) -> Result<SlicedPacket<'a>, err::packet::EthSliceError> {
use err::packet::EthSliceError::*;
let ipv6 = Ipv6Slice::from_slice(self.slice).map_err(|err| {
use err::ipv6::SliceError as I;
match err {
I::Len(mut err) => {
err.layer_start_offset += self.offset;
Len(err)
}
I::Header(err) => Ipv6(err),
I::Exts(err) => Ipv6Exts(err),
}
})?;
// safe data needed in following steps
let payload_ip_number = ipv6.payload().ip_number;
let fragmented = ipv6.payload().fragmented;
// set the new data
self.offset += unsafe {
// SAFETY: The payload is a subslice of self.slice.
// therefor calculating the offset from it is safe and
// the result should always be a positive number.
ipv6.payload()
.payload
.as_ptr()
.offset_from(self.slice.as_ptr()) as usize
};
self.len_source = ipv6.payload().len_source;
self.slice = ipv6.payload().payload;
self.result.ip = Some(InternetSlice::Ipv6(ipv6));
// only try to decode the transport layer if the payload
// is not fragmented
if fragmented {
Ok(self.slice_payload())
} else {
//parse the data bellow
match payload_ip_number {
ip_number::ICMP => self.slice_icmp4().map_err(Len),
ip_number::UDP => self.slice_udp().map_err(Len),
ip_number::TCP => self.slice_tcp().map_err(|err| {
use err::tcp::HeaderSliceError as I;
match err {
I::Len(err) => Len(err),
I::Content(err) => Tcp(err),
}
}),
ip_number::IPV6_ICMP => self.slice_icmp6().map_err(Len),
value => {
use TransportSlice::*;
self.result.transport = Some(Unknown(value));
Ok(self.slice_payload())
}
}
}
}
pub fn slice_icmp4(mut self) -> Result<SlicedPacket<'a>, err::LenError> {
use crate::TransportSlice::*;
let result = Icmpv4Slice::from_slice(self.slice).map_err(|mut err| {
err.layer_start_offset += self.offset;
if LenSource::Slice == err.len_source {
err.len_source = self.len_source;
}
err
})?;
//set the new data
self.move_by_slice(result.slice());
self.result.transport = Some(Icmpv4(result));
//done
Ok(self.slice_payload())
}
pub fn slice_icmp6(mut self) -> Result<SlicedPacket<'a>, err::LenError> {
use crate::TransportSlice::*;
let result = Icmpv6Slice::from_slice(self.slice).map_err(|mut err| {
err.layer_start_offset += self.offset;
if LenSource::Slice == err.len_source {
err.len_source = self.len_source;
}
err
})?;
//set the new data
self.move_by_slice(result.slice());
self.result.transport = Some(Icmpv6(result));
//done
Ok(self.slice_payload())
}
pub fn slice_udp(mut self) -> Result<SlicedPacket<'a>, err::LenError> {
use crate::TransportSlice::*;
let result = UdpHeaderSlice::from_slice(self.slice).map_err(|mut err| {
err.layer_start_offset += self.offset;
if LenSource::Slice == err.len_source {
err.len_source = self.len_source;
}
err
})?;
//set the new data
self.move_by_slice(result.slice());
self.result.transport = Some(Udp(result));
//done
Ok(self.slice_payload())
}
pub fn slice_tcp(mut self) -> Result<SlicedPacket<'a>, err::tcp::HeaderSliceError> {
use crate::TransportSlice::*;
let result = TcpHeaderSlice::from_slice(self.slice).map_err(|mut err| {
use err::tcp::HeaderSliceError::Len;
if let Len(err) = &mut err {
err.layer_start_offset += self.offset;
if LenSource::Slice == err.len_source {
err.len_source = self.len_source;
}
}
err
})?;
//set the new data
self.move_by_slice(result.slice());
self.result.transport = Some(Tcp(result));
//done
Ok(self.slice_payload())
}
pub fn slice_payload(mut self) -> SlicedPacket<'a> {
self.result.payload = self.slice;
self.result
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::err::{
packet::{EthSliceError, IpSliceError},
Layer, LenError,
};
use crate::test_packet::TestPacket;
const VLAN_ETHER_TYPES: [EtherType; 3] = [
ether_type::VLAN_TAGGED_FRAME,
ether_type::PROVIDER_BRIDGING,
ether_type::VLAN_DOUBLE_TAGGED_FRAME,
];
#[test]
fn from_x_slice() {
// no eth
from_x_slice_vlan_variants(&TestPacket {
link: None,
vlan: None,
ip: None,
transport: None,
});
// eth
{
let eth = Ethernet2Header {
source: [1, 2, 3, 4, 5, 6],
destination: [1, 2, 3, 4, 5, 6],
ether_type: 0.into(),
};
let test = TestPacket {
link: Some(eth.clone()),
vlan: None,
ip: None,
transport: None,
};
// ok ethernet header (with unknown next)
from_x_slice_vlan_variants(&test);
// eth len error
{
let data = test.to_vec(&[]);
for len in 0..data.len() {
let err = LenError {
required_len: eth.header_len(),
len,
len_source: LenSource::Slice,
layer: Layer::Ethernet2Header,
layer_start_offset: 0,
};
from_slice_assert_err(
&test,
&data[..len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
}
}
fn from_x_slice_vlan_variants(base: &TestPacket) {
// none
from_x_slice_ip_variants(base);
// single vlan header
{
let single = SingleVlanHeader {
pcp: 1.try_into().unwrap(),
drop_eligible_indicator: false,
vlan_id: 2.try_into().unwrap(),
ether_type: 3.into(),
};
for vlan_ether_type in VLAN_ETHER_TYPES {
let mut test = base.clone();
test.set_ether_type(vlan_ether_type);
test.vlan = Some(VlanHeader::Single(single.clone()));
// ok vlan header
from_x_slice_ip_variants(&test);
// len error
{
let data = test.to_vec(&[]);
for len in 0..single.header_len() {
let base_len = test.len(&[]) - single.header_len();
let err = LenError {
required_len: single.header_len(),
len,
len_source: LenSource::Slice,
layer: Layer::VlanHeader,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
}
}
// double vlan header
for outer_vlan_ether_type in VLAN_ETHER_TYPES {
for inner_vlan_ether_type in VLAN_ETHER_TYPES {
let double = DoubleVlanHeader {
outer: SingleVlanHeader {
pcp: 1.try_into().unwrap(),
drop_eligible_indicator: false,
vlan_id: 2.try_into().unwrap(),
ether_type: inner_vlan_ether_type,
},
inner: SingleVlanHeader {
pcp: 1.try_into().unwrap(),
drop_eligible_indicator: false,
vlan_id: 2.try_into().unwrap(),
ether_type: 3.into(),
},
};
let mut test = base.clone();
test.set_ether_type(outer_vlan_ether_type);
test.vlan = Some(VlanHeader::Double(double.clone()));
// ok double vlan header
from_x_slice_ip_variants(&test);
// len error
{
let data = test.to_vec(&[]);
for len in 0..SingleVlanHeader::LEN {
let base_len = test.len(&[]) - SingleVlanHeader::LEN;
let err = LenError {
required_len: SingleVlanHeader::LEN,
len,
len_source: LenSource::Slice,
layer: Layer::VlanHeader,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
}
}
}
fn from_x_slice_ip_variants(base: &TestPacket) {
// none
from_x_slice_transport_variants(base);
// ipv4
for fragmented in [false, true] {
let ipv4 = {
let mut ipv4 =
Ipv4Header::new(0, 1, 2.into(), [3, 4, 5, 6], [7, 8, 9, 10]).unwrap();
ipv4.more_fragments = fragmented;
ipv4
};
{
let mut test = base.clone();
test.set_ether_type(ether_type::IPV4);
test.ip = Some(IpHeader::Version4(ipv4.clone(), Default::default()));
test.set_payload_len(0);
// ok ipv4
from_x_slice_transport_variants(&test);
// ipv4 len error
{
let data = test.to_vec(&[]);
for len in 0..ipv4.header_len() {
let base_len = test.len(&[]) - ipv4.header_len();
let err = LenError {
required_len: ipv4.header_len(),
len,
len_source: LenSource::Slice,
layer: Layer::Ipv4Header,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len({
if len < 1 {
let mut err = err.clone();
err.required_len = 1;
err.layer = Layer::IpHeader;
err
} else {
err.clone()
}
}),
);
}
}
// ipv4 content error (ihl lenght too small)
{
let mut data = test.to_vec(&[]);
let ipv4_offset = data.len() - ipv4.header_len();
// set the ihl to 0 to trigger a content error
data[ipv4_offset] = 0b1111_0000 & data[ipv4_offset];
from_slice_assert_err(
&test,
&data,
EthSliceError::Ipv4(
err::ipv4::HeaderError::HeaderLengthSmallerThanHeader { ihl: 0 },
),
IpSliceError::Ip(err::ip::HeaderError::Ipv4HeaderLengthSmallerThanHeader {
ihl: 0,
}),
);
}
// ipv4 content error (total length too small)
{
let mut data = test.to_vec(&[]);
let ipv4_offset = data.len() - ipv4.header_len();
// set the total length to 0 to trigger a content error
data[ipv4_offset + 2] = 0;
data[ipv4_offset + 3] = 0;
let err = LenError {
required_len: ipv4.header_len(),
len: 0,
len_source: LenSource::Ipv4HeaderTotalLen,
layer: Layer::Ipv4Packet,
layer_start_offset: {
test.link.as_ref().map(|h| h.header_len()).unwrap_or(0)
+ test.vlan.as_ref().map(|h| h.header_len()).unwrap_or(0)
},
};
from_slice_assert_err(
&test,
&data,
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
// ipv4 extension content error
{
let auth = IpAuthHeader::new(0.into(), 1, 2, &[]).unwrap();
let mut test = base.clone();
test.set_ether_type(ether_type::IPV4);
test.ip = Some(IpHeader::Version4(
{
let mut ipv4 = ipv4.clone();
ipv4.protocol = ip_number::AUTH;
ipv4
},
Ipv4Extensions {
auth: Some(auth.clone()),
},
));
test.set_payload_len(0);
// ok ipv4 & extension
from_x_slice_transport_variants(&test);
// ipv4 extension len error
for len in 0..auth.header_len() {
// set payload length
let mut test = test.clone();
test.set_payload_le_from_ip_on(
-1 * (auth.header_len() as isize) + (len as isize),
);
let data = test.to_vec(&[]);
let base_len = test.len(&[]) - auth.header_len();
let err = LenError {
required_len: auth.header_len(),
len,
len_source: LenSource::Ipv4HeaderTotalLen,
layer: Layer::IpAuthHeader,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
// ipv4 extension content error
{
let mut data = test.to_vec(&[]);
let auth_offset = data.len() - auth.header_len();
// set the icv len too smaller then allowed
data[auth_offset + 1] = 0;
// expect an error
let err = err::ip_auth::HeaderError::ZeroPayloadLen;
from_slice_assert_err(
&test,
&data,
EthSliceError::Ipv4Exts(err.clone()),
IpSliceError::Ip(err::ip::HeaderError::Ipv4Ext(err.clone())),
);
}
}
}
// ipv6
{
let ipv6 = Ipv6Header {
traffic_class: 0,
flow_label: 1.try_into().unwrap(),
payload_length: 2,
next_header: 3.into(),
hop_limit: 4,
source: [0; 16],
destination: [0; 16],
};
// ipv6 header only
{
let mut test = base.clone();
test.set_ether_type(ether_type::IPV6);
test.ip = Some(IpHeader::Version6(ipv6.clone(), Default::default()));
test.set_payload_len(0);
// ok ipv6
from_x_slice_transport_variants(&test);
// header len ipv6
{
let data = test.to_vec(&[]);
for len in 0..ipv6.header_len() {
let base_len = test.len(&[]) - ipv6.header_len();
let err = err::LenError {
required_len: ipv6.header_len(),
len,
len_source: LenSource::Slice,
layer: Layer::Ipv6Header,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len({
if len < 1 {
let mut err = err.clone();
err.required_len = 1;
err.layer = Layer::IpHeader;
err
} else {
err.clone()
}
}),
);
}
}
// content error ipv6
{
let mut data = test.to_vec(&[]);
// inject an invalid ip version
let base_len = data.len() - ipv6.header_len();
data[base_len] = data[base_len] & 0b0000_1111;
from_slice_assert_err(
&test,
&data,
EthSliceError::Ipv6(err::ipv6::HeaderError::UnexpectedVersion {
version_number: 0,
}),
IpSliceError::Ip(err::ip::HeaderError::UnsupportedIpVersion {
version_number: 0,
}),
);
}
}
// ipv6 + extension
for fragment in [false, true] {
let auth = IpAuthHeader::new(ip_number::GGP, 1, 2, &[]).unwrap();
let frag = Ipv6FragmentHeader {
next_header: ip_number::AUTH,
fragment_offset: 0.try_into().unwrap(),
more_fragments: fragment,
identification: 3,
};
let mut test = base.clone();
test.set_ether_type(ether_type::IPV6);
test.ip = Some(IpHeader::Version6(
{
let mut ipv6 = ipv6.clone();
ipv6.next_header = ip_number::IPV6_FRAG;
ipv6
},
{
let mut exts: Ipv6Extensions = Default::default();
exts.fragment = Some(frag.clone());
exts.auth = Some(auth.clone());
exts
},
));
test.set_payload_len(0);
// ok ipv6 & extensions
from_x_slice_transport_variants(&test);
// ipv6 extension len error
for len in 0..auth.header_len() {
// set payload length
let mut test = test.clone();
test.set_payload_le_from_ip_on(
-1 * (auth.header_len() as isize) + (len as isize),
);
let data = test.to_vec(&[]);
let base_len = test.len(&[]) - auth.header_len();
let err = LenError {
required_len: auth.header_len(),
len,
len_source: LenSource::Ipv6HeaderPayloadLen,
layer: Layer::IpAuthHeader,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
// ipv6 extension content error (auth)
{
let mut data = test.to_vec(&[]);
let auth_offset = data.len() - auth.header_len();
// set the icv len too smaller then allowed
data[auth_offset + 1] = 0;
let err = err::ip_auth::HeaderError::ZeroPayloadLen;
from_slice_assert_err(
&test,
&data,
EthSliceError::Ipv6Exts(err::ipv6_exts::HeaderError::IpAuth(err.clone())),
IpSliceError::Ip(err::ip::HeaderError::Ipv6Ext(
err::ipv6_exts::HeaderError::IpAuth(err.clone()),
)),
);
}
// ipv6 extension content error (hop by hop not at start)
{
let mut data = test.to_vec(&[]);
let auth_offset = data.len() - auth.header_len();
// set the next header to be a hop-by-hop header to trigger a "not at start error"
data[auth_offset] = 0;
from_slice_assert_err(
&test,
&data,
EthSliceError::Ipv6Exts(err::ipv6_exts::HeaderError::HopByHopNotAtStart),
IpSliceError::Ip(err::ip::HeaderError::Ipv6Ext(
err::ipv6_exts::HeaderError::HopByHopNotAtStart,
)),
);
}
}
}
}
fn from_x_slice_transport_variants(base: &TestPacket) {
// none
from_x_slice_assert_ok(base);
// transport can only be set if ip is present
if let Some(ip) = &base.ip {
// udp
{
let udp = UdpHeader {
source_port: 1,
destination_port: 2,
length: 3,
checksum: 4,
};
let mut test = base.clone();
test.ip = Some({
let mut ip = ip.clone();
ip.set_next_headers(ip_number::UDP);
ip
});
test.transport = Some(TransportHeader::Udp(udp.clone()));
test.set_payload_len(0);
// ok decode
from_x_slice_assert_ok(&test);
// length error
if false == test.is_ip_payload_fragmented() {
for len in 0..udp.header_len() {
// build new test packet
let mut test = test.clone();
// set payload length
test.set_payload_le_from_ip_on(len as isize);
// generate data
let data = test.to_vec(&[]);
let base_len = test.len(&[]) - udp.header_len();
let err = LenError {
required_len: udp.header_len(),
len,
len_source: match test.ip.as_ref().unwrap() {
IpHeader::Version4(_, _) => LenSource::Ipv4HeaderTotalLen,
IpHeader::Version6(_, _) => LenSource::Ipv6HeaderPayloadLen,
},
layer: Layer::UdpHeader,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
}
// tcp
{
let tcp = TcpHeader::new(1, 2, 3, 4);
let mut test = base.clone();
test.ip = Some({
let mut ip = ip.clone();
ip.set_next_headers(ip_number::TCP);
ip
});
test.transport = Some(TransportHeader::Tcp(tcp.clone()));
test.set_payload_len(0);
// ok decode
from_x_slice_assert_ok(&test);
// error can only occur if ip does not fragment the packet
if false == test.is_ip_payload_fragmented() {
// length error
{
for len in 0..(tcp.header_len() as usize) {
// set payload length
let mut test = test.clone();
test.set_payload_le_from_ip_on(len as isize);
let data = test.to_vec(&[]);
let base_len = test.len(&[]) - (tcp.header_len() as usize);
let err = LenError {
required_len: tcp.header_len() as usize,
len,
len_source: match test.ip.as_ref().unwrap() {
IpHeader::Version4(_, _) => LenSource::Ipv4HeaderTotalLen,
IpHeader::Version6(_, _) => LenSource::Ipv6HeaderPayloadLen,
},
layer: Layer::TcpHeader,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
// content error
{
let mut data = test.to_vec(&[]);
let base_len = test.len(&[]) - (tcp.header_len() as usize);
// set data offset to 0 to trigger an error
data[base_len + 12] = data[base_len + 12] & 0b0000_1111;
let err = err::tcp::HeaderError::DataOffsetTooSmall { data_offset: 0 };
from_slice_assert_err(
&test,
&data,
EthSliceError::Tcp(err.clone()),
IpSliceError::Tcp(err.clone()),
);
}
}
}
// icmpv4
{
let icmpv4 =
Icmpv4Header::new(Icmpv4Type::EchoReply(IcmpEchoHeader { id: 1, seq: 2 }));
let mut test = base.clone();
test.ip = Some({
let mut ip = ip.clone();
ip.set_next_headers(ip_number::ICMP);
ip
});
test.transport = Some(TransportHeader::Icmpv4(icmpv4.clone()));
test.set_payload_len(0);
// ok decode
from_x_slice_assert_ok(&test);
// length error
if false == test.is_ip_payload_fragmented() {
for len in 0..icmpv4.header_len() {
// set payload length
let mut test = test.clone();
test.set_payload_le_from_ip_on(len as isize);
let data = test.to_vec(&[]);
let base_len = test.len(&[]) - icmpv4.header_len();
let err = LenError {
required_len: icmpv4.header_len(),
len,
len_source: match test.ip.as_ref().unwrap() {
IpHeader::Version4(_, _) => LenSource::Ipv4HeaderTotalLen,
IpHeader::Version6(_, _) => LenSource::Ipv6HeaderPayloadLen,
},
layer: Layer::Icmpv4,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
}
// icmpv6
{
let icmpv6 =
Icmpv6Header::new(Icmpv6Type::EchoReply(IcmpEchoHeader { id: 1, seq: 2 }));
let mut test = base.clone();
test.ip = Some({
let mut ip = ip.clone();
ip.set_next_headers(ip_number::IPV6_ICMP);
ip
});
test.transport = Some(TransportHeader::Icmpv6(icmpv6.clone()));
test.set_payload_len(0);
// ok decode
from_x_slice_assert_ok(&test);
// length error
if false == test.is_ip_payload_fragmented() {
for len in 0..icmpv6.header_len() {
// set payload length
let mut test = test.clone();
test.set_payload_le_from_ip_on(len as isize);
let data = test.to_vec(&[]);
let base_len = test.len(&[]) - icmpv6.header_len();
let err = LenError {
required_len: icmpv6.header_len(),
len,
len_source: match test.ip.as_ref().unwrap() {
IpHeader::Version4(_, _) => LenSource::Ipv4HeaderTotalLen,
IpHeader::Version6(_, _) => LenSource::Ipv6HeaderPayloadLen,
},
layer: Layer::Icmpv6,
layer_start_offset: base_len,
};
from_slice_assert_err(
&test,
&data[..base_len + len],
EthSliceError::Len(err.clone()),
IpSliceError::Len(err.clone()),
);
}
}
}
}
}
fn from_x_slice_assert_ok(test_base: &TestPacket) {
fn assert_test_result(
test: &TestPacket,
expected_payload: &[u8],
data: &[u8],
result: &SlicedPacket,
) {
// check if fragmenting
let is_fragmented = test.is_ip_payload_fragmented();
// check headers
assert_eq!(test.link, result.link.as_ref().map(|e| e.to_header()));
assert_eq!(test.vlan, result.vlan.as_ref().map(|e| e.to_header()));
assert_eq!(
test.ip,
result.ip.as_ref().map(|s: &InternetSlice| -> IpHeader {
match s {
InternetSlice::Ipv4(ipv4) => IpHeader::Version4(
ipv4.header().to_header(),
ipv4.extensions().to_header(),
),
InternetSlice::Ipv6(ipv6) => IpHeader::Version6(
ipv6.header().to_header(),
Ipv6Extensions::from_slice(
ipv6.header().next_header(),
ipv6.extensions().slice(),
)
.unwrap()
.0,
),
}
})
);
// check transport header & payload
if is_fragmented {
assert_eq!(result.transport, None);
let transport_len = test.transport.as_ref().map_or(0, |t| t.header_len());
assert_eq!(
result.payload,
&data[data.len() - expected_payload.len() - transport_len..]
);
} else {
use TransportHeader as H;
use TransportSlice as S;
match &result.transport {
Some(S::Icmpv4(icmpv4)) => {
assert_eq!(&test.transport, &Some(H::Icmpv4(icmpv4.header())));
assert_eq!(icmpv4.payload(), expected_payload);
assert_eq!(result.payload, &[]);
}
Some(S::Icmpv6(icmpv6)) => {
assert_eq!(&test.transport, &Some(H::Icmpv6(icmpv6.header())));
assert_eq!(icmpv6.payload(), expected_payload);
assert_eq!(result.payload, &[]);
}
Some(S::Udp(s)) => {
assert_eq!(&test.transport, &Some(H::Udp(s.to_header())));
assert_eq!(result.payload, expected_payload);
}
Some(S::Tcp(s)) => {
assert_eq!(&test.transport, &Some(H::Tcp(s.to_header())));
assert_eq!(result.payload, expected_payload);
}
Some(S::Unknown(next_ip_number)) => {
assert_eq!(&test.transport, &None);
assert_eq!(
*next_ip_number,
test.ip.as_ref().unwrap().next_header().unwrap()
);
assert_eq!(result.payload, expected_payload);
}
None => {
assert_eq!(&test.transport, &None);
assert_eq!(result.payload, expected_payload);
}
}
}
}
// setup payload
let payload = [1, 2, 3, 4];
// set length fields in ip headers
let test = {
let mut test = test_base.clone();
test.set_payload_len(payload.len());
test
};
// write data
let data = test.to_vec(&payload);
// from_ethernet
if test.link.is_some() {
let result = SlicedPacket::from_ethernet(&data).unwrap();
assert_test_result(&test, &payload, &data, &result);
}
// from_ether_type (vlan at start)
if test.link.is_none() && test.vlan.is_some() {
for ether_type in VLAN_ETHER_TYPES {
let result = SlicedPacket::from_ether_type(ether_type, &data).unwrap();
assert_test_result(&test, &payload, &data, &result);
}
}
// from_ether_type (ip at start)
if test.link.is_none() && test.vlan.is_none() {
if let Some(ip) = &test.ip {
let result = SlicedPacket::from_ether_type(
match ip {
IpHeader::Version4(_, _) => ether_type::IPV4,
IpHeader::Version6(_, _) => ether_type::IPV6,
},
&data,
)
.unwrap();
assert_test_result(&test, &payload, &data, &result);
}
}
// from_ip_slice
if test.link.is_none() && test.vlan.is_none() && test.ip.is_some() {
let result = SlicedPacket::from_ip(&data).unwrap();
assert_test_result(&test, &payload, &data, &result);
}
}
/// Check that the given errors get triggered if presented with the given
/// data.
fn from_slice_assert_err(
test: &TestPacket,
data: &[u8],
eth_err: EthSliceError,
ip_err: IpSliceError,
) {
// from_ethernet_slice
if test.link.is_some() {
assert_eq!(
eth_err.clone(),
SlicedPacket::from_ethernet(&data).unwrap_err()
);
}
// from_ether_type (vlan at start)
if test.link.is_none() && test.vlan.is_some() {
for ether_type in VLAN_ETHER_TYPES {
assert_eq!(
eth_err.clone(),
SlicedPacket::from_ether_type(ether_type, &data).unwrap_err()
);
}
}
// from_ether_type (ip at start)
if test.link.is_none() && test.vlan.is_none() {
if let Some(ip) = &test.ip {
let err = SlicedPacket::from_ether_type(
match ip {
IpHeader::Version4(_, _) => ether_type::IPV4,
IpHeader::Version6(_, _) => ether_type::IPV6,
},
&data,
)
.unwrap_err();
assert_eq!(err, eth_err.clone());
}
}
// from_ip_slice
if test.link.is_none() && test.vlan.is_none() && test.ip.is_some() {
assert_eq!(ip_err, SlicedPacket::from_ip(&data).unwrap_err());
}
}
}
| true
|
bb61dc886abaa2ebecdae23131e0e69b54c0b3c3
|
Rust
|
sachinbhutani/svelte-on-rust
|
/src/main.rs
|
UTF-8
| 3,856
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
use rocket::serde::json::{Value, json};
use rocket::serde::{Serialize,Deserialize, json::Json};
use rocket::{Response};
use rocket::fairing::{Fairing, Info, Kind};
use rocket::http::{Header,Status};
use rocket::request::{self, Outcome, Request, FromRequest};
use sha2::{Sha256,Digest};
use chrono::Utc;
use std::convert::Infallible;
#[macro_use] extern crate rocket;
fn get_session_id(data: &String) -> String{
let time = Utc::now().to_rfc3339();
let mut hasher = Sha256::new();
hasher.update(data);
hasher.update(time);
let hash = format!("{:x}",hasher.finalize());
return hash;
}
#[get("/")]
fn api() -> String {
format!("Hello, from Rust Backend!")
}
#[get("/message", rank = 1)]
fn json_message() -> Value{
json!({ "app": "svelte-on-rust", "version": "0.2.0", "status": "ok"})
}
// User struct for login
#[derive(Serialize, Deserialize)]
struct User {
username: String,
password: String
}
#[post("/login", format = "json", data = "<user>")]
fn login_user(user: Json<User>) -> Value{
// should be replaced with databased/auth service logic to generate tokens
// issue access token from auth service
let token = get_session_id(&user.username);
if user.username == user.password {
json!({ "result" : "success",
"messge" : "login successfull",
"token" : token
})
}else {
json!({ "result" : "error",
"message": "Invalid Username/Password"
})
}
}
#[get("/logout")]
fn logout_user() -> Value{
// database logic to process logout should be added
json!({ "result" : "success",
"messge" : "logout successfull"
})
}
struct Token<'r>(&'r str);
#[derive(Debug)]
enum TokenError {
Missing,
Invalid,
}
#[rocket::async_trait]
impl<'r> FromRequest<'r> for Token<'r> {
type Error = TokenError;
async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Error> {
//replace with server side validation of the session id /auth token to respond
let t = req.headers().get_one("x-token");
print!("Token recieved:{:?}",t);
match t {
None => Outcome::Failure((Status::Unauthorized, TokenError::Missing)),
Some("_") => Outcome::Failure((Status::Unauthorized, TokenError::Missing)),
Some(key) => Outcome::Success(Token(key)),
Some(_) => Outcome::Failure((Status::BadRequest, TokenError::Invalid)),
}
}
}
//#[get("/sensitive")]
//fn sensitive(key: ApiKey<'_>) -> &'static str {
// "Sensitive data."
//}
#[get("/secret")]
fn secret(token: Token<'_>) -> Value {
//print!("session_id: {}",Token);
json!({"token": token.0, "message":"This is a secret message from server, just for you!"})
}
//catch all OPTIONS requests to trigger CORS
#[options("/<_..>")]
fn all_options() {
}
#[derive(Default, Clone)]
struct CORS {
}
#[rocket::async_trait]
impl Fairing for CORS {
fn info(&self) -> Info {
Info {
name: "Cross-Origin-Resource-Sharing Fairing",
kind: Kind::Response,
}
}
async fn on_response<'r>(&self, _request: &'r Request<'_>, response: &mut Response<'r>) {
response.set_header(Header::new("Access-Control-Allow-Origin", "*"));
response.set_header(Header::new(
"Access-Control-Allow-Methods",
"POST, PATCH, PUT, DELETE, HEAD, OPTIONS, GET",
));
response.set_header(Header::new("Access-Control-Allow-Headers", "*"));
response.set_header(Header::new("Access-Control-Allow-Credentials", "true"));
}
}
#[launch]
fn rocket() -> _ {
rocket::build()
.mount("/api",routes![api,json_message])
.mount("/api/auth",routes![login_user,logout_user,all_options])
.mount("/api/secure",routes![secret,all_options])
.attach(CORS::default())
}
| true
|
5c420fcc292e94086410f906c02763eee3cd7a90
|
Rust
|
tyu-ru/kyopro_lib
|
/klmacro/src/lib.rs
|
UTF-8
| 893
| 3.109375
| 3
|
[] |
no_license
|
#[macro_export]
macro_rules! chmin {
($x:expr, $y: expr) => {
if $y < $x {
$x = $y;
true
} else {
false
}
};
($x:expr, $($y:expr),+) => {
$(chmin!($x, $y)) || *
};
}
#[macro_export]
macro_rules! chmax {
($x:expr, $y: expr) => {
if $y > $x {
$x = $y;
true
} else {
false
}
};
($x:expr, $($y:expr),+) => {
$(chmax!($x, $y)) || *
};
}
#[cfg(test)]
#[test]
fn test() {
let mut x = 2;
assert_eq!(chmin!(x, 4), false);
assert_eq!(x, 2);
assert_eq!(chmin!(x, 1), true);
assert_eq!(x, 1);
assert_eq!(chmin!(x, 1), false);
assert_eq!(x, 1);
let mut y = 4;
assert_eq!(chmax!(y, 4, 1, 2, 3), false);
assert_eq!(y, 4);
assert_eq!(chmax!(y, 4, 1, 6, 5, 2, 3), true);
assert_eq!(y, 6);
}
| true
|
71e797e49bb81de6d0995e22a2414f7109531cc2
|
Rust
|
comp590-19s/590-material
|
/lecture/04-the-stack/ex1_refs/src/main.rs
|
UTF-8
| 277
| 3.0625
| 3
|
[] |
no_license
|
fn main() {
let a: u8 = 0;
print_address("&a", &a);
let b: u8 = 1;
print_address("&b", &b);
let p: &u8 = &b;
print_address("p", p);
println!("{}", *p);
}
fn print_address(label: &str, address: &u8) {
println!("{:p} - {}", address, label);
}
| true
|
dd79f1dbbc3a3d3b3979d94cae6e530963071798
|
Rust
|
Kuzirashi/godwoken
|
/crates/db/src/write_batch.rs
|
UTF-8
| 1,754
| 2.703125
| 3
|
[
"MIT"
] |
permissive
|
//! TODO(doc): @quake
use crate::db::cf_handle;
use crate::schema::Col;
use crate::{internal_error, Result};
use rocksdb::{OptimisticTransactionDB, WriteBatch};
use std::sync::Arc;
/// TODO(doc): @quake
pub struct RocksDBWriteBatch {
pub(crate) db: Arc<OptimisticTransactionDB>,
pub(crate) inner: WriteBatch,
}
impl RocksDBWriteBatch {
/// TODO(doc): @quake
pub fn len(&self) -> usize {
self.inner.len()
}
/// Return WriteBatch serialized size (in bytes).
pub fn size_in_bytes(&self) -> usize {
self.inner.size_in_bytes()
}
/// TODO(doc): @quake
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
/// TODO(doc): @quake
pub fn put(&mut self, col: Col, key: &[u8], value: &[u8]) -> Result<()> {
let cf = cf_handle(&self.db, col)?;
self.inner.put_cf(cf, key, value).map_err(internal_error)
}
/// TODO(doc): @quake
pub fn delete(&mut self, col: Col, key: &[u8]) -> Result<()> {
let cf = cf_handle(&self.db, col)?;
self.inner.delete_cf(cf, key).map_err(internal_error)
}
/// Remove database entries from start key to end key.
///
/// Removes the database entries in the range ["begin_key", "end_key"), i.e.,
/// including "begin_key" and excluding "end_key". It is not an error if no
/// keys exist in the range ["begin_key", "end_key").
pub fn delete_range(&mut self, col: Col, from: &[u8], to: &[u8]) -> Result<()> {
let cf = cf_handle(&self.db, col)?;
self.inner
.delete_range_cf(cf, from, to)
.map_err(internal_error)
}
/// TODO(doc): @quake
pub fn clear(&mut self) -> Result<()> {
self.inner.clear().map_err(internal_error)
}
}
| true
|
e7a94b0bd47ac7f5dd2988e8e29bc44d584ecda7
|
Rust
|
oskarbraten/zelda
|
/src/collections.rs
|
UTF-8
| 2,765
| 3.78125
| 4
|
[
"MIT"
] |
permissive
|
use num_traits::{Unsigned, Bounded, AsPrimitive, WrappingAdd, WrappingSub};
/// A ring buffer of sequence numbers and associated values.
/// The ring buffer can have any size, but it should preferably be a power of two.
/// Any other size may result in a __reduction__ in the total number of sequence numbers available.
/// For example:
///
/// With sequence numbers of type _T = u8_ and a size of _7_, the total number of available sequence numbers become:
///
/// _(255 / 7) * 7 == 252_
#[derive(Debug, Clone)]
pub struct SequenceRingBuffer<T, V> {
size: T,
max: T,
current: T,
buffer: Vec<Option<V>>
}
impl<T, V> SequenceRingBuffer<T, V> where T: PartialOrd + Unsigned + Bounded + WrappingAdd + WrappingSub + AsPrimitive<usize>, V: Clone {
/// Creates a new buffer with the specified size.
/// The size limits the number of active (sequence number, value)-pairs that can be stored.
/// Once the size is exceeded the oldest pair will be dropped.
pub fn new(size: T) -> Self {
Self {
size,
max: ((T::max_value() / size) * size),
current: T::zero(),
buffer: (0..size.as_()).map(|_| None).collect()
}
}
/// Inserts the value and returns the associated sequence number.
/// The entry with the oldest sequence number at the time will fall out of the buffer (being overwritten by the new one).
pub fn insert(&mut self, value: V) -> T {
let seq = self.current.wrapping_add(&T::one()) % self.max;
let index: usize = (seq % self.size).as_();
self.buffer[index] = Some(value);
self.current = seq;
seq
}
/// Checks whether the sequence number is within bounds, returning the actual index in the buffer if it is.
fn within_bounds(&self, seq: &T) -> Option<T> {
let high = self.current;
let low = self.current.wrapping_sub(&self.size);
if (low <= high && low < *seq && *seq <= high) || (low > high && high <= *seq && *seq < low) {
Some(*seq % self.size)
} else {
None
}
}
/// Removes the value associated with the sequence number, returning the value if it was found.
pub fn remove(&mut self, seq: &T) -> Option<V> {
self.within_bounds(seq).and_then(|index| {
let value = self.buffer[index.as_()].clone();
self.buffer[index.as_()] = None;
value
})
}
/// Gets a reference to the value associated with the sequence number, if it exists.
pub fn get(&self, seq: &T) -> Option<&V> {
self.within_bounds(seq).and_then(|index| {
self.buffer.get(index.as_()).and_then(|value| {
value.as_ref()
})
})
}
}
| true
|
ea4252e6a7b6142dd98f0f4418c0cc7b6a1c6c33
|
Rust
|
rolo/midigrep
|
/src/str_to_note.rs
|
UTF-8
| 3,772
| 2.984375
| 3
|
[] |
no_license
|
use midi::note::Note;
fn str_to_note(n: &str) -> Option<Note> {
match n {
// A
"a1" => Some(Note::A1),
"a2" => Some(Note::A2),
"a3" => Some(Note::A3),
"a4" => Some(Note::A4),
"a5" => Some(Note::A5),
"a6" => Some(Note::A6),
"a7" => Some(Note::A7),
"a8" => Some(Note::A8),
// A#
"a#0" => Some(Note::As0),
"a#1" => Some(Note::As1),
"a#2" => Some(Note::As2),
"a#3" => Some(Note::As3),
"a#4" => Some(Note::As4),
"a#5" => Some(Note::As5),
"a#6" => Some(Note::As6),
"a#7" => Some(Note::As7),
"a#8" => Some(Note::As8),
// B
"b0" => Some(Note::B0),
"b1" => Some(Note::B1),
"b2" => Some(Note::B2),
"b3" => Some(Note::B3),
"b4" => Some(Note::B4),
"b5" => Some(Note::B5),
"b6" => Some(Note::B6),
"b7" => Some(Note::B7),
"b8" => Some(Note::B8),
// C
"c0" => Some(Note::C0),
"c1" => Some(Note::C1),
"c2" => Some(Note::C2),
"c3" => Some(Note::C3),
"c4" => Some(Note::C4),
"c5" => Some(Note::C5),
"c6" => Some(Note::C6),
"c7" => Some(Note::C7),
"c8" => Some(Note::C8),
// C#
"c#0" => Some(Note::Cs0),
"c#1" => Some(Note::Cs1),
"c#2" => Some(Note::Cs2),
"c#3" => Some(Note::Cs3),
"c#4" => Some(Note::Cs4),
"c#5" => Some(Note::Cs5),
"c#6" => Some(Note::Cs6),
"c#7" => Some(Note::Cs7),
"c#8" => Some(Note::Cs8),
// D
"d0" => Some(Note::D0),
"d1" => Some(Note::D1),
"d2" => Some(Note::D2),
"d3" => Some(Note::D3),
"d4" => Some(Note::D4),
"d5" => Some(Note::D5),
"d6" => Some(Note::D6),
"d7" => Some(Note::D7),
"d8" => Some(Note::D8),
// D#
"d#0" => Some(Note::Ds0),
"d#1" => Some(Note::Ds1),
"d#2" => Some(Note::Ds2),
"d#3" => Some(Note::Ds3),
"d#4" => Some(Note::Ds4),
"d#5" => Some(Note::Ds5),
"d#6" => Some(Note::Ds6),
"d#7" => Some(Note::Ds7),
"d#8" => Some(Note::Ds8),
// E
"e0" => Some(Note::E0),
"e1" => Some(Note::E1),
"e2" => Some(Note::E2),
"e3" => Some(Note::E3),
"e4" => Some(Note::E4),
"e5" => Some(Note::E5),
"e6" => Some(Note::E6),
"e7" => Some(Note::E7),
"e8" => Some(Note::E8),
// F
"f0" => Some(Note::F0),
"f1" => Some(Note::F1),
"f2" => Some(Note::F2),
"f3" => Some(Note::F3),
"f4" => Some(Note::F4),
"f5" => Some(Note::F5),
"f6" => Some(Note::F6),
"f7" => Some(Note::F7),
"f8" => Some(Note::F8),
// F#
"f#0" => Some(Note::Fs0),
"f#1" => Some(Note::Fs1),
"f#2" => Some(Note::Fs2),
"f#3" => Some(Note::Fs3),
"f#4" => Some(Note::Fs4),
"f#5" => Some(Note::Fs5),
"f#6" => Some(Note::Fs6),
"f#7" => Some(Note::Fs7),
"f#8" => Some(Note::Fs8),
// G
"g0" => Some(Note::G0),
"g1" => Some(Note::G1),
"g2" => Some(Note::G2),
"g3" => Some(Note::G3),
"g4" => Some(Note::G4),
"g5" => Some(Note::G5),
"g6" => Some(Note::G6),
"g7" => Some(Note::G7),
"g8" => Some(Note::G8),
_ => None
}
}
pub fn to_notes(s: Vec<String>) -> Result<Vec<Note>, ()> {
let mut v = Vec::with_capacity(s.len());
for c in s {
let n = str_to_note(&c.to_lowercase());
match n {
Some(note) => v.push(note),
None => { println!("Invalid note: {}", c) }
}
}
Ok(v)
}
| true
|
8499a1a768d8fae78d1c2c6b8bd1887ab43df7ee
|
Rust
|
olFi95/rust_test
|
/src/main.rs
|
UTF-8
| 437
| 2.546875
| 3
|
[] |
no_license
|
use socketcan;
fn main() {
println!("Hello, world!");
let result = socketcan::CANSocket::open("vcan0");
let socket = result.unwrap();
let vec = [1, 2, 3];
let frame_result = socketcan::CANFrame::new(1, &vec, false, true);
if frame_result.is_ok() {
let _blah = socket.write_frame(&frame_result.unwrap());
println!("Write success!");
} else {
println!("Write failure!");
}
}
| true
|
19650d116b77c47835218cd881e30b6505cf7c7c
|
Rust
|
glium/glium
|
/examples/blitting.rs
|
UTF-8
| 2,422
| 2.734375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#[macro_use]
extern crate glium;
use std::io::Cursor;
use glium::{Surface, Display};
use glutin::surface::WindowSurface;
use support::{ApplicationContext, State};
mod support;
struct Application {
pub opengl_texture: glium::Texture2d,
pub dest_texture: glium::Texture2d,
}
impl ApplicationContext for Application {
const WINDOW_TITLE:&'static str = "Glium blitting example";
fn new(display: &Display<WindowSurface>) -> Self {
// building a texture with "OpenGL" drawn on it
let image = image::load(Cursor::new(&include_bytes!("../tests/fixture/opengl.png")[..]),
image::ImageFormat::Png).unwrap().to_rgba8();
let image_dimensions = image.dimensions();
let image = glium::texture::RawImage2d::from_raw_rgba_reversed(&image.into_raw(), image_dimensions);
let opengl_texture = glium::Texture2d::new(display, image).unwrap();
// building a 1024x1024 empty texture
let dest_texture = glium::Texture2d::empty_with_format(display,
glium::texture::UncompressedFloatFormat::U8U8U8U8,
glium::texture::MipmapsOption::NoMipmap,
1024, 1024).unwrap();
dest_texture.as_surface().clear_color(0.0, 0.0, 0.0, 1.0);
Self {
opengl_texture,
dest_texture,
}
}
fn draw_frame(&mut self, display: &Display<WindowSurface>) {
let frame = display.draw();
if rand::random::<f64>() <= 0.016666 {
let (left, bottom, dimensions): (f32, f32, f32) = rand::random();
let dest_rect = glium::BlitTarget {
left: (left * self.dest_texture.get_width() as f32) as u32,
bottom: (bottom * self.dest_texture.get_height().unwrap() as f32) as u32,
width: (dimensions * self.dest_texture.get_width() as f32) as i32,
height: (dimensions * self.dest_texture.get_height().unwrap() as f32) as i32,
};
self.opengl_texture.as_surface().blit_whole_color_to(&self.dest_texture.as_surface(), &dest_rect,
glium::uniforms::MagnifySamplerFilter::Linear);
}
self.dest_texture.as_surface().fill(&frame, glium::uniforms::MagnifySamplerFilter::Linear);
frame.finish().unwrap();
}
}
fn main() {
State::<Application>::run_loop();
}
| true
|
498d4f8945560f45c7d9baa4c907ecff1e980f12
|
Rust
|
vypxl/chainreaction
|
/wasm/src/lib.rs
|
UTF-8
| 1,611
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
use wasm_bindgen::prelude::*;
use js_sys::*;
#[wasm_bindgen]
extern "C" {
fn alert(s: String);
}
#[wasm_bindgen]
pub struct World {
width: usize,
height: usize,
cells: Vec<u8>,
}
#[wasm_bindgen]
impl World {
pub fn new(width: usize, height: usize) -> World {
World {
width: width,
height: height,
cells: vec![0; width * height],
}
}
pub fn update(&mut self, x: usize, y: usize) {
if self.cell(x, y) == 4 {
self.set_cell(x, y, self.cell(x, y) - 4);
for (nx, ny) in self.neighbours(x, y) {
self.inc_cell(nx, ny);
self.update(nx, ny);
}
}
}
fn neighbours(&self, x: usize, y: usize) -> Vec<(usize, usize)> {
vec![
((x + 1) % self.width, y),
((x - 1) % self.width, y),
(x, (y + 1) % self.height),
(x, (y - 1) % self.height),
]
}
pub fn width(&self) -> usize {
self.width
}
pub fn height(&self) -> usize {
self.height
}
pub fn cells(&self) -> Uint8Array {
unsafe { Uint8Array::view(&self.cells) }
}
pub fn cell(&self, x: usize, y: usize) -> u8 {
self.cells[self.width * x + y]
}
pub fn set_cell(&mut self, x: usize, y: usize, value: u8) {
self.cells[self.width * x + y] = value;
}
pub fn inc_cell(&mut self, x: usize, y: usize) {
let idx = self.width * x + y;
self.cells[idx] += 1;
if self.cells[idx] > 4 {
self.cells[idx] = 4;
}
}
}
| true
|
fa8edd91d3355eacf5d57638bdce07598a549340
|
Rust
|
adumbidiot/scratch-native
|
/src/scratch/utils.rs
|
UTF-8
| 1,764
| 3.40625
| 3
|
[] |
no_license
|
use std::path::PathBuf;
pub struct DirCreater {
path: PathBuf,
logger: Box<DirCreaterLogger>,
}
impl DirCreater {
pub fn new(path: PathBuf) -> DirCreater {
return DirCreater {
path,
logger: Box::new(DefaultLogger::new()),
};
}
pub fn with_logger<T: DirCreaterLogger + 'static>(&mut self, logger: T) -> &mut Self {
self.logger = Box::new(logger);
return self;
}
pub fn up(&mut self) -> &mut Self {
self.path.pop();
return self;
}
pub fn down(&mut self, path: &str) -> &mut Self {
self.path.push(path);
return self;
}
pub fn mkdir(&mut self) -> Result<&mut Self, std::io::Error> {
self.logger.log_mkdir(&self.path);
return std::fs::create_dir(&self.path).map(|_| self);
}
pub fn write_file(&mut self, name: &str, data: &[u8]) -> Result<&mut Self, std::io::Error> {
self.path.push(name);
self.logger.log_write_file(&self.path, name, data);
let ret = std::fs::write(&self.path, data);
self.path.pop();
return ret.map(|_| self);
}
}
pub trait DirCreaterLogger {
fn new() -> Self
where
Self: Sized;
fn log(&mut self);
fn log_mkdir(&mut self, path: &PathBuf);
fn log_write_file(&mut self, path: &PathBuf, name: &str, data: &[u8]);
}
struct DefaultLogger;
impl DirCreaterLogger for DefaultLogger {
fn new() -> DefaultLogger {
return DefaultLogger;
}
fn log(&mut self) {}
fn log_mkdir(&mut self, path: &PathBuf) {
println!("Making dir: {}", path.display());
}
fn log_write_file(&mut self, path: &PathBuf, _name: &str, _data: &[u8]) {
println!("Creating: {}", path.display());
}
}
| true
|
388422507b6841c403e2923c888e0bda394cadd5
|
Rust
|
justanotherdot/rust-assert-no-alloc
|
/tests/test.rs
|
UTF-8
| 3,418
| 2.71875
| 3
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
use assert_no_alloc::*;
use std::panic::catch_unwind;
#[global_allocator]
static A: AllocDisabler = AllocDisabler;
#[cfg(not(feature = "warn_debug"))]
compile_error!("The test suite requires the warn_debug feature to be enabled. Use `cargo test --features warn_debug`");
// This is only a kludge; what we actually want to check is "will do_alloc() be optimized out?", e.g. due to
// compiler optimizations turned on in --release mode. We can't do that, the closest we can get is to check
// whether debug_assertions are disabled, which coincidentially also happens in release mode.
#[cfg(not(debug_assertions))]
compile_error!("The test suite only works in debug mode. Use `cargo test --features warn_debug`");
#[cfg(feature = "warn_debug")]
fn check_and_reset() -> bool {
let result = violation_count() > 0;
reset_violation_count();
result
}
// Provide a stub check_and_reset() function if warn_debug is disabled. This will never be compiled due to the
// compile_error!() above, but this stub ensures that the output will not be cluttered with spurious error
// messages.
#[cfg(not(feature = "warn_debug"))]
fn check_and_reset() -> bool { unreachable!() }
fn do_alloc() {
let _tmp: Box<u32> = Box::new(42);
}
#[test]
fn ok_noop() {
assert_eq!(check_and_reset(), false);
do_alloc();
assert_eq!(check_and_reset(), false);
}
#[test]
fn ok_simple() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
});
do_alloc();
assert_eq!(check_and_reset(), false);
}
#[test]
fn ok_nested() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
assert_no_alloc(|| {
});
});
do_alloc();
assert_eq!(check_and_reset(), false);
}
#[test]
fn forbidden_simple() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
do_alloc();
});
assert_eq!(check_and_reset(), true);
}
#[test]
fn forbidden_in_nested() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
assert_no_alloc(|| {
do_alloc();
});
});
assert_eq!(check_and_reset(), true);
}
#[test]
fn forbidden_after_nested() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
assert_no_alloc(|| {
});
do_alloc();
});
assert_eq!(check_and_reset(), true);
}
#[test]
fn unwind_ok() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
let r = catch_unwind(|| {
assert_no_alloc(|| {
panic!();
});
});
assert!(r.is_err());
});
check_and_reset(); // unwinding might have allocated memory; we don't care about that.
do_alloc();
assert_eq!(check_and_reset(), false);
}
#[test]
fn unwind_nested() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
let r = catch_unwind(|| {
assert_no_alloc(|| {
panic!();
});
});
assert!(r.is_err());
check_and_reset(); // unwinding might have allocated memory; we don't care about that.
do_alloc();
assert_eq!(check_and_reset(), true);
});
}
#[test]
fn unwind_nested2() {
assert_eq!(check_and_reset(), false);
assert_no_alloc(|| {
assert_no_alloc(|| {
let r = catch_unwind(|| {
assert_no_alloc(|| {
assert_no_alloc(|| {
panic!();
});
});
});
assert!(r.is_err());
check_and_reset(); // unwinding might have allocated memory; we don't care about that.
do_alloc();
assert_eq!(check_and_reset(), true);
});
});
check_and_reset(); // unwinding might have allocated memory; we don't care about that.
do_alloc();
assert_eq!(check_and_reset(), false);
}
| true
|
1d2477b49f812fce49920765e64ec0cd0132cb21
|
Rust
|
tim-weis/winit-blit
|
/examples/color_blend.rs
|
UTF-8
| 7,665
| 3.125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use winit::{
event::{ElementState, Event, KeyboardInput, WindowEvent},
event_loop::{ControlFlow, EventLoop},
window::WindowBuilder,
};
use winit_blit::{PixelBufferTyped, BGRA};
fn main() {
let event_loop = EventLoop::new();
let window = WindowBuilder::new()
.with_title("Software rendering example")
.build(&event_loop)
.unwrap();
let red = BGRA::from_rgb(255, 0, 0);
let green = BGRA::from_rgb(0, 255, 0);
let blue = BGRA::from_rgb(0, 0, 255);
let alpha = BGRA::new(0, 0, 0, 255);
let mut blend_mode = BlendMode::Approx;
for i in 0..=255 {
print!("{:x} ", bl(i, 0, 255));
}
event_loop.run(move |event, _, control_flow| {
// println!("{:?}", event);
match event {
Event::WindowEvent {
event: WindowEvent::KeyboardInput{input: KeyboardInput{state: ElementState::Pressed, ..}, ..},
window_id,
} if window_id == window.id() => {
blend_mode = match blend_mode {
BlendMode::Approx => BlendMode::Exact,
BlendMode::Exact => BlendMode::Naive,
BlendMode::Naive => BlendMode::Approx,
};
window.request_redraw();
}
Event::WindowEvent {
event: WindowEvent::CloseRequested,
window_id,
} if window_id == window.id() => *control_flow = ControlFlow::Exit,
Event::RedrawRequested(window_id) => {
if window_id == window.id() {
let (width, height): (u32, u32) = window.inner_size().into();
let mut buffer =
PixelBufferTyped::<BGRA>::new_supported(width, height, &window);
let start = std::time::Instant::now();
for (i, row) in buffer.rows_mut().enumerate() {
let y = ((i as f32 / height as f32) * 255.0).round() as u8;
let t_blend = blend_approx(y, red, green);
let b_blend = blend_approx(y, alpha, blue);
for (j, pixel) in row.into_iter().enumerate() {
// *pixel = x_blend;
let x = ((j as f32 / width as f32) * 255.0).round() as u8;
*pixel = blend_approx(x, t_blend, b_blend);
}
}
let end = std::time::Instant::now();
println!("{:?}", end - start);
buffer.blit(&window).unwrap();
}
}
_ => *control_flow = ControlFlow::Wait,
}
});
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum BlendMode {
Exact,
Approx,
Naive,
}
fn blend(i: u8, a: BGRA, b: BGRA) -> BGRA {
let i = i as f32 / 255.0;
let a_f = 1.0 - i;
let b_f = i;
let bl = |a: u8, b: u8| ((a_f * (a as f32 / 255.0).powf(2.2) + b_f * (b as f32 / 255.0).powf(2.2)).powf(1.0/2.2) * 255.0) as u8;
BGRA {
r: bl(a.r, b.r),
g: bl(a.g, b.g),
b: bl(a.g, b.b),
a: bl(a.a, b.a),
}
}
fn bl(f: u8, a: u8, b: u8) -> u8 {
let a_linear = POWER_TABLE[a as usize] as u32;//a.pow(3) + 765 * a.pow(2);
let b_linear = POWER_TABLE[b as usize] as u32;//b.pow(3) + 765 * b.pow(2);
let f = f as u32;
let a_f = 255 - f;
let b_f = f;
let val = (
(
a_f * a_linear +
b_f * b_linear
) / 255
) as u16;
// CORRECTION_TABLE[val as usize]
interp_correction_table((val >> 8) as u8, val as u8)
}
static POWER_TABLE: &[u16] = &[
0, 1, 2, 4, 7, 11, 17, 24, 32, 42, 53, 65, 79, 94, 111, 129, 148, 169, 192, 216, 242, 270, 299,
330, 362, 396, 432, 469, 508, 549, 591, 635, 681, 729, 779, 830, 883, 938, 995, 1053, 1113,
1175, 1239, 1305, 1373, 1443, 1514, 1587, 1663, 1740, 1819, 1900, 1983, 2068, 2155, 2243, 2334,
2427, 2521, 2618, 2717, 2817, 2920, 3024, 3131, 3240, 3350, 3463, 3578, 3694, 3813, 3934, 4057,
4182, 4309, 4438, 4570, 4703, 4838, 4976, 5115, 5257, 5401, 5547, 5695, 5845, 5998, 6152, 6309,
6468, 6629, 6792, 6957, 7124, 7294, 7466, 7640, 7816, 7994, 8175, 8358, 8543, 8730, 8919, 9111,
9305, 9501, 9699, 9900, 10102, 10307, 10515, 10724, 10936, 11150, 11366, 11585, 11806, 12029,
12254, 12482, 12712, 12944, 13179, 13416, 13655, 13896, 14140, 14386, 14635, 14885, 15138,
15394, 15652, 15912, 16174, 16439, 16706, 16975, 17247, 17521, 17798, 18077, 18358, 18642,
18928, 19216, 19507, 19800, 20095, 20393, 20694, 20996, 21301, 21609, 21919, 22231, 22546,
22863, 23182, 23504, 23829, 24156, 24485, 24817, 25151, 25487, 25826, 26168, 26512, 26858,
27207, 27558, 27912, 28268, 28627, 28988, 29351, 29717, 30086, 30457, 30830, 31206, 31585,
31966, 32349, 32735, 33124, 33514, 33908, 34304, 34702, 35103, 35507, 35913, 36321, 36732,
37146, 37562, 37981, 38402, 38825, 39252, 39680, 40112, 40546, 40982, 41421, 41862, 42306,
42753, 43202, 43654, 44108, 44565, 45025, 45487, 45951, 46418, 46888, 47360, 47835, 48313,
48793, 49275, 49761, 50249, 50739, 51232, 51728, 52226, 52727, 53230, 53736, 54245, 54756,
55270, 55787, 56306, 56828, 57352, 57879, 58409, 58941, 59476, 60014, 60554, 61097, 61642,
62190, 62741, 63295, 63851, 64410, 64971, 65535,
];
static CORRECTION_TABLE: &[u8] = &[
0, 21, 28, 34, 39, 43, 46, 50, 53, 56, 59, 61, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 85,
87, 89, 90, 92, 93, 95, 96, 98, 99, 101, 102, 103, 105, 106, 107, 109, 110, 111, 112, 114, 115,
116, 117, 118, 119, 120, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
136, 137, 138, 139, 140, 141, 142, 143, 144, 144, 145, 146, 147, 148, 149, 150, 151, 151, 152,
153, 154, 155, 156, 156, 157, 158, 159, 160, 160, 161, 162, 163, 164, 164, 165, 166, 167, 167,
168, 169, 170, 170, 171, 172, 173, 173, 174, 175, 175, 176, 177, 178, 178, 179, 180, 180, 181,
182, 182, 183, 184, 184, 185, 186, 186, 187, 188, 188, 189, 190, 190, 191, 192, 192, 193, 194,
194, 195, 195, 196, 197, 197, 198, 199, 199, 200, 200, 201, 202, 202, 203, 203, 204, 205, 205,
206, 206, 207, 207, 208, 209, 209, 210, 210, 211, 212, 212, 213, 213, 214, 214, 215, 215, 216,
217, 217, 218, 218, 219, 219, 220, 220, 221, 221, 222, 223, 223, 224, 224, 225, 225, 226, 226,
227, 227, 228, 228, 229, 229, 230, 230, 231, 231, 232, 232, 233, 233, 234, 234, 235, 235, 236,
236, 237, 237, 238, 238, 239, 239, 240, 240, 241, 241, 242, 242, 243, 243, 244, 244, 245, 245,
246, 246, 247, 247, 248, 248, 249, 249, 249, 250, 250, 251, 251, 252, 252, 253, 253, 254, 254,
255, 255,
];
fn interp_correction_table(index: u8, val: u8) -> u8 {
if index >= 56 {
CORRECTION_TABLE[index as usize]
} else {
let a = CORRECTION_TABLE[index as usize] as u16;
let b = CORRECTION_TABLE[index as usize + 1] as u16;
let f = val as u16;
let a_f = 255 - f;
let b_f = f;
((a_f * a + b_f * b) / 255) as u8
}
}
fn blend_approx(f: u8, a: BGRA, b: BGRA) -> BGRA {
BGRA {
r: bl(f, a.r, b.r),
g: bl(f, a.g, b.g),
b: bl(f, a.b, b.b),
a: bl(f, a.a, b.a),
}
}
fn bl_naive(f: u8, a: u8, b: u8) -> u8 {
let (f, a, b) = (f as u64, a as u64, b as u64);
let a_f = 255 - f;
let b_f = f;
((a_f * a + b_f * b) / 255) as u8
}
fn blend_naive(f: u8, a: BGRA, b: BGRA) -> BGRA {
BGRA {
r: bl_naive(f, a.r, b.r),
g: bl_naive(f, a.g, b.g),
b: bl_naive(f, a.b, b.b),
a: bl_naive(f, a.a, b.a),
}
}
| true
|
23191f773df0edcb359a4facb0e231b3b51632a0
|
Rust
|
segmentio/analytics-rust
|
/src/http.rs
|
UTF-8
| 1,930
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
//! Low-level HTTP bindings to the Segment tracking API.
use crate::client::Client;
use crate::message::Message;
use failure::Error;
use std::time::Duration;
/// A client which synchronously sends single messages to the Segment tracking
/// API.
///
/// `HttpClient` implements [`Client`](../client/trait.Client.html); see the
/// documentation for `Client` for more on how to send events to Segment.
pub struct HttpClient {
client: reqwest::blocking::Client,
host: String,
}
impl Default for HttpClient {
fn default() -> Self {
HttpClient {
client: reqwest::blocking::Client::builder()
.connect_timeout(Duration::new(10, 0))
.build()
.unwrap(),
host: "https://api.segment.io".to_owned(),
}
}
}
impl HttpClient {
/// Construct a new `HttpClient` from a `reqwest::Client` and a Segment API
/// scheme and host.
///
/// If you don't care to re-use an existing `reqwest::Client`, you can use
/// the `Default::default` value, which will send events to
/// `https://api.segment.io`.
pub fn new(client: reqwest::blocking::Client, host: String) -> HttpClient {
HttpClient { client, host }
}
}
impl Client for HttpClient {
fn send(&self, write_key: &str, msg: &Message) -> Result<(), Error> {
let path = match msg {
Message::Identify(_) => "/v1/identify",
Message::Track(_) => "/v1/track",
Message::Page(_) => "/v1/page",
Message::Screen(_) => "/v1/screen",
Message::Group(_) => "/v1/group",
Message::Alias(_) => "/v1/alias",
Message::Batch(_) => "/v1/batch",
};
self.client
.post(&format!("{}{}", self.host, path))
.basic_auth(write_key, Some(""))
.json(msg)
.send()?
.error_for_status()?;
Ok(())
}
}
| true
|
a3db574c59f71128ea4b215a0148d003b8c811af
|
Rust
|
apachecn/kattis
|
/eastereggs_doesntwork/eastereggs.rs
|
UTF-8
| 3,868
| 3.03125
| 3
|
[] |
no_license
|
use std::io::{self, Read};
#[derive(Copy, Clone, Debug)]
struct Point(i32, i32);
// dist^2, blue_i, red_i
#[derive(Copy, Clone)]
struct Edge(u64, usize, usize);
struct Problem {
blue: Vec<Point>,
red: Vec<Point>,
edges: Vec<Edge>,
needed: usize,
}
impl Problem {
fn compute_edges(&mut self) {
for (i, p1) in self.blue.iter().copied().enumerate() {
for (j, p2) in self.red.iter().copied().enumerate() {
let distx = (p1.0 - p2.0).abs() as u64;
let disty = (p1.1 - p2.1).abs() as u64;
let dist_squared = distx * distx + disty * disty;
self.edges.push(Edge(dist_squared, i + self.red.len(), j));
}
}
self.edges.sort_unstable_by_key(|edge| edge.0);
}
fn solve(self) -> u64 {
let mut union_find = UnionFind::new(self.red.len(), self.blue.len());
for edge in self.edges {
println!(
"Edge {:?} to {:?} of dist {}.",
self.blue[edge.1 - self.red.len()],
self.red[edge.2],
(edge.0 as f64).sqrt()
);
union_find.union(edge.1, edge.2);
if union_find.maximum < self.needed {
return edge.0;
}
}
unreachable!()
}
}
#[derive(Copy, Clone)]
struct Data {
num_red: usize,
num_blue: usize,
}
impl Data {
fn max(self) -> usize {
std::cmp::max(self.num_red, self.num_blue)
}
}
struct UnionFind {
root: Vec<usize>,
data: Vec<Data>,
maximum: usize,
}
impl UnionFind {
pub fn new(red: usize, blue: usize) -> Self {
let size = red + blue;
let mut vec = Vec::with_capacity(size);
for _ in 0..red {
vec.push(Data {
num_red: 1,
num_blue: 0,
});
}
for _ in 0..blue {
vec.push(Data {
num_red: 0,
num_blue: 1,
});
}
Self {
root: (0..size).collect(),
data: vec,
maximum: size,
}
}
fn root(&mut self, a: usize) -> usize {
let mut root = self.root[a];
while root != self.root[root] {
root = self.root[root];
}
let mut i = a;
while i != root {
let next = self.root[i];
self.root[i] = root;
i = next;
}
root
}
pub fn union(&mut self, a: usize, b: usize) {
let a = self.root(a);
let b = self.root(b);
let da = self.data[a];
let db = self.data[b];
let data = Data {
num_red: da.num_red + db.num_red,
num_blue: da.num_blue + db.num_blue,
};
self.maximum += data.max();
self.maximum -= da.max();
self.maximum -= db.max();
self.data[b] = data;
self.root[a] = b;
}
}
fn main() {
let mut stdin = io::stdin();
let mut buf = String::new();
stdin.read_to_string(&mut buf).unwrap();
let mut split = buf.split_ascii_whitespace();
let n = split.next().unwrap().parse().unwrap();
let b = split.next().unwrap().parse().unwrap();
let r = split.next().unwrap().parse().unwrap();
let mut p = Problem {
blue: Vec::with_capacity(b),
red: Vec::with_capacity(r),
edges: Vec::with_capacity(b * r),
needed: n,
};
for _ in 0..b {
let x = split.next().unwrap().parse().unwrap();
let y = split.next().unwrap().parse().unwrap();
p.blue.push(Point(x,y));
}
for _ in 0..r {
let x = split.next().unwrap().parse().unwrap();
let y = split.next().unwrap().parse().unwrap();
p.red.push(Point(x,y));
}
p.compute_edges();
let res = p.solve();
println!("{}", (res as f64).sqrt());
}
| true
|
f0f5e93e5a557a466d171341e6d037d7aeb749d9
|
Rust
|
adriensamson/deployer
|
/src/error.rs
|
UTF-8
| 493
| 2.703125
| 3
|
[] |
no_license
|
use core::result;
use std::io;
#[derive(Debug)]
pub enum Error {
IoError(io::Error),
RuntimeError(String),
ConfigError(String),
}
pub type Result<T> = result::Result<T, Error>;
impl std::convert::From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::IoError(err)
}
}
impl std::convert::From<toml::de::Error> for Error {
fn from(_err: toml::de::Error) -> Error {
Error::ConfigError(String::from("Error while parsing config"))
}
}
| true
|
d22f7baf3e73b4588144fa7add7be3c420b0b2fe
|
Rust
|
kuzyanov/rust-concurrent-hash-map
|
/tests/integration_test.rs
|
UTF-8
| 5,177
| 3.28125
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::hash_map::RandomState;
use concurrent_hash_map::{ConcurrentHashMap, Entry};
#[test]
fn get_test() {
let map = ConcurrentHashMap::new();
let key = 1;
let mut value = "value";
assert!(map.get(&key).is_none());
map.insert(key, value);
assert_eq!(value, *map.get(&key).unwrap());
value = "value2";
map.insert(key, value);
assert_eq!(value, *map.get(&key).unwrap());
}
#[test]
fn get_mut_test() {
let map = ConcurrentHashMap::new();
let key = 1;
let mut value = "value";
assert!(map.get_mut(&key).is_none());
map.insert(key, value);
assert_eq!(value, *map.get_mut(&key).unwrap());
value = "value2";
*map.get_mut(&key).unwrap() = value;
assert_eq!(value, *map.get_mut(&key).unwrap());
}
#[test]
fn contains_key_test() {
let map = ConcurrentHashMap::new();
let key = 1;
let mut value = "value";
assert!(!map.contains_key(&key));
map.insert(key, value);
assert!(map.contains_key(&key));
value = "value2";
map.insert(key, value);
assert!(map.contains_key(&key));
}
#[test]
fn insert_test() {
let map = ConcurrentHashMap::new();
let key = 1;
let value = "value";
assert_eq!(0, map.len());
assert_eq!(None, map.insert(key, value));
assert_eq!(Some(value), map.insert(key, value));
assert_eq!(Some(value), map.insert(key, value));
assert_eq!(1, map.len());
}
#[test]
fn remove_test() {
let map = ConcurrentHashMap::new();
let key = 1;
let value = "value";
assert_eq!(0, map.len());
assert_eq!(None, map.remove(&key));
assert_eq!(None, map.insert(key, value));
assert_eq!(1, map.len());
assert_eq!(Some(value), map.remove(&key));
assert_eq!(0, map.len());
}
#[test]
fn clear_test() {
let map = ConcurrentHashMap::new();
let key1 = 1;
let value1 = "value1";
let key2 = 2;
let value2 = "value2";
assert_eq!(0, map.len());
assert_eq!(None, map.insert(key1, value1));
assert_eq!(None, map.insert(key2, value2));
assert_eq!(2, map.len());
map.clear();
assert_eq!(0, map.len());
}
#[test]
fn len_test() {
let map = ConcurrentHashMap::new();
let key1 = 1;
let value1 = "value1";
let key2 = 2;
let value2 = "value2";
assert_eq!(0, map.len());
assert_eq!(None, map.insert(key1, value1));
assert_eq!(1, map.len());
assert_eq!(None, map.insert(key2, value2));
assert_eq!(Some(value2), map.insert(key2, value2));
assert_eq!(2, map.len());
assert_eq!(Some(value1), map.remove(&key1));
assert_eq!(1, map.len());
assert_eq!(Some(value2), map.remove(&key2));
assert_eq!(0, map.len());
}
#[test]
fn capacity_test() {
let initial_capacity = 32;
let map = ConcurrentHashMap::with_capacity(initial_capacity);
let capacity = map.capacity();
assert!(capacity >= initial_capacity);
for i in 0..capacity {
map.insert(i, i);
}
assert_eq!(capacity, map.capacity());
for i in capacity..capacity * 2 {
map.insert(i, i);
}
assert!(map.capacity() > capacity);
}
#[test]
fn into_iterator_test() {
let map = ConcurrentHashMap::new();
let entry1 = Entry::new(1, "value1");
let entry2 = Entry::new(2, "value2");
map.insert(*entry1.key(), *entry1.value());
map.insert(*entry2.key(), *entry2.value());
let entries: Vec<Entry<i32, &str>> = map.into_iter().collect();
assert_eq!(2, entries.len());
assert!(entries.contains(&entry1));
assert!(entries.contains(&entry2));
}
#[test]
fn clone_test() {
let origin_map = ConcurrentHashMap::new();
let key = 1;
origin_map.insert(key, "value1");
let cloned_map = origin_map.clone();
assert_eq!(origin_map.len(), cloned_map.len());
assert_eq!(origin_map.capacity(), cloned_map.capacity());
assert_eq!(
*origin_map.get(&key).unwrap(),
*cloned_map.get(&key).unwrap()
);
cloned_map.insert(key, "value2");
assert_ne!(
*origin_map.get(&key).unwrap(),
*cloned_map.get(&key).unwrap()
);
}
#[test]
fn with_capacity_and_hasher_test() {
let capacity = 32;
let map: ConcurrentHashMap<i32, i32, RandomState> =
ConcurrentHashMap::with_capacity_and_hasher(capacity, RandomState::new());
assert_eq!(0, map.len());
assert!(map.capacity() >= capacity);
}
#[test]
fn with_capacity_test() {
let capacity = 32;
let map: ConcurrentHashMap<i32, i32, RandomState> = ConcurrentHashMap::with_capacity(capacity);
assert_eq!(0, map.len());
assert!(map.capacity() >= capacity);
}
#[test]
fn with_hasher_test() {
let map: ConcurrentHashMap<i32, i32, RandomState> =
ConcurrentHashMap::with_hasher(RandomState::new());
assert_eq!(0, map.len());
assert!(map.capacity() > 0);
}
#[test]
fn new_test() {
let map: ConcurrentHashMap<i32, i32, RandomState> = ConcurrentHashMap::new();
assert_eq!(0, map.len());
assert!(map.capacity() > 0);
}
#[test]
fn default_test() {
let map: ConcurrentHashMap<i32, &str, RandomState> = Default::default();
assert_eq!(0, map.len());
assert!(map.capacity() > 0);
}
| true
|
f81693bf1961c1662bc330eb06748ffff620b501
|
Rust
|
ShaneQi/simfd
|
/src/main.rs
|
UTF-8
| 11,436
| 2.640625
| 3
|
[
"Apache-2.0"
] |
permissive
|
extern crate clap;
extern crate plist;
extern crate prettytable;
use clap::{App, Arg};
use plist::Plist;
use prettytable::cell::Cell;
use prettytable::row::Row;
use prettytable::Table;
use std::fs::read_dir;
use std::fs::File;
use std::io::BufReader;
use std::vec::Vec;
fn main() {
let matches = App::new("simfd")
.version("1.0")
.author("Shane Qi <qizengtai@gmail.com>")
.about("Find out file location of Xcode simulators.")
.arg(
Arg::with_name("QUERY")
.help("Queries to find app location or simulator device lodation.")
.multiple(true),
)
.arg(
Arg::with_name("device")
.short("d")
.long("device")
.help("Search among devices instead of apps."),
)
.get_matches();
let mut queries: Vec<String> = Vec::new();
if let Some(values) = matches.values_of("QUERY") {
for q in values {
queries.push(q.to_owned());
}
}
let simulator_directory = matches
.value_of("simulator directory")
.map(|a| a.to_string())
.unwrap_or_else(|| {
let mut default_simualtor_directory = std::env::home_dir()
.expect("Failed to find home directory.")
.into_os_string()
.into_string()
.expect("Failed to find home directory.");
default_simualtor_directory += "/Library/Developer/CoreSimulator/Devices";
return default_simualtor_directory;
});
let mut table = Table::new();
for entry in read_dir(simulator_directory).expect("Didn't find simulators directory.") {
if let Ok(entry) = entry {
let mut path = entry.path();
if path.is_dir() {
let mut device_path = path.clone();
path.push("device.plist");
if let Ok(file) = File::open(path) {
let mut reader = BufReader::new(file);
let plist = Plist::from_reader(&mut reader).unwrap();
if let Plist::Dict(dict) = plist {
let name = dict.get("name").and_then(|e| {
if let Plist::String(name) = e {
Some(name.to_owned())
} else {
None
}
});
let runtime = dict.get("runtime").and_then(|e| {
if let Plist::String(name) = e {
name.to_owned()
.split(".")
.last()
.map(|s| s.to_string())
.and_then(|s| {
let mut components = Vec::<String>::new();
for component in s.split("-") {
components.push(component.to_string())
}
if components.len() < 3 {
None
} else {
Some(format!(
"{} {}.{}",
components.get(0).unwrap(),
components.get(1).unwrap(),
components.get(2).unwrap()
))
}
})
} else {
None
}
});
let udid = dict.get("UDID").and_then(|e| {
if let Plist::String(name) = e {
Some(name.to_owned())
} else {
None
}
});
if let (Some(device_name), Some(runtime), Some(udid)) =
(name, runtime, udid)
{
if matches.occurrences_of("device") > 0 {
let mut matched = true;
for q in &queries {
if device_name.to_lowercase().contains(&q.to_lowercase())
|| q.to_lowercase().contains(&device_name.to_lowercase())
|| runtime.to_lowercase().contains(&q.to_lowercase())
|| q.to_lowercase().contains(&runtime.to_lowercase())
|| udid.to_lowercase().contains(&q.to_lowercase())
|| q.to_lowercase().contains(&udid.to_lowercase())
{
} else {
matched = false;
break;
}
}
if matched {
table.add_row(Row::new(vec![
Cell::new(&device_name),
Cell::new(&runtime),
Cell::new(device_path.to_str().unwrap_or("")),
]));
}
} else {
device_path.push("data/Containers/Data/Application");
if let Ok(entries) = read_dir(device_path) {
for entry in entries {
if let Ok(entry) = entry {
let mut path = entry.path();
let path_clone = path.clone();
let app_path = path_clone
.into_os_string()
.into_string()
.unwrap_or("".to_string());
path.push(
".com.apple.mobile_container_manager.metadata.plist",
);
if let Ok(file) = File::open(path) {
let mut reader = BufReader::new(file);
let app_plist =
Plist::from_reader(&mut reader).unwrap();
if let Plist::Dict(dict) = app_plist {
let bundle_id = dict.get(
"MCMMetadataIdentifier",
).and_then(|e| {
if let Plist::String(name) = e {
Some(name.to_owned())
} else {
None
}
});
if let Some(bundle_id) = bundle_id {
let mut matched = true;
for q in &queries {
if bundle_id
.to_lowercase()
.contains(&q.to_lowercase())
|| q.to_lowercase().contains(
&bundle_id.to_lowercase(),
)
|| device_name
.to_lowercase()
.contains(&q.to_lowercase())
|| q.to_lowercase().contains(
&device_name.to_lowercase(),
)
|| runtime
.to_lowercase()
.contains(&q.to_lowercase())
|| q.to_lowercase().contains(
&runtime.to_lowercase(),
)
|| udid.to_lowercase()
.contains(&q.to_lowercase())
|| q.to_lowercase()
.contains(&udid.to_lowercase())
{
} else {
matched = false;
break;
}
}
if matched {
table.add_row(Row::new(vec![
Cell::new(&bundle_id),
Cell::new(&device_name),
Cell::new(&runtime),
Cell::new(&app_path),
]));
}
}
}
}
}
}
}
}
}
}
}
}
}
}
if table.len() > 0 {
table.printstd();
} else {
println!("Didn't find any app that matches queries.")
}
}
| true
|
1d65b5048aa113885845dd4ad787392c61c5df72
|
Rust
|
SilverSoldier/rlock
|
/src/config.rs
|
UTF-8
| 2,763
| 3.078125
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::ffi::{ CStr, CString };
use libc::{
getenv,
};
const config_msg: &'static str = "
Do not edit/remove this line. Change color for each screen by editing only the right hand side of following lines. If file is not parseable, will revert to default config.
";
macro_rules! map (
{$($key:expr => $value:expr), + } => {
{
let mut m = HashMap::new();
$(
m.insert($key, $value);
)+
m
}
};
);
pub fn getusername() -> String {
let username: String;
unsafe{
let name = getenv(CString::new("USER").unwrap().as_ptr());
username = CStr::from_ptr(name).to_string_lossy().into_owned();
}
username
}
fn create_color_map(init: &str, input: &str, failed: &str) -> HashMap<u32, String> {
map!{
0 /* Init */ => init.to_string(),
1 /* Input */ => input.to_string(),
2 /* Failed */ => failed.to_string()
}
}
fn create_default_config() -> HashMap<u32, String> {
/* Create the default config */
println!("Used default config");
create_color_map("black", "#006400", "#8B0000")
}
pub fn parse_contents(mut contents: String) -> HashMap<u32, String> {
/* Remove the message from the file contents and then separate using
* whitespaces */
let config = contents.split_off(config_msg.len() - 1);
let mut iter = config.split_whitespace();
iter.next();
match iter.next() {
Some(init_col) => {
iter.next();
match iter.next() {
Some(inp_col) => {
iter.next();
match iter.next() {
Some(fail_col) => {
return create_color_map(init_col, inp_col, fail_col)
},
None => {}
}
},
None => {}
}
},
None => {}
}
create_default_config()
}
pub fn read_config() -> HashMap<u32, String> {
let file_prefix = String::from("/home/");
let file_suffix = String::from("/.rlock_config");
let username = getusername();
let path = file_prefix + &username + &file_suffix;
match File::open(path) {
Ok(f) => {
println!("Reading from config");
let mut file = f;
let mut contents = String::new();
match file.read_to_string(&mut contents) {
Ok(_) => return parse_contents(contents),
Err(_) => {}
}
},
Err(_) => {
/* TODO: Create file in case it does not exist */
}
}
create_default_config()
}
| true
|
73d7f4dca790139d2b91b01bfa63396a12ebc352
|
Rust
|
J-AugustoManzano/livro_Rust
|
/ExerciciosFixacao/Cap05/c05Exer2B/src/main.rs
|
UTF-8
| 869
| 3.125
| 3
|
[] |
no_license
|
use std::io;
use std::io::prelude::*;
fn potencia(b: u64, e: u64, p: &mut u64) {
*p = 1;
for i in 1 .. e + 1 {
*p *= b;
}
}
fn main() {
let mut base = String::new();
let mut expo = String::new();
let mut resp: u64 = 0;
let bas: u64;
let exp: u64;
print!("Entre o valor da base ......: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut base).unwrap();
bas = base.trim().parse::<u64>().unwrap();
print!("Entre o valor da expoente ..: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut expo).unwrap();
exp = expo.trim().parse::<u64>().unwrap();
potencia(bas, exp, &mut resp);
println!("Resultado = {}", resp);
println!();
print!("Tecle <Enter> para encerrar...");
io::stdout().flush().unwrap();
io::stdin().read(&mut [0u8]).unwrap();
}
| true
|
876321018022ae1c8e1706a87fb6f1cd0e92bc60
|
Rust
|
teuron/prepona
|
/src/algo/cc/cc.rs
|
UTF-8
| 6,134
| 3.359375
| 3
|
[
"MIT"
] |
permissive
|
use crate::algo::{Dfs, DfsListener};
use crate::graph::{Edge, UndirectedEdge};
use crate::provide;
/// Finds connected components of an undirected graph.
///
/// # Examples
/// ```
/// use prepona::prelude::*;
/// use prepona::storage::Mat;
/// use prepona::graph::MatGraph;
/// use prepona::algo::ConnectedComponents;
///
/// // a --- b --- d g
/// // | /
/// // c ___/ e --- f
/// let mut graph = MatGraph::init(Mat::<usize>::init());
/// let a = graph.add_vertex();
/// let b = graph.add_vertex();
/// let c = graph.add_vertex();
/// let d = graph.add_vertex();
/// let e = graph.add_vertex();
/// let f = graph.add_vertex();
/// let g = graph.add_vertex();
///
/// graph.add_edge_unchecked(a, b, 1.into());
/// graph.add_edge_unchecked(a, c, 1.into());
/// graph.add_edge_unchecked(c, b, 1.into());
/// graph.add_edge_unchecked(b, d, 1.into());
/// graph.add_edge_unchecked(e, f, 1.into());
///
/// let ccs = ConnectedComponents::init(&graph).execute(&graph);
///
/// for cc in ccs {
/// match cc.len() {
/// 1 => assert!(cc.contains(&g)),
/// 2 => assert!(vec![e, f].iter().all(|v_id| cc.contains(v_id))),
/// 4 => assert!(vec![a, b, c, d].iter().all(|v_id| cc.contains(v_id))),
/// _ => panic!("Unknown component: {:?}", cc),
/// }
/// }
/// ```
pub struct ConnectedComponents {
current_component: Vec<usize>,
ccs: Vec<Vec<usize>>,
}
impl DfsListener for ConnectedComponents {
fn on_white(&mut self, dfs: &Dfs<Self>, virt_id: usize) {
let real_id = dfs.get_id_map().real_id_of(virt_id);
self.current_component.push(real_id);
}
fn on_finish(&mut self, _: &Dfs<Self>) {
// Every time dfs is finished, a graph component is traversed.
self.ccs.push(self.current_component.clone());
// So next time on_white is called, it will be for a new component.
self.current_component.clear();
}
}
impl ConnectedComponents {
/// Initializes the structure.
pub fn init<G, W, E: Edge<W>>(_: &G) -> Self
where
G: provide::Graph<W, E, UndirectedEdge> + provide::Vertices + provide::Neighbors,
{
ConnectedComponents {
ccs: vec![],
current_component: vec![],
}
}
/// Finds connected components of an undirected graph.
///
/// # Arguments
/// `graph`: Graph to search for its connected components.
///
/// # Returns
/// Connected components of the graph. \
/// Returned value will be vector of vectors. Each vector contains ids of vertices that are in a component.
pub fn execute<G, W, E: Edge<W>>(mut self, graph: &G) -> Vec<Vec<usize>>
where
G: provide::Graph<W, E, UndirectedEdge> + provide::Vertices + provide::Neighbors,
{
let mut dfs = Dfs::init(graph, &mut self);
dfs.execute(graph);
self.ccs
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::graph::MatGraph;
use crate::provide::*;
use crate::storage::Mat;
#[test]
fn empty_graph() {
let graph = MatGraph::init(Mat::<usize>::init());
let ccs = ConnectedComponents::init(&graph).execute(&graph);
assert_eq!(ccs.len(), 0);
}
#[test]
fn graph_with_one_component() {
// a --- b --- d g
// | / | |
// c ___/ '--- e --- f
let mut graph = MatGraph::init(Mat::<usize>::init());
let a = graph.add_vertex();
let b = graph.add_vertex();
let c = graph.add_vertex();
let d = graph.add_vertex();
let e = graph.add_vertex();
let f = graph.add_vertex();
let g = graph.add_vertex();
graph.add_edge_unchecked(a, b, 1.into());
graph.add_edge_unchecked(a, c, 1.into());
graph.add_edge_unchecked(c, b, 1.into());
graph.add_edge_unchecked(b, d, 1.into());
graph.add_edge_unchecked(d, e, 1.into());
graph.add_edge_unchecked(e, f, 1.into());
graph.add_edge_unchecked(f, g, 1.into());
let ccs = ConnectedComponents::init(&graph).execute(&graph);
assert_eq!(ccs.len(), 1);
assert_eq!(ccs[0].len(), 7);
assert!(vec![a, b, c, d, e, f, g]
.iter()
.all(|v_id| ccs[0].contains(v_id)));
}
#[test]
fn trivial_graph() {
// a --- b --- d g
// | /
// c ___/ e --- f
let mut graph = MatGraph::init(Mat::<usize>::init());
let a = graph.add_vertex();
let b = graph.add_vertex();
let c = graph.add_vertex();
let d = graph.add_vertex();
let e = graph.add_vertex();
let f = graph.add_vertex();
let g = graph.add_vertex();
graph.add_edge_unchecked(a, b, 1.into());
graph.add_edge_unchecked(a, c, 1.into());
graph.add_edge_unchecked(c, b, 1.into());
graph.add_edge_unchecked(b, d, 1.into());
graph.add_edge_unchecked(e, f, 1.into());
let ccs = ConnectedComponents::init(&graph).execute(&graph);
for cc in ccs {
match cc.len() {
1 => assert!(cc.contains(&g)),
2 => assert!(vec![e, f].iter().all(|v_id| cc.contains(v_id))),
4 => assert!(vec![a, b, c, d].iter().all(|v_id| cc.contains(v_id))),
_ => panic!("Unknown component: {:?}", cc),
}
}
}
#[test]
fn graph_with_no_edge() {
// a b c
// d e f
let mut graph = MatGraph::init(Mat::<usize>::init());
let a = graph.add_vertex();
let b = graph.add_vertex();
let c = graph.add_vertex();
let d = graph.add_vertex();
let e = graph.add_vertex();
let f = graph.add_vertex();
let ccs = ConnectedComponents::init(&graph).execute(&graph);
assert_eq!(ccs.len(), 6);
for cc in &ccs {
assert_eq!(cc.len(), 1)
}
assert_eq!(ccs.concat(), [a, b, c, d, e, f]);
}
}
| true
|
b098e5f99f85ef022a21a93fd05f0c189ab7333a
|
Rust
|
bonega/chipsand
|
/src/registers.rs
|
UTF-8
| 4,267
| 3.265625
| 3
|
[] |
no_license
|
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum R8 {
A,
F,
B,
C,
D,
E,
H,
L,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum R16 {
AF,
BC,
DE,
HL,
SP,
}
pub struct Registers {
pub a: u8,
pub b: u8,
pub c: u8,
pub d: u8,
pub e: u8,
h: u8,
l: u8,
pub z_flag: bool,
pub n_flag: bool,
pub h_flag: bool,
pub c_flag: bool,
pub sp: u16,
pub pc: u16,
}
pub trait RegIO<T, V> {
fn write(&mut self, reg: T, v: V);
fn read(&self, reg: T) -> V;
}
impl RegIO<R8, u8> for Registers {
fn write(&mut self, reg: R8, v: u8) {
match reg {
R8::A => self.a = v,
R8::F => unimplemented!(),
R8::B => self.b = v,
R8::C => self.c = v,
R8::D => self.d = v,
R8::E => self.e = v,
R8::H => self.h = v,
R8::L => self.l = v,
}
}
fn read(&self, reg: R8) -> u8 {
match reg {
R8::A => self.a,
R8::F => self.get_reg_f(),
R8::B => self.b,
R8::C => self.c,
R8::D => self.d,
R8::E => self.e,
R8::H => self.h,
R8::L => self.l,
}
}
}
impl RegIO<R16, u16> for Registers {
fn write(&mut self, reg: R16, v: u16) {
match reg {
R16::AF => self.set_reg_af(v),
R16::BC => self.set_reg_bc(v),
R16::DE => self.set_reg_de(v),
R16::HL => self.set_reg_hl(v),
R16::SP => self.sp = v,
}
}
fn read(&self, reg: R16) -> u16 {
match reg {
R16::AF => self.get_reg_af(),
R16::BC => self.get_reg_bc(),
R16::DE => self.get_reg_de(),
R16::HL => self.get_reg_hl(),
R16::SP => self.sp,
}
}
}
impl Registers {
pub fn new() -> Self {
Registers {
a: 0,
b: 0,
c: 0,
d: 0,
e: 0,
h: 0,
l: 0,
z_flag: false,
n_flag: false,
h_flag: false,
c_flag: false,
sp: 0,
pc: 0x100,
}
}
pub fn get_reg_f(&self) -> u8 {
(((self.z_flag as u8) << 3)
+ ((self.n_flag as u8) << 2)
+ ((self.h_flag as u8) << 1)
+ (self.c_flag as u8))
<< 4
}
pub fn get_reg_af(&self) -> u16 {
to_u16(self.a, self.get_reg_f())
}
pub fn set_reg_af(&mut self, v: u16) {
self.a = high_bits(v);
let lower = low_bits(v);
self.z_flag = (lower >> 7) & 1 != 0;
self.n_flag = (lower >> 6) & 1 != 0;
self.h_flag = (lower >> 5) & 1 != 0;
self.c_flag = (lower >> 4) & 1 != 0;
}
pub fn get_reg_bc(&self) -> u16 {
to_u16(self.b, self.c)
}
pub fn set_reg_bc(&mut self, v: u16) {
self.b = high_bits(v);
self.c = low_bits(v);
}
pub fn get_reg_de(&self) -> u16 {
to_u16(self.d, self.e)
}
pub fn set_reg_de(&mut self, v: u16) {
self.d = high_bits(v);
self.e = low_bits(v);
}
pub fn get_reg_hl(&self) -> u16 {
to_u16(self.h, self.l)
}
pub fn set_reg_hl(&mut self, v: u16) {
self.h = high_bits(v);
self.l = low_bits(v);
}
}
fn to_u16(h: u8, l: u8) -> u16 {
(h as u16) << 8 | l as u16
}
fn high_bits(x: u16) -> u8 {
(x >> 8) as u8
}
fn low_bits(x: u16) -> u8 {
(x & 0xFF) as u8
}
#[cfg(test)]
mod tests {
use super::*;
fn mock_registers() -> Registers {
Registers::new()
}
#[test]
fn test_get_reg_af() {
let mut regs = mock_registers();
regs.a = 0xF0;
regs.z_flag = true;
regs.n_flag = false;
regs.h_flag = true;
regs.c_flag = false;
assert_eq!(0b1111000010100000, regs.get_reg_af());
}
#[test]
fn test_set_reg_af() {
let mut regs = mock_registers();
regs.set_reg_af(0b1111000010100000);
assert_eq!(regs.a, 0xF0);
assert_eq!(true, regs.z_flag);
assert_eq!(false, regs.n_flag);
assert_eq!(true, regs.h_flag);
assert_eq!(false, regs.c_flag);
}
}
| true
|
ebdb0bdd7592be29d74364f3d891cbb6524462dc
|
Rust
|
grantlemons/neo4rs
|
/lib/src/lib.rs
|
UTF-8
| 25,206
| 3.140625
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Neo4j driver compatible with neo4j 4.x versions
//!
//! * An implementation of the [bolt protocol][bolt] to interact with Neo4j server
//! * async/await apis using [tokio][tokio]
//! * Supports bolt 4.2 specification
//! * tested with Neo4j versions: 4.0, 4.1, 4.2
//!
//!
//! [bolt]: https://7687.org/
//! [tokio]: https://github.com/tokio-rs/tokio
//!
//!
//! # Examples
//!
//! ```
//! use neo4rs::*;
//! use std::sync::Arc;
//! use std::sync::atomic::{AtomicU32, Ordering};
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let id = Uuid::new_v4().to_string();
//!
//! let graph = Arc::new(Graph::new(&uri, user, pass).await.unwrap());
//! let mut result = graph.run(
//! query("CREATE (p:Person {id: $id})").param("id", id.clone())
//! ).await.unwrap();
//!
//! let mut handles = Vec::new();
//! let mut count = Arc::new(AtomicU32::new(0));
//! for _ in 1..=42 {
//! let graph = graph.clone();
//! let id = id.clone();
//! let count = count.clone();
//! let handle = tokio::spawn(async move {
//! let mut result = graph.execute(
//! query("MATCH (p:Person {id: $id}) RETURN p").param("id", id)
//! ).await.unwrap();
//! while let Ok(Some(row)) = result.next().await {
//! count.fetch_add(1, Ordering::Relaxed);
//! }
//! });
//! handles.push(handle);
//! }
//!
//! futures::future::join_all(handles).await;
//! assert_eq!(count.load(Ordering::Relaxed), 42);
//! }
//! ```
//!
//! ## Configurations
//!
//! Use the config builder to override the default configurations like
//! * `fetch_size` - number of rows to fetch in batches (default is 200)
//! * `max_connections` - maximum size of the connection pool (default is 16)
//! * `db` - the database to connect to (default is `neo4j`)
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//!
//! #[tokio::main]
//! async fn main() {
//! let config = config()
//! .uri("127.0.0.1:7687")
//! .user("neo4j")
//! .password("neo")
//! .db("neo4j")
//! .fetch_size(500)
//! .max_connections(10)
//! .build()
//! .unwrap();
//! let graph = Graph::connect(config).await.unwrap();
//! let mut result = graph.execute(query("RETURN 1")).await.unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let value: i64 = row.get("1").unwrap();
//! assert_eq!(1, value);
//! assert!(result.next().await.unwrap().is_none());
//! }
//! ```
//!
//! ## Nodes
//! A simple example to create a node and consume the created node from the row stream.
//!
//! * [`Graph::run`] just returns [`errors::Result`]`<()>`, usually used for write only queries.
//! * [`Graph::execute`] returns [`errors::Result`]`<`[`RowStream`]`>`
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//!
//! assert!(graph.run(query("RETURN 1")).await.is_ok());
//!
//! let mut result = graph.execute(
//! query( "CREATE (friend:Person {name: $name}) RETURN friend")
//! .param("name", "Mr Mark")
//! ).await.unwrap();
//!
//! while let Ok(Some(row)) = result.next().await {
//! let node: Node = row.get("friend").unwrap();
//! let id = node.id();
//! let labels = node.labels();
//! let name: String = node.get("name").unwrap();
//! assert_eq!(name, "Mr Mark");
//! assert_eq!(labels, vec!["Person"]);
//! assert!(id > 0);
//! }
//! }
//! ```
//!
//! ## Transactions
//!
//! Start a new transaction using [`Graph::start_txn`], which will return a handle [`Txn`] that can
//! be used to [`Txn::commit`] or [`Txn::rollback`] the transaction.
//!
//! Note that the handle takes a connection from the connection pool, which will be released once
//! the Txn is dropped
//!
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let txn = graph.start_txn().await.unwrap();
//! let id = Uuid::new_v4().to_string();
//! let result = txn.run_queries(vec![
//! query("CREATE (p:Person {id: $id})").param("id", id.clone()),
//! query("CREATE (p:Person {id: $id})").param("id", id.clone())
//! ]).await;
//!
//! assert!(result.is_ok());
//! txn.commit().await.unwrap();
//! let mut result = graph
//! .execute(query("MATCH (p:Person) WHERE p.id = $id RETURN p.id").param("id", id.clone()))
//! .await
//! .unwrap();
//! # assert!(result.next().await.unwrap().is_some());
//! # assert!(result.next().await.unwrap().is_some());
//! # assert!(result.next().await.unwrap().is_none());
//! }
//!
//! ```
//!
//! ### Streams within a transaction
//!
//! Each [`RowStream`] returned by various execute within the same transaction are well isolated,
//! so you can consume the stream anytime within the transaction using [`RowStream::next`]
//!
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let config = config()
//! .uri("127.0.0.1:7687")
//! .user("neo4j")
//! .password("neo")
//! .fetch_size(1)
//! .build()
//! .unwrap();
//! let graph = Graph::connect(config).await.unwrap();
//! let name = Uuid::new_v4().to_string();
//! let txn = graph.start_txn().await.unwrap();
//!
//! txn.run_queries(vec![
//! query("CREATE (p { name: $name })").param("name", name.clone()),
//! query("CREATE (p { name: $name })").param("name", name.clone()),
//! ])
//! .await
//! .unwrap();
//!
//!
//! //start stream_one
//! let mut stream_one = txn
//! .execute(query("MATCH (p {name: $name}) RETURN p").param("name", name.clone()))
//! .await
//! .unwrap();
//! let row = stream_one.next().await.unwrap().unwrap();
//! assert_eq!(row.get::<Node>("p").unwrap().get::<String>("name").unwrap(), name.clone());
//!
//! //start stream_two
//! let mut stream_two = txn.execute(query("RETURN 1")).await.unwrap();
//! let row = stream_two.next().await.unwrap().unwrap();
//! assert_eq!(row.get::<i64>("1").unwrap(), 1);
//!
//! //stream_one is still active here
//! let row = stream_one.next().await.unwrap().unwrap();
//! assert_eq!(row.get::<Node>("p").unwrap().get::<String>("name").unwrap(), name.clone());
//!
//! //stream_one completes
//! assert!(stream_one.next().await.unwrap().is_none());
//! //stream_two completes
//! assert!(stream_two.next().await.unwrap().is_none());
//! txn.commit().await.unwrap();
//! }
//!
//! ```
//!
//!
//! ### Rollback a transaction
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//!
//! let txn = graph.start_txn().await.unwrap();
//! let id = Uuid::new_v4().to_string();
//! // create a node
//! txn.run(query("CREATE (p:Person {id: $id})").param("id", id.clone()))
//! .await
//! .unwrap();
//! // rollback the changes
//! txn.rollback().await.unwrap();
//!
//! // changes not updated in the database
//! let mut result = graph
//! .execute(query("MATCH (p:Person) WHERE p.id = $id RETURN p.id").param("id", id.clone()))
//! .await
//! .unwrap();
//! assert!(result.next().await.unwrap().is_none());
//! }
//!
//! ```
//!
//! ### Txn vs Graph
//!
//! Everytime you execute a query using [`Graph::run`] or [`Graph::execute`], a new connection is
//! taken from the pool and released immediately.
//!
//! However, when you execute a query on a transaction using [`Txn::run`] or [`Txn::execute`] the
//! same connection will be reused, the underlying connection will be released to the pool in a
//! clean state only after you commit/rollback the transaction and the [`Txn`] handle is dropped.
//!
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let txn = graph.start_txn().await.unwrap();
//! let id = Uuid::new_v4().to_string();
//! txn.run(query("CREATE (p:Person {id: $id})").param("id", id.clone()))
//! .await
//! .unwrap();
//! txn.run(query("CREATE (p:Person {id: $id})").param("id", id.clone()))
//! .await
//! .unwrap();
//! // graph.execute(..) will not see the changes done above as the txn is not committed yet
//! let mut result = graph
//! .execute(query("MATCH (p:Person) WHERE p.id = $id RETURN p.id").param("id", id.clone()))
//! .await
//! .unwrap();
//! assert!(result.next().await.unwrap().is_none());
//! txn.commit().await.unwrap();
//!
//! //changes are now seen as the transaction is committed.
//! let mut result = graph
//! .execute(query("MATCH (p:Person) WHERE p.id = $id RETURN p.id").param("id", id.clone()))
//! .await
//! .unwrap();
//! assert!(result.next().await.unwrap().is_some());
//! assert!(result.next().await.unwrap().is_some());
//! assert!(result.next().await.unwrap().is_none());
//! }
//!
//! ```
//!
//! ## Relationships
//!
//! Bounded Relationship between nodes are created using cypher queries and the same can be parsed
//! from the [`RowStream`]
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let mut result = graph.execute(
//! query("CREATE (p:Person { name: 'Oliver Stone' })-[r:WORKS_AT {as: 'Engineer'}]->(neo) RETURN r")
//! ).await.unwrap();
//!
//! let row = result.next().await.unwrap().unwrap();
//! let relation: Relation = row.get("r").unwrap();
//! assert!(relation.id() > -1);
//! assert!(relation.start_node_id() > -1);
//! assert!(relation.end_node_id() > -1);
//! assert_eq!(relation.typ(), "WORKS_AT");
//! assert_eq!(relation.get::<String>("as").unwrap(), "Engineer");
//! }
//! ```
//!
//!
//! Similar to bounded relation, an unbounded relation can also be created/parsed.
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let mut result = graph.execute(
//! query("MERGE (p1:Person { name: 'Oliver Stone' })-[r:RELATED {as: 'friend'}]-(p2: Person {name: 'Mark'}) RETURN r")
//! ).await.unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let relation: Relation = row.get("r").unwrap();
//! assert!(relation.id() > -1);
//! assert!(relation.start_node_id() > -1);
//! assert!(relation.end_node_id() > -1);
//! assert_eq!(relation.typ(), "RELATED");
//! assert_eq!(relation.get::<String>("as").unwrap(), "friend");
//! }
//!
//! ```
//!
//!
//!
//! ## Points
//!
//! A 2d or 3d point can be represented with the types [`Point2D`] and [`Point3D`]
//!
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//!
//! let mut result = graph
//! .execute(query(
//! "WITH point({ x: 2.3, y: 4.5, crs: 'cartesian' }) AS p1,
//! point({ x: 1.1, y: 5.4, crs: 'cartesian' }) AS p2 RETURN distance(p1,p2) AS dist, p1, p2",
//! ))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let dist: f64 = row.get("dist").unwrap();
//! let p1: Point2D = row.get("p1").unwrap();
//! let p2: Point2D = row.get("p2").unwrap();
//! assert_eq!(1.5, dist);
//! assert_eq!(p1.sr_id(), 7203);
//! assert_eq!(p1.x(), 2.3);
//! assert_eq!(p1.y(), 4.5);
//! assert_eq!(p2.sr_id(), 7203);
//! assert_eq!(p2.x(), 1.1);
//! assert_eq!(p2.y(), 5.4);
//! assert!(result.next().await.unwrap().is_none());
//!
//! let mut result = graph
//! .execute(query(
//! "RETURN point({ longitude: 56.7, latitude: 12.78, height: 8 }) AS point",
//! ))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let point: Point3D = row.get("point").unwrap();
//! assert_eq!(point.sr_id(), 4979);
//! assert_eq!(point.x(), 56.7);
//! assert_eq!(point.y(), 12.78);
//! assert_eq!(point.z(), 8.0);
//! assert!(result.next().await.unwrap().is_none());
//!
//! }
//!
//! ```
//!
//! ## Raw bytes
//!
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let mut result = graph
//! .execute(query("RETURN $b as output").param("b", vec![11, 12]))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let b: Vec<u8> = row.get("output").unwrap();
//! assert_eq!(b, &[11, 12]);
//! assert!(result.next().await.unwrap().is_none());
//! }
//!
//! ```
//!
//! ## Durations
//!
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let duration = std::time::Duration::new(5259600, 7);
//! let mut result = graph
//! .execute(query("RETURN $d as output").param("d", duration))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let d: std::time::Duration = row.get("output").unwrap();
//! assert_eq!(d.as_secs(), 5259600);
//! assert_eq!(d.subsec_nanos(), 7);
//! assert!(result.next().await.unwrap().is_none());
//! }
//!
//! ```
//! ## Date
//!
//! See [NaiveDate][naive_date] for date abstraction, it captures the date without time component.
//!
//! [naive_date]: https://docs.rs/chrono/0.4.19/chrono/naive/struct.NaiveDate.html
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let date = chrono::NaiveDate::from_ymd(1985, 2, 5);
//! let mut result = graph
//! .execute(query("RETURN $d as output").param("d", date))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let d: chrono::NaiveDate = row.get("output").unwrap();
//! assert_eq!(d.to_string(), "1985-02-05");
//! assert!(result.next().await.unwrap().is_none());
//! }
//! ```
//!
//!
//! ## Time
//!
//! * [NaiveTime][naive_time] captures only the time of the day
//! * `tuple`([NaiveTime][naive_time], `Option`<[FixedOffset][fixed_offset]>) captures the time of the day along with the
//! offset
//!
//! [naive_time]: https://docs.rs/chrono/0.4.19/chrono/naive/struct.NaiveTime.html
//! [fixed_offset]: https://docs.rs/chrono/0.4.19/chrono/offset/struct.FixedOffset.html
//!
//!
//! ### Time as param
//!
//! Pass a time as a parameter to the query:
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//!
//! //send time without offset as param
//! let time = chrono::NaiveTime::from_hms_nano(11, 15, 30, 200);
//! let mut result = graph.execute(query("RETURN $d as output").param("d", time)).await.unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: (chrono::NaiveTime, Option<chrono::FixedOffset>) = row.get("output").unwrap();
//! assert_eq!(t.0.to_string(), "11:15:30.000000200");
//! assert_eq!(t.1, None);
//! assert!(result.next().await.unwrap().is_none());
//!
//!
//! //send time with offset as param
//! let time = chrono::NaiveTime::from_hms_nano(11, 15, 30, 200);
//! let offset = chrono::FixedOffset::east(3 * 3600);
//! let mut result = graph
//! .execute(query("RETURN $d as output").param("d", (time, offset)))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: (chrono::NaiveTime, Option<chrono::FixedOffset>) = row.get("output").unwrap();
//! assert_eq!(t.0.to_string(), "11:15:30.000000200");
//! assert_eq!(t.1, Some(offset));
//! assert!(result.next().await.unwrap().is_none());
//! }
//! ```
//!
//!
//! ### Parsing time from result
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//!
//! //Parse time without offset
//! let mut result = graph
//! .execute(query(
//! " WITH time({hour:10, minute:15, second:30, nanosecond: 200}) AS t RETURN t",
//! ))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: (chrono::NaiveTime, Option<chrono::FixedOffset>) = row.get("t").unwrap();
//! assert_eq!(t.0.to_string(), "10:15:30.000000200");
//! assert_eq!(t.1, None);
//! assert!(result.next().await.unwrap().is_none());
//!
//! //Parse time with timezone information
//! let mut result = graph
//! .execute(query(
//! " WITH time({hour:10, minute:15, second:33, nanosecond: 200, timezone: '+01:00'}) AS t RETURN t",
//! ))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: (chrono::NaiveTime, Option<chrono::FixedOffset>) = row.get("t").unwrap();
//! assert_eq!(t.0.to_string(), "10:15:33.000000200");
//! assert_eq!(t.1, Some(chrono::FixedOffset::east(1 * 3600)));
//! assert!(result.next().await.unwrap().is_none());
//! }
//!
//! ```
//!
//!
//! ## DateTime
//!
//!
//! * [DateTime][date_time] captures the date and time with offset
//! * [NaiveDateTime][naive_date_time] captures the date time without offset
//! * `tuple`([NaiveDateTime][naive_date_time], String) captures the date/time and the time zone id
//!
//! [date_time]: https://docs.rs/chrono/0.4.19/chrono/struct.DateTime.html
//! [naive_date_time]: https://docs.rs/chrono/0.4.19/chrono/struct.NaiveDateTime.html
//!
//!
//! ### DateTime as param
//!
//! Pass a DateTime as parameter to the query:
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//!
//! //send datetime as parameter in the query
//! let datetime = chrono::DateTime::parse_from_rfc2822("Tue, 01 Jul 2003 10:52:37 +0200").unwrap();
//!
//! let mut result = graph
//! .execute(query("RETURN $d as output").param("d", datetime))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: chrono::DateTime<chrono::FixedOffset> = row.get("output").unwrap();
//! assert_eq!(t.to_string(), "2003-07-01 10:52:37 +02:00");
//! assert!(result.next().await.unwrap().is_none());
//!
//! //send NaiveDateTime as parameter in the query
//! let localdatetime = chrono::NaiveDateTime::parse_from_str("2015-07-01 08:55:59.123", "%Y-%m-%d %H:%M:%S%.f").unwrap();
//!
//! let mut result = graph
//! .execute(query("RETURN $d as output").param("d", localdatetime))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: chrono::NaiveDateTime = row.get("output").unwrap();
//! assert_eq!(t.to_string(), "2015-07-01 08:55:59.123");
//! assert!(result.next().await.unwrap().is_none());
//!
//! //send NaiveDateTime with timezone id as parameter in the query
//! let datetime = chrono::NaiveDateTime::parse_from_str("2015-07-03 08:55:59.555", "%Y-%m-%d %H:%M:%S%.f").unwrap();
//! let timezone = "Europe/Paris";
//!
//! let mut result = graph
//! .execute(query("RETURN $d as output").param("d", (datetime, timezone)))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let (time, zone): (chrono::NaiveDateTime, String) = row.get("output").unwrap();
//! assert_eq!(time.to_string(), "2015-07-03 08:55:59.555");
//! assert_eq!(zone, "Europe/Paris");
//! assert!(result.next().await.unwrap().is_none());
//!
//! }
//! ```
//!
//! ### Parsing DateTime from result
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//!
//! //Parse NaiveDateTime from result
//! let mut result = graph
//! .execute(query(
//! "WITH localdatetime('2015-06-24T12:50:35.556') AS t RETURN t",
//! ))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: chrono::NaiveDateTime = row.get("t").unwrap();
//! assert_eq!(t.to_string(), "2015-06-24 12:50:35.556");
//! assert!(result.next().await.unwrap().is_none());
//!
//! //Parse DateTime from result
//! let mut result = graph
//! .execute(query(
//! "WITH datetime('2015-06-24T12:50:35.777+0100') AS t RETURN t",
//! ))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let t: chrono::DateTime<chrono::FixedOffset> = row.get("t").unwrap();
//! assert_eq!(t.to_string(), "2015-06-24 12:50:35.777 +01:00");
//! assert!(result.next().await.unwrap().is_none());
//!
//!
//! //Parse NaiveDateTime with zone id from result
//! let mut result = graph
//! .execute(query(
//! "WITH datetime({ year:1984, month:11, day:11, hour:12, minute:31, second:14, nanosecond: 645876123, timezone:'Europe/Stockholm' }) AS d return d",
//! ))
//! .await
//! .unwrap();
//! let row = result.next().await.unwrap().unwrap();
//! let (datetime, zone_id): (chrono::NaiveDateTime, String) = row.get("d").unwrap();
//! assert_eq!(datetime.to_string(), "1984-11-11 12:31:14.645876123");
//! assert_eq!(zone_id, "Europe/Stockholm");
//! assert!(result.next().await.unwrap().is_none());
//!
//! }
//!
//! ```
//!
//!
//!
//! ## Path
//!
//! ```
//! use neo4rs::*;
//! use futures::stream::*;
//! use uuid::Uuid;
//!
//! #[tokio::main]
//! async fn main() {
//! let uri = "127.0.0.1:7687";
//! let user = "neo4j";
//! let pass = "neo";
//! let graph = Graph::new(uri, user, pass).await.unwrap();
//! let name = Uuid::new_v4().to_string();
//! graph.run(
//! query("CREATE (p:Person { name: $name })-[r:WORKS_AT]->(n:Company { name: 'Neo'})").param("name", name.clone()),
//! ).await.unwrap();
//!
//! let mut result = graph.execute(
//! query("MATCH p = (person:Person { name: $name })-[r:WORKS_AT]->(c:Company) RETURN p").param("name", name),
//! ).await.unwrap();
//!
//! let row = result.next().await.unwrap().unwrap();
//! let path: Path = row.get("p").unwrap();
//! assert_eq!(path.ids().len(), 2);
//! assert_eq!(path.nodes().len(), 2);
//! assert_eq!(path.rels().len(), 1);
//! assert!(result.next().await.unwrap().is_none());
//! }
//! ```
//!
//!
mod config;
mod connection;
mod convert;
mod errors;
mod graph;
mod messages;
mod pool;
mod query;
mod row;
mod stream;
mod txn;
mod types;
mod version;
pub use crate::config::{config, Config, ConfigBuilder};
pub use crate::errors::*;
pub use crate::graph::{query, Graph};
pub use crate::query::Query;
pub use crate::row::{Node, Path, Point2D, Point3D, Relation, Row, UnboundedRelation};
pub use crate::stream::RowStream;
pub use crate::txn::Txn;
pub use crate::version::Version;
| true
|
d320ba4dead81e1700bb8d05c238195c94e13571
|
Rust
|
dwuid/advent-of-code-2015
|
/aoc_15/src/main.rs
|
UTF-8
| 3,333
| 3.421875
| 3
|
[
"MIT"
] |
permissive
|
use std::cmp::max;
use std::str::from_utf8;
struct Ingredient {
_name: String,
capacity: i32,
durability: i32,
flavor: i32,
texture: i32,
calories: i32
}
// TODO: Make this a proper iterator. Also reduce allocations?
fn sum_permutations(n: usize, k: usize) -> Vec<Vec<usize>> {
match (n, k) {
(0, _) => vec![],
(_, 0) => vec![vec![0; n]],
(1, _) => vec![vec![k; 1]],
_ => {
let mut solutions = Vec::new();
for tail in sum_permutations(n - 1, k) {
let mut current = vec![0; 1];
current.extend(tail);
solutions.push(current);
}
for tail in sum_permutations(n, k - 1) {
let mut current = vec![tail[0] + 1; 1];
current.extend(&tail[1..]);
solutions.push(current);
}
solutions
}
}
}
fn solve(ingredients: &Vec<Ingredient>, teaspoons: usize) -> u32 {
let mut perfect_score = 0;
for distribution in sum_permutations(ingredients.len(), teaspoons) {
let mut scores = [0i32; 4];
let mut calories = 0;
for (amount, ingredient) in distribution.iter()
.zip(ingredients.iter()) {
let amount = *amount as i32;
scores[0] += ingredient.capacity * amount;
scores[1] += ingredient.durability * amount;
scores[2] += ingredient.flavor * amount;
scores[3] += ingredient.texture * amount;
calories += ingredient.calories * amount;
}
if calories != 500 {
continue;
}
let total_score = if scores.iter().any(|i| *i < 0) {
0
} else {
scores.iter().fold(1u32, |acc, i| acc * (*i as u32))
};
perfect_score = max(perfect_score, total_score);
}
perfect_score
}
static PARSE_ERROR: &'static str = "Invalid input format.";
fn main() {
let input = from_utf8(include_bytes!("../input.txt")).unwrap();
let mut ingredients = Vec::new();
for line in input.split('\n').filter(|l| !l.is_empty()) {
let tokens: Vec<_> = line.split(' ').collect();
if tokens.len() < 11 {
panic!(PARSE_ERROR);
}
// String::pop()? This is pretty ugly.
let capacity = &tokens[2][..tokens[2].len() - 1];
let durability = &tokens[4][..tokens[4].len() - 1];
let flavor = &tokens[6][..tokens[6].len() - 1];
let texture = &tokens[8][..tokens[8].len() - 1];
let calories = tokens[10];
let name = tokens[0][..tokens[0].len() - 1].to_string();
let capacity: i32 = capacity.parse().expect(PARSE_ERROR);
let durability: i32 = durability.parse().expect(PARSE_ERROR);
let flavor: i32 = flavor.parse().expect(PARSE_ERROR);
let texture: i32 = texture.parse().expect(PARSE_ERROR);
let calories: i32 = calories.parse().expect(PARSE_ERROR);
ingredients.push(Ingredient {
_name: name, capacity: capacity, durability: durability,
flavor: flavor, texture: texture, calories: calories
});
}
println!("The tastiest 500 calorie cookie is worth {} points.",
solve(&ingredients, 100));
}
| true
|
519f5099c0a46628b3d91255b14782d84c09a4ed
|
Rust
|
antifuchs/htpasswd
|
/htpasswd/src/lib.rs
|
UTF-8
| 7,794
| 3.421875
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! # `htpasswd` - Load & validate credentials against Apache `.htpasswd` files.
//!
//! This crate provides types and functions that are useful for
//! validating credentials stored in
//! [`.htpasswd`](https://httpd.apache.org/docs/2.4/misc/password_encryptions.html)
//! files, as popularized by the Apache web server.
//!
//! ## Compatibility
//!
//! While `.htpasswd` files allow storing credentials in multiple
//! formats, this crate supports only the bcrypt password storage
//! format. Validating credentials against any other scheme (MD5,
//! SHA1, crypt or plaintext) will result in an authentication error
//! indicating that the storage format is insecure.
//!
//! # Example
//!
//! ```rust
//! # fn main() -> Result<(), htpasswd::ParseFailure> {
//! // the password is "secret"
//! let htpasswd_contents = "username:$2y$05$xT4MzeZJQmgv7XQQGYbf/eP.ING1L9m.iOZF/yUQIYKmYnmEYkfme";
//! let db = htpasswd::parse_htpasswd_str(htpasswd_contents)?;
//! assert_eq!(Ok(()), db.validate("username", "secret"));
//! # Ok(())
//! # }
//! ```
//!
use bcrypt;
use nom;
use std::collections::hash_map::HashMap;
use std::error::Error;
use std::fmt;
use std::fs::read_to_string;
use std::io;
use std::io::Read;
use std::ops::Deref;
use std::path::Path;
use std::str;
use std::str::FromStr;
// The type to use as input to parsers in this crate.
pub use nom::types::CompleteStr as Input;
mod errors;
mod parse;
pub use errors::*;
pub use parse::{ParseErrorKind, ParseFailure};
/// Represents a password hashed with a particular method.
#[derive(Debug, PartialEq)]
enum PasswordHash {
Bcrypt(String),
SHA1(String),
MD5(String),
Crypt(String),
}
/// An in-memory representation of a `.htpasswd` file.
#[derive(Debug, PartialEq)]
pub struct PasswordDB(HashMap<String, PasswordHash>);
impl PasswordDB {
/// Checks the provided username and password against the database
/// and returns `Ok(())` if both match. Otherwise, returns an
/// error indicating the problem with the provided or the stored
/// credentials.
pub fn validate(&self, user: &str, password: &str) -> Result<(), AuthError> {
use crate::PasswordHash::*;
match self.0.get(user).ok_or_else(|| BadCredentials::NoSuchUser)? {
Bcrypt(hash) => match bcrypt::verify(password, hash)? {
true => Ok(()),
false => Err(BadCredentials::InvalidPassword)?,
},
_ => Err(BadCredentials::InsecureStorage)?,
}
}
}
impl FromStr for PasswordDB {
type Err = ParseFailure;
fn from_str(s: &str) -> Result<Self, Self::Err> {
parse_htpasswd_str(s)
}
}
/// Parses an htpasswd-formatted string and returns the entries in it
/// as a hash table, mapping user names to password hashes.
pub fn parse_htpasswd_str(contents: &str) -> Result<PasswordDB, ParseFailure> {
let entries = parse::parse_entries(contents)?;
Ok(PasswordDB(entries))
}
#[derive(Debug)]
pub enum LoadFailure {
Parse(ParseFailure),
Io(io::Error),
}
impl From<io::Error> for LoadFailure {
fn from(f: io::Error) -> Self {
LoadFailure::Io(f)
}
}
impl From<ParseFailure> for LoadFailure {
fn from(f: ParseFailure) -> Self {
LoadFailure::Parse(f)
}
}
impl fmt::Display for LoadFailure {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
use LoadFailure::*;
write!(
f,
"loading htpasswd data: {}",
match self {
Parse(pf) => format!("parse failure:{}:{}: {}", pf.line, pf.column, pf.kind),
Io(io) => format!("reading: {}", io),
}
)
}
}
/// Allows loading .htpasswd data from certain types, e.g. `io.Read`
/// and `Path` objects. Note that due to the way the parser is
/// implemented, the entire input stream has to be read before
/// parsing.
pub trait HtpasswdLoad {
/// Reads self to the end and parses a .htpasswd database from it.
fn load_htpasswd(&mut self) -> Result<PasswordDB, LoadFailure>;
}
impl<T> HtpasswdLoad for T
where
T: Read + Sized,
{
fn load_htpasswd(&mut self) -> Result<PasswordDB, LoadFailure> {
let mut str = String::new();
self.read_to_string(&mut str)?;
Ok(parse_htpasswd_str(&str)?)
}
}
impl HtpasswdLoad for Path {
fn load_htpasswd(&mut self) -> Result<PasswordDB, LoadFailure> {
let contents = read_to_string(self)?;
Ok(parse_htpasswd_str(&contents)?)
}
}
/// Describes some method of keeping a password DB loaded and updated in memory.
///
/// When using authentication middleware in a long-running server,
/// using a regular PasswordDB is not always the easiest or most
/// performant way to keep the database in sync with the database on
/// disk: You might be tempted to load the password DB from disk every
/// time a request is made.
///
/// Instead, you could implement a `PasswordDBSource`, e.g. keeping
/// the actual DB behind a Mutex and updating it whenever the backing
/// file changes.
pub trait PasswordDBSource {
/// Any error that can occur from attempts to load the
/// `PasswordDB`. This will typically be `LoadFailure`.
type Error: Sized + Error;
/// The type returned by `get`. It's meant to be compatible with
/// `MutexGuard`, expected to be a RAII type that frees up the
/// underlying lock/resource when it is dropped.
type Reference: Sized + Deref<Target = Result<PasswordDB, Self::Error>>;
/// Return a RAII object that yields a `PasswordDB` if it was
/// recently refreshed according to the implementation's
/// criteria. Otherwise, returns the error that the last load
/// attempt
fn get(&self) -> Self::Reference;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn bad_fields() {
assert_eq!(
Err(ParseFailure {
kind: ParseErrorKind::BadPassword,
offset: 69,
line: 2,
column: 5
}),
parse_htpasswd_str(
"asf:$2y$05$6mQlzTSUkBbyHDU7XIwQaO3wOEDZpUdYR4YxRXgM2gqe/nwJSy.96\n___:"
)
);
assert_eq!(
Err(ParseFailure {
kind: ParseErrorKind::BadUsername,
offset: 0,
line: 1,
column: 1
}),
parse_htpasswd_str("___")
);
assert_eq!(
Err(ParseFailure {
kind: ParseErrorKind::BadUsername,
offset: 0,
line: 1,
column: 1
}),
parse_htpasswd_str("")
);
assert_eq!(
Err(ParseFailure {
kind: ParseErrorKind::BadUsername,
offset: 0,
line: 1,
column: 1
}),
parse_htpasswd_str(":")
);
assert_eq!(
Err(ParseFailure {
kind: ParseErrorKind::BadUsername,
offset: 0,
line: 1,
column: 1
}),
parse_htpasswd_str(":")
);
}
#[test]
fn validate() {
let entries = parse_htpasswd_str(
"asf:$2y$05$6mQlzTSUkBbyHDU7XIwQaO3wOEDZpUdYR4YxRXgM2gqe/nwJSy.96
bsf:$2y$05$9U5xoWYrBX687.C.MEhsae5LfOrlUqqMSfE2Cpo4K.jyvy3lA.Ijy",
)
.unwrap();
assert_eq!(Ok(()), entries.validate("asf", "oink"));
assert_eq!(Ok(()), entries.validate("bsf", "areisntoiarnstoanrsit"));
assert_eq!(
Err(AuthError::NotAuthenticated(BadCredentials::InvalidPassword)),
entries.validate("asf", "wrong")
);
assert_eq!(
Err(AuthError::NotAuthenticated(BadCredentials::NoSuchUser)),
entries.validate("unperson", "unpassword")
);
}
}
| true
|
761b6c271ddacac2c28ef4072956d51364f5ec21
|
Rust
|
Simon-Laux/delta-command-api
|
/src/error.rs
|
UTF-8
| 1,041
| 2.8125
| 3
|
[] |
no_license
|
use serde::Serialize;
#[derive(Debug)]
pub struct ErrorInstance {
pub kind: ErrorType,
pub message: String,
}
#[derive(Serialize, Debug)]
pub enum ErrorType {
CommandIdMissing,
CommandNotFound,
CommandNotImplementedYet,
CommandParseFailure,
NoContext,
/** the command threw an Error */
Generic,
DeltaChatSQLError,
AnyhowError,
}
impl From<anyhow::Error> for ErrorInstance {
fn from(err: anyhow::Error) -> ErrorInstance {
ErrorInstance {
kind: ErrorType::AnyhowError,
message: format!("{:?}", err),
}
}
}
impl From<deltachat::sql::Error> for ErrorInstance {
fn from(err: deltachat::sql::Error) -> ErrorInstance {
ErrorInstance {
kind: ErrorType::DeltaChatSQLError,
message: format!("SQL error: {:?}", err),
}
}
}
#[macro_export]
macro_rules! genericError {
($err:expr) => {
ErrorInstance {
kind: ErrorType::Generic,
message: $err.to_owned(),
}
};
}
| true
|
f2b0569ac3ac243755223c4530d4da284ec3ce74
|
Rust
|
stv0g/k66
|
/src/usbhs/hwhost.rs
|
UTF-8
| 2,608
| 2.90625
| 3
|
[
"MIT"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::HWHOST {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = r" Value of the field"]
pub struct HCR {
bits: bool,
}
impl HCR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NPORTR {
bits: u8,
}
impl NPORTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct TTASYR {
bits: u8,
}
impl TTASYR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct TTPERR {
bits: u8,
}
impl TTPERR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Host Capable"]
#[inline]
pub fn hc(&self) -> HCR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
HCR { bits }
}
#[doc = "Bits 1:3 - Number of Ports"]
#[inline]
pub fn nport(&self) -> NPORTR {
let bits = {
const MASK: u8 = 7;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) as u8
};
NPORTR { bits }
}
#[doc = "Bits 16:23 - Transaction translator contexts."]
#[inline]
pub fn ttasy(&self) -> TTASYR {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u8
};
TTASYR { bits }
}
#[doc = "Bits 24:31 - Transaction translator periodic contexts."]
#[inline]
pub fn ttper(&self) -> TTPERR {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
};
TTPERR { bits }
}
}
| true
|
6668bdf02973626a0c9d5f067b2e42961ce15351
|
Rust
|
the-eater/lzham
|
/src/lib.rs
|
UTF-8
| 1,635
| 3.21875
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! High-level Rust bindings over the [`lzham_codec`].
//!
//! `lzham` provides high level functions and structures to compress and decompress
//! data based on the LZHAM codec.
//!
//! The crate has not been tested enough yet and some functionality is still unimplemented.
//!
//! ## Examples
//!
//! ```no_run
//! # use lzham::{compress, decompress};
//! let data = String::from("This is a test.");
//!
//! let mut comp = Vec::new();
//! let status = compress(&mut data.as_bytes(), &mut comp);
//!
//! assert!(status.is_success());
//!
//! let mut decomp = Vec::new();
//! let status = decompress(&mut comp.as_slice(), &mut decomp, data.len());
//!
//! assert!(status.is_success());
//! ```
//!
//! [`lzham_codec`]: https://github.com/richgel999/lzham_codec
pub mod compress;
pub mod decompress;
mod low;
#[doc(inline)]
pub use compress::{compress, compress_with_options, CompressionOptions};
#[doc(inline)]
pub use decompress::{decompress, decompress_with_options, DecompressionOptions};
#[doc(inline)]
pub use low::{TableUpdateInterval, TableUpdateRate};
mod test {
#[test]
fn test_compress_and_decompress() {
use crate::{compress, decompress};
let data = String::from(
"This is a test.This is a test.This is a test.\
1234567This is a test.This is a test.123456",
);
let mut comp = Vec::new();
let status = compress(&mut data.as_bytes(), &mut comp);
assert!(status.is_success());
let mut decomp = Vec::new();
let status = decompress(&mut comp.as_slice(), &mut decomp, data.len());
assert!(status.is_success());
}
}
| true
|
256242cc7dc06f5177678f523d7ed9dbebac31d0
|
Rust
|
nical/vodk.rs
|
/gfx2d/simple_allocator.rs
|
UTF-8
| 9,505
| 2.875
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
use range::Range;
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct BlockId {
index: usize,
gen: u16,
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum BlockState {
Used,
Unused,
}
struct AllocatorBlock {
range: Range,
prev: Option<usize>,
next: Option<usize>,
state: BlockState,
gen: u16,
}
pub struct AllocatorHelper {
blocks: Vec<AllocatorBlock>,
available_slots: Vec<usize>,
first: usize,
last: usize,
next_gen: u16,
}
impl AllocatorHelper {
pub fn new(range: Range, state: BlockState) -> AllocatorHelper {
AllocatorHelper {
blocks: vec![AllocatorBlock {
range: range,
prev: None,
next: None,
state: state,
gen: 1,
}],
available_slots: Vec::new(),
first: 0,
last: 0,
next_gen: 2,
}
}
pub fn split(
&mut self,
id: BlockId,
at: u16,
left_state: BlockState,
right_state: BlockState
) -> (BlockId, BlockId) {
assert!(self.contains_block_id(id));
let next = self.blocks[id.index].next;
let first = self.blocks[id.index].range.first;
let new_count = self.blocks[id.index].range.count - at;
let new_index;
let left_gen = self.get_next_gen();
let right_gen = self.get_next_gen();
match self.available_slots.pop() {
Some(idx) => {
self.blocks[idx] = AllocatorBlock {
range: Range { first: first + at, count: new_count },
prev: Some(id.index),
next: next,
state: right_state,
gen: right_gen,
};
new_index = idx;
}
None => {
self.blocks.push(AllocatorBlock {
range: Range { first: first + at, count: new_count },
prev: Some(id.index),
next: next,
state: right_state,
gen: right_gen,
});
new_index = self.blocks.len() - 1;
}
}
self.blocks[id.index].next = Some(new_index);
self.blocks[id.index].range.count = at;
self.blocks[id.index].state = left_state;
self.blocks[id.index].gen = left_gen;
if self.last == id.index { self.last = new_index; }
return (
BlockId { index: id.index, gen: left_gen },
BlockId { index: new_index, gen: right_gen }
);
}
pub fn merge_next(&mut self, id: BlockId, state: BlockState) -> BlockId {
assert!(self.contains_block_id(id));
let next = self.blocks[id.index].next;
let next = next.unwrap();
let next_next = self.blocks[next].next;
self.blocks[id.index].next = next_next;
self.blocks[id.index].range.count += self.blocks[next].range.count;
self.blocks[id.index].state = state;
self.blocks[id.index].gen = self.get_next_gen();
self.blocks[next].gen = 0;
self.blocks[next].range.count = 0;
if self.last == next { self.last = id.index; }
self.available_slots.push(next);
return BlockId {
index: id.index,
gen: self.blocks[id.index].gen
};
}
pub fn clear(&mut self) -> BlockId {
loop {
if self.first == self.last {
break;
}
let first = self.get_first();
self.merge_next(first, BlockState::Unused);
}
return self.get_first();
}
pub fn find_available_block(&self, size: u16) -> Option<BlockId> {
let mut it = self.first;
loop {
if self.blocks[it].state == BlockState::Unused
&& self.blocks[it].range.count >= size {
return Some(BlockId {
index: it,
gen: self.blocks[it].gen,
});
}
match self.blocks[it].next {
Some(next) => { it = next; }
None => { break; }
}
}
return None;
}
pub fn get_first(&self) -> BlockId {
BlockId { index: self.first, gen: self.blocks[self.first].gen }
}
pub fn get_last(&self) -> BlockId {
BlockId { index: self.last, gen: self.blocks[self.last].gen }
}
pub fn get_block_state(&self, id: BlockId) -> BlockState {
assert!(self.contains_block_id(id));
self.blocks[id.index].state
}
pub fn set_block_state(&mut self, id: BlockId, state:BlockState) {
assert!(self.contains_block_id(id));
self.blocks[id.index].state = state;
}
pub fn get_block_range(&self, id: BlockId) -> Range {
assert!(self.contains_block_id(id));
self.blocks[id.index].range
}
pub fn contains_block_id(&self, id: BlockId) -> bool {
id.index < self.blocks.len() && self.blocks[id.index].gen == id.gen
}
fn get_next_gen(&mut self) -> u16 {
self.next_gen += 1;
// Keep 0 as an always invalid generation
if self.next_gen == 0 { self.next_gen = 1; }
return self.next_gen;
}
pub fn get_next(&self, id: BlockId) -> Option<BlockId> {
if let Some(index) = self.blocks[id.index].next {
return Some(BlockId {
index: index,
gen: self.blocks[index].gen
});
}
return None;
}
pub fn get_previous(&self, id: BlockId) -> Option<BlockId> {
if let Some(index) = self.blocks[id.index].prev {
return Some(BlockId {
index: index,
gen: self.blocks[index].gen
});
}
return None;
}
pub fn enclosing_used_range(&self) -> Range {
let mut it = self.first;
let mut first;
loop {
first = self.blocks[it].range.first;
if self.blocks[it].state == BlockState::Used {
break;
}
if let Some(idx) = self.blocks[it].next {
it = idx;
} else {
break;
}
}
let mut it = self.last;
let mut last;
loop {
last = self.blocks[it].range.right_most();
if self.blocks[it].state == BlockState::Used {
break;
}
if let Some(idx) = self.blocks[it].prev {
it = idx;
} else {
break;
}
}
return Range { first: first, count: last - first };
}
pub fn blocks<'l>(&'l mut self) -> BlockIterator<'l> {
return BlockIterator {
allocator: self,
current: Some(self.get_first()),
filter: None,
};
}
pub fn blocks_with_state<'l>(&'l mut self, filter: BlockState) -> BlockIterator<'l> {
return BlockIterator {
allocator: self,
current: Some(self.get_first()),
filter: Some(filter),
};
}
}
pub struct BlockIterator<'l> {
allocator: &'l AllocatorHelper,
current: Option<BlockId>,
filter: Option<BlockState>,
}
impl<'l> Iterator for BlockIterator<'l> {
type Item = BlockId;
fn next(&mut self) -> Option<BlockId> {
loop {
let current = self.current;
let mut done = true;
if let Some(id) = current {
self.current = self.allocator.get_next(id);
if let Some(filter) = self.filter {
if filter != self.allocator.get_block_state(id) {
done = false;
}
}
}
if done {
return current;
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.allocator.blocks.len()))
}
}
#[test]
fn test_allocator() {
let mut alloc = AllocatorHelper::new(Range::new(0, 100), BlockState::Unused);
assert_eq!(alloc.get_first(), alloc.get_last());
let a0 = alloc.get_first();
let ids: Vec<BlockId> = FromIterator::from_iter(alloc.blocks());
assert_eq!(ids, vec![a0]);
assert!(alloc.contains_block_id(a0));
assert_eq!(alloc.get_block_state(a0), BlockState::Unused);
let (a1, b1) = alloc.split(a0, 50, BlockState::Used, BlockState::Unused);
assert!(!alloc.contains_block_id(a0));
assert!(a1 != b1);
assert_eq!(alloc.get_block_state(a1), BlockState::Used);
assert_eq!(alloc.get_block_state(b1), BlockState::Unused);
assert_eq!(alloc.get_block_range(a1), Range::new(0, 50));
assert_eq!(alloc.get_block_range(b1), Range::new(50, 50));
let ids: Vec<BlockId> = FromIterator::from_iter(alloc.blocks());
assert_eq!(ids, vec![a1, b1]);
let ids: Vec<BlockId> = FromIterator::from_iter(alloc.blocks_with_state(BlockState::Used));
assert_eq!(ids, vec![a1]);
let ids: Vec<BlockId> = FromIterator::from_iter(alloc.blocks_with_state(BlockState::Unused));
assert_eq!(ids, vec![b1]);
let a2 = alloc.merge_next(a1, BlockState::Unused);
assert!(!alloc.contains_block_id(a1));
assert!(!alloc.contains_block_id(b1));
let ids: Vec<BlockId> = FromIterator::from_iter(alloc.blocks());
assert_eq!(ids, vec![a2]);
assert_eq!(alloc.get_block_range(a2), Range::new(0, 100));
alloc.clear();
alloc.clear();
}
| true
|
c8a5926524af71ba282d0d033505c7c6a3a5cdac
|
Rust
|
tinco/entity_rust
|
/src/lib.rs
|
UTF-8
| 2,795
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#![feature(type_macros)]
//! # Goal
//!
//! We want to define components, store them in an efficient manner and
//! make them accessible to systems.
//!
//! We want to define events along with their types and make them available
//! to systems.
//!
//! We want to define systems that operate on lists of components, are
//! triggered by other systems through events.
//!
//! # Implementation
//!
//! For each component type there will be a list that is a tuple of an
//! entity ID and the component values. There will also be a map from
//! entity IDs to component list indexes.
//!
//! A system will consist of state, iterators over components its subscribed
//! to and any number of functions that are triggered by events.
//!
//! # Syntax
//!
//! ```
//! component! { Physics, body: physics.RigidBody, physics_id: physics.ID }
//!
//! // event! { GameStarted } // This one is implicitly defined
//! event! { PhysicsTick, dt: u64 }
//! event! { Bump, e1: EntityID, e2: EntityID }
//!
//! system! { PhysicsSystem,
//!
//! state! { world: physics.World }
//!
//! on! { GameStarted, {
//! state.world = physics.World::new(event.name);
//! state.world.on_collision = |e1, e2| {
//! unwrap_entity = |e| { e.user_data.downcast_ref<EntityID>() }
//! trigger! { Bump, unwrap_entity(e1), unwrap_entity(e2) }
//! };
//! }}
//!
//! on! { PhysicsTick, {
//! state.world.step(event.dt);
//! }}
//!
//! component_added! { Physics, {
//! let id = state.world.add_body(component.body);
//! component.physics_id = id;
//! }}
//!
//! component_removed! { Physics, {
//! state.world.remove_body(component.physics_id);
//! }}
//!
//! }
//!
//! system! { BumpSystem, {
//! on! { Bump, {
//! println!("Entity {:?} bumped into entity {:?}!", e1, e2);
//! }}
//! }}
//! ```
#[macro_use]
extern crate lazy_static;
extern crate shared_mutex;
extern crate uuid;
#[macro_use]
pub mod helpers;
#[macro_use]
pub mod components;
pub mod entities;
#[macro_use]
pub mod events;
#[macro_use]
pub mod systems;
use std::thread;
pub use std::time::{ Duration, Instant };
event!{ tick, step: super::Duration }
pub fn run(ticks_per_second: u32) {
let events_thread = ticker(ticks_per_second, true);
let _ = events_thread.join();
}
pub fn ticker(ticks_per_second: u32, sleep: bool) -> thread::JoinHandle<()> {
let step = Duration::from_secs(1) / ticks_per_second;
let mut last_tick = Instant::now();
thread::spawn(move || {
loop {
let current_time = Instant::now();
let next_tick = last_tick + step;
if next_tick > current_time {
if sleep {
thread::sleep(Duration::from_millis(1));
}
} else {
tick::trigger(step);
events::next_tick();
last_tick = Instant::now();
events::run_events();
}
}
})
}
| true
|
0df274af63db097a1aec49b3b1eb91b8bb769eb9
|
Rust
|
yahaa/rust-up
|
/lists/src/second.rs
|
UTF-8
| 5,980
| 3.796875
| 4
|
[
"MIT"
] |
permissive
|
// in second.rs
// pub says we want people outside this module to be able to use List
#[derive(Default)]
pub struct List<T> {
head: Link<T>,
}
type Link<T> = Option<Box<Node<T>>>;
#[derive(Debug)]
struct Node<T> {
elem: T,
next: Link<T>,
}
impl<T> List<T> {
pub fn new() -> Self {
List { head: None }
}
pub fn push(&mut self, elem: T) {
// take 函数可以理解为是 mem::replace 函数的封装
// take 函数会 把 self.head 设置为 None 并且不会夺取其所有权
let new_node = Box::new(Node {
elem,
next: self.head.take(),
});
// 因为上面使用了 take 函数,所以 self.head 的所有权没有被夺取,也就是说 self.head 还是完整的
// 所以这里并不会出现报错
self.head = Some(new_node);
}
pub fn pop(&mut self) -> Option<T> {
self.head.take().map(|node| {
self.head = node.next;
node.elem
})
}
pub fn peek(&self) -> Option<&T> {
// match &self.head {
// None => None,
// Some(node) => Some(&node.elem)
// }
// we also use as_ref
self.head.as_ref().map(|node| {
&node.elem
})
}
pub fn peek_mut(&mut self) -> Option<&mut T> {
// match &mut self.head {
// None => None,
// Some(node) => Some(&mut node.elem)
// }
// 上面的方法可以用 map 简化
self.head.as_mut().map(|node| {
&mut node.elem
})
}
pub fn into_iter(self) -> IntoIter<T> {
IntoIter(self)
}
pub fn iter(&self) -> Iter<T> {
// 1. 这里使用 map 主要是避免写 match 表达式过于繁琐,
// node 值即为 match 表达式的 node 值,i在这里类型为 &Box<Node<T>>
// 2. self 是借用类型,使用到 head 也必须要使用 head 的借用类型,防止 self 被借出不完整
// 3. 闭包里面 &**node 主要是因为 as_ref()
// node 类型为 &Box<Node<T>>
// next 期望为 Option<&Node<T>>,去掉 map 自动封装的 Option 也就是 &Node<T>
// 所以先用 一个 * 得到 Box<Node<T>>, 再一个 * 得到 Node<T>,最后一个 & 得到 &Node<T>
// self.head.as_ref().map(|node| &**node) 插件提示优化为
// self.head.as_deref()
Iter { next: self.head.as_deref() }
}
pub fn iter_mut(&mut self) -> IterMut<T> {
// 这里解释和 iter 的原理是一样的
// self.head.as_mut().map(|node| &mut **node) 插件提示可以优化为
// self.head.as_deref_mut()
IterMut { next: self.head.as_deref_mut() }
}
}
pub struct Iter<'a, T> {
next: Option<&'a Node<T>>,
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
self.next.map(|node| {
// 这个 语句和下面语句是等效的 self.next = node.next.as_ref().map(|node| &**node);
// todo 了解 node.next.as_deref() 的具体一样
self.next = node.next.as_deref();
&node.elem
})
}
}
pub struct IntoIter<T>(List<T>);
impl<T> Iterator for IntoIter<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
self.0.pop()
}
}
pub struct IterMut<'a, T> {
next: Option<&'a mut Node<T>>,
}
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
self.next.take().map(|node| {
// node.next.as_mut().map(|node| &mut **node) 插件提示优化为
// node.next.as_deref_mut()
self.next = node.next.as_deref_mut();
&mut node.elem
})
}
}
impl<T> Drop for List<T> {
fn drop(&mut self) {
let mut cur_link = self.head.take();
while let Some(mut boxed_node) = cur_link {
cur_link = boxed_node.next.take();
}
}
}
#[cfg(test)]
mod test {
use super::List;
#[test]
fn second_test() {
let mut list = List::new();
assert_eq!(list.pop(), None);
list.push(1);
list.push(2);
list.push(3);
assert_eq!(list.pop(), Some(3));
assert_eq!(list.pop(), Some(2));
list.push(4);
list.push(5);
assert_eq!(list.pop(), Some(5));
assert_eq!(list.pop(), Some(4));
assert_eq!(list.pop(), Some(1));
assert_eq!(list.pop(), None);
}
#[test]
fn peek() {
let mut list = List::new();
assert_eq!(list.peek(), None);
assert_eq!(list.peek_mut(), None);
list.push(1);
list.push(2);
list.push(3);
assert_eq!(list.peek(), Some(&3));
assert_eq!(list.peek_mut(), Some(&mut 3));
if let Some(value) = list.peek_mut() { *value = 42 }
assert_eq!(list.peek(), Some(&42));
assert_eq!(list.pop(), Some(42));
}
#[test]
fn into_iter() {
let mut list = List::new();
list.push(1);
list.push(2);
list.push(3);
let mut iter = list.into_iter();
assert_eq!(iter.next(), Some(3));
assert_eq!(iter.next(), Some(2));
assert_eq!(iter.next(), Some(1));
assert_eq!(iter.next(), None);
}
#[test]
fn iter() {
let mut list = List::new();
list.push(1);
list.push(2);
list.push(3);
let mut iter = list.iter();
assert_eq!(iter.next(), Some(&3));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), Some(&1));
}
#[test]
fn iter_mut() {
let mut list = List::new();
list.push(1);
list.push(2);
list.push(3);
let mut iter = list.iter_mut();
assert_eq!(iter.next(), Some(&mut 3));
assert_eq!(iter.next(), Some(&mut 2));
assert_eq!(iter.next(), Some(&mut 1));
}
}
| true
|
197efc6689396b6493ce673743d0c49e403f1123
|
Rust
|
ANEP-Research/quickn-ps
|
/quickn/P10255.rs
|
UTF-8
| 5,066
| 2.578125
| 3
|
[] |
no_license
|
/*
date : 2020 / 5 / 5
author : quickn (quickn.ga)
email : quickwshell@gmail.com
*/
use std::io::{self, BufWriter, Write};
mod scanner {
use std::{io, str};
/* https://github.com/EbTech/rust-algorithms */
/// Same API as Scanner but nearly twice as fast, using horribly unsafe dark arts
/// **REQUIRES** Rust 1.34 or higher
pub struct UnsafeScanner<R> {
reader: R,
buf_str: Vec<u8>,
buf_iter: str::SplitAsciiWhitespace<'static>,
}
impl<R: io::BufRead> UnsafeScanner<R> {
pub fn new(reader: R) -> Self {
Self {
reader,
buf_str: Vec::new(),
buf_iter: "".split_ascii_whitespace(),
}
}
/// This function should be marked unsafe, but noone has time for that in a
/// programming contest. Use at your own risk!
pub fn token<T: str::FromStr>(&mut self) -> T {
loop {
if let Some(token) = self.buf_iter.next() {
return token.parse().ok().expect("Failed parse");
}
self.buf_str.clear();
self.reader
.read_until(b'\n', &mut self.buf_str)
.expect("Failed read");
self.buf_iter = unsafe {
let slice = str::from_utf8_unchecked(&self.buf_str);
std::mem::transmute(slice.split_ascii_whitespace())
}
}
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)]
struct Vec2 {
x: i64,
y: i64,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum CCWResult {
Left,
Right,
Same,
}
impl Vec2 {
fn new(x: i64, y: i64) -> Self {
Self {
x,
y,
}
}
fn ccw(&self, other: Self) -> CCWResult {
let res = self.x*other.y - self.y*other.x;
if res > 0 {
CCWResult::Left
} else if res == 0 {
CCWResult::Same
} else {
CCWResult::Right
}
}
}
use std::ops::{Add, Sub};
use std::collections::BTreeSet;
impl Add for Vec2 {
type Output = Self;
fn add(self, rhs: Self) -> Self {
Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
}
}
}
impl Sub for Vec2 {
type Output = Self;
fn sub(self, rhs: Self) -> Self {
Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
}
}
}
#[derive(Clone, Copy, Debug)]
struct Rectangle {
ll: Vec2,
rr: Vec2,
}
impl Rectangle {
fn new(x_min: i64, y_min: i64, x_max: i64, y_max: i64) -> Self {
Self {
ll: Vec2::new(x_min, y_min),
rr: Vec2::new(x_max, y_max),
}
}
fn h(&self) -> i64 {
self.rr.y - self.ll.y
}
fn lr(&self) -> Vec2 {
self.ll + Vec2::new(0, self.h())
}
fn rl(&self) -> Vec2 {
self.rr - Vec2::new(0, self.h())
}
}
use std::cmp::{min, max};
fn main() {
let (stdin, stdout) = (io::stdin(), io::stdout());
let (mut scan, mut sout) = (
scanner::UnsafeScanner::new(stdin.lock()),
BufWriter::new(stdout.lock()),
);
let t: usize = scan.token();
for _case in 0..t {
let (x1, y1, x2, y2, mut x3, mut y3, mut x4, mut y4) = (scan.token(), scan.token(), scan.token(), scan.token(), scan.token(), scan.token(), scan.token(), scan.token());
let r = Rectangle::new(x1, y1, x2, y2);
let (mut s1, mut s2) = (Vec2::new(x3, y3), Vec2::new(x4, y4));
if x3 > x4 {
let s = s1.clone();
s1 = s2;
s2 = s;
} else if x3 == x4 {
if y3 > y4 {
let s = s1.clone();
s1 = s2;
s2 = s;
}
}
let mut points = [r.lr(), r.ll, r.rl(), r.rr];
let mut res: usize = 0;
let mut sub_res: usize = 0;
let mut q: BTreeSet<Vec2> = BTreeSet::new();
for i in 0..4 {
let j = (i + 1)%4;
let t1 = s1 - points[i];
let t2 = s2 - points[i];
let t3 = s1 - points[j];
let t4 = s2 - points[j];
let t5 = s1 - points[i];
let t6 = s1 - points[j];
let t7 = s2 - points[i];
let t8 = s2 - points[j];
let c1 = t1.ccw(t2);
let c2 = t3.ccw(t4);
let c3 = t5.ccw(t6);
let c4 = t7.ccw(t8);
if (c1 != c2) && (c3 != c4) {
if c1 == CCWResult::Same {
q.insert(points[i]);
} else if c2 == CCWResult::Same {
q.insert(points[j]);
} else {
res += 1;
}
} else if c1 == CCWResult::Same && c2 == CCWResult::Same && c3 == CCWResult::Same && c4 == CCWResult::Same {
res = 4;
break;
}
}
if res != 4 {
res += q.len();
}
writeln!(sout, "{}", res).ok();
}
}
| true
|
79b1fdd4a28fc484b6e506a06309dcc881eac09f
|
Rust
|
dcreager/expression-problem-rust
|
/src/ch01c_sad_face.rs
|
UTF-8
| 4,194
| 3.359375
| 3
|
[
"Apache-2.0"
] |
permissive
|
// -*- coding: utf-8 -*-
// ------------------------------------------------------------------------------------------------
// Copyright © 2018-2019, Douglas Creager.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
// express or implied. See the License for the specific language governing permissions and
// limitations under the License.
// ------------------------------------------------------------------------------------------------
// We've defined the AST representation of our language in another module. Our goal is to add a
// new kind of term to the language — without editing or copying the original definitions.
//
// Note that we're not using a * import so that we can be clear whether we're referring to the new
// Expression, which includes the new kind of term, or the old one, which doesn't.
use crate::ch01a_before;
/// This is the closest we can get: we create a new Expression type, containing each of the new
/// kinds of term, and use a wrapper variant at the end to include all of the old terms without
/// duplicating their definitions.
pub enum Expression {
/// We can now negate numbers, too!
Negate(Box<Expression>),
/// But we want to be able to use all of the existing terms without copying their definitions.
Existing(ch01a_before::Expression),
}
/// We can wrap any old expression in our new AST type.
impl std::convert::From<ch01a_before::Expression> for Expression {
fn from(wrapped: ch01a_before::Expression) -> Expression {
Expression::Existing(wrapped)
}
}
/// We can implement a new evaluate function that knows what to do with the new kind of term, but
/// which delegates to the existing function for all of the existing kinds of term.
impl Expression {
pub fn evaluate(&self) -> i64 {
match self {
Expression::Negate(nested) => -nested.evaluate(),
Expression::Existing(existing) => existing.evaluate(),
}
}
}
// We can use the existing smart constructors as-is because of how we used From.
pub use crate::ch01a_before::add;
pub use crate::ch01a_before::integer_literal;
pub use crate::ch01a_before::subtract;
// And then a smart constructor for the new term
pub fn negate(nested: Expression) -> Expression {
Expression::Negate(Box::new(nested))
}
#[cfg(test)]
mod tests {
use super::*;
// All of the old smart constructors and evaluation rules Just Work:
#[test]
fn can_evaluate_integer_literal() {
let one: Expression = integer_literal(1);
assert_eq!(one.evaluate(), 1);
}
#[test]
fn can_evaluate_add() {
let add: Expression = add(integer_literal(1), integer_literal(2));
assert_eq!(add.evaluate(), 3);
}
#[test]
fn can_evaluate_subtract() {
let subtract: Expression = subtract(integer_literal(1), integer_literal(2));
assert_eq!(subtract.evaluate(), -1);
}
#[test]
fn can_evaluate_nested() {
let add: Expression = add(
integer_literal(1),
subtract(integer_literal(2), integer_literal(3)),
);
assert_eq!(add.evaluate(), 0);
}
// And so do the new ones:
#[test]
fn can_evaluate_negate() {
let negate: Expression = negate(integer_literal(1));
assert_eq!(negate.evaluate(), -1);
}
#[test]
fn can_evaluate_nested_negate() {
let negate: Expression = negate(subtract(integer_literal(2), integer_literal(3)));
assert_eq!(negate.evaluate(), 1);
}
// But! We cannot put a negation inside of any of the old kinds of terms!
/*
#[test]
fn cannot_evaluate_negate_inside_add() {
// This line won't compile!
let add: Expression = add(integer_literal(1), negate(integer_literal(2)));
assert_eq!(negate.evaluate(), -1);
}
*/
}
| true
|
ec78b7377da68de9eb5648f4d13893535973f88a
|
Rust
|
ferrouswheel/rust-sparkline
|
/src/main.rs
|
UTF-8
| 4,581
| 2.8125
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
extern crate sparkline;
extern crate rustc_serialize;
extern crate docopt;
extern crate num;
use sparkline::*;
use std::io;
use std::io::BufRead;
use std::io::Write;
use std::fs::File;
use std::path::Path;
use docopt::Docopt;
const USAGE: &'static str = "
sparkr
Usage:
sparkr [--min=<min>] [--max=<max>] [--theme=<theme>] [--statline] [--gap=<gap>] [--out=<output>] [--file=<file>] [<values>...]
sparkr (-h | --help)
sparkr --version
Options:
-h --help Show this screen.
--version Show version.
--min=<min> Specify minimum value instead of calculating it.
--max=<max> Specify maximum value instead of calculating it.
--gap=<gap> Gap between symbols [default=1]
--statline Show a line of stats after the sparkline, on stderr.
--theme=<theme> What theme to use, 'colour', 'png', or 'classic' (default).
--out=<output> Destination for the sparkline, 'file', 'pipe', 'console' (default).
--file=<file> Filename for output. Implies --out=file. [default=sparkline.EXT]
<values> Just values.
";
#[derive(Debug, RustcDecodable)]
struct Args {
pub flag_min: Option<f64>,
pub flag_max: Option<f64>,
pub flag_gap: Option<usize>,
pub flag_theme: Option<String>,
pub flag_out: Option<types::OutputType>,
pub flag_file: Option<String>,
pub flag_statline: bool,
pub arg_values: Vec<f64>,
}
fn main() {
let mut args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
let mut good_numbers: Vec<_> = args.arg_values;
if good_numbers.len() == 0 {
let mut input_numbers : Vec<String> = vec![];
let stdin = io::stdin();
for line in stdin.lock().lines() {
match line {
Ok(l) => {
input_numbers.extend(
l.split_whitespace()
.filter(|x| !x.is_empty())
.map(|x| x.to_owned()));
},
Err(_) => {
break;
},
};
}
good_numbers = parse_numbers(&input_numbers);
}
let (min, max) = min_max_for_data(&good_numbers, args.flag_min, args.flag_max);
let theme_name : &str = match args.flag_theme {
Some(ref x) if x == "color" => "colour", // 'murica
Some(ref x) if x == "colour" => &*x,
Some(ref x) if x == "classic" => &*x,
Some(ref x) if x == "png" => &*x,
Some(ref x) => { println!("Unknown theme {} falling back to classic", x); "classic" },
_ => "classic",
};
let mut default_fn = "sparkline.".to_owned();
let mut sparky = select_sparkline(theme_name);
default_fn.push_str(&(sparky.file_ext().to_owned()));
//println!("theme name {}", sparky.name());
{
match sparky.validate_output_options(args.flag_out, &args.flag_file) {
false => {
panic!("Bad combination of output type and filename for {}", sparky.name())
},
_ => (),
};
let path = match args.flag_file {
Some(ref x) => {
// Filename specified on command line implies OutputType::File
args.flag_out = Some(types::OutputType::File);
Some(Path::new(x))
},
None => Some(Path::new(&*default_fn)),
};
let output_stream : Box<Write> = match args.flag_out {
Some(types::OutputType::File) => {
println!("Output filename is {:?}", path);
let p = path.unwrap();
Box::new(File::create(p).unwrap())
},
_ =>
Box::new(std::io::stdout())
};
sparky.start(min, max, args.flag_out, output_stream);
let gap_str : String = match args.flag_gap {
Some(x) => std::iter::repeat(" ").take(x).collect(),
None => " ".to_owned(),
};
let length = good_numbers.len();
for (i, num) in good_numbers.iter().enumerate() {
let s = sparky.spark(i, length, *num);
print!("{}", match s {
"" => "",
_ => &*gap_str,
})
}
sparky.end();
}
println!("");
if args.flag_statline {
use std::io::Write;
match writeln!(&mut std::io::stderr(), "min: {}, max: {}, range: {}", min, max, max-min) {
Ok(_) => {},
Err(x) => panic!("Unable to write to stderr: {}", x),
}
}
}
| true
|
ab17f13d48f360a651b93fd55c1cf3495dff3b05
|
Rust
|
blofroth/border
|
/border-py-gym-env/src/act_d.rs
|
UTF-8
| 2,888
| 2.78125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Discrete action for [`super::PyGymEnv`] and [`super::PyVecGymEnv`].
use crate::PyGymEnvActFilter;
use border_core::{
record::{Record, RecordValue},
Act,
};
use pyo3::{IntoPy, PyObject};
use std::default::Default;
use std::fmt::Debug;
/// Represents action.
#[derive(Clone, Debug)]
pub struct PyGymEnvDiscreteAct {
pub act: Vec<i32>,
}
impl PyGymEnvDiscreteAct {
/// Constructs a discrete action.
pub fn new(act: Vec<i32>) -> Self {
Self { act }
}
}
impl Act for PyGymEnvDiscreteAct {}
/// Raw filter for discrete actions.
///
/// No processing is applied to actions.
#[derive(Clone, Debug)]
pub struct PyGymEnvDiscreteActRawFilter {
/// `true` for filters on vectorized environments.
pub vectorized: bool,
}
impl PyGymEnvDiscreteActRawFilter {
/// Returns `true` for filters working with vectorized environments.
pub fn vectorized() -> Self {
Self { vectorized: true }
}
}
impl Default for PyGymEnvDiscreteActRawFilter {
fn default() -> Self {
Self { vectorized: false }
}
}
// TODO: support vecenv
impl PyGymEnvActFilter<PyGymEnvDiscreteAct> for PyGymEnvDiscreteActRawFilter {
fn filt(&mut self, act: PyGymEnvDiscreteAct) -> (PyObject, Record) {
let record = Record::from_slice(&[(
"act",
RecordValue::Array1(act.act.iter().map(|v| *v as f32).collect::<Vec<_>>()),
)]);
let act = if self.vectorized {
pyo3::Python::with_gil(|py| act.act.into_py(py))
} else {
pyo3::Python::with_gil(|py| act.act[0].into_py(py))
};
(act, record)
}
}
/// Defines newtypes of [PyGymEnvDiscreteAct] and [PyGymEnvDiscreteActRawFilter].
///
/// TODO: add example.
#[macro_export]
macro_rules! newtype_act_d {
($struct_:ident) => {
#[derive(Clone, Debug)]
struct $struct_(border_py_gym_env::PyGymEnvDiscreteAct);
impl $struct_ {
fn new(act: Vec<i32>) -> Self {
$struct_(border_py_gym_env::PyGymEnvDiscreteAct::new(act))
}
}
impl border_core::Act for $struct_ {}
};
($struct_:ident, $struct2_:ident) => {
newtype_act_d!($struct_);
struct $struct2_(border_py_gym_env::PyGymEnvDiscreteActRawFilter);
impl border_py_gym_env::PyGymEnvActFilter<$struct_> for $struct2_ {
fn filt(&mut self, act: $struct_) -> (pyo3::PyObject, border_core::record::Record) {
self.0.filt(act.0)
}
}
impl std::default::Default for $struct2_ {
fn default() -> Self {
Self(border_py_gym_env::PyGymEnvDiscreteActRawFilter::default())
}
}
impl $struct2_ {
pub fn vectorized() -> Self {
Self(border_py_gym_env::PyGymEnvDiscreteActRawFilter::vectorized())
}
}
};
}
| true
|
624515ff42514b41d7ef983cd26bc7b7295a248d
|
Rust
|
mfarzamalam/Rust
|
/chap10/p1/src/main.rs
|
UTF-8
| 2,032
| 3.640625
| 4
|
[] |
no_license
|
#[derive(Debug)]
struct aeroplane {
name:String,
engine:String,
country:String,
}
#[derive(Debug)]
struct point <T> {
x:T,
y:T,
}
// impl aeroplane {
// fn new(name:String , engine:String ,country:String ) -> aeroplane {
// aeroplane {
// name,
// engine,
// country,
// }
// }
// fn new_aero (&self) -> String {
// let new_aeroplane_01 = format!("Name : {} Engine : {} Country : {} ", self.name, self.engine, self.country);
// new_aeroplane_01
// }
pub trait information {
fn info (&self) -> String {
"No value".to_string()
}
}
impl information for aeroplane {
fn info(&self) -> String {
let first = format! ("{}" , self.name);
first
}
}
use std::fmt::Display;
fn main () {
let plane = aeroplane {
name: String::from("boing 747"),
engine: String::from("A quality"),
country: String::from("Pakistan"),
};
// println!("{:#?}",plane);
// println!("{:#?}",plane.new_aero());
// println!("{:#?}",plane.info());
// let mut a1 = String::from("B");
// let mut e1 = String::from("B");
// let mut c1 = String::from("B");
// let aeroplane1 = aeroplane::new(a1,e1,c1);
// let aeroplane2 = aeroplane::new(String::from("c"),String::from("c"),String::from("c"));
// println!("{:#?}",aeroplane2);
//////////////////////// Book code practice start ///////////////////////
check_pass_value(32);
check_pass_value("karachi");
let v = vec![1,2,3,5,8,123,2,5,8];
println!("{}",largest_number(&v));
let values = point {x:8 , y:8};
println!("{:#?}",values);
}
fn check_pass_value <T:Display> (x: T) {
println!("{}",x);
}
fn largest_number(num:&[i32]) -> i32 {
let mut largest = num[0];
for &item in num.iter(){
//1 > 2
if item > largest {
largest = item;
}
}
largest
}
| true
|
3e8eb1284bd59b44506f3c2a3c265a4700c1daae
|
Rust
|
dmitmel/sorting-visualization
|
/src/app.rs
|
UTF-8
| 6,436
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
//! The [`App`] struct and some constants.
use graphics::color::{BLACK, TRANSPARENT, WHITE};
use graphics::types::Color;
use opengl_graphics::{GlGraphics, GlyphCache};
use piston::input::*;
use std::thread;
use crate::algorithms::Algorithm;
use crate::array::Array;
use crate::state::*;
/// Color that is used to clear the window before [drawing](App::render).
pub const BACKGROUND_COLOR: Color = BLACK;
/// Color of rectangles that represent the array values.
pub const VALUE_COLOR: Color = WHITE;
/// Color of the values that have been recently accessed.
///
/// _See_ [`State.array_accesses`](State::array_accesses)
pub const ACCESSED_VALUE_COLOR: Color = [1.0, 0.0, 0.0, 1.0];
/// Time in seconds after which array accesses are removed.
///
/// _See_ [`State.array_accesses`](State::array_accesses)
pub const ACCESSED_VALUE_TIMEOUT: f64 = 0.25;
/// Font size of the status text in pixels.
pub const STATUS_TEXT_FONT_SIZE: u32 = 16;
/// Margins between the status text and window borders.
pub const STATUS_TEXT_MARGIN: f64 = 8.0;
/// Factor for increasing / decreasing speed of sorting array
pub const SPEED_FACTOR: f64 = 2.0;
/// This struct contains all [rendering](App::render), [updating](App::update)
/// and [input handling](App::button) logic.
#[derive(Debug)]
pub struct App {
state: SharedState,
algorithm_thread: thread::JoinHandle<()>,
}
impl App {
/// Creates a new app (with a state constructed from the given `array`) and
/// starts an algorithm thread. This function is called `init` instead of
/// `new` because it has side effects.
pub fn init(
algorithm: Box<dyn Algorithm + Send>,
array: Vec<u32>,
speed: f64,
) -> Self {
let array_len = array.len();
let colors = vec![TRANSPARENT; array_len];
let state = SharedState::new(State {
time: 0.0,
speed,
paused: true,
array,
colors,
array_accesses: vec![NO_ARRAY_ACCESS; array_len],
});
let algorithm_state = state.clone();
let algorithm_thread = thread::Builder::new()
.name("algorithm".to_string())
.spawn(move || {
let array = Array::new(algorithm_state);
array.wait(500);
algorithm.sort(array);
})
.unwrap();
Self {
state,
algorithm_thread,
}
}
/// Draws the current [state](State).
pub fn render(
&mut self,
args: RenderArgs,
gl: &mut GlGraphics,
glyphs: &mut GlyphCache<'_>,
) {
gl.draw(args.viewport(), |ctx, gl| {
use graphics::*;
clear(BACKGROUND_COLOR, gl);
// lock the state for the whole rendering cycle so that the algorithm
// thread doesn't change something while the main thread is doing
// rendering
let state = self.state.get();
// transform of the bottom left point of the status text
let status_text_transform = ctx.transform.trans(
STATUS_TEXT_MARGIN,
STATUS_TEXT_MARGIN + f64::from(STATUS_TEXT_FONT_SIZE),
);
let status_text = format!(
"paused = {}, speed = {}%",
state.paused,
state.speed * 100.0
);
// draw the status text
text::Text::new_color(WHITE, STATUS_TEXT_FONT_SIZE)
.draw(
&status_text,
glyphs,
&ctx.draw_state,
status_text_transform,
gl,
)
.unwrap();
let len = state.array.len();
let max_value = *state.array.iter().max().unwrap_or(&0);
// draws a rectangle with a given color which represents a value at a
// specified index
let mut draw_value = |index: usize, color: Color| {
let value = state.array[index];
let window_w: f64 = args.width;
let window_h: f64 = args.height;
let array_y =
STATUS_TEXT_MARGIN * 2.0 + f64::from(STATUS_TEXT_FONT_SIZE);
let array_h = window_h - array_y;
let w = window_w / (len as f64);
let h = f64::from(value) * array_h / f64::from(max_value);
let x = (index as f64) * w;
let y = window_h - h;
rectangle(color, [x, y, w, h], ctx.transform, gl);
};
// draw all values
for index in 0..len {
draw_value(index, VALUE_COLOR);
}
// draw array accesses
for (index, access_time) in state.array_accesses.iter().enumerate() {
if *access_time < 0.0 {
continue;
}
let mut color = ACCESSED_VALUE_COLOR;
let access_age = state.time - access_time;
// map age of this access to the [1.0, 0.0] interval of the alpha (transparency) component
// so that new accesses are opaque and old ones are transparent
let alpha = (1.0 - access_age / ACCESSED_VALUE_TIMEOUT) as f32;
color[color.len() - 1] = alpha;
draw_value(index, color);
}
// draw colored overlays (marks) for some values
for (index, color) in state.colors.iter().enumerate() {
draw_value(index, *color);
}
});
}
/// Advances the [state](State) by a given amount of [time](UpdateArgs::dt).
pub fn update(&mut self, args: UpdateArgs) {
let mut state = self.state.get();
if !state.paused {
state.time += args.dt * state.speed;
}
// time is copied (f64 implements the Copy trait) to a variable because it
// can't be accessed when array_accesses is borrowed mutably
let time = state.time;
for access_time in state.array_accesses.iter_mut() {
if time - *access_time > ACCESSED_VALUE_TIMEOUT {
*access_time = NO_ARRAY_ACCESS;
}
}
}
/// Handles user input and updates the [state](State).
///
/// # Controls
///
/// | Key | Action |
/// | ---------------------- | ------------ |
/// | <kbd>Space</kbd> | pause/resume |
/// | <kbd>↑</kbd> | 2x faster |
/// | <kbd>↓</kbd> | 2x slower |
pub fn button(&mut self, args: ButtonArgs) {
let mut state = self.state.get();
// import commonly used enum values in the current scope
use self::Button::Keyboard;
use self::ButtonState::Press;
match (args.button, args.state) {
(Keyboard(Key::Space), Press) => {
state.paused = !state.paused;
self.algorithm_thread.thread().unpark();
}
(Keyboard(Key::Up), Press) => state.speed *= SPEED_FACTOR,
(Keyboard(Key::Down), Press) => state.speed /= SPEED_FACTOR,
_ => {}
};
}
}
| true
|
cd872e372b5955e1e5f4b7c500a3b5abdb8db491
|
Rust
|
josnelihurt/rust-wasm
|
/src/lib.rs
|
UTF-8
| 4,642
| 2.5625
| 3
|
[
"Unlicense"
] |
permissive
|
extern crate wasm_bindgen;
use wasm_bindgen::prelude::*;
use web_sys::*;
use web_sys::WebGlRenderingContext as Gl;
#[macro_use]
extern crate lazy_static;
mod app_state;
mod common_fns;
mod entities;
mod gl_setup;
mod shaders;
mod webgl_prg;
use entities::Color;
#[wasm_bindgen]
extern {
fn alert(s: &str);
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
}
#[wasm_bindgen]
pub fn greet(msg: &str) {
let alert_msg = "message => ".to_owned() + msg;
alert(&alert_msg);
}
#[wasm_bindgen]
#[derive(Debug, Copy, Clone)]
pub struct UpdateData {
pub time: f32,
pub width: f32,
pub height: f32,
pub triangle_color: Color,
pub gradient0: Color,
pub gradient1: Color,
pub gradient2: Color,
pub gradient3: Color,
pub gradient4: Color,
}
#[wasm_bindgen]
impl UpdateData {
#[wasm_bindgen(constructor)]
pub fn new() -> Self{
let defaultColor : Color = Color{R:0,G:0,B:0};
Self{
time: 0.,
width: 0.,
height: 0.,
triangle_color: defaultColor,
gradient0: defaultColor,
gradient1: defaultColor,
gradient2: defaultColor,
gradient3: defaultColor,
gradient4: defaultColor,
}
}
}
#[wasm_bindgen]
pub struct GlClient {
data: UpdateData,
prg_color_2d: webgl_prg::Color2D,
prg_color_2d_gradient: webgl_prg::Color2DGradient,
gl: WebGlRenderingContext,
}
#[wasm_bindgen]
impl GlClient{
#[wasm_bindgen(constructor)]
pub fn new(canvas_name : &str) -> Self{
log(format!("New GlClient for {}", canvas_name).as_str());
console_error_panic_hook::set_once();
let gl = gl_setup::init_webgl_ctx(canvas_name).unwrap();
Self {
data: UpdateData::new(),
prg_color_2d: webgl_prg::Color2D::new(&gl),
prg_color_2d_gradient: webgl_prg::Color2DGradient::new(&gl),
gl: gl,
}
}
pub fn update(&mut self, data: &UpdateData) -> Result<(), JsValue>{
// log(format!("{} update width {} height {}", time, width, height).as_str());
app_state::update_dynamic_data(data.time, data.width, data.height);
self.data = data.clone();
Ok(())
}
pub fn render(&self){
self.gl.clear(Gl::COLOR_BUFFER_BIT | Gl::COLOR_BUFFER_BIT);
let current_state = app_state::get_current_state();
self.prg_color_2d.render(
&self.gl,
current_state.control_bottom,
current_state.control_top,
current_state.control_left,
current_state.control_right,
current_state.canvas_width,
current_state.canvas_height,
self.data.triangle_color,
// Color::new(100,100,100),
);
let gradients: [&Color; 5] = [
&self.data.gradient0,
&self.data.gradient1,
&self.data.gradient2,
&self.data.gradient3,
&self.data.gradient4,
];
self.prg_color_2d_gradient.render(
&self.gl,
current_state.control_bottom,
current_state.control_top,
current_state.control_left,
current_state.control_right,
current_state.canvas_width,
current_state.canvas_height,
gradients,
);
}
pub fn render3d(&self){
self.gl.clear(Gl::COLOR_BUFFER_BIT | Gl::COLOR_BUFFER_BIT);
let current_state = app_state::get_current_state();
self.prg_color_2d.render(
&self.gl,
current_state.control_bottom,
current_state.control_top,
current_state.control_left,
current_state.control_right,
current_state.canvas_width,
current_state.canvas_height,
self.data.triangle_color,
// Color::new(100,100,100),
);
let gradients: [&Color; 5] = [
&self.data.gradient0,
&self.data.gradient1,
&self.data.gradient2,
&self.data.gradient3,
&self.data.gradient4,
];
self.prg_color_2d_gradient.render(
&self.gl,
current_state.control_bottom,
current_state.control_top,
current_state.control_left,
current_state.control_right,
current_state.canvas_width,
current_state.canvas_height,
gradients,
);
}
}
| true
|
1ab1847410250a802d8c09afacba8834fda09645
|
Rust
|
gfan8w/substrate
|
/advance/erc20/lib.rs
|
UTF-8
| 4,995
| 3.203125
| 3
|
[] |
no_license
|
#![cfg_attr(not(feature = "std"), no_std)]
use ink_lang as ink;
/// 一个简单的 ERC20 ink 合约
/// 合约操作地址: https://paritytech.github.io/canvas-ui/#/
#[ink::contract]
mod erc20 {
use ink_storage::{
collections::HashMap,
lazy::Lazy,
};
/// erc20的储存
#[ink(storage)]
pub struct Erc20 {
/// 货币总量
total_supply: Lazy<Balance>,
/// 每个账户余额
balances: HashMap<AccountId, Balance>,
/// 授权,可转账的数量
allowances: HashMap<(AccountId, AccountId), Balance>,
}
/// 转账的信息,包含一个来源账号,一个接收账号,和 转账额
#[ink(event)]
pub struct Transfer {
/// 来源账户,发起账号
#[ink(topic)]
from: Option<AccountId>,
/// 接收账号
to: Option<AccountId>,
/// 转账额
value: Balance,
}
/// 批准信息,包含发起账号,接收账号 和额度
#[ink(event)]
pub struct Approval {
#[ink(topic)]
owner: AccountId,
#[ink(topic)]
spender: AccountId,
value: Balance,
}
#[derive(Debug, PartialEq, Eq, scale::Encode, scale::Decode)]
#[cfg_attr(feature = "std", derive(scale_info::TypeInfo))]
pub enum Error {
/// 没有足够的余额
InsufficientBalance,
/// 没有足够的授权
InsufficientApproval,
}
// 包装一下Error
pub type Result<T> = core::result::Result<T,Error>;
impl Erc20 {
/// Constructor,构造,传入总量
#[ink(constructor)]
pub fn new(supply: Balance) -> Self {
let caller =Self::env().caller();
let mut balances = HashMap::new();
balances.insert(caller,supply);
Self::env().emit_event(Transfer{
from:None,
to: Some(caller),
value: supply,
});
Self {
total_supply: Lazy::new(supply),
balances,
allowances: HashMap::new(),
}
}
/// 获取总的发行量
#[ink(message)]
pub fn total_supply(&self) -> Balance {
ink_env::debug_println!("total_supply: {}",*self.total_supply);
*self.total_supply
}
/// 获取账号对应的余额
#[ink(message)]
pub fn banlance_of(&self, who: AccountId) -> Balance {
self.balances.get(&who).copied().unwrap_or(0)
}
/// 获取可转额度
#[ink(message)]
pub fn allowance(&self, owner: AccountId, spender: AccountId) -> Balance{
self.allowances.get(&(owner,spender)).copied().unwrap_or(0)
}
/// 转账
#[ink(message)]
pub fn transfer(&mut self, to: AccountId, value: Balance) -> Result<()> {
let from = self.env().caller(); // 等价于 Self::env().caller();
self.inner_transfer(from,to,value)?;
Ok(())
}
/// 审批账号的可转额度
#[ink(message)]
pub fn approve(&mut self, to: AccountId, value: Balance) -> Result<()> {
let owner = self.env().caller();
self.allowances.insert((owner,to),value);
self.env().emit_event(Approval{
owner,
spender:to,
value
});
Ok(())
}
/// 从from 账号转账给to
#[ink(message)]
pub fn transfer_from(&mut self,
from: AccountId,
to: AccountId,
value: Balance) -> Result<()> {
let caller = self.env().caller(); // 等价于 Self::env().caller();
let allownance = self.allowance(from,caller);
if allownance < value {
return Err(Error::InsufficientApproval);
}
self.inner_transfer(from,to,value)?;
self.allowances.insert((from,caller), allownance-value);
Ok(())
}
/// 内部转账函数逻辑
pub fn inner_transfer(&mut self, from: AccountId,
to: AccountId,
value: Balance
) -> Result<()> {
let from_balance =self.banlance_of(from);
if from_balance < value {
return Err(Error::InsufficientBalance);
}
self.balances.insert(from, from_balance-value);
ink_env::debug_println!("{:?} lost amount: {}",from,value);
let to_balance = self.banlance_of(to);
self.balances.insert(to,to_balance+value);
ink_env::debug_println!("{:?} get amount: {}",to,value);
self.env().emit_event(Transfer{
from: Some(from),
to: Some(to),
value
});
Ok(())
}
}
}
| true
|
71759371ebbd68af5b4eb6cc8979df4a1a51dd27
|
Rust
|
jonhoo/faktory-rs
|
/src/proto/mod.rs
|
UTF-8
| 8,798
| 2.78125
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::error::{self, Error};
use bufstream::BufStream;
use libc::getpid;
use std::io;
use std::io::prelude::*;
use std::net::TcpStream;
use url::Url;
pub(crate) const EXPECTED_PROTOCOL_VERSION: usize = 2;
mod single;
// commands that users can issue
pub use self::single::{Ack, Fail, Heartbeat, Info, Job, Push, QueueAction, QueueControl};
// responses that users can see
pub use self::single::Hi;
pub(crate) fn get_env_url() -> String {
use std::env;
let var = env::var("FAKTORY_PROVIDER").unwrap_or_else(|_| "FAKTORY_URL".to_string());
env::var(var).unwrap_or_else(|_| "tcp://localhost:7419".to_string())
}
pub(crate) fn host_from_url(url: &Url) -> String {
format!("{}:{}", url.host_str().unwrap(), url.port().unwrap_or(7419))
}
pub(crate) fn url_parse(url: &str) -> Result<Url, Error> {
let url = Url::parse(url).map_err(error::Connect::ParseUrl)?;
if url.scheme() != "tcp" {
return Err(error::Connect::BadScheme {
scheme: url.scheme().to_string(),
}
.into());
}
if url.host_str().is_none() || url.host_str().unwrap().is_empty() {
return Err(error::Connect::MissingHostname.into());
}
Ok(url)
}
/// A stream that can be re-established after failing.
pub trait Reconnect: Sized {
/// Re-establish the stream.
fn reconnect(&self) -> io::Result<Self>;
}
impl Reconnect for TcpStream {
fn reconnect(&self) -> io::Result<Self> {
TcpStream::connect(self.peer_addr().unwrap())
}
}
#[derive(Clone)]
pub(crate) struct ClientOptions {
/// Hostname to advertise to server.
/// Defaults to machine hostname.
pub(crate) hostname: Option<String>,
/// PID to advertise to server.
/// Defaults to process ID.
pub(crate) pid: Option<usize>,
/// Worker ID to advertise to server.
/// Defaults to a GUID.
pub(crate) wid: Option<String>,
/// Labels to advertise to server.
/// Defaults to ["rust"].
pub(crate) labels: Vec<String>,
/// Password to authenticate with
/// Defaults to None,
pub(crate) password: Option<String>,
is_producer: bool,
}
impl Default for ClientOptions {
fn default() -> Self {
ClientOptions {
hostname: None,
pid: None,
wid: None,
labels: vec!["rust".to_string()],
password: None,
is_producer: false,
}
}
}
pub(crate) struct Client<S: Read + Write> {
stream: BufStream<S>,
opts: ClientOptions,
}
impl<S> Client<S>
where
S: Read + Write + Reconnect,
{
pub(crate) fn connect_again(&self) -> Result<Self, Error> {
let s = self.stream.get_ref().reconnect()?;
Client::new(s, self.opts.clone())
}
pub fn reconnect(&mut self) -> Result<(), Error> {
let s = self.stream.get_ref().reconnect()?;
self.stream = BufStream::new(s);
self.init()
}
}
impl<S: Read + Write> Client<S> {
pub(crate) fn new(stream: S, opts: ClientOptions) -> Result<Client<S>, Error> {
let mut c = Client {
stream: BufStream::new(stream),
opts,
};
c.init()?;
Ok(c)
}
pub(crate) fn new_producer(stream: S, pwd: Option<String>) -> Result<Client<S>, Error> {
let opts = ClientOptions {
password: pwd,
is_producer: true,
..Default::default()
};
Self::new(stream, opts)
}
}
impl<S: Read + Write> Client<S> {
fn init(&mut self) -> Result<(), Error> {
let hi = single::read_hi(&mut self.stream)?;
if hi.version != EXPECTED_PROTOCOL_VERSION {
return Err(error::Connect::VersionMismatch {
ours: EXPECTED_PROTOCOL_VERSION,
theirs: hi.version,
}
.into());
}
// fill in any missing options, and remember them for re-connect
let mut hello = single::Hello::default();
if !self.opts.is_producer {
let hostname = self
.opts
.hostname
.clone()
.or_else(|| hostname::get().ok()?.into_string().ok())
.unwrap_or_else(|| "local".to_string());
self.opts.hostname = Some(hostname);
let pid = self
.opts
.pid
.unwrap_or_else(|| unsafe { getpid() } as usize);
self.opts.pid = Some(pid);
let wid = self.opts.wid.clone().unwrap_or_else(|| {
use rand::{thread_rng, Rng};
thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.map(char::from)
.take(32)
.collect()
});
self.opts.wid = Some(wid);
hello.hostname = Some(self.opts.hostname.clone().unwrap());
hello.wid = Some(self.opts.wid.clone().unwrap());
hello.pid = Some(self.opts.pid.unwrap());
hello.labels = self.opts.labels.clone();
}
if hi.salt.is_some() {
if let Some(ref pwd) = self.opts.password {
hello.set_password(&hi, pwd);
} else {
return Err(error::Connect::AuthenticationNeeded.into());
}
}
single::write_command_and_await_ok(&mut self.stream, &hello)
}
}
impl<S: Read + Write> Drop for Client<S> {
fn drop(&mut self) {
single::write_command(&mut self.stream, &single::End).unwrap();
}
}
pub struct ReadToken<'a, S: Read + Write>(&'a mut Client<S>);
pub(crate) enum HeartbeatStatus {
Ok,
Terminate,
Quiet,
}
impl<S: Read + Write> Client<S> {
pub(crate) fn issue<FC: self::single::FaktoryCommand>(
&mut self,
c: &FC,
) -> Result<ReadToken<'_, S>, Error> {
single::write_command(&mut self.stream, c)?;
Ok(ReadToken(self))
}
pub(crate) fn heartbeat(&mut self) -> Result<HeartbeatStatus, Error> {
single::write_command(
&mut self.stream,
&Heartbeat::new(&**self.opts.wid.as_ref().unwrap()),
)?;
match single::read_json::<_, serde_json::Value>(&mut self.stream)? {
None => Ok(HeartbeatStatus::Ok),
Some(s) => match s
.as_object()
.and_then(|m| m.get("state"))
.and_then(|s| s.as_str())
{
Some("terminate") => Ok(HeartbeatStatus::Terminate),
Some("quiet") => Ok(HeartbeatStatus::Quiet),
_ => Err(error::Protocol::BadType {
expected: "heartbeat response",
received: format!("{}", s),
}
.into()),
},
}
}
pub(crate) fn fetch<Q>(&mut self, queues: &[Q]) -> Result<Option<Job>, Error>
where
Q: AsRef<str>,
{
self.issue(&single::Fetch::from(queues))?.read_json()
}
}
impl<'a, S: Read + Write> ReadToken<'a, S> {
pub(crate) fn await_ok(self) -> Result<(), Error> {
single::read_ok(&mut self.0.stream)
}
pub(crate) fn read_json<T>(self) -> Result<Option<T>, Error>
where
T: serde::de::DeserializeOwned,
{
single::read_json(&mut self.0.stream)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
// https://github.com/rust-lang/rust/pull/42219
//#[allow_fail]
#[ignore]
fn it_works() {
Client::new(
TcpStream::connect("localhost:7419").unwrap(),
ClientOptions::default(),
)
.unwrap();
}
#[test]
fn correct_env_parsing() {
use std::env;
if env::var_os("FAKTORY_URL").is_some() {
eprintln!("skipping test to avoid messing with user-set FAKTORY_URL");
return;
}
assert_eq!(get_env_url(), "tcp://localhost:7419");
env::set_var("FAKTORY_URL", "tcp://example.com:7500");
assert_eq!(get_env_url(), "tcp://example.com:7500");
env::set_var("FAKTORY_PROVIDER", "URL");
env::set_var("URL", "tcp://example.com:7501");
assert_eq!(get_env_url(), "tcp://example.com:7501");
}
#[test]
fn url_port_default() {
use url::Url;
let url = Url::parse("tcp://example.com").unwrap();
assert_eq!(host_from_url(&url), "example.com:7419");
}
#[test]
fn url_requires_tcp() {
url_parse("foobar").unwrap_err();
}
#[test]
fn url_requires_host() {
url_parse("tcp://:7419").unwrap_err();
}
#[test]
fn url_doesnt_require_port() {
url_parse("tcp://example.com").unwrap();
}
#[test]
fn url_can_take_password_and_port() {
url_parse("tcp://:foobar@example.com:7419").unwrap();
}
}
| true
|
ea60347bc8b52e45e5765fd423ecbf456913ce98
|
Rust
|
proshunsuke/colmsg
|
/src/bin/colmsg/clap_app.rs
|
UTF-8
| 4,631
| 2.796875
| 3
|
[
"MIT"
] |
permissive
|
use clap::{App as ClapApp, Arg, AppSettings};
pub fn build_app() -> ClapApp<'static, 'static> {
ClapApp::new(crate_name!())
.version(crate_version!())
.global_setting(AppSettings::ColoredHelp)
.about(
"A CLI tool for '櫻坂46メッセージ', '日向坂46メッセージ', '乃木坂46メッセージ', and '齋藤飛鳥メッセージ' app.\n\n\
Use '--help' instead of '-h' to see a more detailed version of the help text.",
)
.long_about("A CLI tool for saving messages of '櫻坂46メッセージ', '日向坂46メッセージ', '乃木坂46メッセージ', and '齋藤飛鳥メッセージ' app locally.")
.arg(
Arg::with_name("group")
.long("group")
.short("g")
.multiple(true)
.possible_values(&["sakurazaka", "hinatazaka", "nogizaka", "asukasaito"])
.help("Save messages of specific group.")
.long_help("Save messages of specific group.
If not specified, save messages both of groups")
.takes_value(true),
)
.arg(
Arg::with_name("name")
.long("name")
.short("n")
.help("Save messages of specific members (菅井友香, 佐々木久美, 秋元真夏..)")
.long_help("Save messages of specific members (菅井友香, 佐々木久美, 秋元真夏..)
Name must be a valid full name of kanji.
If not specified, save messages of all members.
e.g. -n 菅井友香 -n 佐々木久美 -n 秋元真夏.")
.multiple(true)
.takes_value(true),
)
.arg(
Arg::with_name("from")
.long("from")
.short("F")
.help("Save messages after the specific date.")
.long_help("Save messages after the specific date.
Date format is %Y/%m/%d %H:%M:%S
e.g. -F '2020/01/01 00:00:00'")
.takes_value(true),
)
.arg(
Arg::with_name("kind")
.long("kind")
.short("k")
.multiple(true)
.possible_values(&["text", "picture", "video", "voice"])
.help("Save specific kind of messages.")
.long_help("Save specific kind of messages.
If not specified, save all kinds of messages.
e.g. -k text -k image")
.takes_value(true),
)
.arg(
Arg::with_name("dir")
.long("dir")
.short("d")
.help("Set the download directory.")
.long_help("Set the download directory.
Use '--download-dir' to confirm the default directory.")
.takes_value(true),
)
.arg(
Arg::with_name("s_refresh_token")
.long("s_refresh_token")
.help("Set the sakurazaka refresh token.")
.long_help("Set the sakurazaka refresh token.")
.takes_value(true),
)
.arg(
Arg::with_name("h_refresh_token")
.long("h_refresh_token")
.help("Set the hinatazaka refresh token.")
.long_help("Set the hinatazaka refresh token.")
.takes_value(true),
)
.arg(
Arg::with_name("n_refresh_token")
.long("n_refresh_token")
.help("Set the nogizaka refresh token.")
.long_help("Set the nogizaka refresh token.")
.takes_value(true),
)
.arg(
Arg::with_name("a_refresh_token")
.long("a_refresh_token")
.help("Set the asukasaito refresh token.")
.long_help("Set the asukasaito refresh token.")
.takes_value(true),
)
.arg(
Arg::with_name("delete")
.long("delete")
.help("Delete all saved messages.")
.long_help("Delete all saved messages.
If you execute command with this option, all saved messages are deleted from your disk.
Please use be careful."),
)
.arg(
Arg::with_name("config-dir")
.long("config-dir")
.help("Show colmsg's default configuration directory.")
)
.arg(
Arg::with_name("download-dir")
.long("download-dir")
.help("Show colmsg's default download directory.")
)
.help_message("Print this help message.")
.version_message("Show version information.")
}
| true
|
ce4df2c43f92657250e9fd4bb18c34437fe1aa73
|
Rust
|
10allday-kai/api-daemon
|
/third-party/hawk/src/lib.rs
|
UTF-8
| 6,098
| 3.171875
| 3
|
[
"MPL-2.0",
"Apache-2.0"
] |
permissive
|
//! The `hawk` crate provides support for [Hawk](https://github.com/hueniverse/hawk)
//! authentictation. It is a low-level crate, used by higher-level crates to integrate with various
//! Rust HTTP libraries. For example `hyper-hawk` integrates Hawk with Hyper.
//!
//! # Examples
//!
//! ## Hawk Client
//!
//! A client can attach a Hawk Authorization header to requests by providing credentials to a
//! Request instance, which will generate the header.
//!
//! ```
//! use hawk::{RequestBuilder, Credentials, Key, SHA256, PayloadHasher};
//! use std::time::{Duration, UNIX_EPOCH};
//!
//! fn main() {
//! // provide the Hawk id and key
//! let credentials = Credentials {
//! id: "test-client".to_string(),
//! key: Key::new(vec![99u8; 32], SHA256).unwrap(),
//! };
//!
//! let payload_hash = PayloadHasher::hash("text/plain", SHA256, "request-body").unwrap();
//!
//! // provide the details of the request to be authorized
//! let request = RequestBuilder::new("POST", "example.com", 80, "/v1/users")
//! .hash(&payload_hash[..])
//! .request();
//!
//! // Get the resulting header, including the calculated MAC; this involves a random
//! // nonce, so the MAC will be different on every request.
//! let header = request.make_header(&credentials).unwrap();
//!
//! // the header would the be attached to the request
//! assert_eq!(header.id.unwrap(), "test-client");
//! assert_eq!(header.mac.unwrap().len(), 32);
//! assert_eq!(header.hash.unwrap().len(), 32);
//! }
//! ```
//!
//! A client that wishes to use a bewit (URL parameter) can do so as follows:
//!
//! ```
//! use hawk::{RequestBuilder, Credentials, Key, SHA256, Bewit};
//! use std::time::Duration;
//! use std::borrow::Cow;
//!
//! let credentials = Credentials {
//! id: "me".to_string(),
//! key: Key::new("tok", SHA256).unwrap(),
//! };
//!
//! let client_req = RequestBuilder::new("GET", "mysite.com", 443, "/resource").request();
//! let client_bewit = client_req
//! .make_bewit_with_ttl(&credentials, Duration::from_secs(10))
//! .unwrap();
//! let request_path = format!("/resource?bewit={}", client_bewit.to_str());
//! // .. make the request
//! ```
//!
//! ## Hawk Server
//!
//! To act as a server, parse the Hawk Authorization header from the request, generate a new
//! Request instance, and use the request to validate the header.
//!
//! ```
//! use hawk::{RequestBuilder, Header, Key, SHA256};
//! use hawk::mac::Mac;
//! use std::time::{Duration, UNIX_EPOCH};
//!
//! fn main() {
//! let mac = Mac::from(vec![7, 22, 226, 240, 84, 78, 49, 75, 115, 144, 70,
//! 106, 102, 134, 144, 128, 225, 239, 95, 132, 202,
//! 154, 213, 118, 19, 63, 183, 108, 215, 134, 118, 115]);
//! // get the header (usually from the received request; constructed directly here)
//! let hdr = Header::new(Some("dh37fgj492je"),
//! Some(UNIX_EPOCH + Duration::new(1353832234, 0)),
//! Some("j4h3g2"),
//! Some(mac),
//! Some("my-ext-value"),
//! Some(vec![1, 2, 3, 4]),
//! Some("my-app"),
//! Some("my-dlg")).unwrap();
//!
//! // build a request object based on what we know
//! let hash = vec![1, 2, 3, 4];
//! let request = RequestBuilder::new("GET", "localhost", 443, "/resource")
//! .hash(&hash[..])
//! .request();
//!
//! let key = Key::new(vec![99u8; 32], SHA256).unwrap();
//! let one_week_in_secs = 7 * 24 * 60 * 60;
//! if !request.validate_header(&hdr, &key, Duration::from_secs(5200 * one_week_in_secs)) {
//! panic!("header validation failed. Is it 2117 already?");
//! }
//! }
//! ```
//!
//! A server which validates bewits looks like this:
//!
//! ```
//! use hawk::{RequestBuilder, Credentials, Key, SHA256, Bewit};
//! use std::time::Duration;
//! use std::borrow::Cow;
//!
//! let credentials = Credentials {
//! id: "me".to_string(),
//! key: Key::new("tok", SHA256).unwrap(),
//! };
//!
//! // simulate the client generation of a bewit
//! let client_req = RequestBuilder::new("GET", "mysite.com", 443, "/resource").request();
//! let client_bewit = client_req
//! .make_bewit_with_ttl(&credentials, Duration::from_secs(10))
//! .unwrap();
//! let request_path = format!("/resource?bewit={}", client_bewit.to_str());
//!
//! let mut maybe_bewit = None;
//! let server_req = RequestBuilder::new("GET", "mysite.com", 443, &request_path)
//! .extract_bewit(&mut maybe_bewit).unwrap()
//! .request();
//! let bewit = maybe_bewit.unwrap();
//! assert_eq!(bewit.id(), "me");
//! assert!(server_req.validate_bewit(&bewit, &credentials.key));
//! ```
//!
//! ## Features
//!
//! By default, the `use_ring` feature is enabled, which means that this crate will
//! use `ring` for all cryptographic operations.
//!
//! Alternatively, one can configure the crate with the `use_openssl`
//! feature to use the `openssl` crate.
//!
//! If no features are enabled, you must provide a custom implementation of the
//! [`hawk::crypto::Cryptographer`] trait to the `set_cryptographer` function, or
//! the cryptographic operations will panic.
//!
//! Attempting to configure both the `use_ring` and `use_openssl` features will
//! result in a build error.
#[cfg(test)]
#[macro_use]
extern crate pretty_assertions;
mod header;
pub use crate::header::Header;
mod credentials;
pub use crate::credentials::{Credentials, DigestAlgorithm, Key};
mod request;
pub use crate::request::{Request, RequestBuilder};
mod response;
pub use crate::response::{Response, ResponseBuilder};
mod error;
pub use crate::error::*;
mod payload;
pub use crate::payload::PayloadHasher;
mod bewit;
pub use crate::bewit::Bewit;
pub mod mac;
pub mod crypto;
pub const SHA256: DigestAlgorithm = DigestAlgorithm::Sha256;
pub const SHA384: DigestAlgorithm = DigestAlgorithm::Sha384;
pub const SHA512: DigestAlgorithm = DigestAlgorithm::Sha512;
| true
|
c19c6fd3b74469612354b118f052e9e6b5f3b076
|
Rust
|
pronitdas/wasm_geo_agg
|
/wasm/src/ramp.rs
|
UTF-8
| 1,159
| 2.9375
| 3
|
[
"Apache-2.0"
] |
permissive
|
pub fn ramp(agg: Vec<f32>, log: bool, rgb:bool)-> Vec<u8>{
let mut max = -std::f32::INFINITY;
let mut min = std::f32::INFINITY;
let mut result = Vec::new();
let mut min = 0.0;
for val in agg.iter(){
if *val > max{
max = *val;
}
if *val < min{
min = *val
}
}
if log{
if max > 0.0{
max = max.log10();
}
else{
max = 0.0;
}
if min >0.0{
min = min.log10();
}
else{
min = 0.0;
}
}
let mut count_non_zero = 0;
for val in agg.into_iter(){
if val > 0.0 {
count_non_zero = count_non_zero + 1;
}
let mut mval = val;
if log {
if mval > 0.0 {
mval = mval.log10()
}
else{
mval=0.0
}
}
let r_val = (mval - min)*255.0/(max-min);
//let r_val = 0 as u8;
result.push(r_val as u8);
if rgb {
result.push(r_val as u8);
result.push(r_val as u8);
result.push(255 as u8);
}
}
result
}
| true
|
2c995c6add656a8134dfa076b5496c0521a13067
|
Rust
|
Hsodergren/unpackrars
|
/src/output.rs
|
UTF-8
| 3,577
| 3.078125
| 3
|
[] |
no_license
|
use self::Output::{Done, New, Progress, Visit};
use log::*;
use ncurses;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::mpsc;
use std::sync::mpsc::Sender;
use std::thread;
#[derive(Debug)]
pub enum Output {
Visit(PathBuf),
New { path: PathBuf, id: usize },
Done { id: usize },
Progress { id: usize, procent: u8 },
}
impl From<Output> for RealOutput {
fn from(o: Output) -> RealOutput {
RealOutput::Output(o)
}
}
pub enum RealOutput {
Exit,
Output(Output),
}
pub trait HandleOutput {
fn handle(&mut self, o: Output);
}
pub struct Data {
pub output: Box<HandleOutput + Send>,
}
impl HandleOutput for Data {
fn handle(&mut self, o: Output) {
self.output.handle(o);
}
}
pub fn handle_output(mut handler: Data) -> (Sender<RealOutput>, thread::JoinHandle<()>) {
let (sender, receiver) = mpsc::channel();
let handle = thread::spawn(move || {
for output in receiver {
match output {
RealOutput::Exit => break,
RealOutput::Output(o) => handler.handle(o),
}
}
});
(sender, handle)
}
pub struct LogHandler {
working: HashMap<usize, PathBuf>,
}
impl LogHandler {
pub(crate) fn new() -> LogHandler {
env_logger::init();
LogHandler {
working: HashMap::new(),
}
}
}
impl HandleOutput for LogHandler {
fn handle(&mut self, o: Output) {
match o {
Visit(a) => trace!("visiting {:?}", a),
New { path, id } => {
info!("unraring {:?}", path);
self.working.insert(id, path);
}
Done { id } => {
let path = self.working.remove(&id);
info!("done with {:?}", path.unwrap());
}
Progress { procent, .. } => info!("progress: {}%", procent),
}
}
}
pub struct StdoutHandler {
working: HashMap<usize, PathBuf>,
}
impl StdoutHandler {
pub fn new() -> StdoutHandler {
StdoutHandler {
working: HashMap::new(),
}
}
}
impl HandleOutput for StdoutHandler {
fn handle(&mut self, o: Output) {
match o {
Visit(a) => println!("visiting {:?}", a),
New { path, id } => {
println!("unraring {:?}", path);
self.working.insert(id, path);
}
Done { id } => {
let path = self.working.remove(&id);
println!("done with {:?}", path.unwrap());
}
Progress { procent, .. } => println!("progress: {}%", procent),
}
}
}
pub struct FancyHandler {
working: HashMap<usize, Info>,
}
struct Info {
path: PathBuf,
row: usize,
}
impl FancyHandler {
pub fn new() -> FancyHandler {
ncurses::initscr();
FancyHandler {
working: HashMap::new(),
}
}
fn update_visit(&self, path: PathBuf) {
self.update(self.working.len() as i32, path.to_str().unwrap());
}
fn update(&self, line: i32, text: &str) {
ncurses::mv(line, 0);
ncurses::clrtoeol();
ncurses::addstr(text);
}
}
impl HandleOutput for FancyHandler {
fn handle(&mut self, o: Output) {
match o {
Visit(path) => self.update_visit(path),
New { .. } => {}
Done { .. } => {}
Progress { .. } => {}
}
ncurses::refresh();
}
}
impl Drop for FancyHandler {
fn drop(&mut self) {
ncurses::endwin();
}
}
| true
|
d36c2035b455406129b72349199eac24c1bb6261
|
Rust
|
celsworth/apachetop.rs
|
/src/stats.rs
|
UTF-8
| 2,034
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
use crate::prelude::*;
#[derive(Eq, PartialEq, Debug, Clone)]
pub struct Counters {
pub requests: i64,
pub bytes: i64,
}
impl Counters {
pub fn empty() -> Self {
Self {
requests: 0,
bytes: 0,
}
}
pub fn add_request(&mut self, request: &Request) {
self.requests += 1;
self.bytes += request.size;
}
pub fn remove_request(&mut self, request: &Request) {
self.requests -= 1;
self.bytes -= request.size;
}
}
#[derive(Eq, PartialEq, Debug, Clone)]
pub struct Stats {
pub global: Counters,
// stats for 200-299 etc
pub by_status_code: [Counters; 6],
}
impl Stats {
pub fn new() -> Self {
Self {
global: Counters::empty(),
by_status_code: [
Counters::empty(),
Counters::empty(),
Counters::empty(),
Counters::empty(),
Counters::empty(),
Counters::empty(),
],
}
}
pub fn add_request(&mut self, request: &Request) {
self.global.add_request(&request);
// remove from appropriate HTTP status code Stats too
let i = Self::status_code_stats_index_for_request(&request);
let status_code_stats = &mut self.by_status_code[i];
status_code_stats.add_request(&request);
}
pub fn remove_request(&mut self, request: &Request) {
self.global.remove_request(&request);
// remove from appropriate HTTP status code Stats too
let i = Self::status_code_stats_index_for_request(&request);
let status_code_stats = &mut self.by_status_code[i];
status_code_stats.remove_request(&request);
}
fn status_code_stats_index_for_request(request: &Request) -> usize {
match request.status_code {
100..=199 => 1,
200..=299 => 2,
300..=399 => 3,
400..=499 => 4,
500..=599 => 5,
_ => 0, // FIXME?
}
}
}
| true
|
b299ef843b6680ab8be280bb48a9926363a917af
|
Rust
|
Alex6323/Ictarus
|
/ictarus/src/convert/trits.rs
|
UTF-8
| 4,480
| 2.734375
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::constants::*;
use crate::convert::bytes::TxBytes;
use crate::convert::luts::*;
use crate::convert::trytes::TxTrytes;
pub type Trit = i8;
pub type Trits243 = [i8; 243];
pub type TxTrits = [i8; TRANSACTION_SIZE_TRITS];
pub fn from_tx_bytes_2enc9(bytes: &[u8]) -> TxTrits {
let mut trits = [0_i8; TRANSACTION_SIZE_TRITS];
for i in 0..TRANSACTION_SIZE_TRITS / 9 {
let b0 = bytes[i * 2] as usize;
let b1 = bytes[i * 2 + 1] as usize;
let i9 = i * 9;
trits[i9..i9 + 3].copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[b0 / 8][..]);
trits[(i9 + 3)..(i9 + 6)].copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[b1 / 8]);
trits[(i9 + 6)..(i9 + 9)].copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[b0 % 8 + 8 * (b1 % 8)]);
}
trits
}
pub fn from_bytes_2enc9(bytes: &[u8], offset: usize, len: usize) -> Vec<Trit> {
assert!(len % 2 == 0);
let mut trits = vec![0_i8; len / 2 * 9];
for i in 0..(trits.len() / 9) {
let pos = offset + 2 * i;
let b0 = bytes[pos] as usize;
let b1 = bytes[pos + 1] as usize;
let i9 = i * 9;
trits[i9..i9 + 3].copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[b0 / 8][..]);
trits[(i9 + 3)..(i9 + 6)].copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[b1 / 8]);
trits[(i9 + 6)..(i9 + 9)].copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[b0 % 8 + 8 * (b1 % 8)]);
}
trits
}
pub fn from_tx_tryte_string(tryte_string: &str) -> TxTrits {
assert!(IS_TRYTES.is_match(tryte_string));
let bytes = tryte_string.as_bytes();
assert_eq!(TRANSACTION_SIZE_TRYTES, bytes.len());
let mut trits = [0i8; TRANSACTION_SIZE_TRITS];
bytes.iter().enumerate().for_each(|(i, c)| {
trits[(i * 3)..(i * 3) + 3]
.copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[*ASCII_TO_TRYTE.get(&c).unwrap()][..]);
});
trits
}
pub fn from_tryte_string(tryte_string: &str) -> Vec<Trit> {
assert!(IS_TRYTES.is_match(tryte_string));
let bytes = tryte_string.as_bytes();
let mut trits = vec![0i8; tryte_string.len() * 3];
bytes.iter().enumerate().for_each(|(i, c)| {
trits[(i * 3)..(i * 3) + 3]
.copy_from_slice(&TRYTE_TO_TRIT_TRIPLET[*ASCII_TO_TRYTE.get(&c).unwrap()][..]);
});
trits
}
pub fn from_tx_trytes(trytes: &TxTrytes) -> TxTrits {
let mut trits = [0_i8; TRANSACTION_SIZE_TRITS];
trytes.iter().enumerate().for_each(|(i, t)| {
trits[(i * 3)..(i * 3 + 3)].copy_from_slice(&ASCII_TO_TRIT_TRIPLET.get(t).unwrap()[..]);
});
trits
}
pub fn from_trytes(trytes: &[u8]) -> Vec<Trit> {
let mut trits = vec![0_i8; trytes.len() * 3];
trytes.iter().enumerate().for_each(|(i, t)| {
trits[(i * 3)..(i * 3 + 3)].copy_from_slice(&ASCII_TO_TRIT_TRIPLET.get(t).unwrap()[..]);
});
trits
}
#[cfg(test)]
mod tests {
use super::*;
use rand::prelude::*;
#[test]
fn test_from_trytes() {
let trytes = "HELLO9WORLD";
let trits = from_tryte_string(trytes);
println!("{:?}", trits);
assert_eq!(33, trits.len());
}
#[test]
fn test_trits_from_trytes() {
let trytes = "HELLO9WORLD";
//let trits = trits_from_trytes(trytes);
}
/// Converts bytes representing trit quintuplets to its corresponding trit representation.
/// Ported from Cfb's Java version. Just for testing purposes.
fn from_tx_bytes_cfb(bytes: &[u8]) -> [i8; TRANSACTION_SIZE_TRITS] {
// NOTE: we need to convert &[u8] to &[i8] for the following code to work
let bytes = &bytes.iter().map(|u| *u as i8).collect::<Vec<i8>>()[0..bytes.len()];
let mut result = [0_i8; TRANSACTION_SIZE_TRITS];
let mut offset = 0_usize;
let mut index: usize;
let mut count: usize;
for i in 0..bytes.len() {
if offset >= TRANSACTION_SIZE_TRITS {
break;
}
index = if bytes[i] < 0 {
(bytes[i] as i32 + 243) as usize
} else {
bytes[i] as usize
};
count = if (TRANSACTION_SIZE_TRITS - offset) < 5 {
TRANSACTION_SIZE_TRITS - offset
} else {
5
};
result[offset..offset + count].copy_from_slice(&BYTE_TO_TRITS[index][0..count]);
offset += 5;
}
// unnecessary
while offset < result.len() {
result[offset] = 0;
offset += 1;
}
result
}
}
| true
|
736e2ec21b92f1559724b57756ef1fe5f13b9a9f
|
Rust
|
enterprisey/EnterpriseyBot
|
/redirect-banners/src/main.rs
|
UTF-8
| 13,749
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
use std::{
collections::{HashMap, HashSet},
convert::TryInto,
error::Error,
fmt,
fs::{File, OpenOptions},
io::{Read, Write},
ops::Range,
path,
};
use chrono::{prelude::*, Duration};
use config;
use lazy_static::lazy_static;
use mediawiki::{
api::Api,
page::{Page, PageError},
title::Title,
};
use regex::Regex;
static ISO_8601_FMT: &str = "%Y-%m-%dT%H:%M:%SZ";
static SUMMARY: &str = "[[Wikipedia:Bots/Requests for approval/EnterpriseyBot 10|Bot]] removing the article class assessment";
lazy_static! {
static ref REGEX: Regex = Regex::new(r"(?xs) # enable comments, allow . to match \n
\{\{ # begin template
# capturing group 1: template name (should start with 'wikiproject', case-insensitive)
# note the | at the end
([Ww][Ii][Kk][Ii][Pp][Rr][Oo][Jj][Ee][Cc][Tt][^\|\}]*?)
# capturing groups 2-n are the values of the 'class' parameters
(?:
# a class parameter
(\|\s*class\s*=\s*([^\|\}]+?)\s*)
|
# maybe some other parameters
\|[^\|\}]+?
)+? # must have at least one class parameter
\}\}").expect("invalid regex");
static ref NOVELS_WIKIPROJECT_REGEX: Regex = Regex::new("(?i)NovelsWikiProject").expect("invalid regex");
}
fn make_map(params: &[(&str, &str)]) -> HashMap<String, String> {
params.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect()
}
#[derive(Debug)]
struct BotError(String);
impl BotError {
fn new(s: impl Into<String>) -> Self { BotError(s.into()) }
}
impl fmt::Display for BotError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "BotError({})", self.0)
}
}
impl Error for BotError {}
/// Returns true if the page with the given title was a redirect one week ago.
fn check_redirect_age(api: &Api, title: &Title) -> Result<bool, Box<dyn Error>> {
let one_week_ago = (Utc::now() - Duration::weeks(1)).format(ISO_8601_FMT).to_string();
let title = title.full_pretty(api).ok_or(BotError::new("Bad title"))?;
let res = api.get_query_api_json(&make_map(&[
("action", "query"),
("prop", "revisions"),
("titles", &title),
("rvprop", "content"),
("rvslots", "main"),
("rvlimit", "1"),
("rvstart", &one_week_ago),
("formatversion", "2"),
]))?;
let page = &res["query"]["pages"][0];
if page["missing"].as_bool() == Some(true) {
Err(Box::new(BotError::new(format!("missing page (check_redirect_age), title {}", title))))
} else if page["revisions"].is_null() {
Ok(false) // page just didn't exist a week ago
} else {
Ok(page["revisions"][0]["slots"]["main"]["content"].as_str()
.ok_or(BotError::new(format!("bad API response (check_redirect_age): {:?}", res)))?
.to_ascii_lowercase()
.contains("#redirect"))
}
}
/// Gets a list of all templates that redirect to the given set of templates.
/// The inputs should be WITH namespaces; the outputs will be WITHOUT namespaces.
fn get_template_redirects(api: &Api, templates: Vec<String>) -> Result<Vec<String>, Box<dyn Error>> {
let res = api.get_query_api_json_all(&make_map(&[
("action", "query"),
("prop", "linkshere"),
("titles", &templates.join("|")),
("lhprop", "title"),
("lhnamespace", /* template */ "10"),
("lhshow", "redirect"),
("lhlimit", "max"),
("formatversion", "2"),
]))?;
res["query"]["pages"].as_array()
.ok_or(BotError::new(format!("bad API response (get_template_redirects): {:?}", res)))?
.iter()
.map(|page| page["linkshere"].as_array()
.ok_or(BotError::new(format!("bad API response (get_template_redirects): {:?}", res)))
.map(|linkshere| linkshere
.iter()
.map(|val| val["title"].as_str().expect("not a string?")[("template:".len())..].to_ascii_lowercase())
.collect::<HashSet<_>>().into_iter())).collect::<Result<Vec<_>, _>>()
.map(|pages| pages.into_iter().flatten().collect())
.map_err(|e| Box::new(e) as Box<dyn Error>)
}
fn load_progress(filename: &str) -> Result<Option<String>, Box<dyn Error>> {
if path::Path::new(filename).exists() {
let mut file = File::open(&filename)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(Some(contents))
} else {
Ok(None)
}
}
fn save_progress(filename: String, article: String) -> Result<(), Box<dyn Error>> {
let mut file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(filename)?;
file.write_all(&article.into_bytes())?;
Ok(())
}
pub fn process_text(mut text: String, banned_templates: &Vec<String>) -> String {
#[derive(Debug)]
enum Edit { Insert(usize, String), Delete(Range<usize>) };
use Edit::*;
let mut edits: Vec<Edit> = Vec::new();
let mut offset = 0;
let mut locs = REGEX.capture_locations();
while let Some(_) = REGEX.captures_read_at(&mut locs, &text, offset) {
//println!("{} groups {}", line!(), (0..2 * locs.len()).map(|idx| locs.get(idx).map(|(start, end)| &text[start..end]).unwrap_or("None")).collect::<Vec<_>>().join(","));
//println!("{} whole match {}", line!(), &text[locs.get(0).unwrap().0..locs.get(0).unwrap().1]);
let mut template_name = (&text[locs.get(1).unwrap().0..locs.get(1).unwrap().1]).trim().to_string();
// Increment the offset to ensure that we keep progressing through the string
let new_offset = locs.get(1).unwrap().1;
if new_offset > offset {
offset = new_offset;
} else {
panic!("no progress being made!");
}
// Check if template is one of the banned templates
template_name.make_ascii_lowercase();
if banned_templates.iter().any(|b| b == &template_name) {
//println!("banned; continuing");
continue;
}
if locs.get(2).is_none() {
continue;
}
if let Some((start, end)) = locs.get(3) {
if (&text[start..end]).trim().is_empty() {
continue;
}
}
// Schedule edits deleting the class params
let num_class_params = (locs.len() - 2) / 2;
for class_param_idx in 0..num_class_params {
let capturing_group_idx = class_param_idx * 2 + 2;
edits.push(Delete(locs.get(capturing_group_idx).unwrap().0..locs.get(capturing_group_idx).unwrap().1));
}
// Schedule an edit inserting the former class param
let former = format!("<!-- Formerly assessed as {} -->",
(0..num_class_params)
.map(|class_param_idx| {
let capturing_group_idx = class_param_idx * 2 + 3;
format!("{}{}", &text[locs.get(capturing_group_idx).unwrap().0..locs.get(capturing_group_idx).unwrap().1], "-class")
})
.collect::<Vec<String>>()
.join(", "));
let idx_of_template_end = locs.get(0).unwrap().1;
edits.push(Insert(idx_of_template_end, former));
}
// Make edits
for edit in edits.into_iter().rev() {
match edit {
Insert(idx, insert_text) => text.insert_str(idx, &insert_text),
Delete(range) => text.replace_range(range, ""),
}
}
text
}
fn main() -> Result<(), Box<dyn Error>> {
let mut config = config::Config::default();
config
.merge(config::File::with_name("settings"))?
.merge(config::Environment::with_prefix("APP"))?;
let username = config.get_str("username")?;
let password = config.get_str("password")?;
let num_edits_per_session: usize = config.get_int("edits_per_session")?.try_into()?;
let progress_filename = config.get_str("progress_file")?;
let mut api = Api::new("https://en.wikipedia.org/w/api.php")?;
api.login(username, password)?;
api.set_user_agent(format!("EnterpriseyBot/redirect-banners-rs/{} (https://en.wikipedia.org/wiki/User:EnterpriseyBot; apersonwiki@gmail.com)", env!("CARGO_PKG_VERSION")));
let mut params = make_map(&[
("action", "query"),
("list", "allpages"),
("apnamespace", /* article */ "0"),
("apfilterredir", "redirects"),
("aplimit", "500"),
]);
if let Some(starting_title) = load_progress(&progress_filename)? {
params.insert("apfrom".to_string(), starting_title.trim().to_string());
}
let base_banned_templates = config.get_array("banned_templates")?
.into_iter().map(|val| val.into_str().map(|val| format!("Template:{}", val))).collect::<Result<_, _>>()?;
let banned_templates = get_template_redirects(&api, base_banned_templates)?;
let mut edit_list: Vec<(Title, String)> = Vec::new(); // (title, new text)
'main_loop: for each_result_set in api.get_query_api_json_limit_iter(¶ms, None) {
let each_result_set = each_result_set?;
let pages = each_result_set["query"]["allpages"].as_array()
.ok_or(BotError::new(format!("bad API result: {:?}", each_result_set)))?;
for each_page_obj in pages {
let mut title = Title::new(each_page_obj["title"].as_str()
.ok_or(BotError::new(format!("bad API result (title construction): {:?}", each_result_set)))?,
/* article */ 0);
if !check_redirect_age(&api, &title)? {
continue;
}
title.toggle_talk();
let page = Page::new(title);
match page.text(&api) {
Ok(text) => {
let text = NOVELS_WIKIPROJECT_REGEX.replace(&text, "WikiProject Novels").to_string();
let new_text = process_text(text.clone(), &banned_templates);
if new_text != text {
edit_list.push((page.title().clone(), new_text));
println!("WILL EDIT {:?}", page.title());
if edit_list.len() >= num_edits_per_session {
break 'main_loop;
}
}
},
Err(PageError::Missing(_)) => continue,
Err(e) => return Err(Box::new(e)),
};
}
}
if let Some((title, _)) = edit_list.get(edit_list.len().saturating_sub(1)) {
save_progress(progress_filename, title.pretty().to_string())?;
}
for (title, new_text) in edit_list.into_iter() {
Page::new(title).edit_text(&mut api, new_text, SUMMARY)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
fn assert_unchanged(text: impl Into<String>, banned: &Vec<String>) {
let text = text.into();
assert_eq!(process_text(text.clone(), banned), text);
}
#[test]
fn test_process_text() {
assert_unchanged("{{WikiProject X|importance=}}", &vec![]);
assert_unchanged("{{WikiProject X|class=|importance=}}", &vec![]);
assert_unchanged("{{WikiProject X|class= |importance=}}", &vec![]);
assert_eq!(process_text("{{Wikiproject Cars|class=Foo}}".to_string(), &vec![]),
"{{Wikiproject Cars}}<!-- Formerly assessed as Foo-class -->");
assert_eq!(process_text("{{Wikiproject Cars|a|class=Foo}}".to_string(), &vec![]),
"{{Wikiproject Cars|a}}<!-- Formerly assessed as Foo-class -->");
assert_eq!(process_text("{{Wikiproject Cars|class=Foo|a}}".to_string(), &vec![]),
"{{Wikiproject Cars|a}}<!-- Formerly assessed as Foo-class -->");
assert_eq!(process_text("{{Wikiproject Cars|a|class=Foo|b}}".to_string(), &vec![]),
"{{Wikiproject Cars|a|b}}<!-- Formerly assessed as Foo-class -->");
}
#[test]
fn test_process_text_more() {
assert_unchanged("{{WikiProject Astronomy|object=yes|importance=|class=}}\n{{WikiProject Solar System|class=|importance=}}", &vec![]);
}
#[test]
fn test_process_text_wpbs() {
let wpbs = vec!["wikiproject banner shell".to_string()];
assert_eq!(process_text("{{WikiProject banner shell|1=
{{WikiProject New York City |class=redirect |importance=NA}}
{{WikiProject Streetcars |NYPT=yes |class=NA |importance=NA}}
}}".to_string(), &wpbs),
"{{WikiProject banner shell|1=
{{WikiProject New York City |importance=NA}}<!-- Formerly assessed as redirect-class -->
{{WikiProject Streetcars |NYPT=yes |importance=NA}}<!-- Formerly assessed as NA-class -->
}}");
assert_eq!(process_text("{{Talk header}}
{{WikiProject banner shell|1=
{{WikiProject New York City |class=redirect |importance=NA}}
{{WikiProject Streetcars |NYPT=yes |class=NA |importance=NA}}
{{WikiProject Buses |NYPT=yes |class=redirect |importance=NA}}
}}
".to_string(), &wpbs),
"{{Talk header}}
{{WikiProject banner shell|1=
{{WikiProject New York City |importance=NA}}<!-- Formerly assessed as redirect-class -->
{{WikiProject Streetcars |NYPT=yes |importance=NA}}<!-- Formerly assessed as NA-class -->
{{WikiProject Buses |NYPT=yes |importance=NA}}<!-- Formerly assessed as redirect-class -->
}}
");
assert_eq!(process_text("{{Talk page of redirect}}
{{WikiProject banner shell |1=
{{WikiProject Film|class=Redirect|American=yes|Filmmaking=yes}}
{{WikiProject Michigan|class=Redirect|Detroit=yes}}
}}
".to_string(), &wpbs),
"{{Talk page of redirect}}
{{WikiProject banner shell |1=
{{WikiProject Film|American=yes|Filmmaking=yes}}<!-- Formerly assessed as Redirect-class -->
{{WikiProject Michigan|Detroit=yes}}<!-- Formerly assessed as Redirect-class -->
}}
");
}
}
| true
|
e74d1dc1c75eaf180e0d28b8348f34b433f8c11a
|
Rust
|
cbxgyh/Wq
|
/src/main.rs
|
UTF-8
| 3,404
| 2.953125
| 3
|
[] |
no_license
|
#![feature(proc_macro_hygiene)]
#![feature(decl_macro)]
mod config;
#[macro_use]
extern crate rocket;
use std::io;
use std::env;
use rocket::{Request, Handler, Route, Data, Catcher, Response};
use rocket::http::{Status, RawStr};
use rocket::response::{self, Responder, status::Custom};
use rocket::handler::Outcome;
use rocket::outcome::IntoOutcome;
use rocket::http::Method::*;
use std::fs::File;
fn main() {
//rocket::ignite().launch();
rocket().launch();
}
#[derive(Clone)]
struct CustomHandler{
data:&'static str
}
impl CustomHandler{
fn new(data:&'static str)->Vec<Route>{
vec![Route::new(Get,"/<id>",Self{data})]
}
}
impl Handler for CustomHandler{
fn handle<'a>(&self,req:&'a Request,data:Data)->Outcome<'a>{
let id= req.get_param::<&RawStr>(0)
.and_then(|res|res.ok())
.or_forward(data)?;
Outcome::from(req,format!("{}-{}",self.data,id))
}
}
#[test]
fn test_development_config() {
config::test_config(rocket::config::Environment::Development);
}
fn rocket() -> rocket::Rocket{
let always_forward = Route::ranked(1,Get,"/",forward);
let hello=Route::ranked(2,Get,"/",hi);
let echo=Route::new(Get,"/echo/<str>",echo_url);
let name= Route::new(Get,"/<name>",name);
let post_upload=Route::new(Post,"/",upload);
let get_upload= Route::new(Get,"/",get_upload);
let not_found_catcher=Catcher::new(404,not_found_handler);
rocket::ignite()
.mount("/",vec![always_forward,hello,echo])
.mount("/upload",vec![get_upload,post_upload])
.mount("/hello",vec![name.clone()])
.mount("/hi",vec![name])
.mount("/custom",CustomHandler::new("some data"))
.register(vec![not_found_catcher])
}
fn forward<'a>(_req:&'a Request,data:Data)->Outcome<'a>{
Outcome::forward(data)
}
fn hi<'a>(req:&'a Request,_:Data)->Outcome<'a>{
Outcome::from(req,"HELLO")
}
fn echo_url<'a>(req:&'a Request,data:Data)->Outcome<'a>{
let param= req.get_param::<&RawStr>(1)
.and_then(|res|res.ok())
.into_outcome(Status::BadRequest)?;
Outcome::from(req,RawStr::from_str(param).url_decode())
}
fn name<'a> (req:&'a Request,_:Data)->Outcome<'a>{
let param = req.get_param::<&'a RawStr>(0)
.and_then(|res|res.ok())
.unwrap_or("unnamed".into());
Outcome::from(req,RawStr::from_str(param).url_decode())
}
fn upload<'a>(req:&'a Request,data:Data)->Outcome<'a>{
if !req.content_type().map_or(false,|ct|ct.is_plain()) {
println!(" => content-type of upload must be text/plain ");
return Outcome::failure(Status::BadRequest);
}
let file = File::create(env::temp_dir().join("upload.txt"));
if let Ok(mut file)=file{
if let Ok(n)= io::copy(&mut data.open(),&mut file){
return Outcome::from(req,format!("ok{},bytes uploaded",n));
}
println!("=> fail copy");
Outcome::failure(Status::InternalServerError)
}else {
println!("=> cot not open file:{:?}",file.unwrap_err());
Outcome::failure(Status::InternalServerError)
}
}
fn get_upload<'a>(req:&'a Request,_:Data)->Outcome<'a>{
Outcome::from(req,File::open(env::temp_dir().join("unload.txt")).ok())
}
fn not_found_handler<'a>(req:&'a Request)->response::Result<'a>{
let res=Custom(Status::NotFound,format!("can not find:{}",req.uri()));
res.respond_to(req)
}
| true
|
2b642b9a6cb2f9bfe1854d41a60d51405d851951
|
Rust
|
guyoung/CaptfEncoder
|
/CaptfEncoder-V3/misc/src/net/port_scan.rs
|
UTF-8
| 1,504
| 2.6875
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::fmt::Write;
use super::internal::anyhow::Result;
use crate::MiscResult;
use super::internal::port_scan;
pub fn execute(input: &str, options: Option<HashMap<String, String>>) -> Result<MiscResult> {
let mut timeout:u64 = 50;
let mut thread:i32 = 100;
let mut all_ports = false;
if let Some(options) = options {
if let Some(val) = options.get("timeout") {
timeout = val.parse()?;
}
if let Some(val) = options.get("thread") {
thread = val.parse()?;
if thread > 512 {
thread = 512;
}
}
if let Some(val) = options.get("ports") {
if val == "All ports" {
all_ports = true;
}
}
}
let addr = input.parse()?;
let result = port_scan::port_scan(addr, timeout, thread, all_ports)?;
let mut out = String::new();
writeln!(&mut out, "Scan time {} seconds", result.scan_time.as_secs())?;
writeln!(&mut out, "Discover {} ports open", result.ports.len())?;
writeln!(&mut out, "")?;
for port_info in result.ports {
writeln!(
&mut out,
"{} {} {:?}",
result.ip_addr, port_info.port, port_info.status
)?;
}
let result = MiscResult {
successed: true,
val: out,
message: String::from(""),
};
Ok(result)
}
| true
|
d005da33894d26d563c8a1f473dc6432cbcd8de0
|
Rust
|
GlenDC/AdventOfCode
|
/2021/day12/src/main.rs
|
UTF-8
| 8,583
| 3.1875
| 3
|
[] |
no_license
|
use std::collections::HashMap;
fn main() {
println!("++++++++++++");
println!("PART#1");
println!("++++++++++++");
part1(TEST_INPUT);
println!("-----");
part1(INPUT);
println!("++++++++++++");
println!("PART#2");
println!("++++++++++++");
part2(TEST_INPUT);
println!("-----");
part2(INPUT);
}
fn part1(input: &str) {
// register all points & connections
let mut cave_map = CaveMap::new();
for line in input.split("\n") {
let mut it = line.split("-");
let a: Point = it.next().unwrap().into();
let b: Point = it.next().unwrap().into();
assert!(it.next().is_none());
cave_map.register_conn(a, b);
}
// println!("{:?}", cave_map);
let mut paths_wip = Vec::new();
let mut paths_successful = Vec::new();
// start from start & build up from there
for conn in cave_map.connections_from(&Point::Start).unwrap() {
let mut p = Path::new();
assert!(p.extend(conn));
paths_wip.push(p);
}
// fill all paths, until nothing is left as wip...
loop {
match paths_wip.pop() {
None => break,
Some(path) => match cave_map.connections_from(path.tail()) {
None => {
// path has dead end, dropping it
// println!("path dropped due dead end, path: {:?}", path);
continue;
}
Some(conns) => {
for conn in conns {
let mut path_cloned = path.clone();
match conn {
Point::End => {
assert!(path_cloned.extend(conn));
// println!("successful path: {:?}", path_cloned);
paths_successful.push(path_cloned);
continue;
}
_ => {
if path_cloned.extend(conn) {
paths_wip.push(path_cloned);
continue;
}
// drop path, path cannot be extended due to double crossing of small cave
// println!("path dropped due to double crossing small cave ({:?}), path: {:?}", conn, path_cloned);
}
}
}
}
},
}
}
println!("{}", paths_successful.len());
}
fn part2(input: &str) {
// register all points & connections
let mut cave_map = CaveMap::new();
for line in input.split("\n") {
let mut it = line.split("-");
let a: Point = it.next().unwrap().into();
let b: Point = it.next().unwrap().into();
assert!(it.next().is_none());
cave_map.register_conn(a, b);
}
// println!("{:?}", cave_map);
let mut paths_wip = Vec::new();
let mut paths_successful = Vec::new();
// start from start & build up from there
for conn in cave_map.connections_from(&Point::Start).unwrap() {
let mut p = Path::new_part2();
assert!(p.extend(conn));
paths_wip.push(p);
}
// fill all paths, until nothing is left as wip...
loop {
match paths_wip.pop() {
None => break,
Some(path) => match cave_map.connections_from(path.tail()) {
None => {
// path has dead end, dropping it
// println!("path dropped due dead end, path: {:?}", path);
continue;
}
Some(conns) => {
for conn in conns {
let mut path_cloned = path.clone();
match conn {
Point::End => {
assert!(path_cloned.extend(conn));
// println!("successful path: {:?}", path_cloned);
paths_successful.push(path_cloned);
continue;
}
_ => {
if path_cloned.extend(conn) {
paths_wip.push(path_cloned);
continue;
}
// drop path, path cannot be extended due to double crossing of small cave
// println!("path dropped due to double crossing small cave ({:?}), path: {:?}", conn, path_cloned);
}
}
}
}
},
}
}
println!("{}", paths_successful.len());
}
// NOTE: this can be made more memory-efficient by storing the id of a Small-/Big- Cave
// as an u16, by converting s=[b0,b1] as (b0<<5 | b1). However, for this simple toy challenge it is not worth the effort.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum Point {
Start,
End,
SmallCave(String),
BigCave(String),
}
impl From<&str> for Point {
fn from(s: &str) -> Point {
match s.trim() {
"start" => Point::Start,
"end" => Point::End,
s => {
if s.bytes().all(|b| matches!(b, b'a'..=b'z')) {
Point::SmallCave(String::from(s))
} else {
Point::BigCave(String::from(s))
}
}
}
}
}
#[derive(Debug)]
struct CaveMap {
connections: HashMap<Point, Vec<Point>>,
}
impl CaveMap {
pub fn new() -> CaveMap {
CaveMap {
connections: HashMap::new(),
}
}
pub fn register_conn(&mut self, a: Point, b: Point) {
self._add_connection(a.clone(), b.clone());
self._add_connection(b, a);
}
fn _add_connection(&mut self, a: Point, b: Point) {
let v = self.connections.entry(a).or_insert(Vec::new());
if v.iter().any(|p| *p == b) {
return;
}
v.push(b);
}
pub fn connections_from(&self, p: &Point) -> Option<Vec<&Point>> {
self.connections
.get(p)
.and_then(|v| Some(v.iter().collect()))
}
}
// NOTE: this can be probably done without any cloning, by working with something like:
// ```
// struct PathNode<'a> {
// point: Point,
// parent: Option<&'a PathNode<'a>>,
// }
// ```
// To make that work without ever needing to clone however would probably want to use some kind of std::rc::Rc
// smart pointer to be able to work with references and at the same time still making sure the actual memory lives long
// enough without having to store it somewhere explicitly. Could be fun to do, but not my cup of tea for this exercise.
#[derive(Debug, Clone)]
struct Path {
points: Vec<Point>,
// used for part #2
allow_small_cave_double_visit: bool,
}
impl Path {
pub fn new() -> Path {
Path {
points: vec![Point::Start],
allow_small_cave_double_visit: false,
}
}
pub fn new_part2() -> Path {
Path {
points: vec![Point::Start],
allow_small_cave_double_visit: true,
}
}
// NOTE:
// Can be made more efficient by splitting it up as
// ```
// fn extend_as(&mut self, p: &Point) -> Option<Path>
// ```
// and
// ```
// fn extend_into(self, p: &Point) -> Option<Path>
// ```
pub fn extend(&mut self, p: &Point) -> bool {
match p {
Point::BigCave(_) => (),
Point::SmallCave(_) => {
if self.points.iter().any(|c| c == p) {
if !self.allow_small_cave_double_visit {
return false;
}
self.allow_small_cave_double_visit = false;
}
}
_ => {
if self.points.iter().any(|c| c == p) {
return false;
}
}
}
self.points.push(p.clone());
true
}
pub fn tail(&self) -> &Point {
&self.points[self.points.len() - 1]
}
}
const TEST_INPUT: &'static str = "start-A
start-b
A-c
A-b
b-d
A-end
b-end";
const INPUT: &'static str = "fw-ll
end-dy
tx-fw
tx-tr
dy-jb
ZD-dy
dy-BL
dy-tr
dy-KX
KX-start
KX-tx
fw-ZD
tr-end
fw-jb
fw-yi
ZD-nr
start-fw
tx-ll
ll-jb
yi-jb
yi-ll
yi-start
ZD-end
ZD-jb
tx-ZD";
| true
|
c94824dc1cca4251a15ee69a6d080661babb0043
|
Rust
|
geom3trik/tooro-editor
|
/src/ui/elements/slider.rs
|
UTF-8
| 2,760
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
//! Slider control wrapped in a container with label and value display
use iced::{slider, Column, Container, HorizontalAlignment, Length, Row, Slider, Text};
use crate::messages::Message;
use crate::params::{MultiParameter, SoundParameter};
use crate::style;
/// Returns a slider for a sound (preset) parameter
pub fn slider_with_labels<'a>(
label: &'a str,
state: &'a mut slider::State,
sound_param: SoundParameter,
value: i32,
) -> Container<'a, Message> {
let range = sound_param.get_range();
let slider = Slider::new(state, range, value, move |v| {
Message::SoundParameterChange(sound_param, v)
})
.style(style::Slider);
Container::new(
Row::new()
.push(
Column::new()
.push(
Text::new(label)
.size(style::PARAM_LABEL_TEXT_SIZE)
.width(Length::Units(style::PARAM_LABEL_WIDTH)),
)
.padding([3, 0, 0, 0]),
)
.push(slider)
.push(
Column::new()
.push(
Text::new(format!("{}", value))
.size(style::PARAM_LABEL_TEXT_SIZE)
.horizontal_alignment(HorizontalAlignment::Right)
.width(Length::Units(style::PARAM_VALUE_WIDTH)),
)
.padding([3, 0, 0, 5]),
),
)
}
/// Returns a slider for a multi parameter
pub fn multi_slider_with_labels<'a>(
label: &'a str,
state: &'a mut slider::State,
multi_param: MultiParameter,
value: i32,
) -> Container<'a, Message> {
let range = multi_param.get_range();
let slider = Slider::new(state, range, value, move |v| {
Message::MultiParameterChange(multi_param, v)
})
.style(style::Slider);
Container::new(
Row::new()
.push(
Column::new()
.push(
Text::new(label)
.size(style::PARAM_LABEL_TEXT_SIZE)
.width(Length::Units(style::PARAM_LABEL_WIDTH)),
)
.padding([3, 0, 0, 0]),
)
.push(slider)
.push(
Column::new()
.push(
Text::new(format!("{}", value))
.size(style::PARAM_LABEL_TEXT_SIZE)
.horizontal_alignment(HorizontalAlignment::Right)
.width(Length::Units(style::PARAM_VALUE_WIDTH)),
)
.padding([3, 0, 0, 5]),
),
)
}
| true
|
fb334f68fa9045bc0f762b7dbccae5328dbd3970
|
Rust
|
rcdomigan/atl-rs
|
/src/parser.rs
|
UTF-8
| 8,043
| 3.21875
| 3
|
[] |
no_license
|
use std::error::Error;
use std::fmt;
use std::fmt::Display;
use std::io;
use std::io::Read;
use std::iter::Iterator;
use asts::NestAst;
use types::symbol;
use types::{Any, Ast};
static DELIMS: &'static [u8] = b" \t\n()\"';";
#[derive(Debug)]
pub enum ParseError {
UnbalancedParens(usize),
IOError(io::Error),
}
use self::ParseError::UnbalancedParens;
impl Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Error for ParseError {
fn description(&self) -> &str {
match self {
UnbalancedParens(_line) => "Missing closing parenthesis",
ParseError::IOError(err) => err.description(),
}
}
fn cause(&self) -> Option<&Error> {
match self {
UnbalancedParens(_) => Some(self),
ParseError::IOError(ref err) => Some(err),
}
}
}
// Similar to asts::SorA, but takes ownership.
#[derive(Debug)]
pub enum Parsed {
Atom(Any),
Subex(Ast),
}
pub struct Parser<Reader: io::Read> {
head: Option<u8>,
input: io::Bytes<Reader>,
line: usize,
}
impl<Reader: io::Read> Parser<Reader> {
pub fn new(reader: Reader) -> Parser<Reader> {
Parser {
head: None,
input: reader.bytes(),
line: 1,
}
}
fn shift(&mut self) -> Result<(), ParseError> {
match self.input.next() {
Some(Ok(cc)) => {
self.head = Some(cc);
Ok(())
}
Some(Err(err)) => Err(ParseError::IOError(err)),
None => {
self.head = None;
Ok(())
}
}
}
fn ignore_to_newline(&mut self) -> Result<(), ParseError> {
loop {
self.shift()?;
match self.head {
Some(b'\n') => {
self.line += 1;
return Ok(());
}
None => return Ok(()),
Some(_) => (),
}
}
}
// Continue parsing a sub-expression (expecting that the opening
// '(' has already been consumed). Returns Some(()) (since its
// delim should always be a ')', None for end of stream, or an
// error for an error.
fn subex(&mut self, mut ast: &mut Ast) -> Result<(), ParseError> {
loop {
self.shift()?;
match self.head {
Some(cc) => match cc {
b'(' => {
let nest = NestAst::new(&mut ast);
self.subex(nest.ast)?;
}
b')' => return Ok(()),
b'\n' => self.line += 1,
b' ' | b'\t' => continue,
b';' => self.ignore_to_newline()?,
_ => {
ast.push(self.atom()?);
match self.head {
None => return Err(UnbalancedParens(self.line)),
Some(b')') => return Ok(()),
Some(b'\n') => self.line += 1,
Some(b';') => self.ignore_to_newline()?,
// Nothing needs to be done for other delims
_ => continue,
}
}
},
None => return Err(UnbalancedParens(self.line)),
}
}
}
// Parse an atom returing Some(terminating delim) or None for end of stream
fn atom(&mut self) -> Result<Any, ParseError> {
let mut scratch = String::new();
let to_atom = |buff: &str| {
if let Ok(num) = buff.parse() {
Any::Fixnum(num)
} else {
symbol(&buff)
}
};
loop {
match self.head {
Some(cc) => {
if DELIMS.contains(&cc) {
return Ok(to_atom(&scratch));
} else {
scratch.push(cc as char);
}
}
None => return Ok(to_atom(&scratch)),
}
self.shift()?;
}
}
// Skip over comments and whitespace, returning the first
// character which does not fall into those two categories, or
// None if no such character is found.
fn strip(&mut self) -> Result<(), ParseError> {
loop {
self.shift()?;
match self.head {
Some(cc) => match cc {
b' ' => continue,
b'\n' | b'\t' => {
self.line += 1;
continue;
}
b';' => {
self.ignore_to_newline()?;
continue;
}
_ => return Ok(()),
},
None => return Ok(()),
}
}
}
}
impl<Reader: io::Read> Iterator for Parser<Reader> {
type Item = Result<Parsed, ParseError>;
fn next(&mut self) -> Option<Result<Parsed, ParseError>> {
match self.strip() {
Ok(()) => self.head.map(|cc| match cc {
b'(' => {
let mut ast = Ast::new();
let rval;
{
let mut nest = NestAst::new(&mut ast);
rval = self.subex(&mut nest.ast);
}
match rval {
Ok(_) => Ok(Parsed::Subex(ast)),
Err(err) => Err(err),
}
}
_ => self.atom().map(Parsed::Atom),
}),
Err(err) => Some(Err(err)),
}
}
}
pub fn from_read<T: Read>(input: T) -> Option<Result<Parsed, ParseError>> {
Parser::new(input).next()
}
pub fn from_str(input: &str) -> Option<Result<Parsed, ParseError>> {
from_read(input.as_bytes())
}
#[cfg(test)]
mod tests {
use parser::{from_str, ParseError::UnbalancedParens, Parsed, Parser};
use types::{symbol, Any, Ast};
#[test]
fn parse_ast() {
{
let mut ast = Ast::new();
let mut parser = Parser::new("12 2)".as_bytes());
parser.subex(&mut ast).unwrap();
assert_eq!(ast, vec![Any::Fixnum(12), Any::Fixnum(2)]);
}
{
let mut ast = Ast::new();
let mut parser = Parser::new("1 (2 3) 4)".as_bytes());
parser.subex(&mut ast).unwrap();
assert_eq!(
ast,
vec![
Any::Fixnum(1),
Any::AstData(2),
Any::Fixnum(2),
Any::Fixnum(3),
Any::Fixnum(4),
]
);
}
}
#[test]
fn parse_unbalanced() {
match from_str("(12 2") {
None => panic!("Should have result"),
Some(Ok(_)) => panic!("Should have an error"),
Some(Err(err)) => match err {
UnbalancedParens(line) => assert_eq!(1, line),
_ => panic!("Should have been UnbalancedParen"),
},
}
}
#[test]
fn parse_int() {
if let Parsed::Subex(ast) = from_str("(1 (2 3) 4)").unwrap().unwrap() {
assert_eq!(
ast,
vec![
Any::AstData(5),
Any::Fixnum(1),
Any::AstData(2),
Any::Fixnum(2),
Any::Fixnum(3),
Any::Fixnum(4),
]
);
} else {
panic!("Expected Some ast!");
}
}
#[test]
fn parse_symbol() {
if let Parsed::Atom(atom) = from_str("symbol").unwrap().unwrap() {
assert_eq!(atom, symbol("symbol"));
} else {
panic!("Expected Somem ast!");
}
}
}
| true
|
2001b01fdd2d0915c0a18c40d89574836b10da5d
|
Rust
|
Andy-Python-Programmer/ion
|
/src/logger.rs
|
UTF-8
| 7,590
| 3.109375
| 3
|
[] |
no_license
|
use core::fmt;
use core::fmt::Write;
use font8x8::UnicodeFonts;
use spin::mutex::SpinMutex;
use spin::Once;
use bit_field::BitField;
/// Describes the layout and pixel format of a framebuffer.
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct FrameBufferInfo {
/// The width in pixels.
pub horizontal_resolution: usize,
/// The height in pixels.
pub vertical_resolution: usize,
/// The color format of each pixel.
pub pixel_format: PixelFormat,
/// The number of bits per pixel.
pub bits_per_pixel: usize,
/// Number of pixels between the start of a line and the start of the next.
///
/// Some framebuffers use additional padding at the end of a line, so this
/// value might be larger than `horizontal_resolution`. It is
/// therefore recommended to use this field for calculating the start address of a line.
pub stride: usize,
}
/// Color format of pixels in the framebuffer.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
#[repr(C)]
pub enum PixelFormat {
/// One byte red, then one byte green, then one byte blue.
///
/// Length might be larger than 3, check [`bytes_per_pixel`][FrameBufferInfo::bytes_per_pixel]
/// for this.
RGB,
/// One byte blue, then one byte green, then one byte red.
///
/// Length might be larger than 3, check [`bytes_per_pixel`][FrameBufferInfo::bytes_per_pixel]
/// for this.
BGR,
}
#[repr(transparent)]
#[derive(Debug, Copy, Clone)]
pub struct Color(u32);
impl Color {
#[inline]
pub fn new(hex: u32) -> Self {
Self(hex)
}
}
/// The global logger instance used for the `log` crate.
pub static LOGGER: Once<LockedLogger> = Once::new();
/// A [`Logger`] instance protected by a spinlock.
pub struct LockedLogger(SpinMutex<Logger>);
impl LockedLogger {
/// Create a new instance that logs to the given framebuffer.
#[inline]
pub fn new(
framebuffer: &'static mut [u8],
backbuffer: &'static mut [u8],
info: FrameBufferInfo,
) -> Self {
Self(SpinMutex::new(Logger::new(framebuffer, backbuffer, info)))
}
/// Force-unlocks the logger to prevent a deadlock.
///
/// ## Saftey
/// This method is not memory safe and should be only used when absolutely necessary.
pub unsafe fn force_unlock(&self) {
self.0.force_unlock()
}
}
impl log::Log for LockedLogger {
#[inline]
fn enabled(&self, _metadata: &log::Metadata) -> bool {
true
}
#[inline]
fn log(&self, record: &log::Record) {
let mut logger = self.0.lock();
writeln!(logger, "{}: {}", record.level(), record.args()).unwrap();
}
#[inline]
fn flush(&self) {}
}
struct Logger {
framebuffer: &'static mut [u8],
backbuffer: &'static mut [u8],
info: FrameBufferInfo,
x_pos: usize,
y_pos: usize,
scroll_lock: bool,
fg: Color,
bg: Color,
}
impl Logger {
#[inline]
fn new(
framebuffer: &'static mut [u8],
backbuffer: &'static mut [u8],
info: FrameBufferInfo,
) -> Self {
Self {
framebuffer,
backbuffer,
info,
x_pos: 0x00,
y_pos: 0x00,
scroll_lock: false,
fg: Color::new(u32::MAX),
bg: Color::new(u32::MIN),
}
}
fn write_char(&mut self, c: char) {
match c {
'\n' => self.new_line(),
'\r' => self.carriage_return(),
_ => {
if self.x_pos >= self.width() {
self.new_line();
}
if self.y_pos >= (self.height() - 16) {
self.clear();
}
let rendered = font8x8::BASIC_FONTS
.get(c)
.expect("Character not found in basic font");
self.write_rendered_char(rendered);
}
}
}
fn write_rendered_char(&mut self, rendered: [u8; 8]) {
for (y, byte) in rendered.iter().enumerate() {
for (x, bit) in (0..8).enumerate() {
let draw = *byte & (1 << bit) == 0;
self.write_pixel(
self.x_pos + x,
self.y_pos + y,
if draw { self.bg } else { self.fg },
);
}
}
self.x_pos += 8;
}
fn write_pixel(&mut self, x: usize, y: usize, color: Color) {
let pixel_offset = y * self.info.stride + x;
let color = [
(color.0.get_bits(0..8) & 255) as u8,
(color.0.get_bits(8..16) & 255) as u8,
(color.0.get_bits(16..24) & 255) as u8,
(color.0.get_bits(24..32) & 255) as u8,
];
let bits_per_pixel = self.info.bits_per_pixel;
let byte_offset = pixel_offset * bits_per_pixel;
self.backbuffer[byte_offset..(byte_offset + bits_per_pixel)]
.copy_from_slice(&color[..bits_per_pixel]);
}
#[inline]
fn clear(&mut self) {
self.x_pos = 0;
self.y_pos = 0;
self.backbuffer.fill(0x00)
}
#[inline]
fn width(&self) -> usize {
self.info.horizontal_resolution
}
#[inline]
fn height(&self) -> usize {
self.info.vertical_resolution
}
#[inline]
fn carriage_return(&mut self) {
self.x_pos = 0;
}
#[inline]
fn new_line(&mut self) {
if !self.scroll_lock {
self.y_pos += 16;
}
self.carriage_return();
}
fn flush(&mut self) {
// SAFETY: life is ment to be unsafe
unsafe {
self.backbuffer
.as_ptr()
.copy_to_nonoverlapping(self.framebuffer.as_mut_ptr(), self.framebuffer.len());
}
}
}
impl fmt::Write for Logger {
fn write_str(&mut self, s: &str) -> fmt::Result {
for c in s.chars() {
self.write_char(c)
}
Ok(())
}
}
/// This function is responsible for initializing the global logger
/// instance.
pub fn init(framebuffer: &'static mut [u8], backbuffer: &'static mut [u8], info: FrameBufferInfo) {
let logger = LOGGER.call_once(move || LockedLogger::new(framebuffer, backbuffer, info));
log::set_logger(logger).expect("Logger already set");
log::set_max_level(log::LevelFilter::Trace);
}
#[macro_export]
macro_rules! print {
($($arg:tt)*) => ($crate::logger::_print(format_args!($($arg)*)));
}
#[macro_export]
macro_rules! println {
() => ($crate::prelude::print!("\n"));
($($arg:tt)*) => ($crate::prelude::print!("{}\n", format_args!($($arg)*)));
}
/// This function is responsible for clearing the screen.
pub fn clear() {
LOGGER.get().map(|l| l.0.lock().clear());
}
pub fn set_cursor_pos(x: usize, y: usize) {
LOGGER.get().map(|l| {
l.0.lock().x_pos = x;
l.0.lock().y_pos = y;
});
}
pub fn with_fg<F>(color: Color, f: F)
where
F: FnOnce(),
{
LOGGER.get().map(|l| {
let mut lock = l.0.lock();
let old = lock.fg;
lock.fg = color;
core::mem::drop(lock);
f();
let mut lock = l.0.lock();
lock.fg = old;
});
}
pub fn flush() {
LOGGER.get().map(|l| l.0.lock().flush());
}
pub fn display_height() -> usize {
LOGGER.get().map(|l| l.0.lock().height()).unwrap()
}
pub fn set_scroll_lock(lock: bool) {
LOGGER.get().map(|l| l.0.lock().scroll_lock = lock);
}
#[doc(hidden)]
pub fn _print(args: fmt::Arguments) {
LOGGER.get().map(|l| l.0.lock().write_fmt(args));
}
| true
|
5ebd3eedc2be660233e2b539885b0335a68be2e4
|
Rust
|
holochain-open-dev/minimal-hdk-unit-test-example
|
/zome/src/goal/crud.rs
|
UTF-8
| 3,387
| 2.53125
| 3
|
[] |
no_license
|
use crate::{
error::Error,
validate_helpers::entry_from_element_create_or_update,
};
use dna_help::WrappedAgentPubKey;
use hdk::prelude::*;
use std::fmt;
// A Goal Card. This is a card on the SoA Tree which can be small or non-small, complete or
// incomplete, certain or uncertain, and contains text content.
// user hash and unix timestamp are included to prevent hash collisions.
#[hdk_entry(id = "goal")]
#[derive(Clone, PartialEq)]
pub struct Goal {
pub content: String,
pub user_hash: WrappedAgentPubKey,
pub user_edit_hash: Option<WrappedAgentPubKey>,
pub timestamp_created: f64,
pub timestamp_updated: Option<f64>,
pub hierarchy: Hierarchy,
pub status: Status,
pub tags: Option<Vec<String>>,
pub description: String,
pub time_frame: Option<TimeFrame>,
pub is_imported: bool,
}
// can be updated
impl TryFrom<&Element> for Goal {
type Error = Error;
fn try_from(element: &Element) -> Result<Self, Self::Error> {
entry_from_element_create_or_update::<Goal>(element)
}
}
impl Goal {
pub fn new(
content: String,
user_hash: WrappedAgentPubKey,
user_edit_hash: Option<WrappedAgentPubKey>,
timestamp_created: f64,
timestamp_updated: Option<f64>,
hierarchy: Hierarchy,
status: Status,
tags: Option<Vec<String>>,
description: String,
time_frame: Option<TimeFrame>,
is_imported: bool,
) -> Self {
Self {
content,
user_hash,
user_edit_hash,
timestamp_created,
timestamp_updated,
hierarchy,
status,
tags,
description,
time_frame,
is_imported,
}
}
}
#[derive(Debug, Serialize, Deserialize, SerializedBytes, Clone, PartialEq)]
pub struct UIEnum(String);
#[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone, PartialEq)]
#[serde(from = "UIEnum")]
#[serde(into = "UIEnum")]
pub enum Status {
Uncertain,
Incomplete,
InProcess,
Complete,
InReview,
}
impl From<UIEnum> for Status {
fn from(ui_enum: UIEnum) -> Self {
match ui_enum.0.as_str() {
"Incomplete" => Self::Incomplete,
"InProcess" => Self::InProcess,
"Complete" => Self::Complete,
"InReview" => Self::InReview,
_ => Self::Uncertain,
}
}
}
impl From<Status> for UIEnum {
fn from(status: Status) -> Self {
Self(status.to_string())
}
}
impl fmt::Display for Status {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone, PartialEq)]
#[serde(from = "UIEnum")]
#[serde(into = "UIEnum")]
pub enum Hierarchy {
Root,
Trunk,
Branch,
Leaf,
NoHierarchy,
}
impl From<UIEnum> for Hierarchy {
fn from(ui_enum: UIEnum) -> Self {
match ui_enum.0.as_str() {
"Root" => Self::Root,
"Trunk" => Self::Trunk,
"Branch" => Self::Branch,
"Leaf" => Self::Leaf,
_ => Self::NoHierarchy,
}
}
}
impl From<Hierarchy> for UIEnum {
fn from(hierarchy: Hierarchy) -> Self {
Self(hierarchy.to_string())
}
}
impl fmt::Display for Hierarchy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Serialize, Deserialize, Debug, SerializedBytes, Clone, PartialEq)]
pub struct TimeFrame {
from_date: f64,
to_date: f64,
}
impl TimeFrame {
pub fn new(from_date: f64, to_date: f64) -> Self {
Self { from_date, to_date }
}
}
| true
|
a6677454b75d5fab17e6301f25cbb629313078b1
|
Rust
|
coolguy1990/learning-rust
|
/generics/src/main.rs
|
UTF-8
| 1,408
| 3.765625
| 4
|
[] |
no_license
|
use std::cmp::PartialOrd;
// fn largest_i32(list: &[i32]) -> i32 {
// let mut largest = list[0];
// for &num in list.iter() {
// if num > largest {
// largest = num;
// }
// }
// largest
// }
// fn largest_char(list: &[char]) -> char {
// let mut largest = list[0];
// for &num in list.iter() {
// if num > largest {
// largest = num;
// }
// }
// largest
// }
fn largest<T: PartialOrd + Copy>(list: &[T]) -> T {
let mut largest = list[0];
for &num in list.iter() {
if num > largest {
largest = num;
}
}
largest
}
fn main() {
// let numbers = vec![20, 30, 34, 550, 100];
// let mut largest = numbers[0];
// for num in numbers {
// if num > largest {
// largest = num;
// }
// }
// println!("Largest is {:?}", largest);
// let numbers = vec![20, 40, 1, 99, 100];
// let result = largest(&numbers);
// println!("Largest is {:?}", result);
// let numbers = vec![10, 20, 1, 20, 120, 11];
// let result = largest_i32(&numbers);
// println!("Largest is {:?}", result);
// let chars = vec!['y', 'm', 'a', 'q'];
// let result = largest_char(&chars);
// println!("The largest char is {}", result);
let numbers = vec![10, 20, 1, 20, 120, 11];
let result = largest(&numbers);
println!("Largest is {:?}", result);
let chars = vec!['y', 'm', 'a', 'q'];
let result = largest(&chars);
println!("The largest char is {}", result);
}
| true
|
9dc95be06f0e24ec99528746ec669f2967020ca3
|
Rust
|
kennytm/distributed-lock-sample
|
/src/main.rs
|
UTF-8
| 4,077
| 2.765625
| 3
|
[
"Apache-2.0"
] |
permissive
|
use std::thread::{sleep, spawn};
use std::time::{Duration, SystemTime};
use etcd_rs::prelude::*;
use etcd_rs::Client;
use futures::{
sync::{mpsc, oneshot},
Future, Sink, Stream,
};
fn spawn_keep_alive(client: &Client, lease_id: i64) {
let lease_client = client.lease();
let task = tokio::timer::Interval::new_interval(Duration::from_secs(1))
.map_err(|_| ())
.and_then(move |_| {
lease_client
.keep_alive_once(KeepAliveRequest::new(lease_id))
.then(|r| match r {
Ok(resp) => {
if resp.ttl() == 0 {
println!("lease expired");
Err(())
} else {
Ok(())
}
}
Err(e) => {
println!("failed to keep alive: {:?}", e);
Err(())
}
})
})
.for_each(|_| Ok(()));
tokio::spawn(task);
}
enum LockTask {
Lock(oneshot::Sender<Vec<u8>>),
Unlock(Vec<u8>),
Revoke,
}
const LOCK_NAME: &str = "tikv_importer/prepare_lock";
fn run_leaser(client: Client, prepare_lock_recv: mpsc::Receiver<LockTask>) {
let least_client = client.lease();
let task = least_client
.grant(GrantRequest::new(5)) // <-- keep alive for 5 seconds.
.map_err(|_| ())
.and_then(move |resp| {
let lease_id = resp.id();
spawn_keep_alive(&client, lease_id);
let lock_client = client.lock();
prepare_lock_recv.for_each(move |task| match task {
LockTask::Lock(reply) => Box::new(
lock_client
.lock(LockRequest::new(LOCK_NAME, lease_id))
.map(move |lock_resp| reply.send(lock_resp.key().to_owned()).unwrap())
.map_err(|e| eprintln!("lock failed: {:?}", e)),
)
as Box<dyn Future<Item = (), Error = ()> + Send>,
LockTask::Unlock(key) => Box::new(
lock_client
.unlock(UnlockRequest::new(key))
.map(|_| ())
.map_err(|e| eprintln!("unlock failed: {:?}", e)),
)
as Box<dyn Future<Item = (), Error = ()> + Send>,
LockTask::Revoke => Box::new(
least_client
.revoke(RevokeRequest::new(lease_id))
.map(|_| ())
.map_err(|e| eprintln!("revoke failed: {:?}", e)),
)
as Box<dyn Future<Item = (), Error = ()> + Send>,
})
});
tokio::run(task);
}
fn main() {
let client = Client::builder().add_endpoint("127.0.0.1:2379").build();
let (mut prepare_lock_send, prepare_lock_recv) = mpsc::channel(0);
let leaser_thread = spawn(move || run_leaser(client, prepare_lock_recv));
for i in 0..5 {
println!("{:?} {} - acquire mutex to prepare", SystemTime::now(), i);
let (lock_reply_send, lock_reply_recv) = oneshot::channel();
let key = (&mut prepare_lock_send)
.send(LockTask::Lock(lock_reply_send))
.map_err(|_| oneshot::Canceled)
.and_then(move |_| lock_reply_recv)
.wait()
.unwrap();
println!(
"{:?} {} - got mutex to prepare - key = {:x?}",
SystemTime::now(),
i,
key
);
// simulate do task ....
sleep(Duration::from_secs(8));
// simulate do task ....
println!(
"{:?} {} - completed prepare, unlocking mutex",
SystemTime::now(),
i
);
(&mut prepare_lock_send)
.send(LockTask::Unlock(key))
.wait()
.unwrap();
}
prepare_lock_send.send(LockTask::Revoke).wait().unwrap();
leaser_thread.join().unwrap();
}
| true
|
696db3fad8d8bc3e0d6501fe9a4d40a7d51a0c4f
|
Rust
|
wvandeun/rust-httpd
|
/src/main.rs
|
UTF-8
| 6,786
| 2.734375
| 3
|
[] |
no_license
|
use std::net::{TcpListener, TcpStream, SocketAddr};
use std::io::{Read, Write, BufReader, BufRead};
use std::thread;
use std::str;
use std::fs::File;
use std::process::Command;
extern crate httparse;
fn respond_hello_world(mut stream: TcpStream) {
let response = b"HTTP/1.1 200 OK\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n<html><body>Hello world</body></html>\r\n";
stream.write(response).expect("Write failed");
}
fn serve_static_file(mut stream: TcpStream, path: &str) {
let mut file = match File::open(format!("www/{}", path)) {
Ok(file) => file,
Err(_) => File::open("404.html").expect("404.html file missing!"),
};
let mut buffer = Vec::new();
file.read_to_end(&mut buffer).expect("Read failed");
stream.write(&buffer).expect("Write failed");
}
fn handle_cgi_script(request: httparse::Request, mut stream: TcpStream, client_addr: SocketAddr, req_path: &str) {
let path_components: Vec<&str> = req_path.splitn(2, "/").collect();
let default_path = "/";
let (script_name, path_info) = (path_components.get(0).unwrap(), path_components.get(1).unwrap_or(&default_path));
let client_ip = client_addr.ip().to_string();
let meta_variables = build_cgi_meta_vars(&request, &client_ip, script_name, path_info);
let mut command = Command::new(format!("cgi/{}", script_name));
println!("{:?}", &meta_variables);
build_environmental_variables(&mut command, meta_variables);
match command.output() {
Ok(output) => {
if output.status.success() {
stream.write(&output.stdout).expect("Command failed");
} else {
stream.write(&output.stderr).expect("Stderr");
}
},
Err(_) => {
respond_error(stream);
}
}
}
fn build_environmental_variables<'a>(command: &'a mut Command, meta_variables: Vec<(&'a str, &'a str)>) {
for &tup in meta_variables.iter() {
command.env(tup.0, tup.1);
}
println!("{:?}", command);
}
fn build_cgi_meta_vars<'a>(request: &'a httparse::Request, client_ip: &'a String, script_name: &'a str, path_info: &'a str) -> Vec<(&'a str, &'a str)> {
let mut headers = Vec::new();
for (i, &item) in request.headers.iter().enumerate() {
match &item.name {
&"Authorization" => headers.push(("AUTH_TYPE", str::from_utf8(&item.value).unwrap())),
&"Content-Length" => headers.push(("CONTENT_LENGTH", str::from_utf8(&item.value).unwrap())),
&"Content-Type" => headers.push(("CONTENT_TYPE", str::from_utf8(&item.value).unwrap())),
&"Host" => {
let header_value = str::from_utf8(&item.value).unwrap();
match header_value.find(':') {
Some(index) => {
headers.push(("SERVER_NAME", &header_value[..(index)]));
headers.push(("SERVER_PORT", &header_value[(index + 1)..]));
},
None => {
headers.push(("SERVER_NAME", header_value));
}
}
},
_ => {},
};
};
headers.push(("REMOTE_ADDR", &client_ip[..]));
headers.push(("REMOTE_HOST", &client_ip[..]));
headers.push(("REQUEST_METHOD", request.method.unwrap()));
headers.push(("SCRIPT_NAME", script_name));
match path_info.find('?') {
Some(index) => {
headers.push(("PATH_INFO", &path_info[..(index)]));
headers.push(("QUERY_STRING", &path_info[(index + 1)..]));
},
None => {
headers.push(("PATH_INFO", path_info));
}
};
headers.push(("SERVER_PROTOCOL", "HTTP 1.1"));
headers.push(("SERVER_SOFTWARE", "rust-httpd 0.1"));
return headers;
}
fn respond_error(mut stream: TcpStream) {
let response = b"HTTP/1.1 500 Internal Server Error\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n<html><body>500 - Server Error</body></html>\r\n";
stream.write(response).expect("Write failed");
}
fn respond_file_not_found(mut stream: TcpStream) {
let response = b"HTTP/1.1 404 File Not Found\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n<html><body>404 - File Not Found</body></html>\r\n";
stream.write(response).expect("Write failed");
}
fn request_url(buffer: &[u8]) -> Option<&str> {
let mut headers = [httparse::EMPTY_HEADER; 16];
let mut req = httparse::Request::new(&mut headers);
match req.parse(&buffer) {
Ok(_) => {
match req.path {
Some(ref path) => {
return Some(path);
},
None => {
return None;
}
}
},
Err(_) => {
return None;
}
}
}
fn read_request_head(stream: &TcpStream) -> Vec<u8> {
let mut reader = BufReader::new(stream);
let mut buff = Vec::new();
let mut read_bytes = reader.read_until(b'\n', &mut buff).unwrap();
while read_bytes > 0 {
read_bytes = reader.read_until(b'\n', &mut buff).unwrap();
if read_bytes == 2 && &buff[(buff.len()-2)..] == b"\r\n" {
break;
}
}
return buff;
}
fn handle_request(mut stream: TcpStream, client_addr: SocketAddr) {
let request_bytes = read_request_head(&stream);
let mut headers = [httparse::EMPTY_HEADER; 16];
let mut req = httparse::Request::new(&mut headers);
req.parse(&request_bytes);
println!("{:?}", req.headers);
let body_length: u32 = match req.headers.iter().find(|&&header| header.name == "Content-Length") {
Some(header) => str::from_utf8(header.value).unwrap().parse().unwrap(),
None => 0,
};
// let request_body = read_request_body();
match req.path {
Some(path) => {
if path.starts_with("/files") {
serve_static_file(stream, &path[7..]);
} else if path == "/hello" {
respond_hello_world(stream);
} else if path.starts_with("/cgi") {
handle_cgi_script(req, stream, client_addr, &path[5..]);
} else {
respond_file_not_found(stream);
}
},
None => {
respond_error(stream);
}
};
}
fn main() {
let listener = TcpListener::bind("127.0.0.1:8888").unwrap();
loop {
match listener.accept() {
Ok((stream, addr)) => { thread::spawn(move || {
handle_request(stream, addr);
})
},
Err(e) => {
thread::spawn(move || {
println!("Connection failed: {:?}", e)
})
},
};
};
}
| true
|
d2197126e3cb2c73c15868e9d156b840123ca791
|
Rust
|
Joseph-LeGrice/rust-ac-ffmpeg
|
/examples/encoding.rs
|
UTF-8
| 3,930
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
use std::{fs::File, time::Duration};
use ac_ffmpeg::{
codec::{
video::{self, VideoEncoder, VideoFrameMut},
CodecParameters, Encoder,
},
format::{
io::IO,
muxer::{Muxer, OutputFormat},
},
time::{TimeBase, Timestamp},
Error,
};
use clap::{App, Arg};
/// Open a given output file.
fn open_output(path: &str, elementary_streams: &[CodecParameters]) -> Result<Muxer<File>, Error> {
let output_format = OutputFormat::guess_from_file_name(path)
.ok_or_else(|| Error::new(format!("unable to guess output format for file: {}", path)))?;
let output = File::create(path)
.map_err(|err| Error::new(format!("unable to create output file {}: {}", path, err)))?;
let io = IO::from_seekable_write_stream(output);
let mut muxer_builder = Muxer::builder();
for codec_parameters in elementary_streams {
muxer_builder.add_stream(codec_parameters)?;
}
muxer_builder.build(io, output_format)
}
/// Create h264 encoded black video file of a given length and with a given
/// resolution.
fn encode_black_video(
output: &str,
width: u32,
height: u32,
duration: Duration,
) -> Result<(), Error> {
let pixel_format = video::frame::get_pixel_format("yuv420p");
// create a black video frame with a given resolution
let frame = VideoFrameMut::black(pixel_format, width as _, height as _).freeze();
// note: it is 1/fps
let time_base = TimeBase::new(1, 25);
let mut encoder = VideoEncoder::builder("libx264")?
.pixel_format(pixel_format)
.width(width as _)
.height(height as _)
.time_base(time_base)
.build()?;
let codec_parameters = encoder.codec_parameters().into();
let mut muxer = open_output(output, &[codec_parameters])?;
let mut frame_idx = 0;
let mut frame_timestamp = Timestamp::new(frame_idx, time_base);
let max_timestamp = Timestamp::from_secs(0) + duration;
while frame_timestamp < max_timestamp {
let cloned_frame = frame.clone().with_pts(frame_timestamp);
encoder.push(cloned_frame)?;
while let Some(packet) = encoder.take()? {
muxer.push(packet.with_stream_index(0))?;
}
frame_idx += 1;
frame_timestamp = Timestamp::new(frame_idx, time_base);
}
encoder.flush()?;
while let Some(packet) = encoder.take()? {
muxer.push(packet.with_stream_index(0))?;
}
muxer.flush()
}
fn main() {
let matches = App::new("encoding")
.arg(
Arg::with_name("output")
.required(true)
.takes_value(true)
.value_name("OUTPUT")
.help("Output file"),
)
.arg(
Arg::with_name("width")
.short("w")
.takes_value(true)
.value_name("WIDTH")
.help("width")
.default_value("640"),
)
.arg(
Arg::with_name("height")
.short("h")
.takes_value(true)
.value_name("HEIGHT")
.help("height")
.default_value("480"),
)
.arg(
Arg::with_name("duration")
.short("d")
.takes_value(true)
.value_name("DURATION")
.help("duration in seconds")
.default_value("10"),
)
.get_matches();
let output_filename = matches.value_of("output").unwrap();
let width = matches.value_of("width").unwrap().parse().unwrap();
let height = matches.value_of("height").unwrap().parse().unwrap();
let duration = matches.value_of("duration").unwrap().parse().unwrap();
let duration = Duration::from_secs_f32(duration);
if let Err(err) = encode_black_video(output_filename, width, height, duration) {
eprintln!("ERROR: {}", err);
}
}
| true
|
f59d7116ff05be482ddd382aabafe73f1e79e2d6
|
Rust
|
lilopkins/metar-rs
|
/src/parser/mod.rs
|
UTF-8
| 14,786
| 2.703125
| 3
|
[
"MIT"
] |
permissive
|
use super::types::Data::*;
use super::types::*;
use super::Metar;
use pest::iterators::Pair;
use pest::Parser;
use pest_derive::Parser;
#[derive(Parser)]
#[grammar = "parser/metar.pest"]
pub struct MetarParser;
impl super::MetarError {
fn from_pest_err(e: pest::error::Error<Rule>, data: String) -> Self {
match e.location {
pest::error::InputLocation::Pos(p) => Self {
string: data,
start: p,
length: 0,
variant: e.variant,
},
pest::error::InputLocation::Span((s, end)) => Self {
string: data,
start: s,
length: end - s,
variant: e.variant,
},
}
}
}
pub(crate) fn parse(data: String) -> Result<super::Metar, super::MetarError> {
let res = MetarParser::parse(Rule::metar, &data);
res.map(|mut pairs| {
let metar_pair = pairs.next().unwrap();
metar_pair.into()
})
.map_err(|e| super::MetarError::from_pest_err(e, data))
}
impl<'i> From<Pair<'i, Rule>> for Metar {
fn from(pair: Pair<'i, Rule>) -> Self {
let mut metar = Metar {
station: "ZZZZ".to_owned(),
time: Time {
date: 0,
hour: 0,
minute: 0,
},
wind: Wind {
dir: Unknown,
speed: Unknown,
varying: None,
gusting: None,
},
visibility: Unknown,
clouds: Known(Clouds::NoCloudDetected),
cloud_layers: Vec::new(),
vert_visibility: None,
weather: Vec::new(),
temperature: Unknown,
dewpoint: Unknown,
pressure: Unknown,
remarks: None,
};
assert_eq!(pair.as_rule(), Rule::metar);
for part in pair.into_inner() {
match part.as_rule() {
Rule::station => metar.station = part.as_str().to_owned(),
Rule::observation_time => metar.time = Time::from(part),
Rule::wind => metar.wind = Wind::from(part),
Rule::wind_varying => {
let mut hdgs = part.into_inner();
let from = hdgs.next().unwrap().as_str().parse().unwrap();
let to = hdgs.next().unwrap().as_str().parse().unwrap();
metar.wind.varying = Some((from, to));
}
Rule::atmos_condition => {
if part.as_str() == "CAVOK" {
metar.visibility = Known(Visibility::CAVOK);
metar.clouds = Known(Clouds::NoCloudDetected);
} else if part.as_str() == "SKC" {
metar.clouds = Known(Clouds::NoCloudDetected);
} else {
for c in part.into_inner() {
match c.as_rule() {
Rule::visibility_horizontal => {
if c.as_str() == "////" {
continue;
} else if c.as_str().ends_with("SM") {
// Statute miles
let mut total = 0f32;
let dist = &c.as_str()[..c.as_str().len() - 2];
let parts = dist.split(' ');
for p in parts {
if p.contains('/') {
let mut parts = p.split('/');
let n: f32 = parts.next().unwrap().parse().unwrap();
let d: f32 = parts.next().unwrap().parse().unwrap();
total += n / d;
} else {
total += p.parse::<f32>().unwrap();
}
}
metar.visibility = Known(Visibility::StatuteMiles(total));
} else {
// Metres
metar.visibility =
Known(Visibility::Metres(c.as_str().parse().unwrap()));
}
}
Rule::visibility_vertical => {
let data = &c.as_str()[2..];
match data {
"///" => {
metar.vert_visibility =
Some(VertVisibility::ReducedByUnknownAmount)
}
_ => {
metar.vert_visibility = Some(VertVisibility::Distance(
data.parse().unwrap(),
))
}
}
}
Rule::wx => metar.weather.push(Weather::from(c)),
Rule::cloud => {
metar.clouds = Known(Clouds::CloudLayers);
metar.cloud_layers.push(CloudLayer::from(c));
}
_ => (),
}
}
}
}
Rule::temperatures => {
let mut temps = part.into_inner();
let temp = temps.next().unwrap();
let dewp = temps.next().unwrap();
metar.temperature = match temp.as_str() {
"//" => Unknown,
v => {
if let Some(stripped) = v.strip_prefix('M') {
Known(-stripped.parse::<i32>().unwrap())
} else {
Known(v.parse().unwrap())
}
}
};
metar.dewpoint = match dewp.as_str() {
"//" => Unknown,
v => {
if let Some(stripped) = v.strip_prefix('M') {
Known(-stripped.parse::<i32>().unwrap())
} else {
Known(v.parse().unwrap())
}
}
};
}
Rule::pressure => {
let s = part.as_str();
let data = &s[1..];
if data == "////" {
break;
}
if s.starts_with('Q') {
// QNH
metar.pressure = Known(Pressure::Hectopascals(data.parse().unwrap()));
} else if s.starts_with('A') {
// inHg
metar.pressure = Known(Pressure::InchesOfMercury(
data.parse::<f32>().unwrap() / 100f32,
));
} else {
unreachable!()
}
}
Rule::remarks => metar.remarks = Some(part.as_str().to_owned()),
_ => (),
}
}
metar
}
}
impl<'i> From<Pair<'i, Rule>> for Time {
fn from(pair: Pair<'i, Rule>) -> Self {
let mut time = Time {
date: 0,
hour: 0,
minute: 0,
};
assert_eq!(pair.as_rule(), Rule::observation_time);
for part in pair.into_inner() {
match part.as_rule() {
Rule::observation_day => time.date = part.as_str().parse().unwrap(),
Rule::observation_hour => time.hour = part.as_str().parse().unwrap(),
Rule::observation_minute => time.minute = part.as_str().parse().unwrap(),
_ => (),
}
}
time
}
}
impl<'i> From<Pair<'i, Rule>> for Wind {
fn from(pair: Pair<'i, Rule>) -> Self {
let mut wind = Wind {
dir: Unknown,
speed: Unknown,
varying: None,
gusting: None,
};
assert_eq!(pair.as_rule(), Rule::wind);
if pair.as_str() == "CALM" {
wind.speed = Known(WindSpeed::Calm);
return wind;
}
let mut speed = None;
let mut gusting = None;
let mut unit = None;
for part in pair.into_inner() {
match part.as_rule() {
Rule::wind_dir => {
wind.dir = match part.as_str() {
"///" => Unknown,
"VRB" => Known(WindDirection::Variable),
v => Known(WindDirection::Heading(v.parse().unwrap())),
};
}
Rule::wind_speed => {
let mut s = part.as_str();
if s == "//" {
break;
}
if s.starts_with('P') {
s = &s[1..];
}
speed = Some(s.parse().unwrap());
}
Rule::wind_gusts => {
gusting = Some(part.as_str()[1..].parse().unwrap());
}
Rule::wind_unit => {
let unit_s = part.as_str();
unit = match unit_s {
"KT" => Some(WindSpeed::Knot(0)),
"KPH" => Some(WindSpeed::KilometresPerHour(0)),
"MPS" => Some(WindSpeed::MetresPerSecond(0)),
_ => unreachable!(),
}
}
_ => (),
}
}
if let Some(spd) = speed {
wind.speed = Known(unit.clone().unwrap().clone_changing_contents(spd));
}
if let Some(gust) = gusting {
wind.gusting = unit.map(|u| u.clone_changing_contents(gust));
}
wind
}
}
impl<'i> From<Pair<'i, Rule>> for Weather {
fn from(pair: Pair<'i, Rule>) -> Self {
let mut wx = Weather {
intensity: WeatherIntensity::Moderate,
conditions: Vec::new(),
};
assert_eq!(pair.as_rule(), Rule::wx);
for part in pair.into_inner() {
match part.as_rule() {
Rule::wx_intensity => {
wx.intensity = match part.as_str() {
"+" => WeatherIntensity::Heavy,
"-" => WeatherIntensity::Light,
"VC" => WeatherIntensity::InVicinity,
_ => unreachable!(),
}
}
Rule::wx_condition => {
let cond = match part.as_str() {
"MI" => WeatherCondition::Shallow,
"PR" => WeatherCondition::Partial,
"BC" => WeatherCondition::Patches,
"DR" => WeatherCondition::LowDrifting,
"BL" => WeatherCondition::Blowing,
"SH" => WeatherCondition::Showers,
"TS" => WeatherCondition::Thunderstorm,
"FZ" => WeatherCondition::Freezing,
"RA" => WeatherCondition::Rain,
"DZ" => WeatherCondition::Drizzle,
"SN" => WeatherCondition::Snow,
"SG" => WeatherCondition::SnowGrains,
"IC" => WeatherCondition::IceCrystals,
"PL" => WeatherCondition::IcePellets,
"GR" => WeatherCondition::Hail,
"GS" => WeatherCondition::SnowPelletsOrSmallHail,
"UP" => WeatherCondition::UnknownPrecipitation,
"FG" => WeatherCondition::Fog,
"VA" => WeatherCondition::VolcanicAsh,
"BR" => WeatherCondition::Mist,
"HZ" => WeatherCondition::Haze,
"DU" => WeatherCondition::WidespreadDust,
"FU" => WeatherCondition::Smoke,
"SA" => WeatherCondition::Sand,
"PY" => WeatherCondition::Spray,
"SQ" => WeatherCondition::Squall,
"PO" => WeatherCondition::Dust,
"DS" => WeatherCondition::Duststorm,
"SS" => WeatherCondition::Sandstorm,
"FC" => WeatherCondition::FunnelCloud,
_ => unreachable!(),
};
wx.conditions.push(cond);
}
_ => (),
}
}
wx
}
}
impl<'i> From<Pair<'i, Rule>> for CloudLayer {
fn from(pair: Pair<'i, Rule>) -> Self {
assert_eq!(pair.as_rule(), Rule::cloud);
let mut density = "";
let mut typ = CloudType::Normal;
let mut floor = None;
for part in pair.into_inner() {
match part.as_rule() {
Rule::cloud_density => density = part.as_str(),
Rule::cloud_type => {
match part.as_str() {
"///" => typ = CloudType::Unknown,
"CB" => typ = CloudType::Cumulonimbus,
"TCU" => typ = CloudType::ToweringCumulus,
_ => unreachable!(),
};
}
Rule::cloud_floor => match part.as_str() {
"///" => floor = None,
_ => floor = Some(part.as_str().parse().unwrap()),
},
_ => (),
}
}
match density {
"///" => CloudLayer::Unknown(typ, floor),
"FEW" => CloudLayer::Few(typ, floor),
"SCT" => CloudLayer::Scattered(typ, floor),
"BKN" => CloudLayer::Broken(typ, floor),
"OVC" => CloudLayer::Overcast(typ, floor),
_ => unreachable!(),
}
}
}
| true
|
b8849b2ff4eae6236c6e863c2de8749a99fadcb3
|
Rust
|
BlueNebulaDev/rust-version-test
|
/mystr-2.0.0/src/lib.rs
|
UTF-8
| 157
| 3.109375
| 3
|
[] |
no_license
|
pub struct MyStr {
pub length: usize,
pub data: *const u8
}
pub fn from_slice( slice: &[u8] ) -> MyStr {
MyStr{ length:slice.len(), data: &slice[0] }
}
| true
|
c5acb9275aecc52801d8d4cdfcb7b8e783fcb697
|
Rust
|
F3kilo/hex_field_playground
|
/src/term_render.rs
|
UTF-8
| 394
| 2.734375
| 3
|
[] |
no_license
|
use crate::app::Render;
pub struct TermRender {
buf: String,
}
impl TermRender {
pub fn new() -> Self {
Self { buf: String::new() }
}
pub fn add_line(&mut self, line: &str) {
self.buf = format!("\n{} ->> {}", self.buf, line);
}
}
impl Render for TermRender {
fn render(&mut self) {
println!("{}", self.buf);
self.buf.clear();
}
}
| true
|
3b10d5cf2e9632913b8e628b9996829999e1b320
|
Rust
|
str4d/i2p_elgamal
|
/src/lib.rs
|
UTF-8
| 1,876
| 2.90625
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Implementation of I2P's ElGamal public-key encryption scheme over the
//! 2048-bit MODP DH group.
//!
//! This implementation is not constant-time (yet).
#[macro_use]
extern crate lazy_static;
extern crate num_bigint;
extern crate num_traits;
extern crate rand;
extern crate sha2;
#[cfg(test)]
extern crate data_encoding;
use std::fmt;
mod constants;
mod elgamal;
mod utils;
pub use elgamal::{Decryptor, Encryptor, KeyPairGenerator};
/// ElGamal errors
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Error {
InvalidCiphertext,
InvalidMessage,
}
/// The public component of an ElGamal encryption keypair. Represents only the
/// exponent, not the primes (which are constants).
pub struct PublicKey(pub [u8; 256]);
impl PublicKey {
fn from_bytes(buf: &[u8; 256]) -> Self {
let mut x = [0u8; 256];
x.copy_from_slice(buf);
PublicKey(x)
}
}
impl Clone for PublicKey {
fn clone(&self) -> Self {
PublicKey::from_bytes(&self.0)
}
}
impl fmt::Debug for PublicKey {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0[..].fmt(formatter)
}
}
impl PartialEq for PublicKey {
fn eq(&self, other: &Self) -> bool {
self.0
.iter()
.zip(other.0.iter())
.fold(true, |acc, (a, b)| acc && (a == b))
}
}
/// The private component of an ElGamal encryption keypair.
pub struct PrivateKey(pub [u8; 256]);
impl PrivateKey {
fn from_bytes(buf: &[u8; 256]) -> Self {
let mut x = [0u8; 256];
x.copy_from_slice(buf);
PrivateKey(x)
}
}
impl Clone for PrivateKey {
fn clone(&self) -> Self {
PrivateKey::from_bytes(&self.0)
}
}
impl fmt::Debug for PrivateKey {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0[..].fmt(formatter)
}
}
| true
|
beba86b443f5a3c4f516b88bcf9179603eef47f4
|
Rust
|
dineshadepu/phd_orgmode
|
/notes/rigid_body_dynamics/rbd/src/geometry.rs
|
UTF-8
| 2,253
| 3.015625
| 3
|
[] |
no_license
|
extern crate itertools_num;
use itertools_num::linspace;
pub fn grid_linspace(xl: f32, xr: f32, xnum: usize, yl: f32, yr: f32, ynum: usize) -> (Vec<f32>, Vec<f32>) {
// create x range
let x = linspace::<f32>(xl, xr, xnum).collect::<Vec<_>>();
// create y range
let y = linspace::<f32>(yl, yr, ynum).collect::<Vec<_>>();
let mut x_grid = vec![];
let mut y_grid = vec![];
for i in 0..y.len() {
for j in 0..x.len() {
x_grid.push(x[j]);
y_grid.push(y[i]);
}
}
(x_grid, y_grid)
}
pub fn arange(left: f32, right: f32, step: f32) -> Vec<f32>{
let mut x = vec![];
let mut tmp = left;
while tmp < right{
x.push(tmp);
tmp += step;
}
x
}
#[test]
fn test_arange() {
assert_eq!(vec![0., 1., 2., 3., 4.], arange(0., 5., 1.));
}
pub fn grid_arange(xl: f32, xr: f32, x_spacing: f32, yl: f32, yr: f32, y_spacing: f32) -> (Vec<f32>, Vec<f32>) {
let x_arange = arange(xl, xr, x_spacing);
let y_arange = arange(yl, yr, y_spacing);
let mut x_grid = vec![];
let mut y_grid = vec![];
for i in 0..y_arange.len() {
for j in 0..x_arange.len() {
x_grid.push(x_arange[j]);
y_grid.push(y_arange[i]);
}
}
(x_grid, y_grid)
}
pub fn tank(
xl: f32,
xr: f32,
x_spacing: f32,
yl: f32,
yr: f32,
y_spacing: f32,
layers: usize,
) -> (Vec<f32>, Vec<f32>) {
let x_arange = arange(xl, xr, x_spacing);
let y_arange = arange(yl, yr, y_spacing);
let (xg, yg) = grid_arange(xl, xr, x_spacing, yl, yr, y_spacing);
// now filter the particles which only belong to tank
let (mut x, mut y) = (vec![], vec![]);
let x_left_cutoff = xl + (layers - 1) as f32 * x_spacing + x_spacing / 2.;
let x_right_cutoff =
x_arange[x_arange.len() - 1] - (layers - 1) as f32 * x_spacing - x_spacing / 2.;
let y_bottom_cutoff = yl + (layers - 1) as f32 * y_spacing + y_spacing / 2.;
for i in 0..xg.len() {
if xg[i] < x_left_cutoff || xg[i] > x_right_cutoff {
x.push(xg[i]);
y.push(yg[i]);
} else if yg[i] < y_bottom_cutoff {
x.push(xg[i]);
y.push(yg[i]);
}
}
(x, y)
}
| true
|
1d74d18fc42d51eeb9a02ce47169bf343befc7cf
|
Rust
|
withoutboats/mock_io
|
/src/mock_io.rs
|
UTF-8
| 2,627
| 2.875
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::cmp;
use std::io::{self, BufRead, Read, Write};
use std::sync::{Arc, Mutex, MutexGuard};
use Lock;
#[derive(Clone)]
pub struct MockIo {
inner: Arc<Mutex<Vec<u8>>>
}
impl MockIo {
pub fn new() -> MockIo {
MockIo {
inner: Arc::new(Mutex::new(Vec::new()))
}
}
pub fn get_data(&self) -> io::Result<Vec<u8>> {
if let Ok(guard) = self.inner.lock() {
Ok(guard.clone())
} else {
Err(io::Error::new(io::ErrorKind::Other, "Mock IO mutex poisoned"))
}
}
pub fn set_data(&self, data: &[u8]) -> io::Result<()> {
if let Ok(mut guard) = self.inner.lock() {
guard.clear();
guard.extend_from_slice(data);
Ok(())
} else {
Err(io::Error::new(io::ErrorKind::Other, "Mock IO mutex poisoned"))
}
}
}
impl<'a> Lock<'a> for MockIo {
type Lock = MockIoLock<'a>;
fn lock(&'a self) -> MockIoLock<'a> {
MockIoLock {
inner: self.inner.lock().expect("Mock IO mutex poisoned")
}
}
}
impl Read for MockIo {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if let Ok(mut guard) = self.inner.lock() { read(&mut guard, buf) }
else { Err(io::Error::new(io::ErrorKind::Other, "Mock IO mutex poisoned")) }
}
}
impl Write for MockIo {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
if let Ok(mut guard) = self.inner.lock() { write(&mut guard, buf) }
else { Err(io::Error::new(io::ErrorKind::Other, "Mock IO mutex poisoned")) }
}
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
pub struct MockIoLock<'a> {
inner: MutexGuard<'a, Vec<u8>>,
}
impl<'a> BufRead for MockIoLock<'a> {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
Ok(&self.inner)
}
fn consume(&mut self, amt: usize) {
self.inner.drain(..amt).fold((), |_, _| ());
}
}
impl<'a> Read for MockIoLock<'a> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
read(&mut self.inner, buf)
}
}
impl<'a> Write for MockIoLock<'a> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
write(&mut self.inner, buf)
}
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
fn read(mock: &mut MutexGuard<Vec<u8>>, buf: &mut [u8]) -> io::Result<usize> {
let len = cmp::min(buf.len(), mock.len());
buf[..len].clone_from_slice(&mock[..len]);
mock.drain(..len).fold((), |_, _| ());
Ok(len)
}
fn write(mock: &mut MutexGuard<Vec<u8>>, buf: &[u8]) -> io::Result<usize> {
mock.extend_from_slice(buf);
Ok(buf.len())
}
| true
|
1e7e8ad20dadc8ec08460306412ded9a0bd08222
|
Rust
|
chpio/vdom-rs
|
/vdom/src/vdom/node/tag.rs
|
UTF-8
| 5,229
| 2.734375
| 3
|
[] |
no_license
|
use super::*;
pub trait Tag<D>
where
D: Driver,
{
fn is_tag_static(&self) -> bool;
fn tag(&self) -> &str;
fn visit_children<NV>(&mut self, visitor: &mut NV) -> Result<(), NV::Err>
where
NV: NodeVisitor<D>;
fn diff_children<ND>(&mut self, ancestor: &mut Self, differ: &mut ND) -> Result<(), ND::Err>
where
ND: NodeDiffer<D>;
fn visit_attrs<NV>(&mut self, visitor: &mut NV) -> Result<(), NV::Err>
where
NV: AttrVisitor<D>;
fn diff_attrs<AD>(&mut self, ancestor: &mut Self, differ: &mut AD) -> Result<(), AD::Err>
where
AD: AttrDiffer<D>;
fn driver_store(&mut self) -> &mut D::TagStore;
}
pub struct TagStatic<D, C, A>
where
D: Driver,
{
tag: &'static str,
children: C,
attrs: A,
driver_store: D::TagStore,
}
impl<D, C, A> TagStatic<D, C, A>
where
D: Driver,
C: Node<D>,
A: AttrList<D>,
{
pub fn new(tag: &'static str, attrs: A, children: C) -> TagStatic<D, C, A> {
TagStatic {
tag,
children,
attrs,
driver_store: D::new_tag_store(),
}
}
}
impl<D, C, A> Tag<D> for TagStatic<D, C, A>
where
D: Driver,
C: Node<D>,
A: AttrList<D>,
{
fn is_tag_static(&self) -> bool {
true
}
fn tag(&self) -> &str {
self.tag
}
fn visit_children<NV>(&mut self, visitor: &mut NV) -> Result<(), NV::Err>
where
NV: NodeVisitor<D>,
{
self.children.visit(&mut 0, visitor)
}
fn diff_children<ND>(&mut self, ancestor: &mut Self, differ: &mut ND) -> Result<(), ND::Err>
where
ND: NodeDiffer<D>,
{
self.children
.diff(&mut 0, &mut 0, &mut ancestor.children, differ)
}
fn visit_attrs<AV>(&mut self, visitor: &mut AV) -> Result<(), AV::Err>
where
AV: AttrVisitor<D>,
{
self.attrs.visit(visitor)
}
fn diff_attrs<AD>(&mut self, ancestor: &mut Self, differ: &mut AD) -> Result<(), AD::Err>
where
AD: AttrDiffer<D>,
{
self.attrs.diff(&mut ancestor.attrs, differ)
}
fn driver_store(&mut self) -> &mut D::TagStore {
&mut self.driver_store
}
}
impl<D, C, A> Node<D> for TagStatic<D, C, A>
where
D: Driver,
C: Node<D>,
A: AttrList<D>,
{
fn visit<NV>(&mut self, index: &mut usize, visitor: &mut NV) -> Result<(), NV::Err>
where
NV: NodeVisitor<D>,
{
visitor.on_tag(*index, self)?;
*index += 1;
Ok(())
}
fn diff<ND>(
&mut self,
curr_index: &mut usize,
ancestor_index: &mut usize,
ancestor: &mut Self,
differ: &mut ND,
) -> Result<(), ND::Err>
where
ND: NodeDiffer<D>,
{
debug_assert_eq!(self.tag, ancestor.tag);
differ.on_tag(*curr_index, *ancestor_index, self, ancestor)?;
*curr_index += 1;
*ancestor_index += 1;
Ok(())
}
}
pub struct TagDyn<D, C, A>
where
D: Driver,
{
tag: Cow<'static, str>,
children: C,
attrs: A,
driver_store: D::TagStore,
}
impl<D, C, A> TagDyn<D, C, A>
where
D: Driver,
C: Node<D>,
A: AttrList<D>,
{
pub fn new<T>(tag: T, attrs: A, children: C) -> TagDyn<D, C, A>
where
T: Into<Cow<'static, str>>,
{
TagDyn {
tag: tag.into(),
children,
attrs,
driver_store: D::new_tag_store(),
}
}
}
impl<D, C, A> Tag<D> for TagDyn<D, C, A>
where
D: Driver,
C: Node<D>,
A: AttrList<D>,
{
fn is_tag_static(&self) -> bool {
false
}
fn tag(&self) -> &str {
self.tag.as_ref()
}
fn visit_children<NV>(&mut self, visitor: &mut NV) -> Result<(), NV::Err>
where
NV: NodeVisitor<D>,
{
self.children.visit(&mut 0, visitor)
}
fn diff_children<ND>(&mut self, ancestor: &mut Self, differ: &mut ND) -> Result<(), ND::Err>
where
ND: NodeDiffer<D>,
{
self.children
.diff(&mut 0, &mut 0, &mut ancestor.children, differ)
}
fn visit_attrs<AV>(&mut self, visitor: &mut AV) -> Result<(), AV::Err>
where
AV: AttrVisitor<D>,
{
self.attrs.visit(visitor)
}
fn diff_attrs<AD>(&mut self, ancestor: &mut Self, differ: &mut AD) -> Result<(), AD::Err>
where
AD: AttrDiffer<D>,
{
self.attrs.diff(&mut ancestor.attrs, differ)
}
fn driver_store(&mut self) -> &mut D::TagStore {
&mut self.driver_store
}
}
impl<D, C, A> Node<D> for TagDyn<D, C, A>
where
D: Driver,
C: Node<D>,
A: AttrList<D>,
{
fn visit<NV>(&mut self, index: &mut usize, visitor: &mut NV) -> Result<(), NV::Err>
where
NV: NodeVisitor<D>,
{
visitor.on_tag(*index, self)?;
*index += 1;
Ok(())
}
fn diff<ND>(
&mut self,
curr_index: &mut usize,
ancestor_index: &mut usize,
ancestor: &mut Self,
differ: &mut ND,
) -> Result<(), ND::Err>
where
ND: NodeDiffer<D>,
{
differ.on_tag(*curr_index, *ancestor_index, self, ancestor)?;
*curr_index += 1;
*ancestor_index += 1;
Ok(())
}
}
| true
|
282ce7b6edc61988efd2732b6e7fb1a077d8ad71
|
Rust
|
Arthurdw/Learning
|
/Personal/Rust/Rust-Learning/tuples.rs
|
UTF-8
| 247
| 3.1875
| 3
|
[] |
no_license
|
fn main() {
let tuple = (10, 20, 30, 40, "test", (1, 2, (3, 5)));
println!("{}", ((tuple.5).2).1);
let tup2 = (1, 2, 3);
let (a, b, c) = tup2;
println!("a is {}", a);
println!("b is {}", b);
println!("c is {}", c);
}
| true
|
f6c87632777f6c07211c62b7fcf0d867503d0f97
|
Rust
|
FreddyWordingham/dia
|
/src/sci/math/geom/dom/tree/construct.rs
|
UTF-8
| 4,630
| 2.796875
| 3
|
[] |
no_license
|
//! Constructor methods.
use crate::{
tree::{Cell, Settings},
Aabb, Bar, Collide, Grp, Mesh, Pos3, Set, SmoothTriangle,
};
impl<'a> Cell<'a> {
/// Construct a new tree root cell.
/// Root cell has a depth of zero.
#[inline]
#[must_use]
pub fn new_root(sett: &Settings, surfs: &'a Set<Mesh>) -> Self {
let mut boundary = Self::init_boundary(surfs);
boundary.expand(sett.padding());
let mut tris = Vec::new();
for (group, mesh) in surfs.map() {
tris.reserve(mesh.tris().len());
for tri in mesh.tris() {
tris.push((group.as_str(), tri));
}
}
let mut pb = Bar::new("Growing tree", 8_u64.pow(sett.max_depth() as u32));
let children = Self::init_children(sett, &boundary, 1, tris.as_slice(), &mut pb);
pb.finish_with_message("Tree grown.");
Self::Root { boundary, children }
}
/// Initialise the boundary encompassing all of the mesh vertices.
#[inline]
#[must_use]
fn init_boundary(surfs: &Set<Mesh>) -> Aabb {
let mut mins = None;
let mut maxs = None;
for mesh in surfs.map().values() {
let (mesh_mins, mesh_maxs) = mesh.boundary().mins_maxs();
if mins.is_none() {
mins = Some(mesh_mins);
} else {
for (grid_min, mesh_min) in mins.as_mut().unwrap().iter_mut().zip(mesh_mins.iter())
{
if mesh_min < grid_min {
*grid_min = *mesh_min;
}
}
}
if maxs.is_none() {
maxs = Some(mesh_maxs);
} else {
for (grid_max, mesh_max) in maxs.as_mut().unwrap().iter_mut().zip(mesh_maxs.iter())
{
if mesh_max > grid_max {
*grid_max = *mesh_max;
}
}
}
}
Aabb::new(mins.unwrap(), maxs.unwrap())
}
/// Initialise the children of a branching cell.
#[allow(clippy::similar_names)]
#[inline]
#[must_use]
fn init_children(
sett: &Settings,
parent_boundary: &Aabb,
depth: i32,
potential_tris: &[(&'a Grp, &'a SmoothTriangle)],
mut pb: &mut Bar,
) -> [Box<Self>; 8] {
debug_assert!(depth <= sett.max_depth());
debug_assert!(!potential_tris.is_empty());
let hws = parent_boundary.half_widths();
let mut make_child = |min_x: f64, min_y: f64, min_z: f64| {
let min = Pos3::new(min_x, min_y, min_z);
Box::new(Self::init_child(
sett,
Aabb::new(min, min + hws),
depth,
potential_tris,
&mut pb,
))
};
let mins = parent_boundary.mins();
let min_x = mins.x;
let min_y = mins.y;
let min_z = mins.z;
let nnn = make_child(min_x, min_y, min_z);
let pnn = make_child(min_x + hws.x, min_y, min_z);
let npn = make_child(min_x, min_y + hws.y, min_z);
let ppn = make_child(min_x + hws.x, min_y + hws.y, min_z);
let nnp = make_child(min_x, min_y, min_z + hws.z);
let pnp = make_child(min_x + hws.x, min_y, min_z + hws.z);
let npp = make_child(min_x, min_y + hws.y, min_z + hws.z);
let ppp = make_child(min_x + hws.x, min_y + hws.y, min_z + hws.z);
[nnn, pnn, npn, ppn, nnp, pnp, npp, ppp]
}
/// Initialise a child cell.
#[inline]
#[must_use]
fn init_child(
sett: &Settings,
boundary: Aabb,
depth: i32,
potential_tris: &[(&'a Grp, &'a SmoothTriangle)],
mut pb: &mut Bar,
) -> Self {
debug_assert!(depth <= sett.max_depth());
let mut detection_vol = boundary.clone();
detection_vol.expand(sett.padding());
let mut tris = Vec::new();
for (group, tri) in potential_tris {
if tri.overlap(&detection_vol) {
tris.push((*group, *tri));
}
}
if tris.is_empty() {
pb.block(8_u64.pow((sett.max_depth() - depth) as u32));
return Self::Empty { boundary };
}
if (tris.len() <= sett.tar_tris()) || (depth >= sett.max_depth()) {
pb.block(8_u64.pow((sett.max_depth() - depth) as u32));
return Self::Leaf { boundary, tris };
}
let children = Self::init_children(sett, &boundary, depth + 1, &tris, &mut pb);
Self::Branch { boundary, children }
}
}
| true
|
df8834ffc18608029068a88f7a6ce81aa58d0a2d
|
Rust
|
clinuxrulz/sodium-rust-demo
|
/core/src/ecs/ecs_context.rs
|
UTF-8
| 1,374
| 2.921875
| 3
|
[] |
no_license
|
use ecs::Entity;
use ecs::Component;
use ecs::IsComponent;
use std::vec::Vec;
use std::ops::Fn;
pub trait EcsContext {
fn transaction<F>(&mut self, do_it: F)
where F: FnOnce(&mut Self);
fn get_component<T: Clone + 'static>(&self, entity: &Entity, component: Component<T>) -> Option<T>;
fn with_component<T: Clone + 'static, R, F: FnMut(&Self,&T)->R >(&self, entity: &Entity, component: Component<T>, k: &mut F) -> Option<R> {
let comp_op = self.get_component(entity, component);
match comp_op {
Some(comp) => Some(k(self, &comp)),
None => None
}
}
fn with_component_mut<T: Clone + 'static, R, F: FnMut(&Self,&T)->R >(&mut self, entity: &Entity, component: Component<T>, k: &mut F) -> Option<R> {
let comp_op = self.get_component(entity, component);
match comp_op {
Some(comp) => Some(k(self, &comp)),
None => None
}
}
fn find_children_of(&self, entity: &Entity) -> Vec<Entity>;
fn entities_with_component<T>(&self, component: Component<T>) -> Vec<Entity>;
fn create_entity(&mut self) -> Entity;
fn destroy_entity(&mut self, entity: &Entity);
fn set_component<T: IsComponent + Clone + 'static>(&mut self, entity: &Entity, component: T);
fn unset_component<T>(&mut self, entity: &Entity, component: Component<T>);
}
| true
|
c804c6676e6b2cdf27b5bd60e345e6804369f636
|
Rust
|
Swiftaff/rust_timesnapper_checker
|
/src/settings_popup.rs
|
UTF-8
| 5,971
| 2.640625
| 3
|
[] |
no_license
|
extern crate native_windows_derive as nwd;
extern crate native_windows_gui as nwg;
use crate::config::*;
use nwd::NwgUi;
#[derive(Default, NwgUi)]
pub struct SettingsPopup {
//height roughly 30 * rows?
#[nwg_control(size: (800, 250), position: (600, 600), title: "Timesnapper Checker Settings", flags: "WINDOW|VISIBLE")]
#[nwg_events( OnWindowClose: [SettingsPopup::close], OnInit: [SettingsPopup::fonty] )]
window: nwg::Window,
//#[nwg_resource(size: 6, family: "Comic Sans")]
//#[nwg_layout_item(layout: grid, row: 0, col: 0)]
//font: nwg::Font,
#[nwg_layout(parent: window, spacing: 1)]
grid: nwg::GridLayout,
#[nwg_control(text: "", flags:"NONE")]
#[nwg_layout_item(layout: grid, row: 1, col: 0)]
state_is_dirty: nwg::Label,
//Settings ini path field
#[nwg_control(text: "Settings.ini")]
#[nwg_layout_item(layout: grid, row: 1, col: 0, col_span: 1)]
ini_path_main_label: nwg::Label,
#[nwg_control(text: &get_path_from_confy(), flags: "VISIBLE|DISABLED")]
#[nwg_layout_item(layout: grid, row: 1, col: 1, col_span: 4)]
ini_path: nwg::TextInput,
#[nwg_resource(title:"Timesnapper Checker - Select Settings.ini",action: nwg::FileDialogAction::Open, filters: "Ini(*.ini)")]
ini_path_file_dialog: nwg::FileDialog,
#[nwg_control(text: "Select...",focus: true)]
#[nwg_layout_item(layout: grid, row: 1, col: 5)]
#[nwg_events( OnButtonClick: [SettingsPopup::ini_path_selector] )]
ini_path_button_change: nwg::Button,
#[nwg_control(text: "Timesnapper Checker needs to know where the Timesnapper 'Settings.ini' file is located.\r\nIt is usually here: C:\\Users\\%USERPROFILE%\\AppData\\Roaming\\TimeSnapper\\Settings.ini")]
#[nwg_layout_item(layout: grid, row: 2, col: 1, col_span: 4, row_span: 2)]
ini_path_help_label: nwg::Label,
//filesize field
#[nwg_control(text: "Blank filesize")]
#[nwg_layout_item(layout: grid, row: 4, col: 0, col_span: 1)]
filesize_main_label: nwg::Label,
#[nwg_control(text: &get_blank_max_filesize_from_confy(), flags: "VISIBLE")]
#[nwg_layout_item(layout: grid, row: 4, col: 1, col_span: 1)]
#[nwg_events( OnTextInput: [SettingsPopup::filesize_dirty] )]
filesize: nwg::TextInput,
#[nwg_control(text: "Timesnapper checker identifies blank screengrabs by their filesize - which can vary depending on your settings.\r\nEnter a value such as 80000 = 80Kb")]
#[nwg_layout_item(layout: grid, row: 5, col: 1, col_span: 5, row_span: 2)]
filesize_help_label: nwg::Label,
//save and cancel
#[nwg_control(text: "Save Changes", enabled: false)]
#[nwg_layout_item(layout: grid, row: 7, col: 4)]
#[nwg_events( OnButtonClick: [SettingsPopup::save] )]
button_save: nwg::Button,
#[nwg_control(text: "Cancel")]
#[nwg_layout_item(layout: grid, row: 7, col: 5)]
#[nwg_events( OnButtonClick: [SettingsPopup::cancel] )]
button_cancel: nwg::Button,
}
impl SettingsPopup {
fn get_font_of_size(&self, size: u32, is_bold: bool) -> nwg::Font {
let mut font = Default::default();
nwg::Font::builder()
.size(size)
.family("Segoe UI")
.weight(if is_bold { 700 } else { 400 })
.build(&mut font)
.expect("Failed to build font");
font
}
fn fonty(&self) {
self.ini_path_main_label
.set_font(Some(&self.get_font_of_size(18, true)));
self.ini_path_help_label
.set_font(Some(&self.get_font_of_size(14, false)));
self.ini_path
.set_font(Some(&self.get_font_of_size(18, false)));
self.ini_path_button_change
.set_font(Some(&self.get_font_of_size(18, false)));
//
self.filesize_main_label
.set_font(Some(&self.get_font_of_size(18, true)));
self.filesize_help_label
.set_font(Some(&self.get_font_of_size(14, false)));
self.filesize
.set_font(Some(&self.get_font_of_size(18, false)));
}
fn filesize_dirty(&self) {
self.button_save.set_enabled(true);
self.state_is_dirty.set_text("filesize changed");
}
fn cancel(&self) {
nwg::stop_thread_dispatch();
}
fn close(&self) {
if &self.state_is_dirty.text().len() > &(0 as usize) {
&self.state_is_dirty.set_text("");
let p = nwg::MessageParams {
title: "Do you want to save the changes you made?",
content: "Your changes will be lost if you don't save them.",
buttons: nwg::MessageButtons::YesNoCancel,
icons: nwg::MessageIcons::Warning,
};
let result = nwg::message(&p);
//nwg::simple_message("About Timesnapper Checker", &format!("{:?}", &result));
if &result == &nwg::MessageChoice::Yes {
&self.save();
} else if &result == &nwg::MessageChoice::No {
nwg::stop_thread_dispatch();
}
} else {
nwg::stop_thread_dispatch();
}
}
fn save(&self) {
let result = save_to_confy(&self.ini_path.text(), &self.filesize.text());
match result {
Ok(_) => {
nwg::simple_message("Timesnapper Checker - Saving settings", "Saved");
}
Err(e) => {
nwg::error_message(
"Timesnapper Checker - Saving settings",
&format!("NOT saved - error: {:?}", e),
);
}
}
nwg::stop_thread_dispatch();
}
fn ini_path_selector(&self) {
if self.ini_path_file_dialog.run(Some(&self.window)) {
if let Ok(file) = self.ini_path_file_dialog.get_selected_item() {
self.ini_path.set_text(&file);
self.button_save.set_enabled(true);
self.state_is_dirty.set_text("path changed");
}
}
}
}
| true
|
1747e0fd05b5a742f8c9baedcd7a61886d5346ac
|
Rust
|
bluseking/slitter
|
/src/press.rs
|
UTF-8
| 17,481
| 2.703125
| 3
|
[
"MIT"
] |
permissive
|
//! A `Press` creates new allocations for a given `Class`. The
//! allocations must be such that the `Press` can also map valid
//! addresses back to their `Class`.
//!
//! While each class gets its own press, the latter requirement means
//! that the presses must all implement compatible metadata stashing
//! schemes. This works because `Mill`s all use the same scheme.
//!
//! We enable mostly lock-free operations by guaranteeing that each
//! span and corresponding metadata is immortal once allocated.
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
use contracts::*;
#[cfg(not(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
)))]
use disabled_contracts::*;
use std::alloc::Layout;
use std::ffi::c_void;
use std::mem::MaybeUninit;
use std::num::NonZeroUsize;
use std::ptr::NonNull;
use std::sync::atomic::AtomicPtr;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::sync::Mutex;
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
use crate::debug_allocation_map;
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
use crate::debug_arange_map;
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
use crate::debug_type_map;
use crate::linear_ref::LinearRef;
use crate::mill;
use crate::mill::Mill;
use crate::mill::SpanMetadata;
use crate::mill::MAX_SPAN_SIZE;
use crate::Class;
/// We batch-allocate at most this many elements at once. This limit
/// makes it clear that a 64-bit counter will not wraparound.
///
/// In practice, callers ask for one more than the magazine size, at
/// most, and that's less than this limit.
const MAX_ALLOCATION_BATCH: usize = 100;
static_assertions::const_assert!(
(crate::magazine_impl::MAGAZINE_SIZE as usize) < MAX_ALLOCATION_BATCH
);
/// We don't guarantee alignment greater than this value.
pub const MAX_OBJECT_ALIGNMENT: usize = 4096;
static_assertions::const_assert!(MAX_OBJECT_ALIGNMENT <= mill::MAX_SPAN_SIZE);
#[derive(Debug)]
pub struct Press {
/// The current span that services bump pointer allocation.
bump: AtomicPtr<SpanMetadata>,
/// Writes to the bump itself (i.e., updating the `AtomicPtr`
/// itself) go through this lock.
mill: Mutex<&'static Mill>,
layout: Layout,
class: Class,
}
/// Returns Ok if the allocation `address` might have come from a `Press` for `class`.
///
/// # Errors
///
/// Returns Err if the address definitely did not come from that `class`.
#[inline]
pub fn check_allocation(class: Class, address: usize) -> Result<(), &'static str> {
let meta_ptr = SpanMetadata::from_allocation_address(address);
let meta = unsafe { meta_ptr.as_mut() }.ok_or("Derived a bad metadata address")?;
if meta.class_id != Some(class.id()) {
Err("Incorrect class id")
} else {
Ok(())
}
}
impl Press {
/// Returns a fresh `Press` for an object `class` with that object
/// `layout`, and the underlying mapper `mapper_name` (`None` for
/// the default `Mapper` / `Mill`).
///
/// All presses with the same `mapper_name` share the same `Mill`.
///
/// # Errors
///
/// Returns `Err` when the layout violates the allocator's constraints,
/// or no mapper can be found for `mapper_name`.
pub fn new(
class: Class,
mut layout: Layout,
mapper_name: Option<&str>,
) -> Result<Self, &'static str> {
if layout.align() > MAX_OBJECT_ALIGNMENT {
return Err("slitter only supports alignment up to 4 KB");
}
layout = layout.pad_to_align();
assert_eq!(layout.size() % layout.align(), 0);
if layout.size() > MAX_SPAN_SIZE / 2 {
Err("Class elements too large (after alignment)")
} else {
Ok(Self {
bump: Default::default(),
mill: Mutex::new(mill::get_mill(mapper_name)?),
layout,
class,
})
}
}
/// Associates the `count` allocations starting at `begin` with `self.class`.
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
fn associate_range(&self, begin: usize, count: usize) -> Result<(), &'static str> {
for i in 0..count {
debug_type_map::associate_class(self.class, begin + i * self.layout.size())?;
}
Ok(())
}
/// Checks if the `count` allocations starting at `begin` are associated with `self.class`.
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
fn is_range_associated_and_free(&self, begin: usize, count: usize) -> Result<(), &'static str> {
for i in 0..count {
let address = NonNull::new((begin + i * self.layout.size()) as *mut c_void)
.ok_or("allocated NULL pointer")?;
debug_type_map::ptr_is_class(self.class, &address)?;
debug_allocation_map::can_be_allocated(self.class, &address)?;
}
Ok(())
}
/// Checks that all `count` allocations starting at `begin` are associated with `self.class`.
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
fn check_allocation_range(&self, begin: usize, count: usize) -> Result<(), &'static str> {
for i in 0..count {
check_allocation(self.class, begin + i * self.layout.size())?;
}
Ok(())
}
/// Attempts to allocate up to `max_count` consecutive object by
/// bumping the metadata pointer.
///
/// Returns the address of the first object and the number of
/// allocations on success.
#[requires(debug_arange_map::is_metadata(meta as * mut SpanMetadata as usize,
std::mem::size_of::<SpanMetadata>()).is_ok(),
"The `meta` reference must come from a metadata range.")]
#[ensures(ret.is_some() -> ret.unwrap().1.get() <= max_count.get(),
"We never return more than `max_count` allocations.")]
#[ensures(ret.is_some() -> ret.unwrap().0.get() as usize % self.layout.align() == 0,
"The base address is correctly aligned.")]
#[ensures(ret.is_some() -> self.associate_range(ret.unwrap().0.get(), ret.unwrap().1.get()).is_ok(),
"On success, it must be possible to associate the returned address with `self.class`.")]
#[ensures(ret.is_some() ->
debug_arange_map::is_data(ret.unwrap().0.get(), self.layout.size() * ret.unwrap().1.get()).is_ok(),
"On success, the returned data must come from a data range.")]
#[ensures(ret.is_some() -> self.check_allocation_range(ret.unwrap().0.get(), ret.unwrap().1.get()).is_ok(),
"On success, the allocations must all have the class metadata set up.")]
fn try_allocate_from_span(
&self,
meta: &mut SpanMetadata,
max_count: NonZeroUsize,
) -> Option<(NonZeroUsize, NonZeroUsize)> {
let desired = max_count.get().clamp(0, MAX_ALLOCATION_BATCH);
let limit = meta.bump_limit as usize;
let allocated_id = meta.bump_ptr.fetch_add(desired, Ordering::Relaxed);
if allocated_id >= limit {
return None;
}
// This is our actual allocation count: our allocated range
// starts at `allocated_id`, and stops at `allocated_id +
// desired` (that's how much we acquired), or at `limit`
// if we acquired more than the bump limit.
let actual = (limit - allocated_id).clamp(0, desired);
// `meta.bump_ptr` is incremented atomically, so
// we always return fresh addresses.
//
// XXX: This expression has to satisfy the `ensures`
// postconditions; they're checked in
// `assert_new_bump_is_safe`, including the alignment
// of `span_begin`.
Some((
NonZeroUsize::new(meta.span_begin + allocated_id * self.layout.size())?,
NonZeroUsize::new(actual)?,
))
}
/// Asserts that every allocation in `bump` is valid for the
/// allocation.
#[cfg(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
))]
fn assert_new_bump_is_safe(&self, bump: *mut SpanMetadata) {
assert!(
debug_arange_map::is_metadata(bump as usize, std::mem::size_of::<SpanMetadata>())
.is_ok()
);
let meta = unsafe { bump.as_mut() }.expect("must be valid");
assert_eq!(meta.span_begin % self.layout.align(), 0);
for i in 0..meta.bump_limit as usize {
let address = meta.span_begin + i * self.layout.size();
assert!(debug_arange_map::is_data(address, self.layout.size()).is_ok());
assert!(check_allocation(self.class, address).is_ok());
}
}
#[cfg(not(any(
all(test, feature = "check_contracts_in_tests"),
feature = "check_contracts"
)))]
#[inline]
fn assert_new_bump_is_safe(&self, _bump: *mut SpanMetadata) {}
/// Attempts to replace our bump pointer with a new one.
#[ensures(ret.is_ok() ->
self.bump.load(Ordering::Relaxed) != old(self.bump.load(Ordering::Relaxed)),
"On success, the bump Span has been updated.")]
#[ensures(debug_arange_map::is_metadata(self.bump.load(Ordering::Relaxed) as usize,
std::mem::size_of::<SpanMetadata>()).is_ok(),
"The bump struct must point to a valid metadata range.")]
fn try_replace_span(&self, expected: *mut SpanMetadata) -> Result<(), i32> {
if self.bump.load(Ordering::Relaxed) != expected {
// Someone else made progress.
return Ok(());
}
let mill = self.mill.lock().unwrap();
// Check again with the lock held, before allocating a new span.
if self.bump.load(Ordering::Relaxed) != expected {
return Ok(());
}
// Get a new span. It must have enough bytes for one
// allocation, but will usually have more (the default desired
// size, nearly 1 MB).
let range = mill.get_span(self.layout.size(), None)?;
let meta: &mut _ = range.meta;
// We should have a fresh Metadata struct before claiming it as ours.
assert_eq!(meta.class_id, None);
meta.class_id = Some(self.class.id());
meta.bump_limit = (range.data_size / self.layout.size()) as u32;
assert!(
meta.bump_limit > 0,
"layout.size > MAX_SPAN_SIZE, but we check for that in the constructor."
);
meta.bump_ptr = AtomicUsize::new(0);
meta.span_begin = range.data as usize;
// Make sure allocations in the trail are properly marked as being ours.
for trailing_meta in range.trail {
// This Metadata struct must not already be allocated.
assert_eq!(trailing_meta.class_id, None);
trailing_meta.class_id = Some(self.class.id());
}
// Publish the metadata for our fresh span.
assert_eq!(self.bump.load(Ordering::Relaxed), expected);
self.assert_new_bump_is_safe(meta);
self.bump.store(meta, Ordering::Release);
Ok(())
}
/// Attempts to allocate up to `max_count` objects. Returns Ok()
/// if we tried to allocate from the current bump region.
///
/// On allocation success, returns Ok(Some(base_address, object_count))
///
/// # Errors
///
/// Returns `Err` if we failed to grab a new bump region.
#[ensures(ret.is_ok() && ret.unwrap().is_some() ->
ret.unwrap().unwrap().1.get() <= max_count.get(),
"We never overallocate.")]
#[ensures(ret.is_ok() && ret.unwrap().is_some() ->
self.is_range_associated_and_free(ret.unwrap().unwrap().0.get(), ret.unwrap().unwrap().1.get()).is_ok(),
"Successful allocations are fresh, or match the class and avoid double-allocation.")]
#[ensures(ret.is_ok() && ret.unwrap().is_some() ->
self.check_allocation_range(ret.unwrap().unwrap().0.get(), ret.unwrap().unwrap().1.get()).is_ok(),
"Sucessful allocations must have the allocation metadata set correctly.")]
fn try_allocate_once(
&self,
max_count: NonZeroUsize,
) -> Result<Option<(NonZeroUsize, NonZeroUsize)>, i32> {
let meta_ptr: *mut SpanMetadata = self.bump.load(Ordering::Acquire);
if let Some(meta) = unsafe { meta_ptr.as_mut() } {
if let Some(result) = self.try_allocate_from_span(meta, max_count) {
return Ok(Some(result));
}
}
// Either we didn't find any span metadata, or bump
// allocation failed. Either way, let's try to put
// a new span in.
self.try_replace_span(meta_ptr).map(|_| None)
}
/// Tries to allocate up to `max_count` objects. Only fails on OOM.
#[ensures(ret.is_some() ->
ret.unwrap().1.get() <= max_count.get(),
"We never overallocate.")]
#[ensures(ret.is_some() ->
self.is_range_associated_and_free(ret.unwrap().0.get(), ret.unwrap().1.get()).is_ok(),
"Successful allocations are fresh, or match the class and avoid double-allocation.")]
#[ensures(ret.is_some() ->
self.check_allocation_range(ret.unwrap().0.get(), ret.unwrap().1.get()).is_ok(),
"Sucessful allocations must have the allocation metadata set correctly.")]
fn try_allocate(&self, max_count: NonZeroUsize) -> Option<(NonZeroUsize, NonZeroUsize)> {
loop {
match self.try_allocate_once(max_count) {
Err(_) => return None, // TODO: log
Ok(Some(result)) => return Some(result),
_ => continue,
}
}
}
#[ensures(ret.is_some() ->
debug_allocation_map::can_be_allocated(self.class, ret.as_ref().unwrap().get()).is_ok(),
"Successful allocations are fresh, or match the class and avoid double-allocation.")]
#[ensures(ret.is_some() ->
debug_type_map::is_class(self.class, ret.as_ref().unwrap()).is_ok(),
"On success, the new allocation has the correct type.")]
#[ensures(ret.is_some() ->
check_allocation(self.class, ret.as_ref().unwrap().get().as_ptr() as usize).is_ok(),
"Sucessful allocations must have the allocation metadata set correctly.")]
pub fn allocate_one_object(&self) -> Option<LinearRef> {
let (address, _count) = self.try_allocate(NonZeroUsize::new(1).unwrap())?;
debug_assert_eq!(_count.get(), 1);
Some(LinearRef::new(unsafe {
NonNull::new_unchecked(address.get() as *mut c_void)
}))
}
/// Attempts to allocate multiple objects: first the second return
/// value, and then as many elements in `dst` as possible.
///
/// Returns the number of elements populated in `dst` (starting
/// at low indices), and an allocated object if possible.
#[ensures(ret.1.is_some() ->
debug_allocation_map::can_be_allocated(self.class, ret.1.as_ref().unwrap().get()).is_ok(),
"Successful allocations are fresh, or match the class and avoid double-allocation.")]
#[ensures(ret.1.is_some() ->
debug_type_map::is_class(self.class, ret.1.as_ref().unwrap()).is_ok(),
"On success, the new allocation has the correct type.")]
#[ensures(ret.1.is_some() ->
check_allocation(self.class, ret.1.as_ref().unwrap().get().as_ptr() as usize).is_ok(),
"Sucessful allocations must have the allocation metadata set correctly.")]
#[ensures(ret.1.is_none() -> ret.0 == 0,
"We always try to satisfy the return value first.")]
// We don't check `dst` because the contract expression would be
// unsafe, but it's the same as `ret.1.is_some()` for all
// populated elements.
//
// We do check the same invariants in the target `Magazine` via
// `ClassInfo::refill_magazine`.
pub fn allocate_many_objects(
&self,
dst: &mut [MaybeUninit<LinearRef>],
) -> (usize, Option<LinearRef>) {
let elsize = self.layout.size();
match self.try_allocate(NonZeroUsize::new(dst.len() + 1).expect("Should not overflow")) {
Some((base, count)) => {
let mut address = base.get();
// Acquires the next element from `base[0..count]`.
let mut get_ref = || {
let ret =
LinearRef::new(unsafe { NonNull::new_unchecked(address as *mut c_void) });
address += elsize;
ret
};
let ret = Some(get_ref());
let mut populated = 0;
for uninit in dst.iter_mut().take(count.get() - 1) {
unsafe { uninit.as_mut_ptr().write(get_ref()) };
populated += 1;
}
debug_assert!(populated <= count.get());
(populated, ret)
}
None => (0, None),
}
}
}
| true
|
f4a4eb85bd512125dc703a21e2e665599e8e9c66
|
Rust
|
chansuke/redmine-rs
|
/src/utils/endpoint.rs
|
UTF-8
| 1,202
| 2.71875
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use anyhow::Result;
use crate::config::Config;
use crate::RmError;
pub(crate) fn build_endpoint(sub_command: &str, arg: &str) -> Result<String, RmError> {
let baseurl = Config::get_env("REDMINE_BASE_URL".to_string())?;
let project = Config::get_env("REDMINE_PROJECT".to_string())?;
let endpoint;
if arg.is_empty() && sub_command != "memberships" {
endpoint = format!("{}/{}.json", baseurl, sub_command);
} else if sub_command == "memberships" {
endpoint = format!("{}/projects/{}/memberships.json", baseurl, project);
} else {
endpoint = format!("{}/{}/{}.json", baseurl, sub_command, arg);
}
Ok(endpoint)
}
pub(crate) fn append_apikey_clause(endpoint: &str) -> Result<String, RmError> {
let apikey = Config::get_env("REDMINE_API_KEY".to_string())?;
let endpoint = format!("{}?key={}", endpoint, apikey);
Ok(endpoint)
}
#[cfg(test)]
mod tests {
use super::*;
use std::env;
#[test]
fn test_build_endpoint() {
env::set_var("REDMINE_BASE_URL", "https://test.redmine.org");
assert!(
build_endpoint("issues", "199").unwrap() == "https://test.redmine.org/issues/199.json"
);
}
}
| true
|
8a233729fcf631908f70bce0ae2ef03bb9832414
|
Rust
|
antonromanov1/dragon-book-compiler
|
/src/symbols.rs
|
UTF-8
| 1,217
| 2.8125
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use crate::ir::*;
use crate::lexer::*;
pub struct Env {
table: HashMap<WordBase, Id>,
pub prev: Option<Box<Env>>,
}
impl Env {
pub fn new(n: Option<Box<Env>>) -> Env {
Env {
table: HashMap::new(),
prev: n,
}
}
pub fn put(&mut self, w: WordBase, i: Id) {
self.table.insert(w, i);
}
pub fn get(&self, w: &WordBase) -> Option<Id> {
match self.table.get(w) {
Some(id) => {
return Some(id.clone());
}
None => {}
};
let mut e = &(self.prev);
match e {
Some(ptr) => loop {
match (*ptr).table.get(w) {
Some(id) => {
return Some(id.clone());
}
None => {
e = &(e.as_ref().unwrap().prev);
match e {
Some(_a) => continue,
None => break,
}
}
};
},
None => {
return None;
}
};
None
}
}
| true
|
c1dd7d1f58cbfd55080df3c41f6ef3012131c376
|
Rust
|
sm8082/class-codes-piaic-q3
|
/main.rs
|
UTF-8
| 1,261
| 2.5625
| 3
|
[] |
no_license
|
//#![deny(warnings)]
use std::{convert::Infallible, net::SocketAddr};
use hyper::{Body, Request, Response, Server};
use hyper::service::{make_service_fn, service_fn};
use hyper::{Method, StatusCode};
#[tokio::main]
async fn main() {
let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
let make_svc = make_service_fn(|_conn| async {
Ok::<_, Infallible>(service_fn(run))
});
let server = Server::bind(&addr).serve(make_svc);
if let Err(e) = server.await {
eprintln!("server error: {}", e);
}
}
async fn run(req: Request<Body>) -> Result<Response<Body>, hyper::Error> {
let mut response = Response::new(Body::empty());
match (req.method(), req.uri().path()) {
(&Method::POST, "/echo/reverse") => {
let body = hyper::body::to_bytes(req.into_body()).await?;
let reverse_it = body.iter().rev().cloned().collect::<Vec<u8>>();
*response.body_mut() = reverse_it.into();
},
(&Method::POST, "/echo/count/chars") => {
let body = hyper::body::to_bytes(req.into_body()).await?;
let count_it = body.iter().count().to_string();
*response.body_mut() = count_it.into();
},
_ => {
*response.status_mut() = StatusCode::NOT_FOUND;
},
};
Ok(response)
}
| true
|
6adc0677542a10348d2f71f18c1805d4d6d4275e
|
Rust
|
candtechsoftware/orst
|
/src/bin/bench.rs
|
UTF-8
| 3,147
| 2.890625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use orst::*;
use rand::prelude::*;
use std::cell::Cell;
use std::cmp::Ordering;
use std::rc::Rc;
#[derive(Clone)]
struct SortEvaluator<T> {
t: T,
cmps: Rc<Cell<usize>>,
}
impl<T: PartialEq> PartialEq for SortEvaluator<T> {
fn eq(&self, other: &Self) -> bool {
self.cmps.set(self.cmps.get() + 1);
self.t == other.t
}
}
impl<T: Eq> Eq for SortEvaluator<T> {}
impl<T: PartialOrd> PartialOrd for SortEvaluator<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.cmps.set(self.cmps.get() + 1);
self.t.partial_cmp(&other.t)
}
}
impl<T: Ord> Ord for SortEvaluator<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.cmps.set(self.cmps.get() + 1);
self.t.cmp(&other.t)
}
}
impl<T> Bytify for SortEvaluator<T>
where
T: Bytify
{
fn bytify(&self, level: usize) -> Option<usize>
{
return self.t.bytify(level);
}
}
fn main() {
let mut rand = rand::thread_rng();
let counter = Rc::new(Cell::new(0));
println!("algorithm n comparisons time");
for &n in &[0, 1, 10, 100, 1000, 10000, 50000] {
let mut values = Vec::with_capacity(n);
for _ in 0..n {
values.push(SortEvaluator {
t: rand.gen::<usize>(),
cmps: Rc::clone(&counter),
});
}
for _ in 0..10 {
values.shuffle(&mut rand);
let took = bench(BubbleSort, &values, &counter);
println!("{} {} {} {}", "bubble", n, took.0, took.1);
let took = bench(InsertionSort { smart: true }, &values, &counter);
println!("{} {} {} {}", "insertion-smart", n, took.0, took.1);
let took = bench(InsertionSort { smart: false }, &values, &counter);
println!("{} {} {} {}", "insertion-dumb", n, took.0, took.1);
let took = bench(SelectionSort, &values, &counter);
println!("{} {} {} {}", "selection", n, took.0, took.1);
let took = bench(QuickSort, &values, &counter);
println!("{} {} {} {}", "quick", n, took.0, took.1);
let took = bench(RadixSort, &values, &counter);
println!("{} {} {} {}", "radix", n, took.0, took.1);
let took = bench(HeapSort, &values, &counter);
println!("{} {} {} {}", "heap", n, took.0, took.1);
let took = bench(StdSorter, &values, &counter);
println!("{} {} {} {}", "stdstable", n, took.0, took.1);
let took = bench(StdUnstableSorter, &values, &counter);
println!("{} {} {} {}", "stdunstable", n, took.0, took.1);
}
}
}
fn bench<T: Ord + Clone, S: Sorter<SortEvaluator<T>>>(
sorter: S,
values: &[SortEvaluator<T>],
counter: &Cell<usize>,
) -> (usize, f64) {
let mut values: Vec<_> = values.to_vec();
counter.set(0);
let time = std::time::Instant::now();
sorter.sort(&mut values);
let took = time.elapsed();
let count = counter.get();
// assert!(values.is_sorted());
for i in 1..values.len() {
assert!(values[i] >= values[i - 1]);
}
(count, took.as_secs_f64())
}
| true
|
18d20722ccb7555c1d450fe6bd7f43a29124237f
|
Rust
|
Patryk27/janet
|
/libs/database/src/lib.rs
|
UTF-8
| 2,427
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
#![feature(crate_visibility_modifier)]
pub use self::{config::*, cqrs::*, features::*, id::*};
mod config;
mod cqrs;
mod features;
mod id;
mod migrations;
use anyhow::*;
use sqlx::sqlite::SqliteConnectOptions;
use sqlx::{ConnectOptions, SqliteConnection};
use std::sync::Arc;
use tokio::sync::Mutex;
#[cfg(test)]
mod test_utils;
#[derive(Clone)]
pub struct Database {
conn: Arc<Mutex<SqliteConnection>>,
}
impl Database {
pub async fn new(config: DatabaseConfig) -> Result<Self> {
if config.path.contains(":memory:") {
tracing::warn!("");
tracing::warn!("!! STARTING WITH AN IN-MEMORY DATABASE !!");
tracing::warn!("");
tracing::warn!("When you restart Janet, she'll forget everything.");
tracing::warn!(
"To get rid of this warning, please change `database.path` to point at a file."
);
tracing::warn!("");
}
let options = SqliteConnectOptions::new()
.filename(&config.path)
.foreign_keys(true)
.statement_cache_capacity(0); // Statement cache is too overzealous and makes `DROP TABLE` statements fail
let mut conn = options
.connect()
.await
.context("Couldn't initialize SQLite")?;
migrations::run(&mut conn)
.await
.context("Couldn't migrate the database")?;
let conn = Arc::new(Mutex::new(conn));
Ok(Database { conn })
}
pub async fn mock() -> Self {
Self::new(DatabaseConfig {
path: ":memory:".into(),
})
.await
.unwrap()
}
pub async fn execute<C: Command>(&self, command: C) -> Result<C::Output> {
command.execute(self).await
}
pub async fn get_all<Q: Query>(&self, query: Q) -> Result<Vec<Q::Model>> {
query.execute(self).await
}
pub async fn get_one<Q: Query>(&self, query: Q) -> Result<Q::Model> {
match self.get_opt(query).await? {
Some(model) => Ok(model),
None => bail!("No models match given query"),
}
}
pub async fn get_opt<Q: Query>(&self, query: Q) -> Result<Option<Q::Model>> {
let model = self.get_all(query).await?.into_iter().next();
Ok(model)
}
crate async fn lock(&self) -> tokio::sync::MutexGuard<'_, SqliteConnection> {
self.conn.lock().await
}
}
| true
|
fe962404075c1a09df57c955ab5823698fd95884
|
Rust
|
martinregnerlarsen/rust-bf
|
/src/main.rs
|
UTF-8
| 3,653
| 3.4375
| 3
|
[] |
no_license
|
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
const MEMORY_SIZE: usize = 30000;
#[derive(PartialEq,Eq)]
enum Token {
IncDP,
DecDP,
IncMemory,
DecMemory,
Put,
Read,
While,
EndWhile
}
fn calculate_jumptable(program: &Vec<Token>) -> Vec<usize> {
let mut jumptable: Vec<usize> = vec![0; program.len()];
let mut pc: usize = 0;
while pc < program.len() {
let ref instruction: Token = program[pc];
if instruction == &Token::While {
let mut bracket_nesting = 1;
let mut seek = pc;
while bracket_nesting > 0 && seek < program.len() - 1 {
seek += 1;
if program[seek] == Token::EndWhile {
bracket_nesting -= 1;
} else if program[seek] == Token::While {
bracket_nesting += 1;
}
}
if bracket_nesting > 0 {
panic!("Unmatched [ at pc={}", pc);
} else {
jumptable[pc] = seek;
jumptable[seek] = pc;
}
}
pc += 1
}
jumptable
}
fn simpleinterp(program: &Vec<Token>) {
let mut memory: Vec<u8> = vec![0; MEMORY_SIZE];
let jumptable = calculate_jumptable(&program);
let mut pc: usize = 0;
let mut dataptr: usize = 0;
while pc < program.len() {
let ref instruction: Token = program[pc];
match instruction {
&Token::IncDP => dataptr = dataptr.wrapping_add(1),
&Token::DecDP => dataptr = dataptr.wrapping_sub(1),
&Token::IncMemory => memory[dataptr] = memory[dataptr].wrapping_add(1),
&Token::DecMemory => memory[dataptr] = memory[dataptr].wrapping_sub(1),
&Token::Read => panic!(", not implemented"),
&Token::Put => print!("{}", memory[dataptr] as char),
&Token::While => {
if memory[dataptr] == 0 {
pc = jumptable[pc];
}
},
&Token::EndWhile => {
if memory[dataptr] != 0 {
pc = jumptable[pc];
}
}
}
pc += 1;
}
}
fn read_file(filename: &str) -> std::io::Result<String> {
let file = File::open(filename)?;
let mut buf_reader = BufReader::new(file);
let mut content = String::new();
buf_reader.read_to_string(&mut content)?;
Ok(content)
}
fn parse(input: &str) -> Vec<Token> {
let mut parsed_program: Vec<Token> = Vec::new();
for c in input.chars() {
let token: Option<Token> = match c {
'>' => Some(Token::IncDP),
'<' => Some(Token::DecDP),
'+' => Some(Token::IncMemory),
'-' => Some(Token::DecMemory),
'.' => Some(Token::Put),
',' => Some(Token::Read),
'[' => Some(Token::While),
']' => Some(Token::EndWhile),
_ => None // Ignore unknown characters in program
};
match token {
Some(token) => parsed_program.push(token),
None => ()
}
}
parsed_program
}
fn open_and_parse(filename: &str) -> std::io::Result<()> {
let content = read_file(filename)?;
let parsed_content = parse(&content);
simpleinterp(&parsed_content);
Ok(())
}
fn main() {
use std::time::SystemTime;
let time = SystemTime::now();
match open_and_parse("mandelbrot.bf") {
Ok(_) => println!("Done!"),
Err(err) => println!("Cannot read file mandelbrot.bf: {}", err)
};
println!("Duration: {:?}", time.elapsed().unwrap())
}
| true
|
7d61670a837ffa36c84ccddab6a4b000a9eba2fd
|
Rust
|
acmcarther/cargo-raze-examples
|
/internal/sources/non_cratesio_library/cargo/vendor/env_logger-0.5.5/src/fmt.rs
|
UTF-8
| 15,869
| 4
| 4
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! Formatting for log records.
//!
//! This module contains a [`Formatter`] that can be used to format log records
//! into without needing temporary allocations. Usually you won't need to worry
//! about the contents of this module and can use the `Formatter` like an ordinary
//! [`Write`].
//!
//! # Formatting log records
//!
//! The format used to print log records can be customised using the [`Builder::format`]
//! method.
//! Custom formats can apply different color and weight to printed values using
//! [`Style`] builders.
//!
//! ```
//! use std::io::Write;
//! use env_logger::fmt::Color;
//!
//! let mut builder = env_logger::Builder::new();
//!
//! builder.format(|buf, record| {
//! let mut level_style = buf.style();
//!
//! level_style.set_color(Color::Red).set_bold(true);
//!
//! writeln!(buf, "{}: {}",
//! level_style.value(record.level()),
//! record.args())
//! });
//! ```
//!
//! [`Formatter`]: struct.Formatter.html
//! [`Style`]: struct.Style.html
//! [`Builder::format`]: ../struct.Builder.html#method.format
//! [`Write`]: https://doc.rust-lang.org/stable/std/io/trait.Write.html
use std::io::prelude::*;
use std::{io, fmt};
use std::rc::Rc;
use std::cell::RefCell;
use std::time::SystemTime;
use termcolor::{ColorSpec, ColorChoice, Buffer, BufferWriter, WriteColor};
use atty;
use humantime::format_rfc3339_seconds;
pub use termcolor::Color;
/// A formatter to write logs into.
///
/// `Formatter` implements the standard [`Write`] trait for writing log records.
/// It also supports terminal colors, through the [`style`] method.
///
/// # Examples
///
/// Use the [`writeln`] macro to easily format a log record:
///
/// ```
/// use std::io::Write;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| writeln!(buf, "{}: {}", record.level(), record.args()));
/// ```
///
/// [`Write`]: https://doc.rust-lang.org/stable/std/io/trait.Write.html
/// [`writeln`]: https://doc.rust-lang.org/stable/std/macro.writeln.html
/// [`style`]: #method.style
pub struct Formatter {
buf: Rc<RefCell<Buffer>>,
write_style: WriteStyle,
}
/// A set of styles to apply to the terminal output.
///
/// Call [`Formatter::style`] to get a `Style` and use the builder methods to
/// set styling properties, like [color] and [weight].
/// To print a value using the style, wrap it in a call to [`value`] when the log
/// record is formatted.
///
/// # Examples
///
/// Create a bold, red colored style and use it to print the log level:
///
/// ```
/// use std::io::Write;
/// use env_logger::fmt::Color;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut level_style = buf.style();
///
/// level_style.set_color(Color::Red).set_bold(true);
///
/// writeln!(buf, "{}: {}",
/// level_style.value(record.level()),
/// record.args())
/// });
/// ```
///
/// Styles can be re-used to output multiple values:
///
/// ```
/// use std::io::Write;
/// use env_logger::fmt::Color;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut bold = buf.style();
///
/// bold.set_bold(true);
///
/// writeln!(buf, "{}: {} {}",
/// bold.value(record.level()),
/// bold.value("some bold text"),
/// record.args())
/// });
/// ```
///
/// [`Formatter::style`]: struct.Formatter.html#method.style
/// [color]: #method.set_color
/// [weight]: #method.set_bold
/// [`value`]: #method.value
#[derive(Clone)]
pub struct Style {
buf: Rc<RefCell<Buffer>>,
spec: ColorSpec,
}
/// A value that can be printed using the given styles.
///
/// It is the result of calling [`Style::value`].
///
/// [`Style::value`]: struct.Style.html#method.value
pub struct StyledValue<'a, T> {
style: &'a Style,
value: T,
}
/// An [RFC3339] formatted timestamp.
///
/// The timestamp implements [`Display`] and can be written to a [`Formatter`].
///
/// [RFC3339]: https://www.ietf.org/rfc/rfc3339.txt
/// [`Display`]: https://doc.rust-lang.org/stable/std/fmt/trait.Display.html
/// [`Formatter`]: struct.Formatter.html
pub struct Timestamp(SystemTime);
/// Log target, either `stdout` or `stderr`.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum Target {
/// Logs will be sent to standard output.
Stdout,
/// Logs will be sent to standard error.
Stderr,
}
impl Default for Target {
fn default() -> Self {
Target::Stderr
}
}
/// Whether or not to print styles to the target.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum WriteStyle {
/// Try to print styles, but don't force the issue.
Auto,
/// Try very hard to print styles.
Always,
/// Never print styles.
Never,
}
impl Default for WriteStyle {
fn default() -> Self {
WriteStyle::Auto
}
}
/// A terminal target with color awareness.
pub(crate) struct Writer {
inner: BufferWriter,
write_style: WriteStyle,
}
impl Writer {
pub(crate) fn write_style(&self) -> WriteStyle {
self.write_style
}
}
/// A builder for a terminal writer.
///
/// The target and style choice can be configured before building.
pub(crate) struct Builder {
target: Target,
write_style: WriteStyle,
}
impl Builder {
/// Initialize the writer builder with defaults.
pub fn new() -> Self {
Builder {
target: Default::default(),
write_style: Default::default(),
}
}
/// Set the target to write to.
pub fn target(&mut self, target: Target) -> &mut Self {
self.target = target;
self
}
/// Parses a style choice string.
///
/// See the [Disabling colors] section for more details.
///
/// [Disabling colors]: ../index.html#disabling-colors
pub fn parse(&mut self, write_style: &str) -> &mut Self {
self.write_style(parse_write_style(write_style))
}
/// Whether or not to print style characters when writing.
pub fn write_style(&mut self, write_style: WriteStyle) -> &mut Self {
self.write_style = write_style;
self
}
/// Build a terminal writer.
pub fn build(&mut self) -> Writer {
let color_choice = match self.write_style {
WriteStyle::Auto => {
if atty::is(match self.target {
Target::Stderr => atty::Stream::Stderr,
Target::Stdout => atty::Stream::Stdout,
}) {
ColorChoice::Auto
} else {
ColorChoice::Never
}
},
WriteStyle::Always => ColorChoice::Always,
WriteStyle::Never => ColorChoice::Never,
};
let writer = match self.target {
Target::Stderr => BufferWriter::stderr(color_choice),
Target::Stdout => BufferWriter::stdout(color_choice),
};
Writer {
inner: writer,
write_style: self.write_style,
}
}
}
impl Default for Builder {
fn default() -> Self {
Builder::new()
}
}
impl Style {
/// Set the text color.
///
/// # Examples
///
/// Create a style with red text:
///
/// ```
/// use std::io::Write;
/// use env_logger::fmt::Color;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut style = buf.style();
///
/// style.set_color(Color::Red);
///
/// writeln!(buf, "{}", style.value(record.args()))
/// });
/// ```
pub fn set_color(&mut self, color: Color) -> &mut Style {
self.spec.set_fg(Some(color));
self
}
/// Set the text weight.
///
/// If `yes` is true then text will be written in bold.
/// If `yes` is false then text will be written in the default weight.
///
/// # Examples
///
/// Create a style with bold text:
///
/// ```
/// use std::io::Write;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut style = buf.style();
///
/// style.set_bold(true);
///
/// writeln!(buf, "{}", style.value(record.args()))
/// });
/// ```
pub fn set_bold(&mut self, yes: bool) -> &mut Style {
self.spec.set_bold(yes);
self
}
/// Set the text intensity.
///
/// If `yes` is true then text will be written in a brighter color.
/// If `yes` is false then text will be written in the default color.
///
/// # Examples
///
/// Create a style with intense text:
///
/// ```
/// use std::io::Write;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut style = buf.style();
///
/// style.set_intense(true);
///
/// writeln!(buf, "{}", style.value(record.args()))
/// });
/// ```
pub fn set_intense(&mut self, yes: bool) -> &mut Style {
self.spec.set_intense(yes);
self
}
/// Set the background color.
///
/// # Examples
///
/// Create a style with a yellow background:
///
/// ```
/// use std::io::Write;
/// use env_logger::fmt::Color;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut style = buf.style();
///
/// style.set_bg(Color::Yellow);
///
/// writeln!(buf, "{}", style.value(record.args()))
/// });
/// ```
pub fn set_bg(&mut self, color: Color) -> &mut Style {
self.spec.set_bg(Some(color));
self
}
/// Wrap a value in the style.
///
/// The same `Style` can be used to print multiple different values.
///
/// # Examples
///
/// Create a bold, red colored style and use it to print the log level:
///
/// ```
/// use std::io::Write;
/// use env_logger::fmt::Color;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut style = buf.style();
///
/// style.set_color(Color::Red).set_bold(true);
///
/// writeln!(buf, "{}: {}",
/// style.value(record.level()),
/// record.args())
/// });
/// ```
pub fn value<T>(&self, value: T) -> StyledValue<T> {
StyledValue {
style: &self,
value
}
}
}
impl Formatter {
pub(crate) fn new(writer: &Writer) -> Self {
Formatter {
buf: Rc::new(RefCell::new(writer.inner.buffer())),
write_style: writer.write_style(),
}
}
pub(crate) fn write_style(&self) -> WriteStyle {
self.write_style
}
/// Begin a new [`Style`].
///
/// # Examples
///
/// Create a bold, red colored style and use it to print the log level:
///
/// ```
/// use std::io::Write;
/// use env_logger::fmt::Color;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let mut level_style = buf.style();
///
/// level_style.set_color(Color::Red).set_bold(true);
///
/// writeln!(buf, "{}: {}",
/// level_style.value(record.level()),
/// record.args())
/// });
/// ```
///
/// [`Style`]: struct.Style.html
pub fn style(&self) -> Style {
Style {
buf: self.buf.clone(),
spec: ColorSpec::new(),
}
}
/// Get a [`Timestamp`] for the current date and time in UTC.
///
/// # Examples
///
/// Include the current timestamp with the log record:
///
/// ```
/// use std::io::Write;
///
/// let mut builder = env_logger::Builder::new();
///
/// builder.format(|buf, record| {
/// let ts = buf.timestamp();
///
/// writeln!(buf, "{}: {}: {}", ts, record.level(), record.args())
/// });
/// ```
///
/// [`Timestamp`]: struct.Timestamp.html
pub fn timestamp(&self) -> Timestamp {
Timestamp(SystemTime::now())
}
pub(crate) fn print(&self, writer: &Writer) -> io::Result<()> {
writer.inner.print(&self.buf.borrow())
}
pub(crate) fn clear(&mut self) {
self.buf.borrow_mut().clear()
}
}
impl Write for Formatter {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.buf.borrow_mut().write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.buf.borrow_mut().flush()
}
}
impl<'a, T> StyledValue<'a, T> {
fn write_fmt<F>(&self, f: F) -> fmt::Result
where
F: FnOnce() -> fmt::Result,
{
self.style.buf.borrow_mut().set_color(&self.style.spec).map_err(|_| fmt::Error)?;
// Always try to reset the terminal style, even if writing failed
let write = f();
let reset = self.style.buf.borrow_mut().reset().map_err(|_| fmt::Error);
write.and(reset)
}
}
impl fmt::Debug for Timestamp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
/// A `Debug` wrapper for `Timestamp` that uses the `Display` implementation.
struct TimestampValue<'a>(&'a Timestamp);
impl<'a> fmt::Debug for TimestampValue<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
f.debug_tuple("Timestamp")
.field(&TimestampValue(&self))
.finish()
}
}
impl fmt::Debug for Writer {
fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
f.debug_struct("Writer").finish()
}
}
impl fmt::Debug for Formatter {
fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
f.debug_struct("Formatter").finish()
}
}
impl fmt::Debug for Builder {
fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
f.debug_struct("Logger")
.field("target", &self.target)
.field("write_style", &self.write_style)
.finish()
}
}
impl fmt::Debug for Style {
fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
f.debug_struct("Style").field("spec", &self.spec).finish()
}
}
macro_rules! impl_styled_value_fmt {
($($fmt_trait:path),*) => {
$(
impl<'a, T: $fmt_trait> $fmt_trait for StyledValue<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
self.write_fmt(|| T::fmt(&self.value, f))
}
}
)*
};
}
impl_styled_value_fmt!(
fmt::Debug,
fmt::Display,
fmt::Pointer,
fmt::Octal,
fmt::Binary,
fmt::UpperHex,
fmt::LowerHex,
fmt::UpperExp,
fmt::LowerExp);
impl fmt::Display for Timestamp {
fn fmt(&self, f: &mut fmt::Formatter)->fmt::Result {
format_rfc3339_seconds(self.0).fmt(f)
}
}
fn parse_write_style(spec: &str) -> WriteStyle {
match spec {
"auto" => WriteStyle::Auto,
"always" => WriteStyle::Always,
"never" => WriteStyle::Never,
_ => Default::default(),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_write_style_valid() {
let inputs = vec![
("auto", WriteStyle::Auto),
("always", WriteStyle::Always),
("never", WriteStyle::Never),
];
for (input, expected) in inputs {
assert_eq!(expected, parse_write_style(input));
}
}
#[test]
fn parse_write_style_invalid() {
let inputs = vec![
"",
"true",
"false",
"NEVER!!"
];
for input in inputs {
assert_eq!(WriteStyle::Auto, parse_write_style(input));
}
}
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.