blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
3866a730bafe206efef29b538fbf44284be3a2b9
|
Rust
|
snow2flying/ostrich
|
/experiments/src/http.rs
|
UTF-8
| 10,159
| 2.734375
| 3
|
[] |
no_license
|
// // Example on how to use the Hyper server in !Send mode.
// // The clients are harder, see https://github.com/hyperium/hyper/issues/2341 for details
// //
// // Essentially what we do is we wrap our types around the Tokio traits. The
// // !Send limitation makes it harder to deal with high level hyper primitives but
// // it works in the end.
// use futures_lite::{AsyncRead, AsyncWrite, Future};
// use hyper::service::service_fn;
// use std::{
// net::SocketAddr,
// pin::Pin,
// task::{Context, Poll},
// };
// use hyper::{server::conn::Http, Body, Request, Response, Method, StatusCode};
// use std::{io, rc::Rc};
// use std::convert::Infallible;
// use glommio::net::{TcpListener, TcpStream};
// use glommio::sync::Semaphore;
// use glommio::{Local, enclose, Task};
//
// #[derive(Clone)]
// struct HyperExecutor;
//
// impl<F> hyper::rt::Executor<F> for HyperExecutor
// where
// F: Future + 'static,
// F::Output: 'static,
// {
// fn execute(&self, fut: F) {
// Task::local(fut).detach();
// }
// }
//
// struct HyperStream(pub TcpStream);
// impl tokio::io::AsyncRead for HyperStream {
// fn poll_read(
// mut self: Pin<&mut Self>,
// cx: &mut Context,
// buf: &mut [u8],
// ) -> Poll<io::Result<usize>> {
// Pin::new(&mut self.0).poll_read(cx, buf)
// }
// }
//
// impl tokio::io::AsyncWrite for HyperStream {
// fn poll_write(
// mut self: Pin<&mut Self>,
// cx: &mut Context,
// buf: &[u8],
// ) -> Poll<io::Result<usize>> {
// Pin::new(&mut self.0).poll_write(cx, buf)
// }
//
// fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
// Pin::new(&mut self.0).poll_flush(cx)
// }
//
// fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
// Pin::new(&mut self.0).poll_close(cx)
// }
// }
//
// pub async fn serve_http<S, F, R, A>(
// addr: A,
// service: S,
// max_connections: usize,
// ) -> io::Result<()>
// where
// S: FnMut(Request<Body>) -> F + 'static + Copy,
// F: Future<Output = Result<Response<Body>, R>> + 'static,
// R: std::error::Error + 'static + Send + Sync,
// A: Into<SocketAddr>,
// {
// let listener = TcpListener::bind(addr.into())?;
// let conn_control = Rc::new(Semaphore::new(max_connections as _));
// loop {
// match listener.accept().await {
// Err(x) => {
// return Err(x.into());
// }
// Ok(stream) => {
// let addr = stream.local_addr().unwrap();
// Local::local(enclose!{(conn_control) async move {
// let _permit = conn_control.acquire_permit(1).await;
// if let Err(x) = Http::new().with_executor(HyperExecutor).serve_connection(HyperStream(stream), service_fn(service)).await {
// panic!("Stream from {:?} failed with error {:?}", addr, x);
// }
// }}).detach();
// }
// }
// }
// }
//
//
// pub async fn hyper_demo(req: Request<Body>) -> Result<Response<Body>, Infallible> {
// match (req.method(), req.uri().path()) {
// (&Method::GET, "/hello") => Ok(Response::new(Body::from("world"))),
// (&Method::GET, "/world") => Ok(Response::new(Body::from("hello"))),
// _ => Ok(Response::builder()
// .status(StatusCode::NOT_FOUND)
// .body(Body::from("notfound"))
// .unwrap()),
// }
// }
use std::future::Future;
use std::io;
use std::io::ErrorKind;
use std::io::Result;
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
use async_std::io::{Read, Write};
use async_std::net::{TcpListener, TcpStream};
use async_std::stream::Stream;
use async_std::task;
use async_tls::{TlsAcceptor, TlsConnector};
use async_tls::client::TlsStream as ClientTlsStream;
use async_tls::server::TlsStream as ServerTlsStream;
use hyper::client::connect::{Connected, Connection};
use hyper::service::Service;
use rustls::ClientConfig;
// use tonic::transport::Uri;
use hyper::Uri;
use futures_lite::future::ready;
use tokio::io::ReadBuf;
use futures_lite::AsyncRead;
#[derive(Clone)]
pub struct HyperExecutor;
impl<F> hyper::rt::Executor<F> for HyperExecutor
where
F: Future + Send + 'static,
F::Output: Send + 'static,
{
fn execute(&self, fut: F) {
task::spawn(fut);
}
}
pub struct HyperListener {
pub tls_acceptor: TlsAcceptor,
pub tcp_listener: TcpListener,
}
impl hyper::server::accept::Accept for HyperListener {
type Conn = HyperStream<ServerTlsStream<TcpStream>>;
type Error = io::Error;
fn poll_accept(
mut self: Pin<&mut Self>,
cx: &mut Context,
) -> Poll<Option<Result<Self::Conn>>> {
let stream = task::ready!(Pin::new(&mut self.tcp_listener.incoming()).poll_next(cx)).unwrap()?;
println!("accept");
let stream = task::ready!(Pin::new(&mut self.tls_acceptor.accept(stream)).poll(cx));
match stream {
Err(err) => Poll::Ready(Some(Err(err))),
Ok(stream) => Poll::Ready(Some(Ok(HyperStream(stream))))
}
}
}
pub struct HyperStream<T>(pub T);
impl<T> tokio::io::AsyncRead for HyperStream<T>
where T: AsyncRead + Unpin + Send
{
fn poll_read(mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>,) -> Poll<Result<()>> {
// Pin::new(&mut self.0).poll_read(cx, buf.initialize_unfilled())
match Pin::new(&mut self.0).poll_read(cx, buf.initialize_unfilled()) {
Poll::Pending => Poll::Pending,
Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
Poll::Ready(Ok(n)) => {
buf.advance(n);
Poll::Ready(Ok(()))
}
}
}
}
impl<T> tokio::io::AsyncWrite for HyperStream<T>
where T: Write + Unpin + Send
{
fn poll_write(mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8]) -> Poll<Result<usize>> {
Pin::new(&mut self.0).poll_write(cx, buf)
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<()>> {
Pin::new(&mut self.0).poll_flush(cx)
}
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<()>> {
Pin::new(&mut self.0).poll_close(cx)
}
}
impl Connection for HyperStream<ClientTlsStream<TcpStream>> {
fn connected(&self) -> Connected {
Connected::new()
}
}
/*pub struct HyperServerStream(pub ServerTlsStream<TcpStream>);
impl tokio::io::AsyncRead for HyperServerStream {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
Pin::new(&mut self.0).poll_read(cx, buf)
}
}
impl tokio::io::AsyncWrite for HyperServerStream {
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut Context,
buf: &[u8],
) -> Poll<io::Result<usize>> {
Pin::new(&mut self.0).poll_write(cx, buf)
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
Pin::new(&mut self.0).poll_flush(cx)
}
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
Pin::new(&mut self.0).poll_close(cx)
}
}
pub struct HyperClientStream(pub ClientTlsStream<TcpStream>);
impl tokio::io::AsyncRead for HyperClientStream {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
Pin::new(&mut self.0).poll_read(cx, buf)
}
}
impl tokio::io::AsyncWrite for HyperClientStream {
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut Context,
buf: &[u8],
) -> Poll<io::Result<usize>> {
Pin::new(&mut self.0).poll_write(cx, buf)
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
Pin::new(&mut self.0).poll_flush(cx)
}
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
Pin::new(&mut self.0).poll_close(cx)
}
}
impl Connection for HyperClientStream {
fn connected(&self) -> Connected {
let connected = Connected::new();
if let Ok(remote_addr) = self.0.get_ref().peer_addr() {
connected.extra(remote_addr)
} else {
connected
}
}
}*/
#[derive(Clone)]
pub struct HyperConnector {
tls_connector: TlsConnector,
}
impl Unpin for HyperConnector {}
impl Service<Uri> for HyperConnector {
type Response = HyperStream<ClientTlsStream<TcpStream>>;
type Error = std::io::Error;
type Future = Pin<Box<dyn Future<Output=io::Result<Self::Response>>>>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<()>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Uri) -> Self::Future {
match req.authority() {
None => Box::pin(ready(Err(io::Error::new(ErrorKind::AddrNotAvailable, format!("{} is invalid", req)).into()))),
Some(authority) => {
let host = authority.host().to_string();
let authority = authority.to_string();
let tls_connector = self.tls_connector.clone();
Box::pin(async move {
let stream = TcpStream::connect(authority).await?;
let tls_stream = tls_connector.connect(host, stream).await?;
Ok(HyperStream(tls_stream))
})
}
}
}
}
impl From<ClientConfig> for HyperConnector {
fn from(cfg: ClientConfig) -> Self {
Self {
tls_connector: TlsConnector::from(Arc::new(cfg))
}
}
}
| true
|
a8d14fd8e7e86ba955a3b754885a16ba2da0691f
|
Rust
|
Disasm/zissou
|
/stm32-rs/stm32l4/src/stm32l4x3/i2c1/oar1.rs
|
UTF-8
| 8,972
| 2.796875
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::OAR1 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct OA1R {
bits: u16,
}
impl OA1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = "Possible values of the field `OA1MODE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum OA1MODER {
#[doc = "Own address 1 is a 7-bit address"]
BIT7,
#[doc = "Own address 1 is a 10-bit address"]
BIT10,
}
impl OA1MODER {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
OA1MODER::BIT7 => false,
OA1MODER::BIT10 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> OA1MODER {
match value {
false => OA1MODER::BIT7,
true => OA1MODER::BIT10,
}
}
#[doc = "Checks if the value of the field is `BIT7`"]
#[inline]
pub fn is_bit7(&self) -> bool {
*self == OA1MODER::BIT7
}
#[doc = "Checks if the value of the field is `BIT10`"]
#[inline]
pub fn is_bit10(&self) -> bool {
*self == OA1MODER::BIT10
}
}
#[doc = "Possible values of the field `OA1EN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum OA1ENR {
#[doc = "Own address 1 disabled. The received slave address OA1 is NACKed"]
DIASBLED,
#[doc = "Own address 1 enabled. The received slave address OA1 is ACKed"]
ENABLED,
}
impl OA1ENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
OA1ENR::DIASBLED => false,
OA1ENR::ENABLED => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> OA1ENR {
match value {
false => OA1ENR::DIASBLED,
true => OA1ENR::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DIASBLED`"]
#[inline]
pub fn is_diasbled(&self) -> bool {
*self == OA1ENR::DIASBLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline]
pub fn is_enabled(&self) -> bool {
*self == OA1ENR::ENABLED
}
}
#[doc = r" Proxy"]
pub struct _OA1W<'a> {
w: &'a mut W,
}
impl<'a> _OA1W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 1023;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `OA1MODE`"]
pub enum OA1MODEW {
#[doc = "Own address 1 is a 7-bit address"]
BIT7,
#[doc = "Own address 1 is a 10-bit address"]
BIT10,
}
impl OA1MODEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
OA1MODEW::BIT7 => false,
OA1MODEW::BIT10 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _OA1MODEW<'a> {
w: &'a mut W,
}
impl<'a> _OA1MODEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: OA1MODEW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Own address 1 is a 7-bit address"]
#[inline]
pub fn bit7(self) -> &'a mut W {
self.variant(OA1MODEW::BIT7)
}
#[doc = "Own address 1 is a 10-bit address"]
#[inline]
pub fn bit10(self) -> &'a mut W {
self.variant(OA1MODEW::BIT10)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `OA1EN`"]
pub enum OA1ENW {
#[doc = "Own address 1 disabled. The received slave address OA1 is NACKed"]
DIASBLED,
#[doc = "Own address 1 enabled. The received slave address OA1 is ACKed"]
ENABLED,
}
impl OA1ENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
OA1ENW::DIASBLED => false,
OA1ENW::ENABLED => true,
}
}
}
#[doc = r" Proxy"]
pub struct _OA1ENW<'a> {
w: &'a mut W,
}
impl<'a> _OA1ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: OA1ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Own address 1 disabled. The received slave address OA1 is NACKed"]
#[inline]
pub fn diasbled(self) -> &'a mut W {
self.variant(OA1ENW::DIASBLED)
}
#[doc = "Own address 1 enabled. The received slave address OA1 is ACKed"]
#[inline]
pub fn enabled(self) -> &'a mut W {
self.variant(OA1ENW::ENABLED)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:9 - Interface address"]
#[inline]
pub fn oa1(&self) -> OA1R {
let bits = {
const MASK: u16 = 1023;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u16
};
OA1R { bits }
}
#[doc = "Bit 10 - Own Address 1 10-bit mode"]
#[inline]
pub fn oa1mode(&self) -> OA1MODER {
OA1MODER::_from({
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 15 - Own Address 1 enable"]
#[inline]
pub fn oa1en(&self) -> OA1ENR {
OA1ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:9 - Interface address"]
#[inline]
pub fn oa1(&mut self) -> _OA1W {
_OA1W { w: self }
}
#[doc = "Bit 10 - Own Address 1 10-bit mode"]
#[inline]
pub fn oa1mode(&mut self) -> _OA1MODEW {
_OA1MODEW { w: self }
}
#[doc = "Bit 15 - Own Address 1 enable"]
#[inline]
pub fn oa1en(&mut self) -> _OA1ENW {
_OA1ENW { w: self }
}
}
| true
|
41004d155e359e6236149995a1404cd8c635bdde
|
Rust
|
rtalwar26/rust-playground
|
/src/models/mod.rs
|
UTF-8
| 150
| 2.890625
| 3
|
[] |
no_license
|
pub struct Animal{
species:String
}
pub fn dog()->Animal{
let d = Animal{species: String::from("dog")};
println!("{}", d.species );
d
}
| true
|
720509bb515f5845c619ded9389f72ec3ed865df
|
Rust
|
U007D/aoc
|
/2017/day_01/src/main.rs
|
UTF-8
| 5,059
| 2.6875
| 3
|
[] |
no_license
|
#![warn(clippy::all)]
#![forbid(overflowing_literals,)]
#![deny(unsafe_code)] // Do not remove! Change to `allow` to explicitly opt-in to using `unsafe` (facilitates auditing)
// vvv Safety-critical application lints (pedantic: use for safety-critical applications only) vvv
#![deny(clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss,
clippy::cast_sign_loss, clippy::float_cmp_const, clippy::indexing_slicing, clippy::integer_arithmetic,
clippy::maybe_infinite_iter, clippy::option_unwrap_used, clippy::result_unwrap_used,)]
// ^^^ End of safety-critical lint section ^^^
// Uncomment before ship to reconcile use of possibly redundant crates and uncover possible debug remnants
// #![warn(clippy::multiple_crate_versions, clippy::print_on_stdout, clippy::unimplemented, clippy::use_debug)]
#![allow(clippy::match_bool,)]
#![feature(never_type, try_trait, associated_type_defaults, self_in_typedefs)]
mod consts;
mod error;
#[cfg(test)]
mod unit_tests;
use self::error::Error;
use std::result::Result as StdResult;
type Result<T> = StdResult<T, Error>;
pub const BASE_10: u32 = 10;
fn sum_matching_digits_iter(init_val: (Option<u32>, Option<u32>), iter: impl Iterator<Item = char>) -> Option<u32> {
iter.fold(init_val, |(sum, prev), curr| {
match (curr.to_digit(BASE_10), prev) {
(Some(curr_d), Some(prev_d)) => {
let prev = Some(curr_d);
match curr_d == prev_d {
true => (sum.and_then(|s: u32| {
match s.overflowing_add(curr_d) {
(s, false) => Some(s),
_ => None,
}
}), prev),
false => (sum, prev),
}
},
curr_prev => (sum, curr_prev.0),
}
}).0
}
pub fn sum_matching_digits(digits: impl AsRef<str>) -> Option<u32> {
sum_matching_digits_iter((Some(0), None), digits.as_ref()
.chars()
.chain({
// Only add first element to end of digits if > 1 char in set
let mut first_two = digits.as_ref().chars().take(2);
first_two.nth(0)
.and_then(|c| first_two.nth(0)
.and_then(|_| Some(c)))
}))
}
fn main() -> Result<()> {
let digits = "59945212267958384861888721899525514753529291453572849834636789447772281393981176491298438538371242283\
536895511781293535483317797837429153613432291415383346882548197148136644392687919782155536777728388533288353454\
847112297674777299484733912287764864566862651148756865369264986344956956922521593739716315435946569544941171492\
946488766611575348519389339547876121464365711831444946799524523259892124812191396861381393149158527746287184435\
324155247766428771317633594138229866193128628896894723977769686621487531877677937626541334293495153243338777879\
254655415885849888271366763761288878191616724671425792619954827318789792845732465336888352263526911221698478329\
435137589241942323459887267417892473791843197823877576131387428178263163762334435218578816782286948636819714454\
426632514231841776289778999639199975294683549535486129666995267186491327899225845245566977151331633764632562251\
818332576928213316655326812882169494512768444191542454234341418349139518545512533397855333959498151156228115659\
992525552349445544739123596743798621824256951875934523637245915419927666513111752172181449986911218568829738251\
623685641567269899399934129635368315931969976769929426735713361645359273712298232369372937823963182378797156129\
563177151877573978153466354544121831986426375775286323938139645146813441628141225887958651697881216553533192337\
988117967658524434247835524195414811321323444878357578884681965437368333429457188678554934224355113483437113116\
243997444828323859985928647952719725775485849674339173222967529921277199644533764146655761968299456649418564937\
687949119845374452272856577163179746494175865283954887899466899149727322882766653561798897835574818194546993543\
175554176914948448128522325511897513864846384282968714361394896161929542677944412569297838396525192858352387361\
429972451893638493564546456631513141248856619194514476289649967972477811968917871716481694278942827687762756891\
241918117511355676923135716636372142986253676559695756998511213818728728757749991728395216178458473589662642911\
753873744644255665144264991668133927686772333566467522733985418141425236514155213632674145648863798636993238872\
78761615927993953372779567675";
println!("The matching-digits sum of \"{}\" is {}", digits, sum_matching_digits(digits)?);
Ok(())
}
| true
|
8de09bcb473148a4970c49af4beb8370d001d570
|
Rust
|
bencecile/absolute_defence
|
/src/lib.rs
|
UTF-8
| 4,993
| 3.109375
| 3
|
[] |
no_license
|
mod math;
mod render;
mod world;
use wasm_bindgen::prelude::*;
use web_sys::{
CanvasRenderingContext2d,
};
use crate::{
math::{
probability::{RollTable},
},
render::{DrawingKit},
world::{Enemy, EnemyType, World},
};
macro_rules! log {
( $( $t:tt )* ) => {
if cfg!(debug_assertions) {
web_sys::console::log_1(&format!( $( $t )* ).into());
}
}
}
#[wasm_bindgen]
pub fn first_time_setup() {
#[cfg(feature = "console_error_panic_hook")]
console_error_panic_hook::set_once();
log!("First time setup complete");
}
#[wasm_bindgen]
pub fn init_context(context: &CanvasRenderingContext2d) {
context.set_fill_style(&("white".into()));
context.set_stroke_style(&("black".into()));
context.set_line_width(10.0);
}
#[wasm_bindgen]
pub struct GameState {
world: World,
state: StateType,
next_state: Option<StateType>,
}
#[wasm_bindgen]
impl GameState {
// TODO Check for some save data here first
// TODO Save the RNG seed so that people can't just restart the game to get a better seed
pub fn starting_state() -> GameState {
// TODO Probably make this roll table from the player's progress somehow
let enemy_roll_table = RollTable::new(&[(1.0, EnemyType::Slime)]);
let world = World::new(enemy_roll_table);
GameState {
world,
state: StateType::default(),
next_state: None,
}
}
pub fn update(&mut self, time_delta: f64) {
// Convert it to seconds (from milliseconds)
let time_delta = time_delta * 1e-3;
if let Some(state) = self.next_state.take() {
log!("Switching to state ({:?})", state);
self.state = state;
}
self.next_state = self.state.update(&mut self.world, time_delta);
}
pub fn render(&self, context: &CanvasRenderingContext2d, width: f64, height: f64) {
let drawing_kit = DrawingKit::new(context, width, height);
self.state.render(&self.worl, drawing_kit);
}
}
#[derive(Copy, Clone, Debug)]
enum StateType {
// TODO May want a start screen or something
/// Where the player can buy equipment and where they go after they die
Staging,
/// The player is progressing through the world and enemies are spawning then attacking
Progressing,
/// The game is paused. The menus should still be useable but no progress is happening.
/// The screen should have a dark film painted overtop of it
Paused,
}
impl StateType {
/// The current state can be changed for the next update (after the next render)
fn update(self, world: &mut World, time_delta: f64) -> Option<StateType> {
match self {
Self::Staging => {
todo!();
},
Self::Progressing => {
world.try_spawn_enemy();
// 1. Check the enemy ranges to see if they're attacking
// 2. If an enemy is attacking, face the player towards the closest one
// Else move the player
// 3. All attacking enemies can now perform their attacks
// If not attacking, update their movement
// 1
let (attacking_enemies, closest_enemy) = world.determine_attacking_enemies();
// 2
if let Some(enemy_index) = closest_enemy {
self.world.player.face_towards(self.world.enemies[enemy_index].position());
} else {
self.world.player.update_movement(time_delta);
}
// 3
let player_position = self.world.player.position();
for i in 0..attacking_enemies.len() {
let enemy = &mut self.world.enemies[i];
// Since the player moved, we need all enemies to start facing them
enemy.turn_towards(player_position);
if attacking_enemies[i] {
enemy.attack(time_delta, &mut self.world.player);
} else {
// TODO Get the enemy's old position to make sure the enemy can't go too far. Capping at 1/2 their range is probably good
enemy.update_movement(time_delta);
}
}
None
},
Self::Paused => {
todo!();
},
}
}
fn render<'a>(self, world: &World, drawing_kit: DrawingKit<'a>) {
match self {
Self::Staging => {
todo!();
},
Self::Progressing => {
drawing_kit.clear();
drawing_kit.draw_world(world);
},
Self::Paused => {
todo!();
},
}
}
}
impl Default for StateType {
// TODO We will want to start in Staging
fn default() -> StateType { Self::Progressing }
}
| true
|
3fca14b0f8987922e3bba92a0972d46e8e855e25
|
Rust
|
Krishnacore/move-tools
|
/dove/src/docs/options.rs
|
UTF-8
| 4,145
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use serde::{Deserialize, Serialize};
/// Docgen options.
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
pub struct DocgenOptions {
/// Whether to run the documentation generator.
#[serde(default = "enabled")]
pub enabled: bool,
/// The level where we start sectioning. Often markdown sections are rendered with
/// unnecessary large section fonts, setting this value high reduces the size.
#[serde(default = "section_level_start")]
pub section_level_start: usize,
/// Whether to include private functions in the generated docs.
#[serde(default = "include_private_fun")]
pub include_private_fun: bool,
/// Whether to include specifications in the generated docs.
#[serde(default = "include_specs")]
pub include_specs: bool,
/// Whether to put specifications in the same section as a declaration or put them all
/// into an independent section.
#[serde(default = "specs_inlined")]
pub specs_inlined: bool,
/// Whether to include Move implementations.
#[serde(default = "include_impl")]
pub include_impl: bool,
/// Max depth to which sections are displayed in table-of-contents.
#[serde(default = "toc_depth")]
pub toc_depth: usize,
/// Whether to use collapsed sections (<details>) for impl and specs
#[serde(default = "collapsed_sections")]
pub collapsed_sections: bool,
/// A list of paths to files containing templates for root documents for the generated
/// documentation.
///
/// A root document is a markdown file which contains placeholders for generated
/// documentation content. It is also processed following the same rules than
/// documentation comments in Move, including creation of cross-references and
/// Move code highlighting.
///
/// A placeholder is a single line starting with a markdown quotation marker
/// of the following form:
///
/// ```notrust
/// > {{move-include NAME_OF_MODULE_OR_SCRIPT}}
/// > {{move-toc}}
/// > {{move-index}}
/// ```
///
/// These lines will be replaced by the generated content of the module or script,
/// or a table of contents, respectively.
///
/// For a module or script which is included in the root document, no
/// separate file is generated. References between the included and the standalone
/// module/script content work transparently.
#[serde(default)]
pub root_doc_templates: Vec<String>,
/// An optional file containing reference definitions. The content of this file will
/// be added to each generated markdown doc.
#[serde(default)]
pub references_file: Option<String>,
/// Whether to include dependency diagrams in the generated docs.
#[serde(default = "include_dep_diagrams")]
pub include_dep_diagrams: bool,
/// Whether to include call diagrams in the generated docs.
#[serde(default = "include_call_diagrams")]
pub include_call_diagrams: bool,
}
fn enabled() -> bool {
true
}
fn section_level_start() -> usize {
1
}
fn include_private_fun() -> bool {
true
}
fn include_specs() -> bool {
true
}
fn specs_inlined() -> bool {
true
}
fn include_impl() -> bool {
true
}
fn toc_depth() -> usize {
3
}
fn collapsed_sections() -> bool {
true
}
fn include_dep_diagrams() -> bool {
false
}
fn include_call_diagrams() -> bool {
false
}
impl Default for DocgenOptions {
fn default() -> Self {
Self {
enabled: enabled(),
section_level_start: section_level_start(),
include_private_fun: include_private_fun(),
include_specs: include_specs(),
specs_inlined: specs_inlined(),
include_impl: include_impl(),
toc_depth: toc_depth(),
collapsed_sections: collapsed_sections(),
root_doc_templates: vec![],
references_file: None,
include_dep_diagrams: include_dep_diagrams(),
include_call_diagrams: include_call_diagrams(),
}
}
}
| true
|
9c0c63dad985d190f9a00addb0adaab08c7f1f59
|
Rust
|
stm32-rs/stm32f4xx-hal
|
/src/rcc/pll.rs
|
UTF-8
| 15,977
| 2.546875
| 3
|
[
"BSD-3-Clause",
"0BSD"
] |
permissive
|
use crate::pac::RCC;
pub struct MainPll {
pub use_pll: bool,
pub pllsysclk: Option<u32>,
pub pll48clk: Option<u32>,
/// "M" divisor, required for the other PLLs on some MCUs.
pub m: Option<u32>,
/// "R" output, required for I2S on STM32F410.
pub plli2sclk: Option<u32>,
}
impl MainPll {
pub fn fast_setup(
pllsrcclk: u32,
use_hse: bool,
pllsysclk: Option<u32>,
pll48clk: bool,
) -> MainPll {
let sysclk = pllsysclk.unwrap_or(pllsrcclk);
if pllsysclk.is_none() && !pll48clk {
// Even if we do not use the main PLL, we still need to set the PLL source as that setting
// applies to the I2S and SAI PLLs as well.
unsafe { &*RCC::ptr() }
.pllcfgr
.write(|w| w.pllsrc().bit(use_hse));
return MainPll {
use_pll: false,
pllsysclk: None,
pll48clk: None,
m: None,
plli2sclk: None,
};
}
// Input divisor from PLL source clock, must result to frequency in
// the range from 1 to 2 MHz
let pllm_min = (pllsrcclk + 1_999_999) / 2_000_000;
let pllm_max = pllsrcclk / 1_000_000;
// Sysclk output divisor must be one of 2, 4, 6 or 8
let sysclk_div = core::cmp::min(8, (432_000_000 / sysclk) & !1);
let target_freq = if pll48clk {
48_000_000
} else {
sysclk * sysclk_div
};
// Find the lowest pllm value that minimize the difference between
// target frequency and the real vco_out frequency.
let pllm = (pllm_min..=pllm_max)
.min_by_key(|pllm| {
let vco_in = pllsrcclk / pllm;
let plln = target_freq / vco_in;
target_freq - vco_in * plln
})
.unwrap();
let vco_in = pllsrcclk / pllm;
assert!((1_000_000..=2_000_000).contains(&vco_in));
// Main scaler, must result in >= 100MHz (>= 192MHz for F401)
// and <= 432MHz, min 50, max 432
let plln = if pll48clk {
// try the different valid pllq according to the valid
// main scaller values, and take the best
let pllq = (4..=9)
.min_by_key(|pllq| {
let plln = 48_000_000 * pllq / vco_in;
let pll48_diff = 48_000_000 - vco_in * plln / pllq;
let sysclk_diff = (sysclk as i32 - (vco_in * plln / sysclk_div) as i32).abs();
(pll48_diff, sysclk_diff)
})
.unwrap();
48_000_000 * pllq / vco_in
} else {
sysclk * sysclk_div / vco_in
};
let pllp = (sysclk_div / 2) - 1;
let pllq = (vco_in * plln + 47_999_999) / 48_000_000;
let real_pll48clk = vco_in * plln / pllq;
unsafe { &*RCC::ptr() }.pllcfgr.write(|w| unsafe {
w.pllm().bits(pllm as u8);
w.plln().bits(plln as u16);
w.pllp().bits(pllp as u8);
w.pllq().bits(pllq as u8);
w.pllsrc().bit(use_hse)
});
let real_pllsysclk = vco_in * plln / sysclk_div;
MainPll {
use_pll: true,
pllsysclk: Some(real_pllsysclk),
pll48clk: if pll48clk { Some(real_pll48clk) } else { None },
m: Some(pllm),
plli2sclk: None,
}
}
#[cfg(feature = "gpio-f410")]
pub fn setup_with_i2s(
pllsrcclk: u32,
use_hse: bool,
pllsysclk: Option<u32>,
pll48clk: bool,
plli2sclk: u32,
) -> MainPll {
use super::{SYSCLK_MAX, SYSCLK_MIN};
// Input divisor from PLL source clock, must result to frequency in
// the range from 1 to 2 MHz
let pllm_min = (pllsrcclk + 1_999_999) / 2_000_000;
let pllm_max = pllsrcclk / 1_000_000;
let (pllm, plln, pllp, pllq, pllr, _) = (pllm_min..=pllm_max)
.filter_map(|m| {
let vco_in = pllsrcclk / m;
// The VCO output must be within 100 and 432 MHz.
let plln_min = (100_000_000 + vco_in - 1) / vco_in;
let plln_max = 432_000_000 / vco_in;
(plln_min..=plln_max)
.filter_map(|n| {
let vco_out = vco_in * n;
// The "P" divider value must be even (2, 4, 6, 8).
let p = if let Some(pllsysclk) = pllsysclk {
let (p, p_output, p_error) = Self::best_divider(
vco_out,
SYSCLK_MIN * 2,
pllsysclk * 2,
SYSCLK_MAX * 2,
1,
4,
)?;
Some((p * 2, p_output / 2, p_error / 2))
} else {
None
};
// The 48 MHz clock must be accurate within 0.25% for USB.
let q = pll48clk.then_some(Self::best_divider(
vco_out, 47_880_000, 48_000_000, 48_120_000, 2, 15,
)?);
// We do not set any accuracy requirements for I2S, as on F410 this frequency is
// provided on a best-effort basis.
// TODO: What is the maximum valid input frequency for I2S?
let r = Self::best_divider(vco_out, 0, plli2sclk, u32::MAX, 2, 15)?;
let error = p.map(|(_, _, error)| error).unwrap_or(0)
+ p.map(|(_, _, error)| error).unwrap_or(0)
+ r.2;
Some((m, n, p.map(|p| p.0), q.map(|q| q.0), r.0, error))
})
.min_by_key(|(_, _, _, _, _, error)| *error)
})
.min_by_key(|(_, _, _, _, _, error)| *error)
.expect("could not find a valid main PLL configuration");
unsafe { &*RCC::ptr() }.pllcfgr.write(|w| unsafe {
w.pllm().bits(pllm as u8);
w.plln().bits(plln as u16);
if let Some(pllp) = pllp {
w.pllp().bits(pllp as u8 / 2 - 1);
}
if let Some(pllq) = pllq {
w.pllq().bits(pllq as u8);
}
w.pllr().bits(pllr as u8);
w.pllsrc().bit(use_hse)
});
let real_pllsysclk = pllp.map(|pllp| pllsrcclk / pllm * plln / pllp);
let real_pll48clk = pllq.map(|pllq| pllsrcclk / pllm * plln / pllq);
MainPll {
use_pll: true,
pllsysclk: real_pllsysclk,
pll48clk: real_pll48clk,
m: Some(pllm),
plli2sclk: None,
}
}
#[cfg(feature = "gpio-f410")]
fn best_divider(
vco_out: u32,
min: u32,
target: u32,
max: u32,
min_div: u32,
max_div: u32,
) -> Option<(u32, u32, u32)> {
let div = (vco_out + target / 2) / target;
let min_div = u32::max(
min_div,
if max != 0 {
(vco_out + max - 1) / max
} else {
0
},
);
let max_div = u32::min(max_div, if min != 0 { vco_out / min } else { u32::MAX });
if min_div > max_div {
return None;
}
let div = u32::min(u32::max(div, min_div), max_div);
let output = vco_out / div;
let error = (output as i32 - target as i32).abs() as u32;
Some((div, output, error))
}
}
#[cfg(not(feature = "gpio-f410"))]
pub struct I2sPll {
pub use_pll: bool,
/// "M" divisor, required for the other PLLs on some MCUs.
pub m: Option<u32>,
/// PLL I2S clock output.
pub plli2sclk: Option<u32>,
}
#[cfg(not(feature = "gpio-f410"))]
impl I2sPll {
pub fn unused() -> I2sPll {
I2sPll {
use_pll: false,
m: None,
plli2sclk: None,
}
}
pub fn setup(pllsrcclk: u32, plli2sclk: Option<u32>) -> I2sPll {
let Some(target) = plli2sclk else {
return Self::unused();
};
// Input divisor from PLL source clock, must result to frequency in
// the range from 1 to 2 MHz
let pllm_min = (pllsrcclk + 1_999_999) / 2_000_000;
let pllm_max = pllsrcclk / 1_000_000;
let (pll, config, _) = (pllm_min..=pllm_max)
.map(|m| Self::optimize_fixed_m(pllsrcclk, m, target))
.min_by_key(|(_, _, error)| *error)
.expect("no suitable I2S PLL configuration found");
Self::apply_config(config);
pll
}
#[cfg(any(
feature = "gpio-f401",
feature = "gpio-f417",
feature = "gpio-f427",
feature = "gpio-f469",
))]
pub fn setup_shared_m(pllsrcclk: u32, m: Option<u32>, plli2sclk: Option<u32>) -> I2sPll {
// "m" is None if the main PLL is not in use.
let Some(m) = m else {
return Self::setup(pllsrcclk, plli2sclk);
};
let Some(target) = plli2sclk else {
return Self::unused();
};
let (pll, config, _) = Self::optimize_fixed_m(pllsrcclk, m, target);
Self::apply_config(config);
pll
}
fn optimize_fixed_m(pllsrcclk: u32, m: u32, plli2sclk: u32) -> (I2sPll, SingleOutputPll, u32) {
let (config, real_plli2sclk, error) =
SingleOutputPll::optimize(pllsrcclk, m, plli2sclk, 2, 7)
.expect("did not find any valid I2S PLL config");
(
I2sPll {
use_pll: true,
m: Some(config.m as u32),
plli2sclk: Some(real_plli2sclk),
},
config,
error,
)
}
#[cfg(not(any(
feature = "gpio-f411",
feature = "gpio-f412",
feature = "gpio-f413",
feature = "gpio-f446",
)))]
fn apply_config(config: SingleOutputPll) {
let rcc = unsafe { &*RCC::ptr() };
// "M" may have been written before, but the value is identical.
rcc.pllcfgr
.modify(|_, w| unsafe { w.pllm().bits(config.m) });
rcc.plli2scfgr
.modify(|_, w| unsafe { w.plli2sn().bits(config.n).plli2sr().bits(config.outdiv) });
}
#[cfg(any(
feature = "gpio-f411",
feature = "gpio-f412",
feature = "gpio-f413",
feature = "gpio-f446",
))]
fn apply_config(config: SingleOutputPll) {
let rcc = unsafe { &*RCC::ptr() };
rcc.plli2scfgr.modify(|_, w| unsafe {
w.plli2sm().bits(config.m);
w.plli2sn().bits(config.n);
w.plli2sr().bits(config.outdiv)
});
}
}
#[cfg(any(feature = "gpio-f427", feature = "gpio-f446", feature = "gpio-f469",))]
pub struct SaiPll {
pub use_pll: bool,
/// SAI clock (PLL output divided by the SAI clock divider).
pub sai_clk: Option<u32>,
}
#[cfg(any(feature = "gpio-f427", feature = "gpio-f446", feature = "gpio-f469",))]
impl SaiPll {
pub fn unused() -> SaiPll {
SaiPll {
use_pll: false,
sai_clk: None,
}
}
pub fn setup(pllsrcclk: u32, sai_clk: Option<u32>) -> SaiPll {
let Some(target) = sai_clk else {
return Self::unused();
};
// Input divisor from PLL source clock, must result to frequency in
// the range from 1 to 2 MHz
let pllm_min = (pllsrcclk + 1_999_999) / 2_000_000;
let pllm_max = pllsrcclk / 1_000_000;
let (pll, config, saidiv, _) = (pllm_min..=pllm_max)
.map(|m| Self::optimize_fixed_m(pllsrcclk, m, target))
.min_by_key(|(_, _, _, error)| *error)
.expect("no suitable SAI PLL configuration found");
Self::apply_config(config, saidiv);
pll
}
#[cfg(any(feature = "gpio-f427", feature = "gpio-f469",))]
pub fn setup_shared_m(pllsrcclk: u32, m: Option<u32>, sai_clk: Option<u32>) -> SaiPll {
// "m" is None if both other PLLs are not in use.
let Some(m) = m else {
return Self::setup(pllsrcclk, sai_clk);
};
let Some(target) = sai_clk else {
return Self::unused();
};
let (pll, config, saidiv, _) = Self::optimize_fixed_m(pllsrcclk, m, target);
Self::apply_config(config, saidiv);
pll
}
fn optimize_fixed_m(
pllsrcclk: u32,
m: u32,
sai_clk: u32,
) -> (SaiPll, SingleOutputPll, u32, u32) {
// NOTE: This code tests lots of configurations due to the nested loops for the two
// dividers. A smarter approach can probably speed up the search.
let (config, saidiv, real_sai_clk, error) = (1..=32)
.filter_map(|saidiv| {
let target = sai_clk * saidiv;
let (config, real_sai_clk, error) =
SingleOutputPll::optimize(pllsrcclk, m, target, 2, 15)?;
Some((config, saidiv, real_sai_clk, error))
})
.min_by_key(|(_, _, _, error)| *error)
.expect("no suitable I2S PLL configuration found");
(
SaiPll {
use_pll: true,
sai_clk: Some(real_sai_clk),
},
config,
saidiv,
error,
)
}
#[cfg(not(feature = "gpio-f446"))]
fn apply_config(config: SingleOutputPll, saidiv: u32) {
let rcc = unsafe { &*RCC::ptr() };
rcc.dckcfgr
.modify(|_, w| w.pllsaidivq().bits(saidiv as u8 - 1));
// "M" may have been written before, but the value is identical.
rcc.pllcfgr
.modify(|_, w| unsafe { w.pllm().bits(config.m) });
rcc.pllsaicfgr
.modify(|_, w| unsafe { w.pllsain().bits(config.n).pllsaiq().bits(config.outdiv) });
}
#[cfg(feature = "gpio-f446")]
fn apply_config(config: SingleOutputPll, saidiv: u32) {
let rcc = unsafe { &*RCC::ptr() };
rcc.dckcfgr
.modify(|_, w| w.pllsaidivq().bits(saidiv as u8 - 1));
rcc.pllsaicfgr.modify(|_, w| unsafe {
w.pllsaim().bits(config.m);
w.pllsain().bits(config.n);
w.pllsaiq().bits(config.outdiv)
});
}
}
#[cfg(not(feature = "gpio-f410"))]
struct SingleOutputPll {
m: u8,
n: u16,
outdiv: u8,
}
#[cfg(not(feature = "gpio-f410"))]
impl SingleOutputPll {
fn optimize(
pllsrcclk: u32,
m: u32,
target: u32,
min_div: u32,
max_div: u32,
) -> Option<(SingleOutputPll, u32, u32)> {
let vco_in = pllsrcclk / m;
// We loop through the possible divider values to find the best configuration. Looping
// through all possible "N" values would result in more iterations.
let (n, outdiv, output, error) = (min_div..=max_div)
.filter_map(|outdiv| {
let target_vco_out = match target.checked_mul(outdiv) {
Some(x) => x,
None => return None,
};
let n = (target_vco_out + (vco_in >> 1)) / vco_in;
let vco_out = vco_in * n;
if !(100_000_000..=432_000_000).contains(&vco_out) {
return None;
}
let output = vco_out / outdiv;
let error = (output as i32 - target as i32).unsigned_abs();
Some((n, outdiv, output, error))
})
.min_by_key(|(_, _, _, error)| *error)?;
Some((
SingleOutputPll {
m: m as u8,
n: n as u16,
outdiv: outdiv as u8,
},
output,
error,
))
}
}
| true
|
9a80fc3d06351d13545848c0f01f8a72aa576684
|
Rust
|
Azure/azure-sdk-for-rust
|
/services/mgmt/subscription/src/package_2021_10/models.rs
|
UTF-8
| 43,003
| 2.65625
| 3
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::de::{value, Deserializer, IntoDeserializer};
use serde::{Deserialize, Serialize, Serializer};
use std::str::FromStr;
#[doc = "The parameters required to accept subscription ownership."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AcceptOwnershipRequest {
#[doc = "Accept subscription ownership request properties."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AcceptOwnershipRequestProperties>,
}
impl AcceptOwnershipRequest {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Accept subscription ownership request properties."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AcceptOwnershipRequestProperties {
#[doc = "The friendly name of the subscription."]
#[serde(rename = "displayName")]
pub display_name: String,
#[doc = "Management group Id for the subscription."]
#[serde(rename = "managementGroupId", default, skip_serializing_if = "Option::is_none")]
pub management_group_id: Option<String>,
#[doc = "Tags for the subscription"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
impl AcceptOwnershipRequestProperties {
pub fn new(display_name: String) -> Self {
Self {
display_name,
management_group_id: None,
tags: None,
}
}
}
#[doc = "The accept ownership state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "AcceptOwnershipState")]
pub enum AcceptOwnershipState {
Pending,
Completed,
Expired,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for AcceptOwnershipState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for AcceptOwnershipState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for AcceptOwnershipState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Pending => serializer.serialize_unit_variant("AcceptOwnershipState", 0u32, "Pending"),
Self::Completed => serializer.serialize_unit_variant("AcceptOwnershipState", 1u32, "Completed"),
Self::Expired => serializer.serialize_unit_variant("AcceptOwnershipState", 2u32, "Expired"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "Subscription Accept Ownership Response"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AcceptOwnershipStatusResponse {
#[doc = "Newly created subscription Id."]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[doc = "The accept ownership state of the resource."]
#[serde(rename = "acceptOwnershipState", default, skip_serializing_if = "Option::is_none")]
pub accept_ownership_state: Option<AcceptOwnershipState>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<ProvisioningState>,
#[doc = "UPN of the billing owner"]
#[serde(rename = "billingOwner", default, skip_serializing_if = "Option::is_none")]
pub billing_owner: Option<String>,
#[doc = "Tenant Id of the subscription"]
#[serde(rename = "subscriptionTenantId", default, skip_serializing_if = "Option::is_none")]
pub subscription_tenant_id: Option<String>,
#[doc = "The display name of the subscription."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "Tags for the subscription"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
impl AcceptOwnershipStatusResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Billing account policies information."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingAccountPoliciesResponse {
#[doc = "Fully qualified ID for the policy."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "Policy name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Resource type."]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "Put billing account policies response properties."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingAccountPoliciesResponseProperties>,
#[doc = "Metadata pertaining to creation and last modification of the resource."]
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
impl BillingAccountPoliciesResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Put billing account policies response properties."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingAccountPoliciesResponseProperties {
#[doc = "Service tenant for the billing account."]
#[serde(
rename = "serviceTenants",
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub service_tenants: Vec<ServiceTenantResponse>,
#[doc = "Determine if the transfers are allowed for the billing account"]
#[serde(rename = "allowTransfers", default, skip_serializing_if = "Option::is_none")]
pub allow_transfers: Option<bool>,
}
impl BillingAccountPoliciesResponseProperties {
pub fn new() -> Self {
Self::default()
}
}
pub type BillingScope = String;
#[doc = "The ID of the canceled subscription"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct CanceledSubscriptionId {
#[doc = "The ID of the canceled subscription"]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
}
impl CanceledSubscriptionId {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The ID of the subscriptions that is being enabled"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct EnabledSubscriptionId {
#[doc = "The ID of the subscriptions that is being enabled"]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
}
impl EnabledSubscriptionId {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Describes the format of Error response."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorResponse {
#[doc = "Error code"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[doc = "Error message indicating why the operation failed."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
impl ErrorResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Error response indicates that the service is not able to process the incoming request. The reason is provided in the error message."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorResponseBody {
#[doc = "Describes the format of Error response."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
#[doc = "Error code"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[doc = "Error message indicating why the operation failed."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
impl azure_core::Continuable for ErrorResponseBody {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
None
}
}
impl ErrorResponseBody {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Tenant policy information list."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct GetTenantPolicyListResponse {
#[doc = "The list of tenant policies."]
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<GetTenantPolicyResponse>,
#[doc = "The link (url) to the next page of results."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for GetTenantPolicyListResponse {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
self.next_link.clone()
}
}
impl GetTenantPolicyListResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Tenant policy Information."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct GetTenantPolicyResponse {
#[doc = "Policy Id."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "Policy name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Resource type."]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "Tenant policy."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<TenantPolicy>,
#[doc = "Metadata pertaining to creation and last modification of the resource."]
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
impl GetTenantPolicyResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Location information."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Location {
#[doc = "The fully qualified ID of the location. For example, /subscriptions/00000000-0000-0000-0000-000000000000/locations/westus."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "The subscription ID."]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[doc = "The location name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "The display name of the location."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "The latitude of the location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub latitude: Option<String>,
#[doc = "The longitude of the location."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub longitude: Option<String>,
}
impl Location {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Location list operation response."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct LocationListResult {
#[doc = "An array of locations."]
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<Location>,
}
impl azure_core::Continuable for LocationListResult {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
None
}
}
impl LocationListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "REST API operation"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Operation {
#[doc = "Operation name: {provider}/{resource}/{operation}"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Indicates whether the operation is a data action"]
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[doc = "The object that represents the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
impl Operation {
pub fn new() -> Self {
Self::default()
}
}
pub mod operation {
use super::*;
#[doc = "The object that represents the operation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Display {
#[doc = "Service provider: Microsoft.Subscription"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[doc = "Resource on which the operation is performed: Profile, endpoint, etc."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[doc = "Operation type: Read, write, delete, etc."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[doc = "Localized friendly description for the operation"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
impl Display {
pub fn new() -> Self {
Self::default()
}
}
}
#[doc = "Result of the request to list operations. It contains a list of operations and a URL link to get the next set of results."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct OperationListResult {
#[doc = "List of operations."]
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<Operation>,
#[doc = "URL to get the next set of operation list results if there are any."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for OperationListResult {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
self.next_link.clone()
}
}
impl OperationListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Pending,
Accepted,
Succeeded,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Pending => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Pending"),
Self::Accepted => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Accepted"),
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Succeeded"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The parameters required to create a new subscription."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PutAliasRequest {
#[doc = "Put subscription properties."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PutAliasRequestProperties>,
}
impl PutAliasRequest {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Put subscription additional properties."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PutAliasRequestAdditionalProperties {
#[doc = "Management group Id for the subscription."]
#[serde(rename = "managementGroupId", default, skip_serializing_if = "Option::is_none")]
pub management_group_id: Option<String>,
#[doc = "Tenant Id of the subscription"]
#[serde(rename = "subscriptionTenantId", default, skip_serializing_if = "Option::is_none")]
pub subscription_tenant_id: Option<String>,
#[doc = "Owner Id of the subscription"]
#[serde(rename = "subscriptionOwnerId", default, skip_serializing_if = "Option::is_none")]
pub subscription_owner_id: Option<String>,
#[doc = "Tags for the subscription"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
impl PutAliasRequestAdditionalProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Put subscription properties."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PutAliasRequestProperties {
#[doc = "The friendly name of the subscription."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "The workload type of the subscription. It can be either Production or DevTest."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub workload: Option<Workload>,
#[doc = "Billing scope of the subscription.\nFor CustomerLed and FieldLed - /billingAccounts/{billingAccountName}/billingProfiles/{billingProfileName}/invoiceSections/{invoiceSectionName}\nFor PartnerLed - /billingAccounts/{billingAccountName}/customers/{customerName}\nFor Legacy EA - /billingAccounts/{billingAccountName}/enrollmentAccounts/{enrollmentAccountName}"]
#[serde(rename = "billingScope", default, skip_serializing_if = "Option::is_none")]
pub billing_scope: Option<BillingScope>,
#[doc = "This parameter can be used to create alias for existing subscription Id"]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[doc = "Reseller Id"]
#[serde(rename = "resellerId", default, skip_serializing_if = "Option::is_none")]
pub reseller_id: Option<String>,
#[doc = "Put subscription additional properties."]
#[serde(rename = "additionalProperties", default, skip_serializing_if = "Option::is_none")]
pub additional_properties: Option<PutAliasRequestAdditionalProperties>,
}
impl PutAliasRequestProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Put tenant policy request properties."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PutTenantPolicyRequestProperties {
#[doc = "Blocks the leaving of subscriptions from user's tenant."]
#[serde(rename = "blockSubscriptionsLeavingTenant", default, skip_serializing_if = "Option::is_none")]
pub block_subscriptions_leaving_tenant: Option<bool>,
#[doc = "Blocks the entering of subscriptions into user's tenant."]
#[serde(rename = "blockSubscriptionsIntoTenant", default, skip_serializing_if = "Option::is_none")]
pub block_subscriptions_into_tenant: Option<bool>,
#[doc = "List of user objectIds that are exempted from the set subscription tenant policies for the user's tenant."]
#[serde(
rename = "exemptedPrincipals",
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub exempted_principals: Vec<String>,
}
impl PutTenantPolicyRequestProperties {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The ID of the subscriptions that is being renamed"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct RenamedSubscriptionId {
#[doc = "The ID of the subscriptions that is being renamed"]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
}
impl RenamedSubscriptionId {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Billing account service tenant."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ServiceTenantResponse {
#[doc = "Service tenant id."]
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[doc = "Service tenant name."]
#[serde(rename = "tenantName", default, skip_serializing_if = "Option::is_none")]
pub tenant_name: Option<String>,
}
impl ServiceTenantResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Subscription information."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Subscription {
#[doc = "The fully qualified ID for the subscription. For example, /subscriptions/00000000-0000-0000-0000-000000000000."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "The subscription ID."]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[doc = "The subscription display name."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "The subscription state. Possible values are Enabled, Warned, PastDue, Disabled, and Deleted."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<subscription::State>,
#[doc = "The tenant ID. For example, 00000000-0000-0000-0000-000000000000."]
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[doc = "Tags for the subscription"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[doc = "Subscription policies."]
#[serde(rename = "subscriptionPolicies", default, skip_serializing_if = "Option::is_none")]
pub subscription_policies: Option<SubscriptionPolicies>,
#[doc = "The authorization source of the request. Valid values are one or more combinations of Legacy, RoleBased, Bypassed, Direct and Management. For example, 'Legacy, RoleBased'."]
#[serde(rename = "authorizationSource", default, skip_serializing_if = "Option::is_none")]
pub authorization_source: Option<String>,
}
impl Subscription {
pub fn new() -> Self {
Self::default()
}
}
pub mod subscription {
use super::*;
#[doc = "The subscription state. Possible values are Enabled, Warned, PastDue, Disabled, and Deleted."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Enabled,
Warned,
PastDue,
Disabled,
Deleted,
}
}
#[doc = "The list of aliases."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubscriptionAliasListResult {
#[doc = "The list of alias."]
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<SubscriptionAliasResponse>,
#[doc = "The link (url) to the next page of results."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl SubscriptionAliasListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Subscription Information with the alias."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubscriptionAliasResponse {
#[doc = "Fully qualified ID for the alias resource."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "Alias ID."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Resource type, Microsoft.Subscription/aliases."]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "Put subscription creation result properties."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<SubscriptionAliasResponseProperties>,
#[doc = "Metadata pertaining to creation and last modification of the resource."]
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
impl SubscriptionAliasResponse {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Put subscription creation result properties."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubscriptionAliasResponseProperties {
#[doc = "Newly created subscription Id."]
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[doc = "The display name of the subscription."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "The provisioning state of the resource."]
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<subscription_alias_response_properties::ProvisioningState>,
#[doc = "Url to accept ownership of the subscription."]
#[serde(rename = "acceptOwnershipUrl", default, skip_serializing_if = "Option::is_none")]
pub accept_ownership_url: Option<String>,
#[doc = "The accept ownership state of the resource."]
#[serde(rename = "acceptOwnershipState", default, skip_serializing_if = "Option::is_none")]
pub accept_ownership_state: Option<AcceptOwnershipState>,
#[doc = "Billing scope of the subscription.\nFor CustomerLed and FieldLed - /billingAccounts/{billingAccountName}/billingProfiles/{billingProfileName}/invoiceSections/{invoiceSectionName}\nFor PartnerLed - /billingAccounts/{billingAccountName}/customers/{customerName}\nFor Legacy EA - /billingAccounts/{billingAccountName}/enrollmentAccounts/{enrollmentAccountName}"]
#[serde(rename = "billingScope", default, skip_serializing_if = "Option::is_none")]
pub billing_scope: Option<BillingScope>,
#[doc = "The workload type of the subscription. It can be either Production or DevTest."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub workload: Option<Workload>,
#[doc = "Reseller Id"]
#[serde(rename = "resellerId", default, skip_serializing_if = "Option::is_none")]
pub reseller_id: Option<String>,
#[doc = "Owner Id of the subscription"]
#[serde(rename = "subscriptionOwnerId", default, skip_serializing_if = "Option::is_none")]
pub subscription_owner_id: Option<String>,
#[doc = "The Management Group Id."]
#[serde(rename = "managementGroupId", default, skip_serializing_if = "Option::is_none")]
pub management_group_id: Option<String>,
#[doc = "Created Time"]
#[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")]
pub created_time: Option<String>,
#[doc = "Tags for the subscription"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
impl SubscriptionAliasResponseProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod subscription_alias_response_properties {
use super::*;
#[doc = "The provisioning state of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ProvisioningState")]
pub enum ProvisioningState {
Accepted,
Succeeded,
Failed,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ProvisioningState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ProvisioningState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ProvisioningState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Accepted => serializer.serialize_unit_variant("ProvisioningState", 0u32, "Accepted"),
Self::Succeeded => serializer.serialize_unit_variant("ProvisioningState", 1u32, "Succeeded"),
Self::Failed => serializer.serialize_unit_variant("ProvisioningState", 2u32, "Failed"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
#[doc = "The created subscription object."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubscriptionCreationResult {
#[doc = "The link to the new subscription. Use this link to check the status of subscription creation operation."]
#[serde(rename = "subscriptionLink", default, skip_serializing_if = "Option::is_none")]
pub subscription_link: Option<String>,
}
impl SubscriptionCreationResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Subscription list operation response."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubscriptionListResult {
#[doc = "An array of subscriptions."]
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<Subscription>,
#[doc = "The URL to get the next set of results."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for SubscriptionListResult {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
self.next_link.clone()
}
}
impl SubscriptionListResult {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The new name of the subscription."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubscriptionName {
#[doc = "New subscription name"]
#[serde(rename = "subscriptionName", default, skip_serializing_if = "Option::is_none")]
pub subscription_name: Option<String>,
}
impl SubscriptionName {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Subscription policies."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SubscriptionPolicies {
#[doc = "The subscription location placement ID. The ID indicates which regions are visible for a subscription. For example, a subscription with a location placement Id of Public_2014-09-01 has access to Azure public regions."]
#[serde(rename = "locationPlacementId", default, skip_serializing_if = "Option::is_none")]
pub location_placement_id: Option<String>,
#[doc = "The subscription quota ID."]
#[serde(rename = "quotaId", default, skip_serializing_if = "Option::is_none")]
pub quota_id: Option<String>,
#[doc = "The subscription spending limit."]
#[serde(rename = "spendingLimit", default, skip_serializing_if = "Option::is_none")]
pub spending_limit: Option<subscription_policies::SpendingLimit>,
}
impl SubscriptionPolicies {
pub fn new() -> Self {
Self::default()
}
}
pub mod subscription_policies {
use super::*;
#[doc = "The subscription spending limit."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SpendingLimit {
On,
Off,
CurrentPeriodOff,
}
}
#[doc = "Tenant Id information."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct TenantIdDescription {
#[doc = "The fully qualified ID of the tenant. For example, /tenants/00000000-0000-0000-0000-000000000000."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "The tenant ID. For example, 00000000-0000-0000-0000-000000000000."]
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[doc = "The category of the tenant. Possible values are TenantCategoryHome,TenantCategoryProjectedBy,TenantCategoryManagedBy"]
#[serde(rename = "tenantCategory", default, skip_serializing_if = "Option::is_none")]
pub tenant_category: Option<String>,
#[doc = "The country/region name of the address for the tenant."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub country: Option<String>,
#[doc = "The Country/region abbreviation for the tenant."]
#[serde(rename = "countryCode", default, skip_serializing_if = "Option::is_none")]
pub country_code: Option<String>,
#[doc = "The display name of the tenant."]
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[doc = "The list of domains for the tenant."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub domains: Option<String>,
#[doc = "The default domain for the tenant."]
#[serde(rename = "defaultDomain", default, skip_serializing_if = "Option::is_none")]
pub default_domain: Option<String>,
#[doc = "The tenant type. Only available for Home tenant category."]
#[serde(rename = "tenantType", default, skip_serializing_if = "Option::is_none")]
pub tenant_type: Option<String>,
}
impl TenantIdDescription {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Tenant Ids information."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TenantListResult {
#[doc = "An array of tenants."]
#[serde(
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub value: Vec<TenantIdDescription>,
#[doc = "The URL to use for getting the next set of results."]
#[serde(rename = "nextLink")]
pub next_link: String,
}
impl azure_core::Continuable for TenantListResult {
type Continuation = String;
fn continuation(&self) -> Option<Self::Continuation> {
if self.next_link.is_empty() {
None
} else {
Some(self.next_link.clone())
}
}
}
impl TenantListResult {
pub fn new(next_link: String) -> Self {
Self {
value: Vec::new(),
next_link,
}
}
}
#[doc = "Tenant policy."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct TenantPolicy {
#[doc = "Policy Id."]
#[serde(rename = "policyId", default, skip_serializing_if = "Option::is_none")]
pub policy_id: Option<String>,
#[doc = "Blocks the leaving of subscriptions from user's tenant."]
#[serde(rename = "blockSubscriptionsLeavingTenant", default, skip_serializing_if = "Option::is_none")]
pub block_subscriptions_leaving_tenant: Option<bool>,
#[doc = "Blocks the entering of subscriptions into user's tenant."]
#[serde(rename = "blockSubscriptionsIntoTenant", default, skip_serializing_if = "Option::is_none")]
pub block_subscriptions_into_tenant: Option<bool>,
#[doc = "List of user objectIds that are exempted from the set subscription tenant policies for the user's tenant."]
#[serde(
rename = "exemptedPrincipals",
default,
deserialize_with = "azure_core::util::deserialize_null_as_default",
skip_serializing_if = "Vec::is_empty"
)]
pub exempted_principals: Vec<String>,
}
impl TenantPolicy {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "The workload type of the subscription. It can be either Production or DevTest."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "Workload")]
pub enum Workload {
Production,
DevTest,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for Workload {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for Workload {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for Workload {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Production => serializer.serialize_unit_variant("Workload", 0u32, "Production"),
Self::DevTest => serializer.serialize_unit_variant("Workload", 1u32, "DevTest"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "Metadata pertaining to creation and last modification of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct SystemData {
#[doc = "The identity that created the resource."]
#[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")]
pub created_by: Option<String>,
#[doc = "The type of identity that created the resource."]
#[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")]
pub created_by_type: Option<system_data::CreatedByType>,
#[doc = "The timestamp of resource creation (UTC)."]
#[serde(rename = "createdAt", default, with = "azure_core::date::rfc3339::option")]
pub created_at: Option<time::OffsetDateTime>,
#[doc = "The identity that last modified the resource."]
#[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by: Option<String>,
#[doc = "The type of identity that last modified the resource."]
#[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by_type: Option<system_data::LastModifiedByType>,
#[doc = "The timestamp of resource last modification (UTC)"]
#[serde(rename = "lastModifiedAt", default, with = "azure_core::date::rfc3339::option")]
pub last_modified_at: Option<time::OffsetDateTime>,
}
impl SystemData {
pub fn new() -> Self {
Self::default()
}
}
pub mod system_data {
use super::*;
#[doc = "The type of identity that created the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "CreatedByType")]
pub enum CreatedByType {
User,
Application,
ManagedIdentity,
Key,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for CreatedByType {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for CreatedByType {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for CreatedByType {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::User => serializer.serialize_unit_variant("CreatedByType", 0u32, "User"),
Self::Application => serializer.serialize_unit_variant("CreatedByType", 1u32, "Application"),
Self::ManagedIdentity => serializer.serialize_unit_variant("CreatedByType", 2u32, "ManagedIdentity"),
Self::Key => serializer.serialize_unit_variant("CreatedByType", 3u32, "Key"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "The type of identity that last modified the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "LastModifiedByType")]
pub enum LastModifiedByType {
User,
Application,
ManagedIdentity,
Key,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for LastModifiedByType {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for LastModifiedByType {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for LastModifiedByType {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::User => serializer.serialize_unit_variant("LastModifiedByType", 0u32, "User"),
Self::Application => serializer.serialize_unit_variant("LastModifiedByType", 1u32, "Application"),
Self::ManagedIdentity => serializer.serialize_unit_variant("LastModifiedByType", 2u32, "ManagedIdentity"),
Self::Key => serializer.serialize_unit_variant("LastModifiedByType", 3u32, "Key"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
| true
|
d6c9836726ef5e875c828aacea6b8f5f12316e29
|
Rust
|
repnop/advent_of_code_2017
|
/day_5/src/main.rs
|
UTF-8
| 2,278
| 3.75
| 4
|
[] |
no_license
|
fn main() {
let input = include_str!("..//input.txt");
println!("Part 1: {} steps", day_5_part_1(input));
println!("Part 2: {} steps", day_5_part_2(input));
}
fn day_5_part_1(input: &str) -> usize {
let mut count = 0;
let mut index = 0;
// Convert the input into a vector of i64s
let mut jumps = input
.lines()
.map(|x| x.parse().unwrap())
.collect::<Vec<i64>>();
// Loop infinitely until we find an index that would be out of bounds
loop {
count += 1;
// Get the offset
let jump_val = jumps[index];
// Increment the offset since we've processed it
jumps[index] += 1;
// Calculate the new index
let new_index = index as i64 + jump_val;
// If we're going to go out of bounds, we've escaped the "maze"
if new_index < 0 || new_index > (jumps.len() - 1) as i64 {
break;
}
// Otherwise make it the next index
index = new_index as usize;
}
count
}
fn day_5_part_2(input: &str) -> usize {
let mut count = 0;
let mut index = 0;
// Convert the input into a vector of i64s
let mut jumps = input
.lines()
.map(|x| x.parse().unwrap())
.collect::<Vec<i64>>();
// Loop infinitely until we find an index that would be out of bounds
loop {
count += 1;
// Get the offset
let jump_val = jumps[index];
// Increment or decrement (if offset >= 3) the offset
// since we've processed it
if jump_val >= 3 {
jumps[index] -= 1;
} else {
jumps[index] += 1;
}
// Calculate the new index
let new_index = index as i64 + jump_val;
// If we're going to go out of bounds, we've escaped the "maze"
if new_index < 0 || new_index > (jumps.len() - 1) as i64 {
break;
}
// Otherwise make it the next index
index = new_index as usize;
}
count
}
#[test]
fn example_1() {
let input = "0\n3\n0\n1\n-3";
let result = day_5_part_1(input);
assert_eq!(result, 5);
}
#[test]
fn example_1_part_2() {
let input = "0\n3\n0\n1\n-3";
let result = day_5_part_2(input);
assert_eq!(result, 10);
}
| true
|
13c6b357a0e7b5f2fce21c0544a80594e07809b3
|
Rust
|
tauri-apps/tauri
|
/core/tauri/src/path/commands.rs
|
UTF-8
| 6,034
| 3.015625
| 3
|
[
"Apache-2.0",
"CC0-1.0",
"MIT",
"CC-BY-NC-ND-4.0"
] |
permissive
|
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use std::path::{Component, Path, PathBuf, MAIN_SEPARATOR};
use super::{BaseDirectory, Error, PathResolver, Result};
use crate::{command, AppHandle, Runtime, State};
/// Normalize a path, removing things like `.` and `..`, this snippet is taken from cargo's paths util.
/// https://github.com/rust-lang/cargo/blob/46fa867ff7043e3a0545bf3def7be904e1497afd/crates/cargo-util/src/paths.rs#L73-L106
fn normalize_path(path: &Path) -> PathBuf {
let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next();
PathBuf::from(c.as_os_str())
} else {
PathBuf::new()
};
for component in components {
match component {
Component::Prefix(..) => unreachable!(),
Component::RootDir => {
ret.push(component.as_os_str());
}
Component::CurDir => {}
Component::ParentDir => {
ret.pop();
}
Component::Normal(c) => {
ret.push(c);
}
}
}
ret
}
/// Normalize a path, removing things like `.` and `..`, this snippet is taken from cargo's paths util but
/// slightly modified to not resolve absolute paths.
/// https://github.com/rust-lang/cargo/blob/46fa867ff7043e3a0545bf3def7be904e1497afd/crates/cargo-util/src/paths.rs#L73-L106
fn normalize_path_no_absolute(path: &Path) -> PathBuf {
let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next();
PathBuf::from(c.as_os_str())
} else {
PathBuf::new()
};
for component in components {
match component {
Component::Prefix(..) => unreachable!(),
Component::RootDir => {
ret.push(component.as_os_str());
}
Component::CurDir => {}
Component::ParentDir => {
ret.pop();
}
Component::Normal(c) => {
// Using PathBuf::push here will replace the whole path if an absolute path is encountered
// which is not the intended behavior, so instead of that, convert the current resolved path
// to a string and do simple string concatenation with the current component then convert it
// back to a PathBuf
let mut p = ret.to_string_lossy().to_string();
// Only add a separator if it doesn't have one already or if current normalized path is empty,
// this ensures it won't have an unwanted leading separator
if !p.is_empty() && !p.ends_with('/') && !p.ends_with('\\') {
p.push(MAIN_SEPARATOR);
}
if let Some(c) = c.to_str() {
p.push_str(c);
}
ret = PathBuf::from(p);
}
}
}
ret
}
#[command(root = "crate")]
pub fn resolve_directory<R: Runtime>(
_app: AppHandle<R>,
resolver: State<'_, PathResolver<R>>,
directory: BaseDirectory,
path: Option<PathBuf>,
) -> Result<PathBuf> {
super::resolve_path(&resolver, directory, path)
}
#[command(root = "crate")]
pub fn resolve(paths: Vec<String>) -> Result<PathBuf> {
// Start with current directory then start adding paths from the vector one by one using `PathBuf.push()` which
// will ensure that if an absolute path is encountered in the iteration, it will be used as the current full path.
//
// examples:
// 1. `vec!["."]` or `vec![]` will be equal to `std::env::current_dir()`
// 2. `vec!["/foo/bar", "/tmp/file", "baz"]` will be equal to `PathBuf::from("/tmp/file/baz")`
let mut path = std::env::current_dir().map_err(Error::CurrentDir)?;
for p in paths {
path.push(p);
}
Ok(normalize_path(&path))
}
#[command(root = "crate")]
pub fn normalize(path: String) -> String {
let mut p = normalize_path_no_absolute(Path::new(&path))
.to_string_lossy()
.to_string();
// Node.js behavior is to return `".."` for `normalize("..")`
// and `"."` for `normalize("")` or `normalize(".")`
if p.is_empty() && path == ".." {
"..".into()
} else if p.is_empty() && path == "." {
".".into()
} else {
// Add a trailing separator if the path passed to this functions had a trailing separator. That's how Node.js behaves.
if (path.ends_with('/') || path.ends_with('\\')) && (!p.ends_with('/') || !p.ends_with('\\')) {
p.push(MAIN_SEPARATOR);
}
p
}
}
#[command(root = "crate")]
pub fn join(mut paths: Vec<String>) -> String {
let path = PathBuf::from(
paths
.iter_mut()
.map(|p| {
// Add a `MAIN_SEPARATOR` if it doesn't already have one.
// Doing this to ensure that the vector elements are separated in
// the resulting string so path.components() can work correctly when called
// in `normalize_path_no_absolute()` later on.
if !p.ends_with('/') && !p.ends_with('\\') {
p.push(MAIN_SEPARATOR);
}
p.to_string()
})
.collect::<String>(),
);
let p = normalize_path_no_absolute(&path)
.to_string_lossy()
.to_string();
if p.is_empty() {
".".into()
} else {
p
}
}
#[command(root = "crate")]
pub fn dirname(path: String) -> Result<PathBuf> {
match Path::new(&path).parent() {
Some(p) => Ok(p.to_path_buf()),
None => Err(Error::NoParent),
}
}
#[command(root = "crate")]
pub fn extname(path: String) -> Result<String> {
match Path::new(&path)
.extension()
.and_then(std::ffi::OsStr::to_str)
{
Some(p) => Ok(p.to_string()),
None => Err(Error::NoExtension),
}
}
#[command(root = "crate")]
pub fn basename(path: String, ext: Option<String>) -> Result<String> {
match Path::new(&path)
.file_name()
.and_then(std::ffi::OsStr::to_str)
{
Some(p) => Ok(if let Some(ext) = ext {
p.replace(ext.as_str(), "")
} else {
p.to_string()
}),
None => Err(Error::NoBasename),
}
}
#[command(root = "crate")]
pub fn is_absolute(path: String) -> bool {
Path::new(&path).is_absolute()
}
| true
|
33e9651893e369c441b711c708fc5a1ee3afec44
|
Rust
|
AlexsanderShaw/checksec.rs
|
/src/shared.rs
|
UTF-8
| 1,555
| 3.03125
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! Implements shared functionalities between elf/macho modules
#[cfg(feature = "color")]
use colored::Colorize;
use serde::{Deserialize, Serialize};
use std::fmt;
/// Split contents of `DT_RPATH`/`DT_RUNPATH` or @rpath entries
#[derive(Debug, Deserialize, Serialize)]
pub enum Rpath {
None,
Yes(String),
YesRW(String),
}
/// wrapper for Vec<Rpath> to allow easy color output per path entry
#[derive(Debug, Deserialize, Serialize)]
pub struct VecRpath {
paths: Vec<Rpath>,
}
impl VecRpath {
#[must_use]
pub fn new(v: Vec<Rpath>) -> Self {
Self { paths: v }
}
}
#[cfg(not(feature = "color"))]
impl fmt::Display for VecRpath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s: Vec<String> = Vec::<String>::new();
for v in &self.paths {
match v {
Rpath::Yes(p) => s.push(p.to_string()),
Rpath::YesRW(p) => s.push(p.to_string()),
Rpath::None => s.push("None".to_string()),
}
}
write!(f, "{}", s.join(":"))?;
Ok(())
}
}
#[cfg(feature = "color")]
impl fmt::Display for VecRpath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s: Vec<String> = Vec::<String>::new();
for v in &self.paths {
match v {
Rpath::Yes(p) | Rpath::YesRW(p) => s.push(p.red().to_string()),
Rpath::None => s.push("None".green().to_string()),
}
}
write!(f, "{}", s.join(":"))?;
Ok(())
}
}
| true
|
1cf7f9e092ceefb3a9c95ff776773509682b8115
|
Rust
|
stefano-pogliani/auth-gateway
|
/src/engine/mod.rs
|
UTF-8
| 5,363
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
use std::fs::File;
use actix_web::http::header::HeaderName;
use actix_web::http::header::HeaderValue;
use anyhow::Context;
use anyhow::Result;
use crate::errors::InvalidEnrichResponseRule;
use crate::models::AuthenticationContext;
use crate::models::AuthenticationResult;
use crate::models::EnrichResponseRule;
use crate::models::PostAuthRule;
use crate::models::PreAuthRule;
use crate::models::RequestContext;
use crate::models::Rule;
use crate::models::RuleAction;
#[cfg(test)]
mod tests;
/// Process rules matching requests.
#[derive(Clone, Debug)]
pub struct RulesEngine {
/// List of response enrichment rules.
rules_enrich: Vec<EnrichResponseRule>,
/// List or post-auth phase rules.
rules_postauth: Vec<PostAuthRule>,
/// List of pre-auth phase rules.
rules_preauth: Vec<PreAuthRule>,
}
impl RulesEngine {
/// Start building a new `RulesEngine`.
pub fn builder() -> RulesEngineBuilder {
let files = Vec::new();
RulesEngineBuilder {
files,
rules_enrich: Vec::new(),
rules_postauth: Vec::new(),
rules_preauth: Vec::new(),
}
}
/// Evaluate enrich rules.
pub fn eval_enrich(
&self,
context: &RequestContext,
mut result: AuthenticationResult,
) -> Result<AuthenticationResult> {
// Look for a matching enrich rule.
let rule = self
.rules_enrich
.iter()
.find(|rule| rule.check(context, &result.authentication_context));
// Return the unmodified result if no rule matches.
let rule = match rule {
None => return Ok(result),
Some(rule) => rule,
};
// Remove headers.
for name in &rule.headers_remove {
let name = HeaderName::from_bytes(name.as_bytes())
.map_err(anyhow::Error::from)
.map_err(InvalidEnrichResponseRule::from)?;
result.headers.remove(name);
}
// Set headers.
for (name, value) in &rule.headers_set {
let name = HeaderName::from_bytes(name.as_bytes())
.map_err(anyhow::Error::from)
.map_err(InvalidEnrichResponseRule::from)?;
let value = HeaderValue::from_str(value)
.map_err(anyhow::Error::from)
.map_err(InvalidEnrichResponseRule::from)?;
result.headers.insert(name, value);
}
// Return the modified result.
Ok(result)
}
/// Evaluate postauth rules.
pub fn eval_postauth(
&self,
context: &RequestContext,
auth_context: &AuthenticationContext,
) -> RuleAction {
self.rules_postauth
.iter()
.find(|rule| rule.check(context, auth_context))
.map(|rule| rule.action)
.unwrap_or(RuleAction::Delegate)
}
/// Evaluate preauth rules.
pub fn eval_preauth(&self, context: &RequestContext) -> RuleAction {
self.rules_preauth
.iter()
.find(|rule| rule.check(context))
.map(|rule| rule.action)
.unwrap_or(RuleAction::Delegate)
}
}
/// Builder for `RulesEngine`s.
pub struct RulesEngineBuilder {
files: Vec<String>,
rules_enrich: Vec<EnrichResponseRule>,
rules_postauth: Vec<PostAuthRule>,
rules_preauth: Vec<PreAuthRule>,
}
impl RulesEngineBuilder {
/// Process provided options and build the `RulesEngine`.
pub fn build(self) -> Result<RulesEngine> {
let mut rules_enrich = self.rules_enrich;
let mut rules_postauth = self.rules_postauth;
let mut rules_preauth = self.rules_preauth;
for file in self.files {
let rules =
File::open(&file).with_context(|| format!("Unable to load rules from {}", file))?;
let rules: Vec<Rule> = serde_yaml::from_reader(rules)
.with_context(|| format!("Unable to YAML decode rules from {}", file))?;
for rule in rules {
match rule {
Rule::EnrichResponse(rule) => rules_enrich.push(rule),
Rule::PostAuth(rule) => rules_postauth.push(rule),
Rule::PreAuth(rule) => rules_preauth.push(rule),
}
}
}
Ok(RulesEngine {
rules_enrich,
rules_postauth,
rules_preauth,
})
}
/// Load rules from these files.
///
/// These rules are loaded last, when the `RulesEngine` is build.
pub fn rule_files<'iter, I>(mut self, files: I) -> RulesEngineBuilder
where
I: IntoIterator<Item = &'iter String>,
{
self.files = files.into_iter().map(String::to_owned).collect();
self
}
/// Insert am enrich phase rule.
#[cfg(test)]
pub fn rule_enrich(mut self, rule: EnrichResponseRule) -> RulesEngineBuilder {
self.rules_enrich.push(rule);
self
}
/// Insert a post-auth phase rule.
#[cfg(test)]
pub fn rule_postauth(mut self, rule: PostAuthRule) -> RulesEngineBuilder {
self.rules_postauth.push(rule);
self
}
/// Insert a pre-auth phase rule.
#[cfg(test)]
pub fn rule_preauth(mut self, rule: PreAuthRule) -> RulesEngineBuilder {
self.rules_preauth.push(rule);
self
}
}
| true
|
3118e050e58525ff26ddb4ab46e7db8f66e6f610
|
Rust
|
koute/pinky
|
/nes/src/generic_mapper.rs
|
UTF-8
| 29,555
| 2.953125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use core::fmt;
use alloc::vec::Vec;
use emumisc::{PeekPoke, At};
use rom::Mirroring;
use mappers::Mapper;
// This is a generic memory mapper that is used to simplify actual
// emulated mappers.
pub struct GenericMapper {
// 0x6000 .. 0xFFFF, 40kb in total, remapped in 8kb chunks
cpu_offsets: [i32; 5],
cpu_flags: [MapFlag::Ty; 5],
// 0x0000 .. 0x2FFF, 12kb in total, remapped in 1kb chunks
ppu_offsets: [i32; 12],
ppu_flags: [MapFlag::Ty; 12],
// For maximum flexibility we just dump everything into one big vector.
memory: Vec< u8 >
}
impl fmt::Debug for GenericMapper {
fn fmt( &self, fmt: &mut fmt::Formatter ) -> fmt::Result {
fn print_maps( fmt: &mut fmt::Formatter, offsets: &[i32], flags: &[MapFlag::Ty], prebaked_offsets: &[i32] ) -> fmt::Result {
let increment = -(prebaked_offsets[1] - prebaked_offsets[0]) / 1024;
for ((offset, flags), prebaked) in offsets.iter().zip( flags.iter() ).zip( prebaked_offsets.iter() ) {
try!( write!( fmt, " 0x{:04X}: ", -prebaked ) );
if flags.contains( MapFlag::Mapped ) {
if flags.contains( MapFlag::Writable ) {
try!( write!( fmt, "[w] " ) );
} else {
try!( write!( fmt, "[-] " ) );
}
let offset = (offset - prebaked) / 1024;
try!( writeln!( fmt, "{}k .. {}k", offset, offset + increment ) );
} else {
try!( writeln!( fmt, "[unmapped]" ) );
}
}
Ok(())
}
try!( writeln!( fmt, "GenericMapper {{" ) );
try!( writeln!( fmt, " Total memory: {}k,", self.memory.len() / 1024 ) );
try!( writeln!( fmt, " CPU {{" ) );
try!( print_maps( fmt, &self.cpu_offsets[..], &self.cpu_flags[..], &PREBAKED_CPU_OFFSETS[..] ) );
try!( writeln!( fmt, " }}," ) );
try!( writeln!( fmt, " PPU {{" ) );
try!( print_maps( fmt, &self.ppu_offsets[..], &self.ppu_flags[..], &PREBAKED_PPU_OFFSETS[..] ) );
try!( writeln!( fmt, " }}" ) );
try!( write!( fmt, "}}" ) );
Ok(())
}
}
pub mod MapFlag {
bitflags!(
pub flags Ty: u8 {
const Mapped = 1 << 0,
const Writable = 1 << 1
}
);
}
// We prebake the memory address of a given memory region in our offsets,
// effectively saving us one extra operation per memory access.
const PREBAKED_CPU_OFFSETS: [i32; 5] = [
-0x6000,
-0x8000,
-0xA000,
-0xC000,
-0xE000
];
const PREBAKED_PPU_OFFSETS: [i32; 12] = [
-0x0000,
-0x0400,
-0x0800,
-0x0C00,
-0x1000,
-0x1400,
-0x1800,
-0x1C00,
-0x2000,
-0x2400,
-0x2800,
-0x2C00
];
pub mod bank {
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum CPU_8K {
Ox6000 = 0,
Ox8000,
OxA000,
OxC000,
OxE000
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum PPU_1K {
Ox0000 = 0,
Ox0400,
Ox0800,
Ox0C00,
Ox1000,
Ox1400,
Ox1800,
Ox1C00,
Ox2000,
Ox2400,
Ox2800,
Ox2C00
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum PPU_4K {
Ox0000 = 0,
Ox1000 = 4,
Ox2000 = 8
}
}
#[inline]
fn get_ppu_bank( address: u16 ) -> u16 {
debug_assert!( address <= 0x2FFF );
let bank = address >> 10;
debug_assert!( bank < PREBAKED_PPU_OFFSETS.len() as u16 );
bank
}
#[inline]
fn get_cpu_bank( address: u16 ) -> u16 {
debug_assert!( address >= 0x6000 );
let bank = (address - 0x6000) >> 13;
debug_assert!( bank < PREBAKED_CPU_OFFSETS.len() as u16 );
bank
}
#[test]
fn test_get_cpu_bank() {
assert_eq!( get_cpu_bank( 0x6000 ), 0 );
assert_eq!( get_cpu_bank( 0x7FFF ), 0 );
assert_eq!( get_cpu_bank( 0x8000 ), 1 );
assert_eq!( get_cpu_bank( 0x9FFF ), 1 );
assert_eq!( get_cpu_bank( 0xA000 ), 2 );
assert_eq!( get_cpu_bank( 0xBFFF ), 2 );
assert_eq!( get_cpu_bank( 0xC000 ), 3 );
assert_eq!( get_cpu_bank( 0xDFFF ), 3 );
assert_eq!( get_cpu_bank( 0xE000 ), 4 );
assert_eq!( get_cpu_bank( 0xFFFF ), 4 );
}
impl GenericMapper {
pub fn new() -> Self {
GenericMapper {
cpu_offsets: PREBAKED_CPU_OFFSETS,
cpu_flags: [MapFlag::Ty::empty(); 5],
ppu_offsets: PREBAKED_PPU_OFFSETS,
ppu_flags: [MapFlag::Ty::empty(); 12],
memory: Vec::new()
}
}
#[cfg(test)]
pub fn memory( &mut self ) -> &[u8] {
&self.memory[..]
}
#[cfg(test)]
pub fn memory_mut( &mut self ) -> &mut [u8] {
&mut self.memory[..]
}
#[inline]
pub fn extend( &mut self, memory: &[u8] ) {
self.memory.extend_from_slice( memory );
}
#[inline]
pub fn extend_empty( &mut self, size: u32 ) {
let new_size = self.memory.len() + size as usize;
self.memory.resize( new_size, 0 );
}
#[inline]
pub fn total_memory_size( &self ) -> usize {
self.memory.len()
}
#[inline]
pub fn initialize_save_ram( &mut self ) {
let offset = self.memory.len() as u32;
self.extend_empty( 8 * 1024 );
self.set_cpu_8k_bank( bank::CPU_8K::Ox6000, offset );
self.set_cpu_8k_writable( bank::CPU_8K::Ox6000, true );
}
#[inline]
pub fn initialize_rom( &mut self, rom: &[u8] ) -> u32 {
assert!( rom.len() >= 16 * 1024 );
let offset = self.memory.len() as u32;
self.extend( rom );
self.set_cpu_lower_16k_bank( offset );
if rom.len() >= 32 * 1024 {
// We have two independent banks.
self.set_cpu_upper_16k_bank( offset + 16 * 1024 );
} else {
assert_eq!( rom.len(), 16 * 1024 );
// Only one bank, so we mirror it.
self.set_cpu_upper_16k_bank( offset );
}
offset
}
#[inline]
pub fn initialize_video_rom( &mut self, video_rom: &[u8] ) -> u32 {
let offset = self.memory.len() as u32;
if video_rom.len() > 0 {
self.extend( video_rom );
} else {
self.extend_empty( 8 * 1024 );
self.set_ppu_4k_writable( bank::PPU_4K::Ox0000, true );
self.set_ppu_4k_writable( bank::PPU_4K::Ox1000, true );
}
self.set_ppu_4k_bank( bank::PPU_4K::Ox0000, offset );
self.set_ppu_4k_bank( bank::PPU_4K::Ox1000, offset + 4 * 1024 );
offset
}
#[inline]
pub fn initialize_background_tilemaps( &mut self, mirroring: Mirroring ) -> u32 {
let offset = self.memory.len() as u32;
match mirroring {
Mirroring::Horizontal | Mirroring::Vertical => {
self.extend_empty( 2 * 1024 );
if mirroring == Mirroring::Horizontal {
self.set_horizontal_mirroring( offset );
} else {
self.set_vertical_mirroring( offset );
}
},
Mirroring::FourScreen => {
self.extend_empty( 4 * 1024 );
self.set_four_screen_mirroring( offset );
}
}
self.set_ppu_4k_writable( bank::PPU_4K::Ox2000, true );
offset
}
#[inline]
fn translate_cpu_address( &self, address: u16 ) -> i32 {
(&self.cpu_offsets[..]).peek( get_cpu_bank( address ) ) + address as i32
}
#[inline]
fn is_cpu_address_writable( &self, address: u16 ) -> bool {
(&self.cpu_flags[..]).peek( get_cpu_bank( address ) ).contains( MapFlag::Writable )
}
#[inline]
fn is_cpu_address_mapped( &self, address: u16 ) -> bool {
(&self.cpu_flags[..]).peek( get_cpu_bank( address ) ).contains( MapFlag::Mapped )
}
#[inline]
pub fn set_cpu_8k_bank( &mut self, bank: bank::CPU_8K, internal_address: u32 ) {
debug_assert!( (internal_address + 8 * 1024) <= self.memory.len() as u32 );
let bank = bank as usize;
let new_offset = internal_address as i32 + PREBAKED_CPU_OFFSETS.peek( bank );
(&mut self.cpu_offsets[..]).poke( bank, new_offset );
(&mut self.cpu_flags[..]).at_mut( bank ).insert( MapFlag::Mapped );
}
#[inline]
pub fn set_cpu_lower_16k_bank( &mut self, internal_address: u32 ) {
self.set_cpu_8k_bank( bank::CPU_8K::Ox8000, internal_address );
self.set_cpu_8k_bank( bank::CPU_8K::OxA000, internal_address + 8 * 1024 );
}
#[inline]
pub fn set_cpu_upper_16k_bank( &mut self, internal_address: u32 ) {
self.set_cpu_8k_bank( bank::CPU_8K::OxC000, internal_address );
self.set_cpu_8k_bank( bank::CPU_8K::OxE000, internal_address + 8 * 1024 );
}
#[inline]
pub fn set_cpu_32k_bank( &mut self, internal_address: u32 ) {
self.set_cpu_8k_bank( bank::CPU_8K::Ox8000, internal_address );
self.set_cpu_8k_bank( bank::CPU_8K::OxA000, internal_address + 8 * 1024 );
self.set_cpu_8k_bank( bank::CPU_8K::OxC000, internal_address + 16 * 1024 );
self.set_cpu_8k_bank( bank::CPU_8K::OxE000, internal_address + 24 * 1024 );
}
#[inline]
pub fn set_cpu_8k_writable( &mut self, bank: bank::CPU_8K, is_writable: bool ) {
let bank = bank as usize;
let flags = (&mut self.cpu_flags[..]).at_mut( bank );
if is_writable {
flags.insert( MapFlag::Writable );
} else {
flags.remove( MapFlag::Writable );
}
}
#[inline]
fn translate_ppu_address( &self, address: u16 ) -> i32 {
(&self.ppu_offsets[..]).peek( get_ppu_bank( address ) ) + address as i32
}
#[inline]
fn is_ppu_address_writable( &self, address: u16 ) -> bool {
(&self.ppu_flags[..]).peek( get_ppu_bank( address ) ).contains( MapFlag::Writable )
}
#[inline]
fn is_ppu_address_mapped( &self, address: u16 ) -> bool {
(&self.ppu_flags[..]).peek( get_ppu_bank( address ) ).contains( MapFlag::Mapped )
}
#[inline]
pub fn set_ppu_1k_bank( &mut self, bank: bank::PPU_1K, internal_address: u32 ) {
debug_assert!( (internal_address + 1024) <= self.memory.len() as u32 );
let bank = bank as usize;
let new_offset = internal_address as i32 + PREBAKED_PPU_OFFSETS.peek( bank );
(&mut self.ppu_offsets[..]).poke( bank, new_offset );
(&mut self.ppu_flags[..]).at_mut( bank ).insert( MapFlag::Mapped );
}
#[inline]
pub fn set_ppu_1k_writable( &mut self, bank: bank::PPU_1K, is_writable: bool ) {
let bank = bank as usize;
let flags = (&mut self.ppu_flags[..]).at_mut( bank );
if is_writable {
flags.insert( MapFlag::Writable );
} else {
flags.remove( MapFlag::Writable );
}
}
#[inline]
pub fn set_ppu_4k_bank( &mut self, bank: bank::PPU_4K, internal_address: u32 ) {
match bank {
bank::PPU_4K::Ox0000 => {
self.set_ppu_1k_bank( bank::PPU_1K::Ox0000, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox0400, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox0800, internal_address + 0x0800 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox0C00, internal_address + 0x0C00 );
},
bank::PPU_4K::Ox1000 => {
self.set_ppu_1k_bank( bank::PPU_1K::Ox1000, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox1400, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox1800, internal_address + 0x0800 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox1C00, internal_address + 0x0C00 );
},
bank::PPU_4K::Ox2000 => {
self.set_ppu_1k_bank( bank::PPU_1K::Ox2000, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2400, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2800, internal_address + 0x0800 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2C00, internal_address + 0x0C00 );
}
}
}
#[inline]
pub fn set_ppu_4k_writable( &mut self, bank: bank::PPU_4K, is_writable: bool ) {
match bank {
bank::PPU_4K::Ox0000 => {
self.set_ppu_1k_writable( bank::PPU_1K::Ox0000, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox0400, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox0800, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox0C00, is_writable );
},
bank::PPU_4K::Ox1000 => {
self.set_ppu_1k_writable( bank::PPU_1K::Ox1000, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox1400, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox1800, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox1C00, is_writable );
},
bank::PPU_4K::Ox2000 => {
self.set_ppu_1k_writable( bank::PPU_1K::Ox2000, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox2400, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox2800, is_writable );
self.set_ppu_1k_writable( bank::PPU_1K::Ox2C00, is_writable );
}
}
}
#[inline]
pub fn set_ppu_lower_4k_bank( &mut self, internal_address: u32 ) {
self.set_ppu_4k_bank( bank::PPU_4K::Ox0000, internal_address );
}
#[inline]
pub fn set_ppu_upper_4k_bank( &mut self, internal_address: u32 ) {
self.set_ppu_4k_bank( bank::PPU_4K::Ox1000, internal_address );
}
#[inline]
pub fn set_ppu_8k_bank( &mut self, internal_address: u32 ) {
self.set_ppu_4k_bank( bank::PPU_4K::Ox0000, internal_address );
self.set_ppu_4k_bank( bank::PPU_4K::Ox1000, internal_address + 0x1000 );
}
#[inline]
pub fn set_horizontal_mirroring( &mut self, internal_address: u32 ) {
self.set_ppu_1k_bank( bank::PPU_1K::Ox2000, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2400, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2800, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2C00, internal_address + 0x0400 );
}
#[inline]
pub fn set_vertical_mirroring( &mut self, internal_address: u32 ) {
self.set_ppu_1k_bank( bank::PPU_1K::Ox2000, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2400, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2800, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2C00, internal_address + 0x0400 );
}
#[inline]
pub fn set_only_lower_bank_mirroring( &mut self, internal_address: u32 ) {
self.set_ppu_1k_bank( bank::PPU_1K::Ox2000, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2400, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2800, internal_address );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2C00, internal_address );
}
#[inline]
pub fn set_only_upper_bank_mirroring( &mut self, internal_address: u32 ) {
self.set_ppu_1k_bank( bank::PPU_1K::Ox2000, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2400, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2800, internal_address + 0x0400 );
self.set_ppu_1k_bank( bank::PPU_1K::Ox2C00, internal_address + 0x0400 );
}
#[inline]
pub fn set_four_screen_mirroring( &mut self, internal_address: u32 ) {
if internal_address + 4 * 1024 > self.memory.len() as u32 {
let extra_space = internal_address + 4 * 1024 - self.memory.len() as u32;
self.extend_empty( extra_space );
}
self.set_ppu_4k_bank( bank::PPU_4K::Ox2000, internal_address );
}
#[inline]
pub fn peek_cpu_memory_space( &self, address: u16 ) -> u8 {
let actual_address = self.translate_cpu_address( address );
self.memory.peek( actual_address )
}
#[inline]
pub fn poke_cpu_memory_space( &mut self, address: u16, value: u8 ) {
if self.is_cpu_address_writable( address ) == false {
#[cfg(feature = "log")]
warn!( "Unhandled write to 0x{:04X} (value=0x{:02X})", address, value );
return;
}
let actual_address = self.translate_cpu_address( address );
self.memory.poke( actual_address, value )
}
#[inline]
pub fn peek_ppu_memory_space( &self, address: u16 ) -> u8 {
let address = if address >= 0x3000 { address - 0x1000 } else { address };
let actual_address = self.translate_ppu_address( address );
self.memory.peek( actual_address )
}
#[inline]
pub fn poke_ppu_memory_space( &mut self, address: u16, value: u8 ) {
let address = if address >= 0x3000 { address - 0x1000 } else { address };
if self.is_ppu_address_writable( address ) == false {
return;
}
let actual_address = self.translate_ppu_address( address );
self.memory.poke( actual_address, value )
}
}
impl Mapper for GenericMapper {
#[inline]
fn peek_rom( &self, address: u16 ) -> u8 {
self.peek_cpu_memory_space( address )
}
#[inline]
fn poke_rom( &mut self, address: u16, value: u8 ) {
self.poke_cpu_memory_space( address, value )
}
#[inline]
fn peek_sram( &self, address: u16 ) -> u8 {
self.peek_cpu_memory_space( address )
}
#[inline]
fn poke_sram( &mut self, address: u16, value: u8 ) {
self.poke_cpu_memory_space( address, value )
}
#[inline]
fn peek_video_memory( &self, address: u16 ) -> u8 {
self.peek_ppu_memory_space( address )
}
#[inline]
fn poke_video_memory( &mut self, address: u16, value: u8 ) {
self.poke_ppu_memory_space( address, value )
}
}
use core::ops::Sub;
use rom::{NesRom, LoadError};
#[inline]
fn wraparound< T: Sub< Output = T > + PartialOrd + Copy >( limit: T, mut value: T ) -> T {
// This isn't going to be super fast,
// and it's probably unnecessary if we have
// a bank count that is a power of two
// (since then we could have used a simple
// bitwise 'and' then), but for now it'll do.
while value >= limit {
value = value - limit;
}
value
}
// This is an even more simplified generic mapper.
pub struct BankedGenericMapper {
inner: GenericMapper,
internal_rom_bank_offset: u32,
internal_video_rom_offset: u32,
internal_background_tilemaps_offset: u32,
rom_size: u32,
video_rom_size: u32,
default_mirroring: Mirroring
}
impl BankedGenericMapper {
fn empty() -> Self {
BankedGenericMapper {
inner: GenericMapper::new(),
internal_rom_bank_offset: 0,
internal_video_rom_offset: 0,
internal_background_tilemaps_offset: 0,
rom_size: 0,
video_rom_size: 0,
default_mirroring: Mirroring::Horizontal
}
}
pub fn from_rom( rom: NesRom ) -> Result< Self, LoadError > {
let mut mapper = Self::empty();
mapper.inner.initialize_save_ram();
mapper.internal_rom_bank_offset = mapper.inner.initialize_rom( &rom.rom[..] );
mapper.internal_video_rom_offset = mapper.inner.initialize_video_rom( &rom.video_rom[..] );
mapper.internal_background_tilemaps_offset = mapper.inner.initialize_background_tilemaps( rom.mirroring );
mapper.rom_size = mapper.internal_video_rom_offset - mapper.internal_rom_bank_offset;
mapper.video_rom_size = mapper.internal_background_tilemaps_offset - mapper.internal_video_rom_offset;
mapper.default_mirroring = rom.mirroring;
assert!( mapper.rom_size > 0 );
assert!( mapper.video_rom_size > 0 );
Ok( mapper )
}
#[cfg(test)]
pub fn memory( &mut self ) -> &[u8] {
self.inner.memory()
}
#[cfg(test)]
pub fn memory_mut( &mut self ) -> &mut [u8] {
self.inner.memory_mut()
}
#[cfg(test)]
pub fn internal_video_rom_offset( &self ) -> u32 {
self.internal_video_rom_offset
}
#[inline]
pub fn last_rom_16k_bank( &self ) -> u8 {
(self.rom_size / 16 * 1024 - 1) as u8
}
#[inline]
pub fn rom_16k_bank_count( &self ) -> u8 {
(self.rom_size / (16 * 1024)) as u8
}
#[inline]
pub fn rom_32k_bank_count( &self ) -> u8 {
(self.rom_size / (32 * 1024)) as u8
}
#[inline]
pub fn video_rom_4k_bank_count( &self ) -> u8 {
(self.video_rom_size / (4 * 1024)) as u8
}
#[inline]
pub fn video_rom_8k_bank_count( &self ) -> u8 {
(self.video_rom_size / (8 * 1024)) as u8
}
#[inline]
pub fn set_cpu_lower_16k_bank_to_bank( &mut self, bank: u8 ) {
let bank = wraparound( self.rom_16k_bank_count(), bank ) as u32;
self.inner.set_cpu_lower_16k_bank( self.internal_rom_bank_offset + bank * 16 * 1024 );
}
#[inline]
pub fn set_cpu_upper_16k_bank_to_bank( &mut self, bank: u8 ) {
let bank = wraparound( self.rom_16k_bank_count(), bank ) as u32;
self.inner.set_cpu_upper_16k_bank( self.internal_rom_bank_offset + bank * 16 * 1024 );
}
#[inline]
pub fn set_cpu_32k_bank_to_bank( &mut self, bank: u8 ) {
let bank = wraparound( self.rom_32k_bank_count(), bank ) as u32;
self.inner.set_cpu_32k_bank( self.internal_rom_bank_offset + bank * 32 * 1024 );
}
#[inline]
pub fn set_ppu_lower_4k_bank_to_bank( &mut self, bank: u8 ) {
let bank = wraparound( self.video_rom_4k_bank_count(), bank ) as u32;
self.inner.set_ppu_lower_4k_bank( self.internal_video_rom_offset + bank * 4 * 1024 );
}
#[inline]
pub fn set_ppu_upper_4k_bank_to_bank( &mut self, bank: u8 ) {
let bank = wraparound( self.video_rom_4k_bank_count(), bank ) as u32;
self.inner.set_ppu_upper_4k_bank( self.internal_video_rom_offset + bank * 4 * 1024 );
}
#[inline]
pub fn set_ppu_8k_bank_to_bank( &mut self, bank: u8 ) {
let bank = wraparound( self.video_rom_8k_bank_count(), bank ) as u32;
self.inner.set_ppu_8k_bank( self.internal_video_rom_offset + bank * 8 * 1024 );
}
#[inline]
pub fn set_horizontal_mirroring( &mut self ) {
self.inner.set_horizontal_mirroring( self.internal_background_tilemaps_offset );
}
#[inline]
pub fn set_vertical_mirroring( &mut self ) {
self.inner.set_vertical_mirroring( self.internal_background_tilemaps_offset );
}
#[inline]
pub fn set_only_lower_bank_mirroring( &mut self ) {
self.inner.set_only_lower_bank_mirroring( self.internal_background_tilemaps_offset );
}
#[inline]
pub fn set_only_upper_bank_mirroring( &mut self ) {
self.inner.set_only_upper_bank_mirroring( self.internal_background_tilemaps_offset );
}
#[inline]
pub fn set_four_screen_mirroring( &mut self ) {
self.inner.set_four_screen_mirroring( self.internal_background_tilemaps_offset );
}
#[inline]
pub fn set_default_mirroring( &mut self ) {
match self.default_mirroring {
Mirroring::Horizontal => self.set_horizontal_mirroring(),
Mirroring::Vertical => self.set_vertical_mirroring(),
Mirroring::FourScreen => self.set_four_screen_mirroring()
}
}
#[inline]
pub fn set_cpu_8k_writable( &mut self, bank: bank::CPU_8K, is_writable: bool ) {
self.inner.set_cpu_8k_writable( bank, is_writable );
}
#[inline]
pub fn set_ppu_4k_writable( &mut self, bank: bank::PPU_4K, is_writable: bool ) {
self.inner.set_ppu_4k_writable( bank, is_writable );
}
}
impl Mapper for BankedGenericMapper {
#[inline]
fn peek_rom( &self, address: u16 ) -> u8 {
self.inner.peek_cpu_memory_space( address )
}
#[inline]
fn poke_rom( &mut self, address: u16, value: u8 ) {
self.inner.poke_cpu_memory_space( address, value )
}
#[inline]
fn peek_sram( &self, address: u16 ) -> u8 {
self.inner.peek_cpu_memory_space( address )
}
#[inline]
fn poke_sram( &mut self, address: u16, value: u8 ) {
self.inner.poke_cpu_memory_space( address, value )
}
#[inline]
fn peek_video_memory( &self, address: u16 ) -> u8 {
self.inner.peek_ppu_memory_space( address )
}
#[inline]
fn poke_video_memory( &mut self, address: u16, value: u8 ) {
self.inner.poke_ppu_memory_space( address, value )
}
}
#[test]
fn test_generic_mapper_cpu_banks() {
let mut mapper = GenericMapper::new();
mapper.extend_empty( 40 * 1024 );
mapper.set_cpu_8k_bank( bank::CPU_8K::Ox8000, 8 * 1024 );
mapper.set_cpu_8k_bank( bank::CPU_8K::OxA000, 16 * 1024 );
mapper.set_cpu_8k_bank( bank::CPU_8K::OxC000, 24 * 1024 );
mapper.set_cpu_8k_bank( bank::CPU_8K::OxE000, 32 * 1024 );
mapper.memory[ 8 * 1024 + 0 ] = 1;
mapper.memory[ 8 * 1024 + 1 ] = 2;
mapper.memory[ 16 * 1024 + 0 ] = 3;
mapper.memory[ 16 * 1024 + 1 ] = 4;
mapper.memory[ 24 * 1024 + 0 ] = 5;
mapper.memory[ 24 * 1024 + 1 ] = 6;
mapper.memory[ 32 * 1024 + 0 ] = 7;
mapper.memory[ 32 * 1024 + 1 ] = 8;
assert_eq!( mapper.peek_rom( 0x8000 ), 1 );
assert_eq!( mapper.peek_rom( 0x8001 ), 2 );
assert_eq!( mapper.peek_rom( 0xA000 ), 3 );
assert_eq!( mapper.peek_rom( 0xA001 ), 4 );
assert_eq!( mapper.peek_rom( 0xC000 ), 5 );
assert_eq!( mapper.peek_rom( 0xC001 ), 6 );
assert_eq!( mapper.peek_rom( 0xE000 ), 7 );
assert_eq!( mapper.peek_rom( 0xE001 ), 8 );
mapper.set_cpu_8k_writable( bank::CPU_8K::Ox8000, true );
mapper.set_cpu_8k_writable( bank::CPU_8K::OxA000, true );
mapper.set_cpu_8k_writable( bank::CPU_8K::OxC000, true );
mapper.set_cpu_8k_writable( bank::CPU_8K::OxE000, true );
mapper.poke_rom( 0x8000, 11 );
mapper.poke_rom( 0xA000, 22 );
mapper.poke_rom( 0xC000, 33 );
mapper.poke_rom( 0xE000, 44 );
assert_eq!( mapper.memory[ 8 * 1024 ], 11 );
assert_eq!( mapper.memory[ 16 * 1024 ], 22 );
assert_eq!( mapper.memory[ 24 * 1024 ], 33 );
assert_eq!( mapper.memory[ 32 * 1024 ], 44 );
mapper.set_cpu_8k_writable( bank::CPU_8K::Ox8000, false );
mapper.poke_rom( 0x8000, 255 );
assert_eq!( mapper.memory[ 8 * 1024 ], 11 );
}
#[test]
fn test_generic_mapper_ppu_banks() {
let mut mapper = GenericMapper::new();
mapper.extend_empty( 40 * 1024 );
mapper.set_ppu_1k_bank( bank::PPU_1K::Ox0400, 1 * 1024 );
mapper.set_ppu_1k_bank( bank::PPU_1K::Ox2000, 2 * 1024 );
mapper.set_ppu_1k_bank( bank::PPU_1K::Ox2400, 3 * 1024 );
mapper.set_ppu_1k_bank( bank::PPU_1K::Ox2800, 4 * 1024 );
mapper.set_ppu_1k_bank( bank::PPU_1K::Ox2C00, 5 * 1024 );
mapper.memory[ 1 * 1024 + 0 ] = 1;
mapper.memory[ 1 * 1024 + 1 ] = 2;
mapper.memory[ 2 * 1024 + 0 ] = 3;
mapper.memory[ 2 * 1024 + 1 ] = 4;
mapper.memory[ 3 * 1024 + 0 ] = 5;
mapper.memory[ 3 * 1024 + 1 ] = 6;
mapper.memory[ 4 * 1024 + 0 ] = 7;
mapper.memory[ 4 * 1024 + 1 ] = 8;
mapper.memory[ 5 * 1024 + 0 ] = 9;
mapper.memory[ 5 * 1024 + 1 ] = 10;
assert_eq!( mapper.peek_video_memory( 0x0400 ), 1 );
assert_eq!( mapper.peek_video_memory( 0x0401 ), 2 );
assert_eq!( mapper.peek_video_memory( 0x2000 ), 3 );
assert_eq!( mapper.peek_video_memory( 0x2001 ), 4 );
assert_eq!( mapper.peek_video_memory( 0x2400 ), 5 );
assert_eq!( mapper.peek_video_memory( 0x2401 ), 6 );
assert_eq!( mapper.peek_video_memory( 0x2800 ), 7 );
assert_eq!( mapper.peek_video_memory( 0x2801 ), 8 );
assert_eq!( mapper.peek_video_memory( 0x2C00 ), 9 );
assert_eq!( mapper.peek_video_memory( 0x2C01 ), 10 );
assert_eq!( mapper.peek_video_memory( 0x3000 ), 3 );
assert_eq!( mapper.peek_video_memory( 0x3001 ), 4 );
assert_eq!( mapper.peek_video_memory( 0x3400 ), 5 );
assert_eq!( mapper.peek_video_memory( 0x3401 ), 6 );
assert_eq!( mapper.peek_video_memory( 0x3800 ), 7 );
assert_eq!( mapper.peek_video_memory( 0x3801 ), 8 );
assert_eq!( mapper.peek_video_memory( 0x3C00 ), 9 );
assert_eq!( mapper.peek_video_memory( 0x3C01 ), 10 );
mapper.set_horizontal_mirroring( 2 * 1024 );
assert_eq!( mapper.peek_video_memory( 0x2000 ), 3 );
assert_eq!( mapper.peek_video_memory( 0x2001 ), 4 );
assert_eq!( mapper.peek_video_memory( 0x2400 ), 3 );
assert_eq!( mapper.peek_video_memory( 0x2401 ), 4 );
assert_eq!( mapper.peek_video_memory( 0x2800 ), 5 );
assert_eq!( mapper.peek_video_memory( 0x2801 ), 6 );
assert_eq!( mapper.peek_video_memory( 0x2C00 ), 5 );
assert_eq!( mapper.peek_video_memory( 0x2C01 ), 6 );
mapper.set_vertical_mirroring( 2 * 1024 );
assert_eq!( mapper.peek_video_memory( 0x2000 ), 3 );
assert_eq!( mapper.peek_video_memory( 0x2001 ), 4 );
assert_eq!( mapper.peek_video_memory( 0x2400 ), 5 );
assert_eq!( mapper.peek_video_memory( 0x2401 ), 6 );
assert_eq!( mapper.peek_video_memory( 0x2800 ), 3 );
assert_eq!( mapper.peek_video_memory( 0x2801 ), 4 );
assert_eq!( mapper.peek_video_memory( 0x2C00 ), 5 );
assert_eq!( mapper.peek_video_memory( 0x2C01 ), 6 );
}
| true
|
423347667bb166b473919c5b525aa558a274e376
|
Rust
|
dy0110/my-wasmpj
|
/src/lib.rs
|
UTF-8
| 595
| 3.109375
| 3
|
[] |
no_license
|
use wasm_bindgen::prelude::*;
use web_sys::console;
#[wasm_bindgen]
pub fn log_in_rust() {
console::log_1(&"Hello World".into());
}
#[wasm_bindgen]
pub fn add_two_ints(a: u32, b: u32) -> u32 {
a + b
}
#[wasm_bindgen]
pub fn resize_window() -> Result<(), JsValue> {
let window = web_sys::window().expect("no global `window` exists");
window.set_inner_height(&"500".into()).unwrap();
window.set_inner_width(&"500".into()).unwrap();
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
assert_eq!(add_two_ints(2, 2), 4);
}
}
| true
|
608a4285b286cf21e39cec35dce52187904fe3e0
|
Rust
|
jyotsna-penumaka/enarx
|
/intel-types/src/lib.rs
|
UTF-8
| 14,109
| 2.859375
| 3
|
[
"Apache-2.0"
] |
permissive
|
// SPDX-License-Identifier: Apache-2.0
//! Intel Documentation related to these types is available at the following link.
//! Section references in further documentation refer to this document.
//! https://www.intel.com/content/dam/www/public/us/en/documents/manuals/64-ia-32-architectures-software-developer-vol-1-manual.pdf
#![cfg_attr(not(test), no_std)]
#![deny(clippy::all)]
#![allow(clippy::identity_op)]
#![deny(missing_docs)]
use bitflags::bitflags;
use core::{
fmt::Debug,
ops::{BitAnd, BitOr, Not},
};
/// Succinctly describes a masked type, e.g. masked Attributes or masked MiscSelect.
/// A mask is applied to Attributes and MiscSelect structs in a Signature (SIGSTRUCT)
/// to specify values of Attributes and MiscSelect to enforce. This struct combines
/// the struct and its mask for simplicity.
#[repr(C)]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Masked<T: BitAnd<Output = T>> {
/// The data being masked, e.g. Attribute flags.
pub data: T,
/// The mask.
pub mask: T,
}
impl<T> Default for Masked<T>
where
T: BitAnd<Output = T>,
T: BitOr<Output = T>,
T: Not<Output = T>,
T: Default,
T: Copy,
{
fn default() -> Self {
T::default().into()
}
}
impl<T> From<T> for Masked<T>
where
T: BitAnd<Output = T>,
T: BitOr<Output = T>,
T: Not<Output = T>,
T: Copy,
{
fn from(value: T) -> Self {
Self {
data: value,
mask: value | !value,
}
}
}
impl<T> PartialEq<T> for Masked<T>
where
T: BitAnd<Output = T>,
T: PartialEq,
T: Copy,
{
fn eq(&self, other: &T) -> bool {
self.mask & self.data == self.mask & *other
}
}
enumerate::enumerate! {
/// Exception Error Codes
#[derive(Copy, Clone)]
pub enum Exception: u8 {
/// Divide-by-zero Error
DivideByZero = 0x00,
/// Debug
Debug = 0x01,
/// Breakpoint
Breakpoint = 0x03,
/// Overflow
Overflow = 0x04,
/// Bound Range Exceeded
BoundRange = 0x05,
/// Invalid Opcode
InvalidOpcode = 0x06,
/// Device Not Available
DeviceNotAvailable = 0x07,
/// Double Fault
DoubleFault = 0x08,
/// Invalid TSS
InvalidTss = 0x0A,
/// Segment Not Present
SegmentNotPresent = 0x0B,
/// Stack-Segment Fault
StackSegment = 0x0C,
/// General Protection Fault
GeneralProtection = 0x0D,
/// Page Fault
Page = 0x0E,
/// x87 Floating-Point Exception
FloatingPoint = 0x10,
/// Alignment Check
AlignmentCheck = 0x11,
/// Machine Check
MachineCheck = 0x12,
/// SIMD Floating-Point Exception
SimdFloatingPoint = 0x13,
/// Virtualization Exception
Virtualization = 0x14,
/// Control Protection Exception
ControlProtection = 0x15,
/// Security Exception
Security = 0x1E,
}
}
/// This type represents an MMX register, used to perform operations on 64-bit packed integer data.
/// It includes the 10-byte value as well as 6 bytes of padding.
#[derive(Debug, Default)]
#[repr(C)]
pub struct Mm([u8; 16]);
/// This type represents an XMM 128-bit data register, used to operate on packed single-
/// precision floating-point operands.
#[derive(Debug, Default)]
#[repr(C)]
pub struct Xmm([u8; 16]);
bitflags! {
/// x87 Floating Point Unit (FPU) Control Word
/// Section 8.1.5
#[repr(transparent)]
pub struct Fcw: u16 {
/// Invalid Operation
const INV_OP = 1 << 0;
/// Denormal Operand
const DENORM_OP = 1 << 1;
/// Zero Divide
const ZERO_DIV = 1 << 2;
/// Overflow
const OVERFLOW = 1 << 3;
/// Underflow
const UNDERFLOW = 1 << 4;
/// Precision
const PREC = 1 << 5;
/// Precision Control 0
const PREC_CTRL0 = 1 << 8;
/// Precision Control 1
const PREC_CTRL1 = 1 << 9;
/// Rounding Control 0
const ROUND_CTRL0 = 1 << 10;
/// Rounding Control 1
const ROUND_CTRL1 = 1 << 11;
/// Infinity Control
const INFINITY_CTRL = 1 << 12;
}
}
impl Default for Fcw {
/// The x87 state initial configuration for FCW masks all floating-point exceptions,
/// sets rounding to nearest, and sets the x87 FPU precision to 64 bits.
/// See Section 8.1.5.
fn default() -> Self {
Fcw::INV_OP
| Fcw::DENORM_OP
| Fcw::ZERO_DIV
| Fcw::OVERFLOW
| Fcw::UNDERFLOW
| Fcw::PREC
| Fcw::PREC_CTRL0
| Fcw::PREC_CTRL1
}
}
bitflags! {
/// 32-bit register providing status and control bits used in SIMD floating-point operations
#[repr(transparent)]
pub struct MxCsr: u32 {
/// Invalid Operation Flag
const INV_OP = 1 << 0;
/// Denormal Flag
const DENORM = 1 << 1;
/// Divide by Zero Flag
const ZERO_DIV = 1 << 2;
/// Overflow Flag
const OVERFLOW = 1 << 3;
/// Underflow Flag
const UNDERFLOW = 1 << 4;
/// Precision Flag
const PREC = 1 << 5;
/// Denormals are Zeros
const DENORM_ARE_ZEROS = 1 << 6;
/// Invalid Operation Mask
const INV_OP_MASK = 1 << 7;
/// Denormal Operation Mask
const DENORM_MASK = 1 << 8;
/// Divide by Zero Mask
const ZERO_DIV_MASK = 1 << 9;
/// Overflow Mask
const OVERFLOW_MASK = 1 << 10;
/// Underflow Mask
const UNDERFLOW_MASK = 1 << 11;
/// Precision Mask
const PREC_MASK = 1 << 12;
/// Rounding Control 0
const ROUND_CTRL0 = 1 << 13;
/// Rounding Control 1
const ROUND_CTRL1 = 1 << 14;
/// Flush to Zero
const FLUSH_TO_ZERO = 1 << 15;
}
}
impl Default for MxCsr {
/// The initial state of MXCSR after power-up/reset or INIT; mask allows software
/// to identify any reserved bits in MXCSR (none are reserved here).
/// See Sections 11.6.4 and 11.6.6.
fn default() -> Self {
MxCsr::INV_OP_MASK
| MxCsr::DENORM_MASK
| MxCsr::ZERO_DIV_MASK
| MxCsr::OVERFLOW_MASK
| MxCsr::UNDERFLOW_MASK
| MxCsr::PREC_MASK
}
}
bitflags! {
/// Section 13.4.3
#[repr(transparent)]
pub struct XcompBv: u64 {
/// Compacted form is used for the layout of the XSAVE EXtended Region
const COMPACT = 1 << 63;
}
}
impl Default for XcompBv {
/// XCOMP_BV[63] = 1, compaction mode for the XSave Extended Region. However, no state
/// components are included in the Extended Region as all other bits are 0. The size of
/// the XSaveXtd is therefore zero.
// TODO: Check this value, as the original appears to set bit 31 but claims to set bit 63
fn default() -> Self {
XcompBv::COMPACT
}
}
bitflags! {
/// The state component bitmap identifies the state components present in the XSAVE area.
/// Bits 62:10 are reserved. Bit 63 does not correspond to any state component.
/// Section 13.1
#[derive(Default)]
#[repr(transparent)]
pub struct XstateBv: u64 {
/// x87 state (Section 13.5.1)
const X87 = 1 << 0;
/// SSE state (Section 13.5.2)
const SSE = 1 << 1;
/// AVX state (Section 13.5.3)
const AVX = 1 << 2;
/// MPX state: BND0-BND3 (BNDREGS state)
const BNDREGS = 1 << 3;
/// MPX state: BNDCFGU and BNDSTATUS (BNDCSR state)
const BNDCSR = 1 << 4;
/// AVX-512 state: opmask state
const AVX512_OPMASK = 1 << 5;
/// AVX-512 state: ZMM_HI256 state
const AVX512_ZMM_HI256 = 1 << 6;
/// AVX-512 state: HI16_ZMM state
const AVX512_HI16_ZMM = 1 << 7;
/// Processor Trace MSRs
const PT = 1 << 8;
/// Protection key feature register (Section 13.5.7)
const PKRU = 1 << 9;
}
}
/// The legacy region of an XSAVE area comprises the 512 bytes starting at the area's base address.
/// See Table 13-1. There is no alignment requirement.
#[derive(Debug, Default)]
#[repr(C)]
pub struct XSaveLegacy {
/// x87 Floating Point Unit (FPU) Control Word
pub fcw: Fcw,
/// x87 FPU Status Word
pub fsw: u16,
/// x87 FPU Tag Word
pub ftw: u8,
/// Reserved
pub reserved0: u8,
/// x87 FPU Opcode
pub fop: u16,
/// x87 FPU Instruction Pointer Offset
pub fip: u64,
/// x87 FPU Data Pointer Offset
pub fdp: u64,
/// 32-bit register providing status and control bits used in SIMD floating-point operations
pub mxcsr: Masked<MxCsr>,
/// Register used to perform operations on 64-bit packed integer data
pub mm: [Mm; 8],
/// 128-bit data register used to operate on packed single-precision floating-point operands
pub xmm: [Xmm; 16],
/// Padding: Size of XSaveLegacy must be 512.
pub padding0: [u64; 11],
/// Padding: Size of XSaveLegacy must be 512.
pub padding1: [u8; 7],
}
/// The XSAVE header of an XSAVE area comprises the 64 bytes starting at offset 512 from the
/// area's base address. See Section 13.4.2. There is no alignment requirement.
#[derive(Debug, Default)]
#[repr(C)]
pub struct XSaveHeader {
/// State-component bitmap identifying the state components in the XSAVE area.
pub xstate_bv: XstateBv,
/// State-component bitmap indicating the format of the XSAVE extended region and whether
/// a component is in the XSAVE area.
pub xcomp_bv: XcompBv,
/// Reserved
pub reserved0: [u64; 6],
}
/// For our use case, the XSave Extended Region is in compacted format and currently holds nothing,
/// as specifed in XCOMP_BV in
/// https://github.com/jsakkine-intel/linux-sgx/blob/master/tools/testing/selftests/x86/sgx/encl_bootstrap.S#L89.
/// For more on this region, see Section 13.4.3. The alignment requirements are variable and specified in
/// 13.4.3.
#[derive(Debug, Default)]
#[repr(C)]
pub struct XSaveExtend([u8; 0]);
/// For details on the fields included in XSave, see Section 13.4. Must be 64 byte aligned.
#[derive(Debug, Default)]
#[repr(C, align(64))]
pub struct XSave {
/// Legacy region of the XSave area
pub legacy: XSaveLegacy,
/// XSave header
pub header: XSaveHeader,
/// XSave Extended Region (not used)
pub extend: XSaveExtend,
}
bitflags! {
/// In 64-bit mode, EFLAGS is extended to 64 bits and called RFLAGS.
/// The upper 32 bits of RFLAGS register is reserved. The lower 32 bits
/// of RFLAGS is the same as EFLAGS.
/// S prefix indicates a status flag; C indicates a control flag; X
/// indicates a system flag.
///
/// See Section 3.4.3.4, 3.4.3, and Figure 3-8.
#[derive(Default)]
#[repr(transparent)]
pub struct Rflags: u64 {
/// Carry flag
const S_CF = 1 << 0;
/// Parity flag
const S_PF = 1 << 2;
/// Auxiliary Carry Flag
const S_AF = 1 << 4;
/// Zero flag
const S_ZF = 1 << 6;
/// Sign flag
const S_SF = 1 << 7;
/// Trap flag
const X_TF = 1 << 8;
/// Interrupt enable flag
const X_IF = 1 << 9;
/// Direction flag
const C_DF = 1 << 10;
/// Overflow flag
const S_OF = 1 << 11;
/// I/O privilege level
const X_IOPL0 = 1 << 12;
/// I/O privilege level
const X_IOPL1 = 1 << 13;
/// Nested task
const X_NT = 1 << 14;
/// Resume flag
const X_RF = 1 << 16;
/// Virtual-8086 mode
const X_VM = 1 << 17;
/// Alignment check / access control
const X_AC = 1 << 18;
/// Virtual interrupt flag
const X_VIF = 1 << 19;
/// Virtual interrupt pending
const X_VIP = 1 << 20;
/// ID flag (ID)
const X_ID = 1 << 21;
}
}
/// This macro enables writing tests for alignment, size, and offset of fields in structs.
/// Example usage:
/// testaso! {
/// struct mystruct: 8, 4096 => {
/// f1: 0,
/// f2: 8
/// }
/// }
#[cfg(test)]
macro_rules! testaso {
(@off $name:path=>$field:ident) => {
memoffset::offset_of!($name, $field)
};
($(struct $name:path: $align:expr, $size:expr => { $($field:ident: $offset:expr),* })+) => {
#[cfg(test)]
#[test]
fn align() {
use core::mem::align_of;
$(
assert_eq!(
align_of::<$name>(),
$align,
"align: {}",
stringify!($name)
);
)+
}
#[cfg(test)]
#[test]
fn size() {
use core::mem::size_of;
$(
assert_eq!(
size_of::<$name>(),
$size,
"size: {}",
stringify!($name)
);
)+
}
#[cfg(test)]
#[test]
fn offsets() {
$(
$(
assert_eq!(
testaso!(@off $name=>$field),
$offset,
"offset: {}::{}",
stringify!($name),
stringify!($field)
);
)*
)+
}
};
}
#[cfg(test)]
testaso! {
struct XSaveLegacy: 8, 512 => {
fcw: 0,
fsw: 2,
ftw: 4,
reserved0: 5,
fop: 6,
fip: 8,
fdp: 16,
mxcsr: 24,
mm: 32,
xmm: 160,
padding0: 416,
padding1: 504
}
struct XSaveHeader: 8, 64 => {
xstate_bv: 0,
xcomp_bv: 8,
reserved0: 16
}
struct XSaveExtend: 1, 0 => { }
struct XSave: 64, 576 => {
legacy: 0,
header: 512,
extend: 576
}
}
| true
|
a27f9449f0da44a0a5922d50195d2ffba1fb76b8
|
Rust
|
Yacodo/qal-rs
|
/src/types/columns.rs
|
UTF-8
| 1,343
| 3.484375
| 3
|
[] |
no_license
|
use super::*;
impl<'c> Columns<'c> {
/// Create a new Columns list
///
/// # Examples
///
/// ````
/// use qal::types::*;
/// let columns = Columns::new();
/// assert!(columns.len() == 1);
/// assert!(columns.query().is_none());
/// ````
pub fn new() -> Columns<'c> {
Columns {
columns: vec![Column::All]
}
}
///TODO
pub fn add<C: Into<Column<'c>>>(mut self, column: C) -> Self {
let column = column.into();
// If new column is all, replace current vec with 1 capacity
if let Column::All = column {
if self.len() > 1 {
drop(&self.columns);
self.columns = Vec::with_capacity(1);
}
}else{
//if new column is not all && current only column is, replace with empty new vec
if self.len() == 1 && self.columns[0].is_all() {
drop(&self.columns);
self.columns = Vec::new();
}
}
self.columns.push(column);
self
}
/// Check len of columns
///
/// # Examples
///
/// ````
/// use qal::types::*;
/// let columns = Columns::new_list();
/// assert!(columns.len() == 1);
/// ````
pub fn len(&self) -> usize {
self.columns.len()
}
}
| true
|
2a39e246277177e58964dbf9705e3c64f3338af5
|
Rust
|
lu-zero/cdylib-link-lines
|
/src/lib.rs
|
UTF-8
| 2,841
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
use std::env;
use std::path::PathBuf;
pub fn metabuild() {
let arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap();
let os = env::var("CARGO_CFG_TARGET_OS").unwrap();
let env = env::var("CARGO_CFG_TARGET_ENV").unwrap();
// We do not care about `_pre` and such.
let major = env::var("CARGO_PKG_VERSION_MAJOR").unwrap();
let minor = env::var("CARGO_PKG_VERSION_MINOR").unwrap();
let patch = env::var("CARGO_PKG_VERSION_PATCH").unwrap();
// Give the priority to [`cargo-c`](https://github.com/lu-zero/cargo-c) in case of.
let prefix = PathBuf::from(env::var_os("CARGO_C_PREFIX").unwrap_or("/usr/local".into()));
let libdir = env::var_os("CARGO_C_LIBDIR").map_or(prefix.join("lib"), Into::into);
let target_dir = env::var_os("CARGO_TARGET_DIR").map_or(
{
let manifest_dir = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap());
manifest_dir
.join("target")
.join(std::env::var("PROFILE").unwrap())
},
Into::into,
);
let name = env::var_os("CARGO_PKG_NAME").unwrap();
let name = name.to_str().expect("pkg name is not valid UTF-8");
let lines = shared_object_link_args(
&name, &major, &minor, &patch, &arch, &os, &env, libdir, target_dir,
);
for line in lines {
println!("cargo:rustc-cdylib-link-arg={}", line);
}
}
/// Return a list of linker arguments useful to produce a
/// platform-correct dynamic library.
pub fn shared_object_link_args(
name: &str,
major: &str,
minor: &str,
patch: &str,
_arch: &str,
os: &str,
env: &str,
libdir: PathBuf,
target_dir: PathBuf,
) -> Vec<String> {
let mut lines = Vec::new();
match (os, env) {
("android", _) => {
lines.push(format!("-Wl,-soname,lib{}.so", name));
}
("linux", _) | ("freebsd", _) | ("dragonfly", _) | ("netbsd", _) if env != "musl" => {
lines.push(format!("-Wl,-soname,lib{}.so.{}", name, major));
}
("macos", _) | ("ios", _) => {
lines.push(format!(
"-Wl,-install_name,{1}/lib{0}.{2}.{3}.{4}.dylib,-current_version,{2}.{3}.{4},-compatibility_version,{2}",
name,
libdir.display(),
major,
minor,
patch,
));
}
("windows", "gnu") => {
// This is only set up to work on GNU toolchain versions of Rust
lines.push(format!(
"-Wl,--out-implib,{}",
target_dir.join(format!("{}.dll.a", name)).display()
));
lines.push(format!(
"-Wl,--output-def,{}",
target_dir.join(format!("{}.def", name)).display()
));
}
_ => {}
}
lines
}
| true
|
9f2688d0157f30e5622633c2a5d9e2eab3ff7f63
|
Rust
|
EAS-block2/easrvr_desktop_bridge
|
/src/main.rs
|
UTF-8
| 2,063
| 2.671875
| 3
|
[] |
no_license
|
#![feature(proc_macro_hygiene, decl_macro)]
use std::collections::HashMap;
use serde_yaml;
use serde::Deserialize;
use std::{io::Read, fs::File};
#[macro_use] extern crate rocket;
#[get("/pc/<name>")]
fn retn(name: String) -> String {
let mut g = false;
let mut s = false;
let mut to_return: String = "clear".to_string();
let conf_f = std::fs::File::open("/etc/EAS/rktconfig.yaml").expect("Can't Find Config!");
let config: Config = serde_yaml::from_reader(conf_f).expect("Bad YAML config file!");
match config.pc_auths.get(&name){
Some(auth) => {
for i in auth.iter(){
match config.alm_info.get(i){
Some(alarm) => {
println!("{:?}",read_status(alarm.get("file").unwrap()).unwrap());
if !read_status(alarm.get("file").unwrap()).unwrap().is_empty(){
match i.as_str(){
"general" => g=true,
"silent" => s=true,
_ =>(),}
println!("I'm getting the address of:{}", alarm.get("addr").unwrap().to_string());
to_return = alarm.get("addr").unwrap().to_string();}
}
None => ()
}}}
None => {to_return = String::from("unauthorized");}
}
if g && s {to_return = config.alm_info.get("both").unwrap().get("addr").unwrap().to_string()}
to_return
}
#[catch(404)]
fn not_found() -> String{
String::from("unauthorized")
}
fn read_status(path: &str) -> std::io::Result<Vec<String>>{
let mut file = File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
let yam: Vec<String> = serde_yaml::from_str(&content).unwrap();
Ok(yam)
}
fn main() {
rocket::ignite().register(catchers![not_found]).mount("/", routes![retn]).launch();
}
#[derive(Deserialize)]
struct Config{
pc_auths: HashMap<String, Vec<String>>,
alm_info: HashMap<String, HashMap<String, String>>,
}
| true
|
858b9b29c60c2352c8d6ce33b7bf3c3d0594df1a
|
Rust
|
K4rakara/waylock
|
/src/color.rs
|
UTF-8
| 2,248
| 3.484375
| 3
|
[
"MIT"
] |
permissive
|
use std::{error, fmt, num, str};
#[derive(Debug)]
pub enum Error {
InvalidLength,
InvalidPrefix,
ParseInt(num::ParseIntError),
}
impl error::Error for Error {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match self {
Self::InvalidLength | Self::InvalidPrefix => None,
Self::ParseInt(err) => err.source(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::InvalidLength => write!(f, "invalid length, color must have exactly 6 digits"),
Self::InvalidPrefix => write!(f, "invalid color prefix, must start with '#' or '0x'"),
Self::ParseInt(err) => write!(f, "parse error: {}", err),
}
}
}
pub fn from_str(s: &str) -> Result<u32, Error> {
let digits = if s.starts_with("0x") {
&s[2..]
} else if s.starts_with('#') {
&s[1..]
} else {
return Err(Error::InvalidPrefix);
};
if digits.len() != 6 {
return Err(Error::InvalidLength);
}
match u32::from_str_radix(digits, 16) {
Ok(number) => Ok(0xff00_0000 | number),
Err(err) => Err(Error::ParseInt(err)),
}
}
#[cfg(test)]
mod tests {
macro_rules! test {
($name: ident: $str: expr, $result: pat) => {
#[test]
fn $name() {
assert!(matches!(super::from_str($str), $result));
}
};
}
test!(no_prefix_6_digit: "01abEF", Err(super::Error::InvalidPrefix));
test!(binary_prefix_6_digit: "0b01abEF", Err(super::Error::InvalidPrefix));
test!(alphabetic_prefix_6_digit: "a01abEF", Err(super::Error::InvalidPrefix));
test!(octothorpe_6_digit: "#01abEF", Ok(_));
test!(octothorpe_short: "#01234", Err(super::Error::InvalidLength));
test!(octothorpe_long: "#01234567", Err(super::Error::InvalidLength));
test!(octothorpe_invalid_digit: "#012z45", Err(super::Error::ParseInt(_)));
test!(hex_6_digit: "#01abEF", Ok(_));
test!(hex_short: "#01234", Err(super::Error::InvalidLength));
test!(hex_long: "#01234567", Err(super::Error::InvalidLength));
test!(hex_invalid_digit: "#012z45", Err(super::Error::ParseInt(_)));
}
| true
|
e401075dd7fe141b6bebf3bdd67e7d167da63c7d
|
Rust
|
launchbadge/sqlx
|
/sqlx-postgres/src/copy.rs
|
UTF-8
| 13,586
| 2.609375
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use futures_core::future::BoxFuture;
use std::borrow::Cow;
use std::ops::{Deref, DerefMut};
use futures_core::stream::BoxStream;
use sqlx_core::bytes::{BufMut, Bytes};
use crate::connection::PgConnection;
use crate::error::{Error, Result};
use crate::ext::async_stream::TryAsyncStream;
use crate::io::{AsyncRead, AsyncReadExt};
use crate::message::{
CommandComplete, CopyData, CopyDone, CopyFail, CopyResponse, MessageFormat, Query,
};
use crate::pool::{Pool, PoolConnection};
use crate::Postgres;
impl PgConnection {
/// Issue a `COPY FROM STDIN` statement and transition the connection to streaming data
/// to Postgres. This is a more efficient way to import data into Postgres as compared to
/// `INSERT` but requires one of a few specific data formats (text/CSV/binary).
///
/// If `statement` is anything other than a `COPY ... FROM STDIN ...` command, an error is
/// returned.
///
/// Command examples and accepted formats for `COPY` data are shown here:
/// https://www.postgresql.org/docs/current/sql-copy.html
///
/// ### Note
/// [PgCopyIn::finish] or [PgCopyIn::abort] *must* be called when finished or the connection
/// will return an error the next time it is used.
pub async fn copy_in_raw(&mut self, statement: &str) -> Result<PgCopyIn<&mut Self>> {
PgCopyIn::begin(self, statement).await
}
/// Issue a `COPY TO STDOUT` statement and transition the connection to streaming data
/// from Postgres. This is a more efficient way to export data from Postgres but
/// arrives in chunks of one of a few data formats (text/CSV/binary).
///
/// If `statement` is anything other than a `COPY ... TO STDOUT ...` command,
/// an error is returned.
///
/// Note that once this process has begun, unless you read the stream to completion,
/// it can only be canceled in two ways:
///
/// 1. by closing the connection, or:
/// 2. by using another connection to kill the server process that is sending the data as shown
/// [in this StackOverflow answer](https://stackoverflow.com/a/35319598).
///
/// If you don't read the stream to completion, the next time the connection is used it will
/// need to read and discard all the remaining queued data, which could take some time.
///
/// Command examples and accepted formats for `COPY` data are shown here:
/// https://www.postgresql.org/docs/current/sql-copy.html
#[allow(clippy::needless_lifetimes)]
pub async fn copy_out_raw<'c>(
&'c mut self,
statement: &str,
) -> Result<BoxStream<'c, Result<Bytes>>> {
pg_begin_copy_out(self, statement).await
}
}
/// Implements methods for directly executing `COPY FROM/TO STDOUT` on a [`PgPool`].
///
/// This is a replacement for the inherent methods on `PgPool` which could not exist
/// once the Postgres driver was moved out into its own crate.
pub trait PgPoolCopyExt {
/// Issue a `COPY FROM STDIN` statement and begin streaming data to Postgres.
/// This is a more efficient way to import data into Postgres as compared to
/// `INSERT` but requires one of a few specific data formats (text/CSV/binary).
///
/// A single connection will be checked out for the duration.
///
/// If `statement` is anything other than a `COPY ... FROM STDIN ...` command, an error is
/// returned.
///
/// Command examples and accepted formats for `COPY` data are shown here:
/// https://www.postgresql.org/docs/current/sql-copy.html
///
/// ### Note
/// [PgCopyIn::finish] or [PgCopyIn::abort] *must* be called when finished or the connection
/// will return an error the next time it is used.
fn copy_in_raw<'a>(
&'a self,
statement: &'a str,
) -> BoxFuture<'a, Result<PgCopyIn<PoolConnection<Postgres>>>>;
/// Issue a `COPY TO STDOUT` statement and begin streaming data
/// from Postgres. This is a more efficient way to export data from Postgres but
/// arrives in chunks of one of a few data formats (text/CSV/binary).
///
/// If `statement` is anything other than a `COPY ... TO STDOUT ...` command,
/// an error is returned.
///
/// Note that once this process has begun, unless you read the stream to completion,
/// it can only be canceled in two ways:
///
/// 1. by closing the connection, or:
/// 2. by using another connection to kill the server process that is sending the data as shown
/// [in this StackOverflow answer](https://stackoverflow.com/a/35319598).
///
/// If you don't read the stream to completion, the next time the connection is used it will
/// need to read and discard all the remaining queued data, which could take some time.
///
/// Command examples and accepted formats for `COPY` data are shown here:
/// https://www.postgresql.org/docs/current/sql-copy.html
fn copy_out_raw<'a>(
&'a self,
statement: &'a str,
) -> BoxFuture<'a, Result<BoxStream<'static, Result<Bytes>>>>;
}
impl PgPoolCopyExt for Pool<Postgres> {
fn copy_in_raw<'a>(
&'a self,
statement: &'a str,
) -> BoxFuture<'a, Result<PgCopyIn<PoolConnection<Postgres>>>> {
Box::pin(async { PgCopyIn::begin(self.acquire().await?, statement).await })
}
fn copy_out_raw<'a>(
&'a self,
statement: &'a str,
) -> BoxFuture<'a, Result<BoxStream<'static, Result<Bytes>>>> {
Box::pin(async { pg_begin_copy_out(self.acquire().await?, statement).await })
}
}
/// A connection in streaming `COPY FROM STDIN` mode.
///
/// Created by [PgConnection::copy_in_raw] or [Pool::copy_out_raw].
///
/// ### Note
/// [PgCopyIn::finish] or [PgCopyIn::abort] *must* be called when finished or the connection
/// will return an error the next time it is used.
#[must_use = "connection will error on next use if `.finish()` or `.abort()` is not called"]
pub struct PgCopyIn<C: DerefMut<Target = PgConnection>> {
conn: Option<C>,
response: CopyResponse,
}
impl<C: DerefMut<Target = PgConnection>> PgCopyIn<C> {
async fn begin(mut conn: C, statement: &str) -> Result<Self> {
conn.wait_until_ready().await?;
conn.stream.send(Query(statement)).await?;
let response = match conn.stream.recv_expect(MessageFormat::CopyInResponse).await {
Ok(res) => res,
Err(e) => {
conn.stream.recv().await?;
return Err(e);
}
};
Ok(PgCopyIn {
conn: Some(conn),
response,
})
}
/// Returns `true` if Postgres is expecting data in text or CSV format.
pub fn is_textual(&self) -> bool {
self.response.format == 0
}
/// Returns the number of columns expected in the input.
pub fn num_columns(&self) -> usize {
assert_eq!(
self.response.num_columns as usize,
self.response.format_codes.len(),
"num_columns does not match format_codes.len()"
);
self.response.format_codes.len()
}
/// Check if a column is expecting data in text format (`true`) or binary format (`false`).
///
/// ### Panics
/// If `column` is out of range according to [`.num_columns()`][Self::num_columns].
pub fn column_is_textual(&self, column: usize) -> bool {
self.response.format_codes[column] == 0
}
/// Send a chunk of `COPY` data.
///
/// If you're copying data from an `AsyncRead`, maybe consider [Self::read_from] instead.
pub async fn send(&mut self, data: impl Deref<Target = [u8]>) -> Result<&mut Self> {
self.conn
.as_deref_mut()
.expect("send_data: conn taken")
.stream
.send(CopyData(data))
.await?;
Ok(self)
}
/// Copy data directly from `source` to the database without requiring an intermediate buffer.
///
/// `source` will be read to the end.
///
/// ### Note: Completion Step Required
/// You must still call either [Self::finish] or [Self::abort] to complete the process.
///
/// ### Note: Runtime Features
/// This method uses the `AsyncRead` trait which is re-exported from either Tokio or `async-std`
/// depending on which runtime feature is used.
///
/// The runtime features _used_ to be mutually exclusive, but are no longer.
/// If both `runtime-async-std` and `runtime-tokio` features are enabled, the Tokio version
/// takes precedent.
pub async fn read_from(&mut self, mut source: impl AsyncRead + Unpin) -> Result<&mut Self> {
// this is a separate guard from WriteAndFlush so we can reuse the buffer without zeroing
struct BufGuard<'s>(&'s mut Vec<u8>);
impl Drop for BufGuard<'_> {
fn drop(&mut self) {
self.0.clear()
}
}
let conn: &mut PgConnection = self.conn.as_deref_mut().expect("copy_from: conn taken");
// flush any existing messages in the buffer and clear it
conn.stream.flush().await?;
loop {
let buf = conn.stream.write_buffer_mut();
// CopyData format code and reserved space for length
buf.put_slice(b"d\0\0\0\x04");
let read = match () {
// Tokio lets us read into the buffer without zeroing first
#[cfg(feature = "_rt-tokio")]
_ => source.read_buf(buf.buf_mut()).await?,
#[cfg(not(feature = "_rt-tokio"))]
_ => source.read(buf.init_remaining_mut()).await?,
};
if read == 0 {
// This will end up sending an empty `CopyData` packet but that should be fine.
break;
}
buf.advance(read);
// Write the length
let read32 = u32::try_from(read)
.map_err(|_| err_protocol!("number of bytes read exceeds 2^32: {}", read))?;
(&mut buf.get_mut()[1..]).put_u32(read32 + 4);
conn.stream.flush().await?;
}
Ok(self)
}
/// Signal that the `COPY` process should be aborted and any data received should be discarded.
///
/// The given message can be used for indicating the reason for the abort in the database logs.
///
/// The server is expected to respond with an error, so only _unexpected_ errors are returned.
pub async fn abort(mut self, msg: impl Into<String>) -> Result<()> {
let mut conn = self
.conn
.take()
.expect("PgCopyIn::fail_with: conn taken illegally");
conn.stream.send(CopyFail::new(msg)).await?;
match conn.stream.recv().await {
Ok(msg) => Err(err_protocol!(
"fail_with: expected ErrorResponse, got: {:?}",
msg.format
)),
Err(Error::Database(e)) => {
match e.code() {
Some(Cow::Borrowed("57014")) => {
// postgres abort received error code
conn.stream
.recv_expect(MessageFormat::ReadyForQuery)
.await?;
Ok(())
}
_ => Err(Error::Database(e)),
}
}
Err(e) => Err(e),
}
}
/// Signal that the `COPY` process is complete.
///
/// The number of rows affected is returned.
pub async fn finish(mut self) -> Result<u64> {
let mut conn = self
.conn
.take()
.expect("CopyWriter::finish: conn taken illegally");
conn.stream.send(CopyDone).await?;
let cc: CommandComplete = match conn
.stream
.recv_expect(MessageFormat::CommandComplete)
.await
{
Ok(cc) => cc,
Err(e) => {
conn.stream.recv().await?;
return Err(e);
}
};
conn.stream
.recv_expect(MessageFormat::ReadyForQuery)
.await?;
Ok(cc.rows_affected())
}
}
impl<C: DerefMut<Target = PgConnection>> Drop for PgCopyIn<C> {
fn drop(&mut self) {
if let Some(mut conn) = self.conn.take() {
conn.stream.write(CopyFail::new(
"PgCopyIn dropped without calling finish() or fail()",
));
}
}
}
async fn pg_begin_copy_out<'c, C: DerefMut<Target = PgConnection> + Send + 'c>(
mut conn: C,
statement: &str,
) -> Result<BoxStream<'c, Result<Bytes>>> {
conn.wait_until_ready().await?;
conn.stream.send(Query(statement)).await?;
let _: CopyResponse = conn
.stream
.recv_expect(MessageFormat::CopyOutResponse)
.await?;
let stream: TryAsyncStream<'c, Bytes> = try_stream! {
loop {
let msg = conn.stream.recv().await?;
match msg.format {
MessageFormat::CopyData => r#yield!(msg.decode::<CopyData<Bytes>>()?.0),
MessageFormat::CopyDone => {
let _ = msg.decode::<CopyDone>()?;
conn.stream.recv_expect(MessageFormat::CommandComplete).await?;
conn.stream.recv_expect(MessageFormat::ReadyForQuery).await?;
return Ok(())
},
_ => return Err(err_protocol!("unexpected message format during copy out: {:?}", msg.format))
}
}
};
Ok(Box::pin(stream))
}
| true
|
1e328c5a4d8c6507343cb2f9a3710834920903a6
|
Rust
|
Eliot00/langyan
|
/src/lib.rs
|
UTF-8
| 568
| 3.109375
| 3
|
[] |
no_license
|
//! Langyan provides a signal mechanism like Django's [signal](https://docs.djangoproject.com/en/3.1/topics/signals/).
//!
//! ## Example
//!
//! ```rust
//! use langyan::signal::{Signal, Receiver};
//!
//! fn after_save(filename: &str) {
//! println!("filename is {}", filename);
//! }
//!
//! fn main() {
//! let saved = Signal::new();
//! let subscription = saved.connect(after_save);
//!
//! // after saved file
//! saved.send("hello.json");
//!
//! // sometime you want disconnect
//! drop(subscription)
//! }
//! ```
pub mod signal;
| true
|
73fb491726ad849cf7dec3d6c25b791ab9dd05bd
|
Rust
|
bazk/advent-of-code-2019-solutions
|
/day11/src/color.rs
|
UTF-8
| 669
| 3.65625
| 4
|
[] |
no_license
|
use std::fmt;
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Clone)]
pub enum Color {
Black,
White,
Grey
}
impl fmt::Debug for Color {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Color::Black => write!(f, " "),
Color::White => write!(f, "\u{2588}\u{2588}"),
Color::Grey => write!(f, "\u{2591}\u{2591}")
}
}
}
impl Color {
pub fn code(&self) -> u32 {
match self {
Color::Black => 0,
Color::White => 1,
Color::Grey => 2
}
}
pub fn from_code(code: u32) -> Color {
match code {
2 => Color::Grey,
1 => Color::White,
_ => Color::Black
}
}
}
| true
|
c14fe9dadf4c142aedaaca760eeef40e6768e237
|
Rust
|
uran0sH/notes
|
/rust/essential_rust/exercise_macro/src/main.rs
|
UTF-8
| 469
| 3.15625
| 3
|
[
"Apache-2.0"
] |
permissive
|
// declarative macro
macro_rules! myVec {
($($x: expr),*) => {
{
let mut temp_vec = Vec::new();
$(
temp_vec.push($x);
)*
temp_vec
}
};
}
// procedural macro
use hello_macro::HelloMacro;
use hello_macro_derive::HelloMacro;
#[derive(HelloMacro)]
struct Pancakes;
fn main() {
let my_vec = myVec![1 + 2, 2, 3, 4, 5];
println!("{:?}", my_vec);
Pancakes::hello_macro();
}
| true
|
b6de1912e2f3c4f952713a517ba1699299a2073c
|
Rust
|
ridephysics/sensoreval
|
/sensoreval_utils/src/error.rs
|
UTF-8
| 484
| 2.578125
| 3
|
[] |
no_license
|
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error("exit status: {0}")]
ExitStatus(std::process::ExitStatus),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Plotly(#[from] plotly_types::Error),
#[error(transparent)]
SerdePickle(#[from] serde_pickle::error::Error),
#[error("no row")]
NoRow,
#[error("row already exists")]
RowAlreadyExists,
#[error("row not found")]
RowNotFound,
}
| true
|
4b38d78ac5f4afed609537196ccae7b79489fb48
|
Rust
|
astral-sh/ruff
|
/crates/ruff/src/rules/pyflakes/rules/yield_outside_function.rs
|
UTF-8
| 2,031
| 3.34375
| 3
|
[
"BSD-3-Clause",
"0BSD",
"LicenseRef-scancode-free-unknown",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0"
] |
permissive
|
use std::fmt;
use ruff_python_ast::{Expr, Ranged};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_semantic::ScopeKind;
use crate::checkers::ast::Checker;
#[derive(Debug, PartialEq, Eq)]
enum DeferralKeyword {
Yield,
YieldFrom,
Await,
}
impl fmt::Display for DeferralKeyword {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
DeferralKeyword::Yield => fmt.write_str("yield"),
DeferralKeyword::YieldFrom => fmt.write_str("yield from"),
DeferralKeyword::Await => fmt.write_str("await"),
}
}
}
/// ## What it does
/// Checks for `yield` and `yield from` statements outside of functions.
///
/// ## Why is this bad?
/// The use of a `yield` or `yield from` statement outside of a function will
/// raise a `SyntaxError`.
///
/// ## Example
/// ```python
/// class Foo:
/// yield 1
/// ```
///
/// ## References
/// - [Python documentation: `yield`](https://docs.python.org/3/reference/simple_stmts.html#the-yield-statement)
#[violation]
pub struct YieldOutsideFunction {
keyword: DeferralKeyword,
}
impl Violation for YieldOutsideFunction {
#[derive_message_formats]
fn message(&self) -> String {
let YieldOutsideFunction { keyword } = self;
format!("`{keyword}` statement outside of a function")
}
}
pub(crate) fn yield_outside_function(checker: &mut Checker, expr: &Expr) {
if matches!(
checker.semantic().scope().kind,
ScopeKind::Class(_) | ScopeKind::Module
) {
let keyword = match expr {
Expr::Yield(_) => DeferralKeyword::Yield,
Expr::YieldFrom(_) => DeferralKeyword::YieldFrom,
Expr::Await(_) => DeferralKeyword::Await,
_ => panic!("Expected Expr::Yield | Expr::YieldFrom | Expr::Await"),
};
checker.diagnostics.push(Diagnostic::new(
YieldOutsideFunction { keyword },
expr.range(),
));
}
}
| true
|
f02342805c8875b5dc244f339ba4595a95d4f45c
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/run-pass/structs-enums/struct-pattern-matching.rs
|
UTF-8
| 273
| 2.828125
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-other-permissive",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// run-pass
#![allow(non_shorthand_field_patterns)]
struct Foo {
x: isize,
y: isize,
}
pub fn main() {
let a = Foo { x: 1, y: 2 };
match a {
Foo { x: x, y: y } => println!("yes, {}, {}", x, y)
}
match a {
Foo { .. } => ()
}
}
| true
|
abc71a0ca3253cdf7c39a6c3163fc4ae6e802134
|
Rust
|
maxtnuk/gluesql
|
/core/src/data/interval/mod.rs
|
UTF-8
| 18,919
| 3.109375
| 3
|
[
"Apache-2.0"
] |
permissive
|
mod error;
mod primitive;
mod string;
use {
super::Value,
crate::{ast::DateTimeField, result::Result},
chrono::{Datelike, Duration, NaiveDate, NaiveDateTime, NaiveTime, Timelike},
core::str::FromStr,
rust_decimal::{prelude::ToPrimitive, Decimal},
serde::{Deserialize, Serialize},
std::{cmp::Ordering, fmt::Debug},
};
pub use error::IntervalError;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum Interval {
Month(i32),
Microsecond(i64),
}
impl PartialOrd<Interval> for Interval {
fn partial_cmp(&self, other: &Interval) -> Option<Ordering> {
match (self, other) {
(Interval::Month(l), Interval::Month(r)) => Some(l.cmp(r)),
(Interval::Microsecond(l), Interval::Microsecond(r)) => Some(l.cmp(r)),
_ => None,
}
}
}
const SECOND: i64 = 1_000_000;
const MINUTE: i64 = 60 * SECOND;
const HOUR: i64 = 3600 * SECOND;
const DAY: i64 = 24 * HOUR;
impl Interval {
pub fn unary_minus(&self) -> Self {
match self {
Interval::Month(v) => Interval::Month(-v),
Interval::Microsecond(v) => Interval::Microsecond(-v),
}
}
pub fn add(&self, other: &Interval) -> Result<Self> {
use Interval::*;
match (self, other) {
(Month(l), Month(r)) => Ok(Month(l + r)),
(Microsecond(l), Microsecond(r)) => Ok(Microsecond(l + r)),
_ => Err(IntervalError::AddBetweenYearToMonthAndHourToSecond.into()),
}
}
pub fn subtract(&self, other: &Interval) -> Result<Self> {
use Interval::*;
match (self, other) {
(Month(l), Month(r)) => Ok(Month(l - r)),
(Microsecond(l), Microsecond(r)) => Ok(Microsecond(l - r)),
_ => Err(IntervalError::SubtractBetweenYearToMonthAndHourToSecond.into()),
}
}
pub fn add_date(&self, date: &NaiveDate) -> Result<NaiveDateTime> {
self.add_timestamp(
&date
.and_hms_opt(0, 0, 0)
.ok_or_else(|| IntervalError::FailedToParseTime(date.to_string()))?,
)
}
pub fn subtract_from_date(&self, date: &NaiveDate) -> Result<NaiveDateTime> {
self.subtract_from_timestamp(
&date
.and_hms_opt(0, 0, 0)
.ok_or_else(|| IntervalError::FailedToParseTime(date.to_string()))?,
)
}
pub fn add_timestamp(&self, timestamp: &NaiveDateTime) -> Result<NaiveDateTime> {
match self {
Interval::Month(n) => {
let month = timestamp.month() as i32 + n;
let year = timestamp.year() + month / 12;
let month = month % 12;
timestamp
.with_year(year)
.and_then(|d| d.with_month(month as u32))
.ok_or_else(|| IntervalError::DateOverflow { year, month }.into())
}
Interval::Microsecond(n) => Ok(*timestamp + Duration::microseconds(*n)),
}
}
pub fn subtract_from_timestamp(&self, timestamp: &NaiveDateTime) -> Result<NaiveDateTime> {
match self {
Interval::Month(n) => {
let months = timestamp.year() * 12 + timestamp.month() as i32 - n;
let year = months / 12;
let month = months % 12;
timestamp
.with_year(year)
.and_then(|d| d.with_month(month as u32))
.ok_or_else(|| IntervalError::DateOverflow { year, month }.into())
}
Interval::Microsecond(n) => Ok(*timestamp - Duration::microseconds(*n)),
}
}
pub fn add_time(&self, time: &NaiveTime) -> Result<NaiveTime> {
match self {
Interval::Month(_) => Err(IntervalError::AddYearOrMonthToTime {
time: time.to_string(),
interval: String::from(self),
}
.into()),
Interval::Microsecond(n) => Ok(*time + Duration::microseconds(*n)),
}
}
pub fn subtract_from_time(&self, time: &NaiveTime) -> Result<NaiveTime> {
match self {
Interval::Month(_) => Err(IntervalError::SubtractYearOrMonthToTime {
time: time.to_string(),
interval: String::from(self),
}
.into()),
Interval::Microsecond(n) => Ok(*time - Duration::microseconds(*n)),
}
}
pub fn years(years: i32) -> Self {
Interval::Month(12 * years)
}
pub fn months(months: i32) -> Self {
Interval::Month(months)
}
pub fn extract(&self, field: &DateTimeField) -> Result<Value> {
let value = match (field, *self) {
(DateTimeField::Year, Interval::Month(i)) => i as i64 / 12,
(DateTimeField::Month, Interval::Month(i)) => i as i64,
(DateTimeField::Day, Interval::Microsecond(i)) => i / DAY,
(DateTimeField::Hour, Interval::Microsecond(i)) => i / HOUR,
(DateTimeField::Minute, Interval::Microsecond(i)) => i / MINUTE,
(DateTimeField::Second, Interval::Microsecond(i)) => i / SECOND,
_ => {
return Err(IntervalError::FailedToExtract.into());
}
};
Ok(Value::I64(value))
}
pub fn days(days: i32) -> Self {
Interval::Microsecond(days as i64 * DAY)
}
pub fn hours(hours: i32) -> Self {
Interval::Microsecond(hours as i64 * HOUR)
}
pub fn minutes(minutes: i32) -> Self {
Interval::Microsecond(minutes as i64 * MINUTE)
}
pub fn seconds(seconds: i64) -> Self {
Interval::Microsecond(seconds * SECOND)
}
pub fn milliseconds(milliseconds: i64) -> Self {
Interval::Microsecond(milliseconds * 1_000)
}
pub fn microseconds(microseconds: i64) -> Self {
Interval::Microsecond(microseconds)
}
pub fn try_from_literal(
value: &str,
leading_field: Option<DateTimeField>,
last_field: Option<DateTimeField>,
) -> Result<Self> {
use DateTimeField::*;
let sign = || if value.get(0..1) == Some("-") { -1 } else { 1 };
let parse_integer = |v: &str| {
v.parse::<i32>()
.map_err(|_| IntervalError::FailedToParseInteger(value.to_owned()).into())
};
let parse_decimal = |duration: i64| {
let parsed = Decimal::from_str(value)
.map_err(|_| IntervalError::FailedToParseDecimal(value.to_owned()))?;
(parsed * Decimal::from(duration))
.to_i64()
.ok_or_else(|| IntervalError::FailedToParseDecimal(value.to_owned()).into())
.map(Interval::Microsecond)
};
let parse_time = |v: &str| {
let sign = if v.get(0..1) == Some("-") { -1 } else { 1 };
let v = v.trim_start_matches('-');
let time = NaiveTime::from_str(v)
.map_err(|_| IntervalError::FailedToParseTime(value.to_owned()))?;
let msec = time.hour() as i64 * HOUR
+ time.minute() as i64 * MINUTE
+ time.second() as i64 * SECOND
+ time.nanosecond() as i64 / 1000;
Ok(Interval::Microsecond(sign as i64 * msec))
};
match (leading_field, last_field) {
(Some(Year), None) => parse_integer(value).map(Interval::years),
(Some(Month), None) => parse_integer(value).map(Interval::months),
(Some(Day), None) => parse_decimal(DAY),
(Some(Hour), None) => parse_decimal(HOUR),
(Some(Minute), None) => parse_decimal(MINUTE),
(Some(Second), None) => parse_decimal(SECOND),
(Some(Year), Some(Month)) => {
let nums = value
.trim_start_matches('-')
.split('-')
.map(parse_integer)
.collect::<Result<Vec<_>>>()?;
match (nums.first(), nums.get(1)) {
(Some(years), Some(months)) => {
Ok(Interval::months(sign() * (12 * years + months)))
}
_ => Err(IntervalError::FailedToParseYearToMonth(value.to_owned()).into()),
}
}
(Some(Day), Some(Hour)) => {
let nums = value
.trim_start_matches('-')
.split(' ')
.map(parse_integer)
.collect::<Result<Vec<_>>>()?;
match (nums.first(), nums.get(1)) {
(Some(days), Some(hours)) => Ok(Interval::hours(sign() * (24 * days + hours))),
_ => Err(IntervalError::FailedToParseDayToHour(value.to_owned()).into()),
}
}
(Some(Day), Some(Minute)) => {
let nums = value.trim_start_matches('-').split(' ').collect::<Vec<_>>();
match (nums.first(), nums.get(1)) {
(Some(days), Some(time)) => {
let days = parse_integer(days)?;
let time = format!("{}:00", time);
Interval::days(days)
.add(&parse_time(&time)?)
.map(|interval| sign() * interval)
}
_ => Err(IntervalError::FailedToParseDayToMinute(value.to_owned()).into()),
}
}
(Some(Day), Some(Second)) => {
let nums = value.trim_start_matches('-').split(' ').collect::<Vec<_>>();
match (nums.first(), nums.get(1)) {
(Some(days), Some(time)) => {
let days = parse_integer(days)?;
Interval::days(days)
.add(&parse_time(time)?)
.map(|interval| sign() * interval)
}
_ => Err(IntervalError::FailedToParseDayToSecond(value.to_owned()).into()),
}
}
(Some(Hour), Some(Minute)) => parse_time(&format!("{}:00", value)),
(Some(Hour), Some(Second)) => parse_time(value),
(Some(Minute), Some(Second)) => {
let time = value.trim_start_matches('-');
parse_time(&format!("00:{}", time)).map(|v| sign() * v)
}
(Some(from), Some(to)) => Err(IntervalError::UnsupportedRange(
format!("{:?}", from),
format!("{:?}", to),
)
.into()),
(None, _) => Err(IntervalError::Unreachable.into()),
}
}
}
#[cfg(test)]
mod tests {
use {
super::{Interval, IntervalError},
crate::ast::DateTimeField,
chrono::{NaiveDate, NaiveTime},
};
fn date(year: i32, month: u32, day: u32) -> NaiveDate {
NaiveDate::from_ymd_opt(year, month, day).unwrap()
}
fn time(hour: u32, min: u32, sec: u32) -> NaiveTime {
NaiveTime::from_hms_opt(hour, min, sec).unwrap()
}
#[test]
fn arithmetic() {
use Interval::*;
macro_rules! test {
($op: ident $a: expr, $b: expr => $c: expr) => {
assert_eq!($a.$op(&$b), Ok($c));
};
}
assert_eq!(Month(1).unary_minus(), Month(-1));
assert_eq!(Microsecond(1).unary_minus(), Microsecond(-1));
// date
assert_eq!(
Month(2).add_date(&date(2021, 11, 11)),
Ok(date(2022, 1, 11).and_hms_opt(0, 0, 0).unwrap())
);
assert_eq!(
Interval::hours(30).add_date(&date(2021, 11, 11)),
Ok(date(2021, 11, 12).and_hms_opt(6, 0, 0).unwrap())
);
assert_eq!(
Interval::years(999_999).add_date(&date(2021, 11, 11)),
Err(IntervalError::DateOverflow {
year: 1_002_020,
month: 11,
}
.into())
);
assert_eq!(
Month(2).subtract_from_date(&date(2021, 11, 11)),
Ok(date(2021, 9, 11).and_hms_opt(0, 0, 0).unwrap())
);
assert_eq!(
Month(14).subtract_from_date(&date(2021, 11, 11)),
Ok(date(2020, 9, 11).and_hms_opt(0, 0, 0).unwrap())
);
assert_eq!(
Interval::hours(30).subtract_from_date(&date(2021, 11, 11)),
Ok(date(2021, 11, 9).and_hms_opt(18, 0, 0).unwrap())
);
assert_eq!(
Interval::years(999_999).subtract_from_date(&date(2021, 11, 11)),
Err(IntervalError::DateOverflow {
year: -997977,
month: -1,
}
.into())
);
// timestamp
assert_eq!(
Interval::minutes(2).add_timestamp(&date(2021, 11, 11).and_hms_opt(12, 3, 1).unwrap()),
Ok(date(2021, 11, 11).and_hms_opt(12, 5, 1).unwrap())
);
assert_eq!(
Interval::hours(30).add_timestamp(&date(2021, 11, 11).and_hms_opt(0, 30, 0).unwrap()),
Ok(date(2021, 11, 12).and_hms_opt(6, 30, 0).unwrap())
);
assert_eq!(
Interval::years(999_999)
.add_timestamp(&date(2021, 11, 11).and_hms_opt(1, 1, 1).unwrap()),
Err(IntervalError::DateOverflow {
year: 1_002_020,
month: 11,
}
.into())
);
assert_eq!(
Month(2).subtract_from_timestamp(&date(2021, 11, 11).and_hms_opt(1, 3, 59).unwrap()),
Ok(date(2021, 9, 11).and_hms_opt(1, 3, 59).unwrap())
);
assert_eq!(
Month(14).subtract_from_timestamp(&date(2021, 11, 11).and_hms_opt(23, 1, 1).unwrap()),
Ok(date(2020, 9, 11).and_hms_opt(23, 1, 1).unwrap())
);
assert_eq!(
Interval::seconds(30)
.subtract_from_timestamp(&date(2021, 11, 11).and_hms_opt(0, 0, 0).unwrap()),
Ok(date(2021, 11, 10).and_hms_opt(23, 59, 30).unwrap())
);
assert_eq!(
Interval::years(999_999)
.subtract_from_timestamp(&date(2021, 11, 11).and_hms_opt(0, 0, 0).unwrap()),
Err(IntervalError::DateOverflow {
year: -997977,
month: -1,
}
.into())
);
// time
assert_eq!(
Interval::minutes(30).add_time(&time(23, 0, 1)),
Ok(time(23, 30, 1))
);
assert_eq!(
Interval::hours(20).add_time(&time(5, 30, 0)),
Ok(time(1, 30, 0))
);
assert_eq!(
Interval::years(1).add_time(&time(23, 0, 1)),
Err(IntervalError::AddYearOrMonthToTime {
time: time(23, 0, 1).to_string(),
interval: String::from(Interval::years(1)),
}
.into())
);
assert_eq!(
Interval::minutes(30).subtract_from_time(&time(23, 0, 1)),
Ok(time(22, 30, 1))
);
assert_eq!(
Interval::hours(20).subtract_from_time(&time(5, 30, 0)),
Ok(time(9, 30, 0))
);
assert_eq!(
Interval::months(3).subtract_from_time(&time(23, 0, 1)),
Err(IntervalError::SubtractYearOrMonthToTime {
time: time(23, 0, 1).to_string(),
interval: String::from(Interval::months(3)),
}
.into())
);
test!(add Month(1), Month(2) => Month(3));
test!(subtract Month(1), Month(2) => Month(-1));
test!(add Microsecond(1), Microsecond(2) => Microsecond(3));
test!(subtract Microsecond(1), Microsecond(2) => Microsecond(-1));
}
#[test]
fn try_from_literal() {
macro_rules! test {
($value: expr, $datetime: ident => $expected_value: expr, $duration: ident) => {
let interval =
Interval::try_from_literal($value, Some(DateTimeField::$datetime), None);
assert_eq!(interval, Ok(Interval::$duration($expected_value)));
};
($value: expr, $from: ident to $to: ident => $expected_value: expr, $duration: ident) => {
let interval = Interval::try_from_literal(
$value,
Some(DateTimeField::$from),
Some(DateTimeField::$to),
);
assert_eq!(interval, Ok(Interval::$duration($expected_value)));
};
}
test!("11", Year => 11, years);
test!("-11", Year => -11, years);
test!("18", Month => 18, months);
test!("-19", Month => -19, months);
test!("2", Day => 2, days);
test!("1.5", Day => 36, hours);
test!("-1.5", Day => -36, hours);
test!("2.5", Hour => 150, minutes);
test!("1", Hour => 60, minutes);
test!("-1", Hour => -60, minutes);
test!("35", Minute => 35, minutes);
test!("-35", Minute => -35, minutes);
test!("10.5", Minute => 630, seconds);
test!("10", Second => 10, seconds);
test!("-10", Second => -10, seconds);
test!("10.5", Second => 10_500_000, microseconds);
test!("-1.5", Second => -1_500_000, microseconds);
test!("10-2", Year to Month => 122, months);
test!("2 12", Day to Hour => 60, hours);
test!("1 01:30", Day to Minute => 60 * 24 + 90, minutes);
test!("1 01:30:40", Day to Second => (60 * 24 + 90) * 60 + 40, seconds);
test!("3 02:30:40.1234", Day to Second =>
(((3 * 24 + 2) * 60 + 30) * 60 + 40) * 1_000_000 + 123_400, microseconds);
test!("12:34", Hour to Minute => 12 * 60 + 34, minutes);
test!("12:34:56", Hour to Second => (12 * 60 + 34) * 60 + 56, seconds);
test!("12:34:56.1234", Hour to Second => ((12 * 60 + 34) * 60 + 56) * 1_000_000 + 123_400, microseconds);
test!("34:56.1234", Minute to Second => (34 * 60 + 56) * 1_000_000 + 123_400, microseconds);
test!("-1-4", Year to Month => -16, months);
test!("-2 10", Day to Hour => -58, hours);
test!("-1 00:01", Day to Minute => -(24 * 60 + 1), minutes);
test!("-1 00:00:01", Day to Second => -(24 * 3600 + 1), seconds);
test!("-1 00:00:01.1", Day to Second => -((24 * 3600 + 1) * 1000 + 100), milliseconds);
test!("-21:10", Hour to Minute => -(21 * 60 + 10), minutes);
test!("-05:12:03", Hour to Second => -(5 * 3600 + 12 * 60 + 3), seconds);
test!("-03:59:22.372", Hour to Second => -((3 * 3600 + 59 * 60 + 22) * 1000 + 372), milliseconds);
test!("-09:33", Minute to Second => -(9 * 60 + 33), seconds);
test!("-09:33.192", Minute to Second => -((9 * 60 + 33) * 1000 + 192), milliseconds);
}
}
| true
|
2941548c7f62073984285dd2eef390dcadbea9ef
|
Rust
|
richardlee159/rCore
|
/os/src/task/task.rs
|
UTF-8
| 6,540
| 2.515625
| 3
|
[] |
no_license
|
use super::{
pid::{pid_alloc, KernelStack, PidHandle},
TaskContext,
};
use crate::{
config::TRAP_CONTEXT,
fs::{File, STDIN, STDOUT},
mm::{MemorySet, PhysPageNum, VirtAddr},
trap::TrapContext,
};
use alloc::{
sync::{Arc, Weak},
vec,
vec::Vec,
};
use spin::{Mutex, MutexGuard};
#[derive(PartialEq)]
pub enum TaskStatus {
Ready,
Running,
Zombie,
}
pub struct TaskControlBlockInner {
pub task_ctx_ptr: usize,
pub task_status: TaskStatus,
pub task_prio: usize,
pub task_stride: usize,
pub memory_set: MemorySet,
pub trap_ctx_ppn: PhysPageNum,
pub base_size: usize,
pub parent: Option<Weak<TaskControlBlock>>,
pub children: Vec<Arc<TaskControlBlock>>,
pub exit_code: i32,
pub fd_table: Vec<Option<Arc<dyn File>>>,
}
impl TaskControlBlockInner {
pub fn get_task_ctx_ptr2(&self) -> *const usize {
&self.task_ctx_ptr
}
pub fn get_trap_ctx(&self) -> &'static mut TrapContext {
self.trap_ctx_ppn.get_mut()
}
pub fn get_user_token(&self) -> usize {
self.memory_set.page_table.token()
}
pub fn is_zombie(&self) -> bool {
self.task_status == TaskStatus::Zombie
}
pub fn alloc_fd(&mut self) -> usize {
if let Some(fd) = self.fd_table.iter().position(|f| f.is_none()) {
fd
} else {
self.fd_table.push(None);
self.fd_table.len() - 1
}
}
}
pub struct TaskControlBlock {
// immutable
pub pid: PidHandle,
pub kernel_stack: KernelStack,
// mutable
inner: Mutex<TaskControlBlockInner>,
}
impl TaskControlBlock {
pub fn acquire_inner_lock(&self) -> MutexGuard<TaskControlBlockInner> {
self.inner.lock()
}
pub fn getpid(&self) -> usize {
self.pid.0
}
pub fn new(elf_data: &[u8]) -> Self {
// memory_set with elf program headers/trampoline/trap context/user stack
let (memory_set, user_sp, entry_point) = MemorySet::from_elf(elf_data);
let trap_ctx_ppn = memory_set
.translate(VirtAddr::from(TRAP_CONTEXT).into())
.unwrap()
.ppn();
// alloc a pid and a kernel stack in kernel space
let pid_handle = pid_alloc();
let kernel_stack = KernelStack::new(&pid_handle);
let kernel_stack_top = kernel_stack.get_top();
// push a task context which goes to trap_return to the top of kernel stack
let task_ctx_ptr = kernel_stack.push_on_top(TaskContext::goto_trap_return());
let task_control_block = Self {
pid: pid_handle,
kernel_stack,
inner: Mutex::new(TaskControlBlockInner {
task_ctx_ptr: task_ctx_ptr as usize,
task_status: TaskStatus::Ready,
task_prio: 16,
task_stride: 0,
memory_set,
trap_ctx_ppn,
base_size: user_sp,
parent: None,
children: Vec::new(),
exit_code: 0,
fd_table: vec![
Some(Arc::new(STDIN)),
Some(Arc::new(STDOUT)),
Some(Arc::new(STDOUT)),
],
}),
};
// prepare TrapContext in user space
let trap_ctx = task_control_block.acquire_inner_lock().get_trap_ctx();
*trap_ctx = TrapContext::app_init_context(entry_point, user_sp, kernel_stack_top);
task_control_block
}
pub fn fork(self: &Arc<Self>) -> Arc<Self> {
// ---- hold parent PCB lock
let mut parent_inner = self.acquire_inner_lock();
// copy user space (include trap context)
let memory_set = MemorySet::from_existed_user(&parent_inner.memory_set);
let trap_ctx_ppn = memory_set
.translate(VirtAddr::from(TRAP_CONTEXT).into())
.unwrap()
.ppn();
// alloc a pid and a kernel stack in kernel space
let pid_handle = pid_alloc();
let kernel_stack = KernelStack::new(&pid_handle);
let kernel_stack_top = kernel_stack.get_top();
// push a task context which goes to trap_return to the top of kernel stack
let task_ctx_ptr = kernel_stack.push_on_top(TaskContext::goto_trap_return());
let task_control_block = Arc::new(Self {
pid: pid_handle,
kernel_stack,
inner: Mutex::new(TaskControlBlockInner {
task_ctx_ptr: task_ctx_ptr as usize,
task_status: TaskStatus::Ready,
task_prio: 16,
task_stride: 0,
memory_set,
trap_ctx_ppn,
base_size: parent_inner.base_size,
parent: Some(Arc::downgrade(self)),
children: Vec::new(),
exit_code: 0,
fd_table: parent_inner.fd_table.clone(),
}),
});
// add child
parent_inner.children.push(task_control_block.clone());
// modify kernel_sp in trap_ctx
// **** acquire child PCB lock
let trap_ctx = task_control_block.acquire_inner_lock().get_trap_ctx();
// **** release child PCB lock
trap_ctx.kernel_sp = kernel_stack_top;
// return
task_control_block
// ---- release parent PCB lock
}
pub fn exec(&self, elf_data: &[u8]) {
// memory_set with elf program headers/trampoline/trap context/user stack
let (memory_set, user_sp, entry_point) = MemorySet::from_elf(elf_data);
let trap_ctx_ppn = memory_set
.translate(VirtAddr::from(TRAP_CONTEXT).into())
.unwrap()
.ppn();
// **** hold current PCB lock
let mut inner = self.acquire_inner_lock();
// substitute memory_set
inner.memory_set = memory_set;
// update trap_ctx ppn
inner.trap_ctx_ppn = trap_ctx_ppn;
// initialize trap_ctx
let trap_ctx = inner.get_trap_ctx();
*trap_ctx =
TrapContext::app_init_context(entry_point, user_sp, self.kernel_stack.get_top());
// **** release current PCB lock
}
pub fn spawn_child(self: &Arc<Self>, elf_data: &[u8]) -> Arc<Self> {
let task_control_block = Arc::new(TaskControlBlock::new(elf_data));
task_control_block.acquire_inner_lock().parent = Some(Arc::downgrade(self));
self.acquire_inner_lock()
.children
.push(task_control_block.clone());
task_control_block
}
}
| true
|
c312efa65b1f4ab3196650939b101f52dec12115
|
Rust
|
thinkofher/aoc2019
|
/day_02/src/main.rs
|
UTF-8
| 3,256
| 3.65625
| 4
|
[] |
no_license
|
use std::env;
use std::io;
use std::io::prelude::*;
enum Operation {
Add,
Multiply,
}
impl Operation {
fn from_i32(code: i32) -> Option<Operation> {
match code {
1 => Some(Operation::Add),
2 => Some(Operation::Multiply),
_ => None,
}
}
fn execute(&self, a: i32, b: i32) -> i32 {
match &self {
Operation::Add => a + b,
Operation::Multiply => a * b,
}
}
}
fn prepare_calculations(intcode: &mut Vec<i32>, noun: i32, verb: i32) {
{
let val = intcode.get_mut(1).unwrap();
*val = noun;
}
let val = intcode.get_mut(2).unwrap();
*val = verb;
}
fn solve_intcode(mut intcode: Vec<i32>) -> i32 {
for (min, max) in (0..intcode.len() / 4).zip(1..intcode.len() / 4 + 1) {
let parameters_positions: Vec<usize> = (min * 4..max * 4).collect();
let operation: Operation;
let first_value: i32;
let second_value: i32;
let ans_position: usize;
{
let operation_code = *intcode.get(parameters_positions[0]).unwrap();
operation = match Operation::from_i32(operation_code) {
Some(op) => op,
None => {
break;
}
}
}
{
first_value = *intcode
.get(*intcode.get(parameters_positions[1]).unwrap() as usize)
.unwrap();
}
{
second_value = *intcode
.get(*intcode.get(parameters_positions[2]).unwrap() as usize)
.unwrap();
}
{
ans_position = *intcode.get(parameters_positions[3]).unwrap() as usize;
}
let ans = intcode.get_mut(ans_position).unwrap();
*ans = operation.execute(first_value, second_value);
}
return *intcode.get(0).unwrap();
}
fn main() {
let input = io::stdin();
let mut intcode = Vec::new();
let numerical_args: Vec<i32> = env::args()
.skip(1)
.map(|val| {
val.parse().unwrap_or_else(|_| {
eprintln!("Only numerical values as arguments!");
std::process::exit(1);
})
})
.collect();
let search_value: i32 = *numerical_args.get(0).unwrap_or_else(|| {
eprintln!("You have to provide value for search!");
std::process::exit(1);
});
for line in input.lock().lines() {
let mut code: Vec<i32> = line
.unwrap_or(String::from(""))
.split(',')
.map(|val| val.parse().unwrap())
.collect();
intcode.append(&mut code);
}
prepare_calculations(&mut intcode, 12, 2);
let mut ans: Option<i32> = None;
for noun in 0..100 {
for verb in 0..100 {
let mut intcode_to_solve = intcode.clone();
prepare_calculations(&mut intcode_to_solve, noun, verb);
let solved_value = solve_intcode(intcode_to_solve);
if solved_value == search_value {
ans = Some(100 * noun + verb);
}
}
}
match ans {
Some(value) => println!("{}", value),
None => println!("Could not find the answer.")
}
}
| true
|
e7e8c6448cc66363e822d5733cf4d9cf70bec289
|
Rust
|
softdevteam/lspace
|
/src/lib/lspace/elements/element_ctx.rs
|
UTF-8
| 2,367
| 2.625
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::rc::Rc;
use std::cell::RefCell;
use cairo::Context;
use layout::lreq::LReq;
use elements::element_layout::ElementReq;
use elements::text_element::{TextReqKey, TextStyleParams};
struct ElementContextMut {
req_table: HashMap<TextReqKey, Rc<ElementReq>>,
}
impl ElementContextMut {
fn text_shared_req(&mut self, style: Rc<TextStyleParams>, text: String,
cairo_ctx: &Context) -> Rc<ElementReq> {
let key = style.text_req_key(text.clone());
let req_entry = self.req_table.entry(key);
return match req_entry {
Entry::Vacant(v) => {
style.apply(cairo_ctx);
let font_extents = cairo_ctx.font_extents();
let text_extents = cairo_ctx.text_extents(text.clone().as_str());
let x_req = LReq::new_fixed_size(text_extents.x_advance);
let y_req = LReq::new_fixed_ref(font_extents.ascent, font_extents.descent);
let shreq = Rc::new(ElementReq::new_from_reqs(x_req, y_req));
v.insert(shreq.clone());
shreq
},
Entry::Occupied(o) => o.get().clone()
};
}
}
pub struct ElementContext {
m: RefCell<ElementContextMut>,
empty_shared_req: Rc<ElementReq>
}
impl ElementContext {
pub fn new() -> ElementContext {
ElementContext{m: RefCell::new(ElementContextMut{req_table: HashMap::new()}),
empty_shared_req: Rc::new(ElementReq::new())}
}
pub fn text_shared_req(&self, style: Rc<TextStyleParams>, text: String,
cairo_ctx: &Context) -> Rc<ElementReq> {
self.m.borrow_mut().text_shared_req(style, text, cairo_ctx)
}
pub fn empty_shared_req(&self) -> Rc<ElementReq> {
return self.empty_shared_req.clone();
}
}
pub struct ElementLayoutContext <'a> {
ctx: &'a ElementContext,
cairo_ctx: &'a Context
}
impl <'a> ElementLayoutContext<'a> {
pub fn new<'b>(ctx: &'b ElementContext, cairo_ctx: &'b Context) -> ElementLayoutContext<'b> {
ElementLayoutContext{ctx: ctx, cairo_ctx: cairo_ctx}
}
pub fn elem_ctx(&'a self) -> &'a ElementContext {
self.ctx
}
pub fn cairo_ctx(&'a self) -> &'a Context {
self.cairo_ctx
}
}
| true
|
debde5e35db23676990763555ccbd14597f6d4a6
|
Rust
|
It4innovations/hyperqueue
|
/crates/tako/src/internal/server/workerload.rs
|
UTF-8
| 13,464
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
use crate::internal::common::index::IndexVec;
use crate::internal::common::resources::map::ResourceMap;
use crate::internal::common::resources::request::ResourceRequestEntry;
use crate::internal::common::resources::{
ResourceAmount, ResourceDescriptor, ResourceId, ResourceRequest, ResourceRequestVariants,
ResourceVec,
};
use crate::internal::messages::worker::WorkerResourceCounts;
use crate::resources::AllocationRequest;
use crate::{Map, Set, TaskId};
use std::ops::Deref;
// WorkerResources are transformed information from ResourceDescriptor
// but transformed for scheduler needs
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WorkerResources {
n_resources: ResourceVec<ResourceAmount>,
}
impl WorkerResources {
pub(crate) fn from_transport(msg: WorkerResourceCounts) -> Self {
WorkerResources {
n_resources: msg.n_resources.into(),
}
}
pub(crate) fn get(&self, resource_id: ResourceId) -> ResourceAmount {
self.n_resources.get(resource_id).copied().unwrap_or(0)
}
pub(crate) fn from_description(
resource_desc: &ResourceDescriptor,
resource_map: &ResourceMap,
) -> Self {
// We only take maximum needed resource id
// We are doing it for normalization purposes. It is useful later
// for WorkerLoad structure that hashed
let resource_count = resource_desc
.resources
.iter()
.map(|x| resource_map.get_index(&x.name).unwrap().as_num() as usize + 1)
.max()
.unwrap_or(0);
let mut n_resources: ResourceVec<ResourceAmount> = IndexVec::filled(0, resource_count);
for descriptor in &resource_desc.resources {
let position = resource_map.get_index(&descriptor.name).unwrap();
n_resources[position] = descriptor.kind.size()
}
WorkerResources { n_resources }
}
pub(crate) fn is_capable_to_run_request(&self, request: &ResourceRequest) -> bool {
request.entries().iter().all(|r| {
let ask = r.request.min_amount();
let has = self.get(r.resource_id);
ask <= has
})
}
pub(crate) fn is_capable_to_run(&self, rqv: &ResourceRequestVariants) -> bool {
rqv.requests().iter().any(|rq| {
rq.entries().iter().all(|r| {
let ask = r.request.min_amount();
let has = self.get(r.resource_id);
ask <= has
})
})
}
pub(crate) fn to_transport(&self) -> WorkerResourceCounts {
WorkerResourceCounts {
n_resources: self.n_resources.deref().clone(),
}
}
pub(crate) fn max_amount(&self, entry: &ResourceRequestEntry) -> ResourceAmount {
match entry.request {
AllocationRequest::Compact(amount)
| AllocationRequest::ForceCompact(amount)
| AllocationRequest::Scatter(amount) => amount,
AllocationRequest::All => self.get(entry.resource_id),
}
}
pub fn difficulty_score(&self, request: &ResourceRequest) -> u32 {
let mut result = 0;
for entry in request.entries() {
let count = self
.n_resources
.get(entry.resource_id)
.copied()
.unwrap_or(0);
if count == 0 {
return 0;
}
result += ((entry.request.amount(count) * 512) / (count * 512)) as u32;
}
result
}
pub fn difficulty_score_of_rqv(&self, rqv: &ResourceRequestVariants) -> u32 {
rqv.requests()
.iter()
.map(|r| self.difficulty_score(r))
.min()
.unwrap_or(0)
}
}
// This represents a current worker load from server perspective
// Note: It ignores time request, as "remaining time" is "always changing" resource
// while this structure is also used in hashset for parking resources
// It is solved in scheduler by directly calling worker.has_time_to_run
#[derive(Debug, Eq, PartialEq)]
pub struct WorkerLoad {
n_resources: ResourceVec<ResourceAmount>,
/// The map stores task_ids of requests for which non-first resource alternative is used
/// i.e. if all tasks has only 1 option in resource requets, this map will be empty
non_first_rq: Map<TaskId, usize>,
round_robin_counter: usize,
}
impl WorkerLoad {
pub(crate) fn new(worker_resources: &WorkerResources) -> WorkerLoad {
WorkerLoad {
n_resources: IndexVec::filled(0, worker_resources.n_resources.len()),
non_first_rq: Default::default(),
round_robin_counter: 0,
}
}
fn _add(&mut self, rq: &ResourceRequest, wr: &WorkerResources) {
for r in rq.entries() {
self.n_resources[r.resource_id] += r.request.amount(wr.n_resources[r.resource_id]);
}
}
pub(crate) fn add_request(
&mut self,
task_id: TaskId,
rqv: &ResourceRequestVariants,
wr: &WorkerResources,
) {
if let Some(rq) = rqv.trivial_request() {
self._add(rq, wr);
return;
}
let idx: usize = rqv
.requests()
.iter()
.enumerate()
.find_map(|(i, rq)| {
if self.have_immediate_resources_for_rq(rq, wr) {
Some(i)
} else {
None
}
})
.unwrap_or_else(|| {
let v = self.round_robin_counter.wrapping_add(1);
self.round_robin_counter = v;
v % rqv.requests().len()
});
self._add(&rqv.requests()[idx], wr);
if idx != 0 {
self.non_first_rq.insert(task_id, idx);
}
}
pub(crate) fn remove_request(
&mut self,
task_id: TaskId,
rqv: &ResourceRequestVariants,
wr: &WorkerResources,
) {
let idx = self.non_first_rq.remove(&task_id).unwrap_or(0);
for r in rqv.requests()[idx].entries() {
self.n_resources[r.resource_id] -= r.request.amount(wr.n_resources[r.resource_id]);
}
}
pub(crate) fn is_underloaded(&self, wr: &WorkerResources) -> bool {
self.n_resources
.iter()
.zip(wr.n_resources.iter())
.all(|(v, w)| v < w)
}
pub(crate) fn is_overloaded(&self, wr: &WorkerResources) -> bool {
self.n_resources
.iter()
.zip(wr.n_resources.iter())
.any(|(v, w)| v > w)
}
pub(crate) fn get(&self, resource_id: ResourceId) -> ResourceAmount {
self.n_resources.get(resource_id).copied().unwrap_or(0)
}
pub(crate) fn have_immediate_resources_for_rq(
&self,
request: &ResourceRequest,
wr: &WorkerResources,
) -> bool {
request.entries().iter().all(|r| {
let amount = wr.max_amount(r);
amount + self.get(r.resource_id) <= wr.get(r.resource_id)
})
}
pub(crate) fn have_immediate_resources_for_rqv(
&self,
requests: &ResourceRequestVariants,
wr: &WorkerResources,
) -> bool {
requests
.requests()
.iter()
.any(|r| self.have_immediate_resources_for_rq(r, wr))
}
pub(crate) fn have_immediate_resources_for_lb(
&self,
lower_bound: &ResourceRequestLowerBound,
wr: &WorkerResources,
) -> bool {
lower_bound
.request_set
.iter()
.any(|r| self.have_immediate_resources_for_rq(r, wr))
}
pub(crate) fn load_wrt_rqv(&self, wr: &WorkerResources, rqv: &ResourceRequestVariants) -> u32 {
rqv.requests()
.iter()
.map(|r| self.load_wrt_request(wr, r))
.min()
.unwrap_or(0)
}
pub(crate) fn load_wrt_request(&self, wr: &WorkerResources, request: &ResourceRequest) -> u32 {
let mut result = 0;
for entry in request.entries() {
let count = wr.n_resources.get(entry.resource_id).copied().unwrap_or(0);
if count == 0 {
return 0;
}
let load = self
.n_resources
.get(entry.resource_id)
.copied()
.unwrap_or(0);
result += ((load * 512) / (count * 512)) as u32;
}
result
}
}
/// This structure tracks an infimum over a set of task requests
/// requests are added by method "include" to this set.
#[derive(Debug, Default)]
pub struct ResourceRequestLowerBound {
request_set: Set<ResourceRequest>,
}
impl ResourceRequestLowerBound {
pub(crate) fn new() -> Self {
Self::default()
}
pub(crate) fn include(&mut self, request: &ResourceRequest) {
// TODO: Technically it would sufficient store only requests that are not covered by existing,
// TODO: Need to ivestigate how coverage works for multinode
if !self.request_set.contains(request) {
self.request_set.insert(request.clone());
}
}
pub(crate) fn include_rqv(&mut self, rqv: &ResourceRequestVariants) {
for rq in rqv.requests() {
self.include(rq)
}
}
}
#[cfg(test)]
mod tests {
use crate::internal::common::resources::ResourceRequestVariants;
use crate::internal::server::workerload::{
ResourceRequestLowerBound, WorkerLoad, WorkerResources,
};
use crate::internal::tests::utils::resources::{cpus_compact, ResBuilder};
use crate::TaskId;
use smallvec::smallvec;
#[test]
fn worker_load_check_lb() {
let wr = WorkerResources {
n_resources: vec![2, 10, 100, 5].into(),
};
let load = WorkerLoad::new(&wr);
let load2 = WorkerLoad {
n_resources: vec![0, 9, 0, 0, 0, 0].into(),
non_first_rq: Default::default(),
round_robin_counter: 0,
};
let mut lb = ResourceRequestLowerBound::new();
lb.include(&cpus_compact(2).add_all(1).finish());
assert!(load.have_immediate_resources_for_lb(&lb, &wr));
assert!(!load2.have_immediate_resources_for_lb(&lb, &wr));
let mut lb = ResourceRequestLowerBound::new();
lb.include(&cpus_compact(2).add(1, 2).add(2, 100).finish());
assert!(load.have_immediate_resources_for_lb(&lb, &wr));
assert!(!load2.have_immediate_resources_for_lb(&lb, &wr));
let mut lb = ResourceRequestLowerBound::new();
lb.include(&cpus_compact(2).add(4, 1).finish());
assert!(!load.have_immediate_resources_for_lb(&lb, &wr));
assert!(!load2.have_immediate_resources_for_lb(&lb, &wr));
lb.include(&cpus_compact(2).add(2, 101).finish());
assert!(!load.have_immediate_resources_for_lb(&lb, &wr));
assert!(!load2.have_immediate_resources_for_lb(&lb, &wr));
lb.include(&cpus_compact(2).finish());
assert!(load.have_immediate_resources_for_lb(&lb, &wr));
assert!(load2.have_immediate_resources_for_lb(&lb, &wr));
}
#[test]
fn worker_load_check_lb_with_variants() {
let mut lb = ResourceRequestLowerBound::new();
let rq1 = ResBuilder::default().add(0, 2).add(1, 2).finish();
let rq2 = ResBuilder::default().add(0, 4).finish();
let rqv = ResourceRequestVariants::new(smallvec![rq1, rq2]);
lb.include_rqv(&rqv);
assert_eq!(lb.request_set.len(), 2);
}
#[test]
fn worker_load_variants() {
let wr = WorkerResources {
n_resources: vec![13, 4, 5].into(),
};
let mut load = WorkerLoad::new(&wr);
let rq1 = ResBuilder::default().add(0, 2).add(1, 2).finish();
let rq2 = ResBuilder::default().add(0, 4).finish();
let rqv = ResourceRequestVariants::new(smallvec![rq1, rq2]);
load.add_request(TaskId::new(1), &rqv, &wr);
assert_eq!(load.n_resources, vec![2, 2, 0].into());
assert!(load.non_first_rq.is_empty());
load.add_request(TaskId::new(2), &rqv, &wr);
assert_eq!(load.n_resources, vec![4, 4, 0].into());
assert!(load.non_first_rq.is_empty());
load.add_request(TaskId::new(3), &rqv, &wr);
assert_eq!(load.n_resources, vec![8, 4, 0].into());
assert_eq!(load.non_first_rq.len(), 1);
assert_eq!(load.non_first_rq.get(&TaskId::new(3)), Some(&1));
load.add_request(TaskId::new(4), &rqv, &wr);
assert_eq!(load.n_resources, vec![12, 4, 0].into());
assert_eq!(load.non_first_rq.len(), 2);
assert_eq!(load.non_first_rq.get(&TaskId::new(4)), Some(&1));
load.add_request(TaskId::new(5), &rqv, &wr);
assert!(
load.n_resources == vec![16, 4, 0].into() || load.n_resources == vec![14, 6, 0].into()
);
let resources = load.n_resources.clone();
load.remove_request(TaskId::new(3), &rqv, &wr);
assert_eq!(
load.n_resources,
vec![resources[0.into()] - 4, resources[1.into()], 0].into()
);
assert!(load.non_first_rq.get(&TaskId::new(3)).is_none());
load.remove_request(TaskId::new(1), &rqv, &wr);
assert_eq!(
load.n_resources,
vec![resources[0.into()] - 6, resources[1.into()] - 2, 0].into()
);
assert!(load.non_first_rq.get(&TaskId::new(1)).is_none());
}
}
| true
|
5d9b72702d043822412d2aff7ce96dc367bc7d73
|
Rust
|
taiki-e/polonius
|
/polonius-engine/src/output/initialization.rs
|
UTF-8
| 11,270
| 2.65625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::time::Instant;
use crate::facts::FactTypes;
use crate::output::{InitializationContext, Output};
use datafrog::{Iteration, Relation, RelationLeaper};
// This represents the output of an intermediate elaboration step (step 1).
struct TransitivePaths<T: FactTypes> {
path_moved_at: Relation<(T::Path, T::Point)>,
path_assigned_at: Relation<(T::Path, T::Point)>,
path_accessed_at: Relation<(T::Path, T::Point)>,
path_begins_with_var: Relation<(T::Path, T::Variable)>,
}
struct InitializationStatus<T: FactTypes> {
var_maybe_partly_initialized_on_exit: Relation<(T::Variable, T::Point)>,
move_error: Relation<(T::Path, T::Point)>,
}
pub(super) struct InitializationResult<T: FactTypes>(
pub(super) Relation<(T::Variable, T::Point)>,
pub(super) Relation<(T::Path, T::Point)>,
);
// Step 1: compute transitive closures of path operations. This would elaborate,
// for example, an access to x into an access to x.f, x.f.0, etc. We do this for:
// - access to a path
// - initialization of a path
// - moves of a path
// FIXME: transitive rooting in a variable (path_begins_with_var)
// Note that this step may not be entirely necessary!
fn compute_transitive_paths<T: FactTypes>(
child_path: Vec<(T::Path, T::Path)>,
path_assigned_at_base: Vec<(T::Path, T::Point)>,
path_moved_at_base: Vec<(T::Path, T::Point)>,
path_accessed_at_base: Vec<(T::Path, T::Point)>,
path_is_var: Vec<(T::Path, T::Variable)>,
) -> TransitivePaths<T> {
let mut iteration = Iteration::new();
let child_path: Relation<(T::Path, T::Path)> = child_path.into();
let ancestor_path = iteration.variable::<(T::Path, T::Path)>("ancestor");
// These are the actual targets:
let path_moved_at = iteration.variable::<(T::Path, T::Point)>("path_moved_at");
let path_assigned_at = iteration.variable::<(T::Path, T::Point)>("path_initialized_at");
let path_accessed_at = iteration.variable::<(T::Path, T::Point)>("path_accessed_at");
let path_begins_with_var = iteration.variable::<(T::Path, T::Variable)>("path_begins_with_var");
// ancestor_path(Parent, Child) :- child_path(Child, Parent).
ancestor_path.extend(child_path.iter().map(|&(child, parent)| (parent, child)));
// path_moved_at(Path, Point) :- path_moved_at_base(Path, Point).
path_moved_at.insert(path_moved_at_base.into());
// path_assigned_at(Path, Point) :- path_assigned_at_base(Path, Point).
path_assigned_at.insert(path_assigned_at_base.into());
// path_accessed_at(Path, Point) :- path_accessed_at_base(Path, Point).
path_accessed_at.insert(path_accessed_at_base.into());
// path_begins_with_var(Path, Var) :- path_is_var(Path, Var).
path_begins_with_var.insert(path_is_var.into());
while iteration.changed() {
// ancestor_path(Grandparent, Child) :-
// ancestor_path(Parent, Child),
// child_path(Parent, Grandparent).
ancestor_path.from_join(
&ancestor_path,
&child_path,
|&_parent, &child, &grandparent| (grandparent, child),
);
// moving a path moves its children
// path_moved_at(Child, Point) :-
// path_moved_at(Parent, Point),
// ancestor_path(Parent, Child).
path_moved_at.from_join(&path_moved_at, &ancestor_path, |&_parent, &p, &child| {
(child, p)
});
// initialising x at p initialises all x:s children
// path_assigned_at(Child, point) :-
// path_assigned_at(Parent, point),
// ancestor_path(Parent, Child).
path_assigned_at.from_join(&path_assigned_at, &ancestor_path, |&_parent, &p, &child| {
(child, p)
});
// accessing x at p accesses all x:s children at p (actually,
// accesses should be maximally precise and this shouldn't happen?)
// path_accessed_at(Child, point) :-
// path_accessed_at(Parent, point),
// ancestor_path(Parent, Child).
path_accessed_at.from_join(&path_accessed_at, &ancestor_path, |&_parent, &p, &child| {
(child, p)
});
// path_begins_with_var(Child, Var) :-
// path_begins_with_var(Parent, Var)
// ancestor_path(Parent, Child).
path_begins_with_var.from_join(
&path_begins_with_var,
&ancestor_path,
|&_parent, &var, &child| (child, var),
);
}
TransitivePaths {
path_assigned_at: path_assigned_at.complete(),
path_moved_at: path_moved_at.complete(),
path_accessed_at: path_accessed_at.complete(),
path_begins_with_var: path_begins_with_var.complete(),
}
}
// Step 2: Compute path initialization and deinitialization across the CFG.
fn compute_move_errors<T: FactTypes>(
ctx: TransitivePaths<T>,
cfg_edge: &Relation<(T::Point, T::Point)>,
output: &mut Output<T>,
) -> InitializationStatus<T> {
let mut iteration = Iteration::new();
// Variables
// var_maybe_partly_initialized_on_exit(var, point): Upon leaving `point`,
// `var` is partially initialized for some path through the CFG, that is
// there has been an initialization of var, and var has not been moved in
// all paths through the CFG.
let var_maybe_partly_initialized_on_exit =
iteration.variable::<(T::Variable, T::Point)>("var_maybe_partly_initialized_on_exit");
// path_maybe_initialized_on_exit(path, point): Upon leaving `point`, the
// move path `path` is initialized for some path through the CFG.
let path_maybe_initialized_on_exit =
iteration.variable::<(T::Path, T::Point)>("path_maybe_initialized_on_exit");
// path_maybe_uninitialized_on_exit(Path, Point): There exists at least one
// path through the CFG to Point such that `Path` has been moved out by the
// time we arrive at `Point` without it being re-initialized for sure.
let path_maybe_uninitialized_on_exit =
iteration.variable::<(T::Path, T::Point)>("path_maybe_uninitialized_on_exit");
// move_error(Path, Point): There is an access to `Path` at `Point`, but
// `Path` is potentially moved (or never initialised).
let move_error = iteration.variable::<(T::Path, T::Point)>("move_error");
// Initial propagation of static relations
// path_maybe_initialized_on_exit(path, point) :- path_assigned_at(path, point).
path_maybe_initialized_on_exit.insert(ctx.path_assigned_at.clone());
// path_maybe_uninitialized_on_exit(path, point) :- path_moved_at(path, point).
path_maybe_uninitialized_on_exit.insert(ctx.path_moved_at.clone());
while iteration.changed() {
// path_maybe_initialized_on_exit(path, point2) :-
// path_maybe_initialized_on_exit(path, point1),
// cfg_edge(point1, point2),
// !path_moved_at(path, point2).
path_maybe_initialized_on_exit.from_leapjoin(
&path_maybe_initialized_on_exit,
(
cfg_edge.extend_with(|&(_path, point1)| point1),
ctx.path_moved_at.extend_anti(|&(path, _point1)| path),
),
|&(path, _point1), &point2| (path, point2),
);
// path_maybe_uninitialized_on_exit(path, point2) :-
// path_maybe_uninitialized_on_exit(path, point1),
// cfg_edge(point1, point2)
// !path_assigned_at(path, point2).
path_maybe_uninitialized_on_exit.from_leapjoin(
&path_maybe_uninitialized_on_exit,
(
cfg_edge.extend_with(|&(_path, point1)| point1),
ctx.path_assigned_at.extend_anti(|&(path, _point1)| path),
),
|&(path, _point1), &point2| (path, point2),
);
// var_maybe_partly_initialized_on_exit(var, point) :-
// path_maybe_initialized_on_exit(path, point).
// path_begins_with_var(path, var).
var_maybe_partly_initialized_on_exit.from_leapjoin(
&path_maybe_initialized_on_exit,
ctx.path_begins_with_var.extend_with(|&(path, _point)| path),
|&(_path, point), &var| (var, point),
);
// move_error(Path, TargetNode) :-
// path_maybe_uninitialized_on_exit(Path, SourceNode),
// cfg_edge(SourceNode, TargetNode),
// path_accessed_at(Path, TargetNode).
move_error.from_leapjoin(
&path_maybe_uninitialized_on_exit,
(
cfg_edge.extend_with(|&(_path, source_node)| source_node),
ctx.path_accessed_at
.extend_with(|&(path, _source_node)| path),
),
|&(path, _source_node), &target_node| (path, target_node),
);
}
if output.dump_enabled {
for &(path, location) in path_maybe_initialized_on_exit.complete().iter() {
output
.path_maybe_initialized_on_exit
.entry(location)
.or_default()
.push(path);
}
for &(path, location) in path_maybe_uninitialized_on_exit.complete().iter() {
output
.path_maybe_uninitialized_on_exit
.entry(location)
.or_default()
.push(path);
}
}
InitializationStatus {
var_maybe_partly_initialized_on_exit: var_maybe_partly_initialized_on_exit.complete(),
move_error: move_error.complete(),
}
}
// Compute two things:
//
// - an over-approximation of the initialization of variables. This is used in
// the origin_live_on_entry computations to determine when a drop may happen; a
// definitely moved variable would not be actually dropped.
// - move errors.
//
// The process is split into two stages:
//
// 1. Compute the transitive closure of path accesses. That is, accessing `f.a`
// would access `f.a.b`, etc.
// 2. Use this to compute both paths that may be initialized and paths that may
// have been deinitialized, which in turn can be used to find move errors (an
// access to a path that may be deinitialized).
pub(super) fn compute<T: FactTypes>(
ctx: InitializationContext<T>,
cfg_edge: &Relation<(T::Point, T::Point)>,
output: &mut Output<T>,
) -> InitializationResult<T> {
let timer = Instant::now();
let transitive_paths = compute_transitive_paths::<T>(
ctx.child_path,
ctx.path_assigned_at_base,
ctx.path_moved_at_base,
ctx.path_accessed_at_base,
ctx.path_is_var,
);
info!("initialization phase 1 completed: {:?}", timer.elapsed());
let InitializationStatus {
var_maybe_partly_initialized_on_exit,
move_error,
} = compute_move_errors::<T>(transitive_paths, cfg_edge, output);
info!(
"initialization phase 2: {} move errors in {:?}",
move_error.elements.len(),
timer.elapsed()
);
if output.dump_enabled {
for &(var, location) in var_maybe_partly_initialized_on_exit.iter() {
output
.var_maybe_partly_initialized_on_exit
.entry(location)
.or_default()
.push(var);
}
}
InitializationResult(var_maybe_partly_initialized_on_exit, move_error)
}
| true
|
804a8f7e173680c37e2b9c1c80cafa340576b40d
|
Rust
|
Bugvi-Benjamin-M/Kattis
|
/rust/pot/src/main.rs
|
UTF-8
| 551
| 3.265625
| 3
|
[
"MIT"
] |
permissive
|
use std::io;
fn input () -> String
{
let mut ret = String::new();
io::stdin().read_line(&mut ret).expect("Failed to read from stdin");
ret
}
fn main() {
let n: u32 = input().trim().parse().expect("Failed to parse int");
let mut x = 0;
for _ in 0..n {
let line = input();
let line = line.trim();
let pwr:u32 = line.chars().last().unwrap().to_digit(10).unwrap();
let i:u32 = line[0..line.len() - 1].parse().unwrap();
x += i.pow(pwr);
}
println!("{}", x);
}
| true
|
fd4f5faf5c103e4bd656ef58faad40855d832034
|
Rust
|
totechite/annict-rs
|
/tests/readme_examples.rs
|
UTF-8
| 1,728
| 2.546875
| 3
|
[
"MIT"
] |
permissive
|
extern crate annis;
use annis::{AccessToken, AuthorizeUrl, Client, OAuth, Works::*};
use std::env;
#[test]
fn main() {
let auth = OAuth::client_id(env::var("annict_client_id").unwrap());
let _url = &auth
.authorize_url()
.redirect_uri("https://example.com")
.scope("read+write")
.build();
// -> Browser access to this uri and Get a certification code.
let _access_token = auth
.access_token()
.client_secret("client_secret_key")
.code("certification code")
.build();
let client = Client::set_token(env::var("annict_access_token").unwrap());
let works = annis::works().params(vec![(filter_title, "lain")]);
let _json = client.call(works).unwrap();
// assert_eq!(json["works"][0]["title"], "serial experiments lain".to_string());
}
#[test]
fn auth_requests() {
let auth = OAuth::client_id(env::var("annict_client_id").unwrap());
// Get Authorize URL
let instant = auth.authorize_url().build();
let manual = AuthorizeUrl {
client_id: env::var("annict_client_id").unwrap(),
redirect_uri: "urn:ietf:wg:oauth:2.0:oob".to_string(),
scope: "read".to_string(),
}
.build();
assert_eq!(instant, manual);
// Get AccessToken
let instant = auth
.access_token()
.client_secret("client_secret_key")
.code("certification code")
.build();
let manual = AccessToken {
client_id: env::var("annict_client_id").unwrap(),
client_secret: "client_secret_key".to_string(),
code: "certification code".to_string(),
redirect_uri: "urn:ietf:wg:oauth:2.0:oob".into(),
}
.build();
assert_eq!(instant, manual);
}
| true
|
873aae14e4cbef7769787ef3d77cb5684aff46dd
|
Rust
|
rapodaca/tinygraph
|
/src/graph/hash_graph.rs
|
UTF-8
| 12,382
| 3.328125
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use super::{ Graph, Error, Step };
/// A Graph backed by an adjacency map. Nodes will not necessarily be iterated
/// in numerical order, but all iteration orders are stable. As such,
/// HashGraph works well when extracting subgraphs from other graphs.
///
/// ```rust
/// use tinygraph::graph::{ Graph, HashGraph, Error, Step };
///
/// fn main() -> Result<(), Error> {
/// let c3 = HashGraph::from_traversal(0, vec![
/// Step::new(0, 1, false),
/// Step::new(1, 2, false),
/// Step::new(2, 0, true)
/// ])?;
///
/// assert_eq!(c3.nodes().to_vec(), vec![ 0, 1, 2 ]);
///
/// let result = HashGraph::from_traversal(0, vec![
/// Step::new(0, 1, false),
/// Step::new(1, 0, false)
/// ]);
///
/// assert_eq!(result, Err(Error::DuplicateEdge(1, 0)));
///
/// Ok(())
/// }
/// ```
///
#[derive(Debug, PartialEq)]
pub struct HashGraph {
adjacency: HashMap<usize, Vec<usize>>,
edges: Vec<(usize, usize)>,
nodes: Vec<usize>
}
impl HashGraph {
/// Builds from a traversal. Returns an error given:
/// - a Step source has not been seen before
/// - duplicate edge forward or reversed
pub fn from_traversal(
root: usize, steps: Vec<Step>
) -> Result<Self, Error> {
let mut adjacency = HashMap::new();
let mut edges = Vec::new();
let mut nodes = Vec::new();
adjacency.insert(root, vec![ ]);
nodes.push(root);
for step in steps {
let Step { sid, tid, cut } = step;
let neighbors = match adjacency.get_mut(&sid) {
Some(neighbors) => neighbors,
None => return Err(Error::MissingNode(sid))
};
neighbors.push(tid);
match adjacency.entry(tid) {
Entry::Occupied(occupied) => {
if cut {
if occupied.get().contains(&sid) {
return Err(Error::DuplicateEdge(sid, tid));
}
} else {
return Err(Error::DuplicateEdge(sid, tid));
}
},
Entry::Vacant(vacant) => {
vacant.insert(vec![ sid ]);
}
}
edges.push((sid, tid));
if !cut {
nodes.push(tid);
}
}
Ok(HashGraph { adjacency, edges, nodes })
}
/// Builds a node-induced subgraph from edges. Returns error given:
/// - duplicate edge forward or reversed
pub fn from_edges(edges: Vec<(usize, usize)>) -> Result<Self, Error> {
let mut nodes = Vec::new();
let mut adjacency: HashMap<usize, Vec<usize>> = HashMap::new();
for (sid, tid) in edges.iter() {
match adjacency.entry(*sid) {
Entry::Occupied(mut entry) => {
let neighbors = entry.get_mut();
if neighbors.contains(tid) {
return Err(Error::DuplicateEdge(*sid, *tid));
} else {
neighbors.push(*tid);
}
},
Entry::Vacant(entry) => {
entry.insert(vec![ *tid ]);
nodes.push(*sid);
}
}
match adjacency.entry(*tid) {
Entry::Occupied(mut entry) => {
let neighbors = entry.get_mut();
if neighbors.contains(sid) {
return Err(Error::DuplicateEdge(*sid, *tid));
} else {
neighbors.push(*sid);
}
},
Entry::Vacant(entry) => {
entry.insert(vec![ *sid ]);
nodes.push(*tid);
}
}
}
Ok(HashGraph { nodes, edges, adjacency })
}
}
impl Graph for HashGraph {
fn is_empty(&self) -> bool {
self.adjacency.is_empty()
}
fn order(&self) -> usize {
self.adjacency.len()
}
fn size(&self) -> usize {
self.edges.len()
}
fn nodes(&self) -> &[usize] {
&self.nodes[..]
}
fn neighbors(&self, id: usize) -> Result<&[usize], Error> {
match self.adjacency.get(&id) {
Some(neighbors) => Ok(&neighbors[..]),
None => Err(Error::MissingNode(id))
}
}
fn has_node(&self, id: usize) -> bool {
self.adjacency.contains_key(&id)
}
fn degree(&self, id: usize) -> Result<usize, Error> {
Ok(self.neighbors(id)?.len())
}
fn edges(&self) -> &[(usize, usize)] {
&self.edges[..]
}
fn has_edge(&self, sid: usize, tid: usize) -> Result<bool, Error> {
let neighbors = self.neighbors(sid)?;
if self.adjacency.contains_key(&tid) {
Ok(neighbors.contains(&tid))
} else {
Err(Error::MissingNode(tid))
}
}
}
#[cfg(test)]
mod from_adjacency {
use super::*;
#[test]
fn given_missing_source() {
let graph = HashGraph::from_traversal(2, vec![
Step::new(3, 4, false)
]);
assert_eq!(graph, Err(Error::MissingNode(3)));
}
#[test]
fn given_duplicate_target() {
let graph = HashGraph::from_traversal(2, vec![
Step::new(2, 5, false),
Step::new(2, 5, false)
]);
assert_eq!(graph, Err(Error::DuplicateEdge(2, 5)));
}
#[test]
fn given_duplicate_target_reversed() {
let graph = HashGraph::from_traversal(2, vec![
Step::new(2, 5, false),
Step::new(5, 2, false)
]);
assert_eq!(graph, Err(Error::DuplicateEdge(5, 2)));
}
#[test]
fn given_foo_back_edge_as_cut() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(1, 0, true)
]);
assert_eq!(graph, Err(Error::DuplicateEdge(1, 0)));
}
#[test]
fn is_emtpy() {
let graph = HashGraph::from_traversal(0, vec![ ]).unwrap();
assert_eq!(graph.is_empty(), false);
}
#[test]
fn order() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false)
]).unwrap();
assert_eq!(graph.order(), 2);
}
#[test]
fn order_given_cut() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(1, 2, false),
Step::new(2, 0, true)
]).unwrap();
assert_eq!(graph.order(), 3);
}
#[test]
fn size() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(0, 2, false)
]).unwrap();
assert_eq!(graph.size(), 2);
}
#[test]
fn size_given_cut() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(1, 2, false),
Step::new(2, 0, true)
]).unwrap();
assert_eq!(graph.size(), 3);
}
#[test]
fn nodes() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(0, 2, false)
]).unwrap();
assert_eq!(graph.nodes().to_vec(), vec![ 0, 1, 2 ]);
}
#[test]
fn nodes_given_cut() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(1, 2, false),
Step::new(2, 0, true)
]).unwrap();
assert_eq!(graph.nodes().to_vec(), vec![ 0, 1, 2 ]);
}
#[test]
fn neighbors_given_outside() {
let graph = HashGraph::from_traversal(0, vec![ ]).unwrap();
assert_eq!(graph.neighbors(1), Err(Error::MissingNode(1)));
}
#[test]
fn neighbors_given_p3_inner() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(1, 2, false)
]).unwrap();
assert_eq!(graph.neighbors(1).unwrap().to_vec(), vec![ 0, 2 ]);
}
#[test]
fn has_node_given_outside() {
let graph = HashGraph::from_traversal(0, vec![ ]).unwrap();
assert_eq!(graph.has_node(1), false);
}
#[test]
fn has_node_given_inside() {
let graph = HashGraph::from_traversal(0, vec![ ]).unwrap();
assert_eq!(graph.has_node(0), true);
}
#[test]
fn degree_given_outside() {
let graph = HashGraph::from_traversal(0, vec![ ]).unwrap();
assert_eq!(graph.degree(1), Err(Error::MissingNode(1)));
}
#[test]
fn edges() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(1, 2, false),
Step::new(2, 0, true)
]).unwrap();
assert_eq!(graph.edges().to_vec(), vec![
(0, 1),
(1, 2),
(2, 0)
]);
}
#[test]
fn has_edge_give_source_outside() {
let graph = HashGraph::from_traversal(0, vec![ ]).unwrap();
assert_eq!(graph.has_edge(1, 0), Err(Error::MissingNode(1)));
}
#[test]
fn has_edge_give_target_outside() {
let graph = HashGraph::from_traversal(0, vec![ ]).unwrap();
assert_eq!(graph.has_edge(0, 1), Err(Error::MissingNode(1)));
}
#[test]
fn has_edge_given_unconnected() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false),
Step::new(1, 2, false)
]).unwrap();
assert_eq!(graph.has_edge(0, 2), Ok(false));
}
#[test]
fn has_edge_given_connected() {
let graph = HashGraph::from_traversal(0, vec![
Step::new(0, 1, false)
]).unwrap();
assert_eq!(graph.has_edge(0, 1), Ok(true));
}
}
#[cfg(test)]
mod from_edges {
use super::*;
#[test]
fn given_duplicate_edge() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(0, 1)
]);
assert_eq!(graph, Err(Error::DuplicateEdge(0, 1)));
}
#[test]
fn given_duplicate_edge_reversed() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 0)
]);
assert_eq!(graph, Err(Error::DuplicateEdge(1, 0)));
}
#[test]
fn is_empty() {
let graph = HashGraph::from_edges(vec![
(0, 1)
]).unwrap();
assert_eq!(graph.is_empty(), false);
}
#[test]
fn order() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 2),
(2, 3)
]).unwrap();
assert_eq!(graph.order(), 4);
}
#[test]
fn size() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 2),
(2, 3)
]).unwrap();
assert_eq!(graph.size(), 3);
}
#[test]
fn nodes() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 2),
(2, 3)
]).unwrap();
assert_eq!(graph.nodes(), &[ 0, 1, 2, 3 ]);
}
#[test]
fn neighbors() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 2),
(2, 3)
]).unwrap();
assert_eq!(graph.neighbors(1).unwrap(), &[ 0, 2 ]);
}
#[test]
fn has_node() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 2),
(2, 3)
]).unwrap();
assert_eq!(graph.has_node(9), false);
}
#[test]
fn degree() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 2),
(2, 3)
]).unwrap();
assert_eq!(graph.degree(1), Ok(2));
}
#[test]
fn edges() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(2, 3),
(1, 2)
]).unwrap();
assert_eq!(graph.edges(), &[
(0, 1),
(2, 3),
(1, 2)
]);
}
#[test]
fn has_edge() {
let graph = HashGraph::from_edges(vec![
(0, 1),
(1, 2),
(2, 3)
]).unwrap();
assert_eq!(graph.has_edge(2, 1), Ok(true));
}
}
| true
|
492eb6fa4dfc30a46beb46f9d5b4523122d14579
|
Rust
|
heliumbrain/axum
|
/examples/key-value-store/src/main.rs
|
UTF-8
| 4,081
| 2.734375
| 3
|
[
"MIT"
] |
permissive
|
//! Simple in-memory key/value store showing features of axum.
//!
//! Run with:
//!
//! ```not_rust
//! cargo run -p example-key-value-store
//! ```
use axum::{
body::Bytes,
extract::{ContentLengthLimit, Extension, Path},
handler::{delete, get, Handler},
http::StatusCode,
response::IntoResponse,
routing::BoxRoute,
Router,
};
use std::{
borrow::Cow,
collections::HashMap,
convert::Infallible,
net::SocketAddr,
sync::{Arc, RwLock},
time::Duration,
};
use tower::{BoxError, ServiceBuilder};
use tower_http::{
add_extension::AddExtensionLayer, auth::RequireAuthorizationLayer,
compression::CompressionLayer, trace::TraceLayer,
};
#[tokio::main]
async fn main() {
// Set the RUST_LOG, if it hasn't been explicitly defined
if std::env::var("RUST_LOG").is_err() {
std::env::set_var("RUST_LOG", "example_key_value_store=debug,tower_http=debug")
}
tracing_subscriber::fmt::init();
// Build our application by composing routes
let app = Router::new()
.route(
"/:key",
// Add compression to `kv_get`
get(kv_get.layer(CompressionLayer::new()))
// But don't compress `kv_set`
.post(kv_set),
)
.route("/keys", get(list_keys))
// Nest our admin routes under `/admin`
.nest("/admin", admin_routes())
// Add middleware to all routes
.layer(
ServiceBuilder::new()
.load_shed()
.concurrency_limit(1024)
.timeout(Duration::from_secs(10))
.layer(TraceLayer::new_for_http())
.layer(AddExtensionLayer::new(SharedState::default()))
.into_inner(),
)
// Handle errors from middleware
.handle_error(handle_error)
.check_infallible();
// Run our app with hyper
let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
tracing::debug!("listening on {}", addr);
axum::Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
}
type SharedState = Arc<RwLock<State>>;
#[derive(Default)]
struct State {
db: HashMap<String, Bytes>,
}
async fn kv_get(
Path(key): Path<String>,
Extension(state): Extension<SharedState>,
) -> Result<Bytes, StatusCode> {
let db = &state.read().unwrap().db;
if let Some(value) = db.get(&key) {
Ok(value.clone())
} else {
Err(StatusCode::NOT_FOUND)
}
}
async fn kv_set(
Path(key): Path<String>,
ContentLengthLimit(bytes): ContentLengthLimit<Bytes, { 1024 * 5_000 }>, // ~5mb
Extension(state): Extension<SharedState>,
) {
state.write().unwrap().db.insert(key, bytes);
}
async fn list_keys(Extension(state): Extension<SharedState>) -> String {
let db = &state.read().unwrap().db;
db.keys()
.map(|key| key.to_string())
.collect::<Vec<String>>()
.join("\n")
}
fn admin_routes() -> Router<BoxRoute> {
async fn delete_all_keys(Extension(state): Extension<SharedState>) {
state.write().unwrap().db.clear();
}
async fn remove_key(Path(key): Path<String>, Extension(state): Extension<SharedState>) {
state.write().unwrap().db.remove(&key);
}
Router::new()
.route("/keys", delete(delete_all_keys))
.route("/key/:key", delete(remove_key))
// Require bearer auth for all admin routes
.layer(RequireAuthorizationLayer::bearer("secret-token"))
.boxed()
}
fn handle_error(error: BoxError) -> Result<impl IntoResponse, Infallible> {
if error.is::<tower::timeout::error::Elapsed>() {
return Ok((StatusCode::REQUEST_TIMEOUT, Cow::from("request timed out")));
}
if error.is::<tower::load_shed::error::Overloaded>() {
return Ok((
StatusCode::SERVICE_UNAVAILABLE,
Cow::from("service is overloaded, try again later"),
));
}
Ok((
StatusCode::INTERNAL_SERVER_ERROR,
Cow::from(format!("Unhandled internal error: {}", error)),
))
}
| true
|
5315560e13f0793ce00b2eec3b554bbc420678ec
|
Rust
|
GeReV/rainyday
|
/src/config.rs
|
UTF-8
| 1,523
| 3.140625
| 3
|
[
"MIT"
] |
permissive
|
use std::path::{Path, PathBuf};
const BACKGROUND_KEY: &str = "background";
pub struct Config {
path: String,
}
impl Config {
pub fn new<P: AsRef<Path>>(path: P) -> Self {
Config {
path: path
.as_ref()
.to_str()
.expect("Unable to convert path to str.")
.to_string(),
}
}
pub fn background(&self) -> Option<PathBuf> {
let ini = ini::Ini::load_from_file(&self.path);
match ini {
Ok(ini) => ini
.get_from(None::<&str>, BACKGROUND_KEY)
.map(PathBuf::from),
Err(_) => None,
}
}
pub fn cached_background(&self) -> Option<PathBuf> {
self.background()
.map(|p| self.backgrounds_directory().join(p.file_name().unwrap()))
}
pub fn set_background(&self, filename: &Path) -> std::io::Result<()> {
let mut ini = ini::Ini::new();
ini.with_general_section()
.set(BACKGROUND_KEY, filename.to_str().unwrap());
ini.write_to_file(&self.path)
}
pub fn backgrounds_directory(&self) -> PathBuf {
std::env::current_exe()
.unwrap()
.parent()
.unwrap()
.join("assets\\textures")
}
}
impl Default for Config {
fn default() -> Self {
let path = std::env::current_exe().unwrap().with_extension("ini");
Config::new(path)
}
}
| true
|
74ab66155c83ca9f82ee81411ae459b8f39aecec
|
Rust
|
rust-lang/regex
|
/regex-automata/src/util/memchr.rs
|
UTF-8
| 2,868
| 2.84375
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unicode"
] |
permissive
|
/*!
This module defines simple wrapper routines for the memchr functions from the
`memchr` crate. Basically, when the `memchr` crate is available, we use it,
otherwise we use a naive implementation which is still pretty fast.
*/
pub(crate) use self::inner::*;
#[cfg(feature = "perf-literal-substring")]
pub(super) mod inner {
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memchr(n1: u8, haystack: &[u8]) -> Option<usize> {
memchr::memchr(n1, haystack)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memchr2(n1: u8, n2: u8, haystack: &[u8]) -> Option<usize> {
memchr::memchr2(n1, n2, haystack)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memchr3(
n1: u8,
n2: u8,
n3: u8,
haystack: &[u8],
) -> Option<usize> {
memchr::memchr3(n1, n2, n3, haystack)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memrchr(n1: u8, haystack: &[u8]) -> Option<usize> {
memchr::memrchr(n1, haystack)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memrchr2(n1: u8, n2: u8, haystack: &[u8]) -> Option<usize> {
memchr::memrchr2(n1, n2, haystack)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memrchr3(
n1: u8,
n2: u8,
n3: u8,
haystack: &[u8],
) -> Option<usize> {
memchr::memrchr3(n1, n2, n3, haystack)
}
}
#[cfg(not(feature = "perf-literal-substring"))]
pub(super) mod inner {
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memchr(n1: u8, haystack: &[u8]) -> Option<usize> {
haystack.iter().position(|&b| b == n1)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memchr2(n1: u8, n2: u8, haystack: &[u8]) -> Option<usize> {
haystack.iter().position(|&b| b == n1 || b == n2)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memchr3(
n1: u8,
n2: u8,
n3: u8,
haystack: &[u8],
) -> Option<usize> {
haystack.iter().position(|&b| b == n1 || b == n2 || b == n3)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memrchr(n1: u8, haystack: &[u8]) -> Option<usize> {
haystack.iter().rposition(|&b| b == n1)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memrchr2(n1: u8, n2: u8, haystack: &[u8]) -> Option<usize> {
haystack.iter().rposition(|&b| b == n1 || b == n2)
}
#[cfg_attr(feature = "perf-inline", inline(always))]
pub(crate) fn memrchr3(
n1: u8,
n2: u8,
n3: u8,
haystack: &[u8],
) -> Option<usize> {
haystack.iter().rposition(|&b| b == n1 || b == n2 || b == n3)
}
}
| true
|
8df952f9e29996d0e4dc20e152d76b86b55f5da3
|
Rust
|
forkkit/viska
|
/lib/models/src/server/mod.rs
|
UTF-8
| 1,119
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
use crate::transport::{RequestMsg, ResponseMsg, TransportMsg};
use common::bytes::Bytes;
use std::net::SocketAddr;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct UdpTuple {
pub bytes: Bytes,
pub peer: SocketAddr,
}
impl From<(Bytes, SocketAddr)> for UdpTuple {
fn from(tuple: (Bytes, SocketAddr)) -> Self {
Self {
bytes: tuple.0,
peer: tuple.1,
}
}
}
impl From<UdpTuple> for (Bytes, SocketAddr) {
fn from(udp_tuple: UdpTuple) -> Self {
(udp_tuple.bytes, udp_tuple.peer)
}
}
impl From<RequestMsg> for UdpTuple {
fn from(from: RequestMsg) -> Self {
UdpTuple {
bytes: from.sip_request.into(),
peer: from.peer,
}
}
}
impl From<ResponseMsg> for UdpTuple {
fn from(from: ResponseMsg) -> Self {
Self {
bytes: from.sip_response.into(),
peer: from.peer,
}
}
}
impl From<TransportMsg> for UdpTuple {
fn from(from: TransportMsg) -> Self {
UdpTuple {
bytes: from.sip_message.into(),
peer: from.peer,
}
}
}
| true
|
c459129ef8d3feffe04661ca0c0d97d6e29cec08
|
Rust
|
gahag/slizzy
|
/src/modules/bandcamp/scraper/mod.rs
|
UTF-8
| 2,720
| 2.78125
| 3
|
[] |
permissive
|
#[cfg(test)]
mod tests;
use std::fmt;
use serde::Deserialize;
use crate::{
track,
web::scraping::{Find, Html, Text},
};
pub use crate::web::scraping::Error;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Data {
/// This can be the track id or title, depending on the label.
pub track: Result<Box<str>, Error>,
pub duration: Result<track::Duration, Error>,
}
pub fn scrap(doc: &Html) -> Data {
log::trace!("scraping html: {:#?}", doc);
Data {
track: scrap_track(doc),
duration: scrap_duration(doc)
}
}
#[derive(Debug, Default)]
struct AdditionalProperties {
duration_secs: Option<f64>,
}
#[derive(Debug, Deserialize)]
struct ApplicationData {
#[serde(alias = "additionalProperty")]
#[serde(deserialize_with = "deserialize_properties")]
additional_properties: AdditionalProperties,
}
fn deserialize_properties<'de, D>(deserializer: D) -> Result<AdditionalProperties, D::Error>
where
D: serde::Deserializer<'de>
{
struct SkipInvalid;
impl<'de> serde::de::Visitor<'de> for SkipInvalid {
type Value = AdditionalProperties;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("additionalProperty array")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'de>,
{
let mut properties = AdditionalProperties::default();
#[derive(Deserialize)]
#[serde(untagged)]
enum Property<'a> {
FloatProp {
name: &'a str,
value: f64,
},
None(serde::de::IgnoredAny),
}
while let Some(item) = seq.next_element::<Property>()? {
match item {
Property::FloatProp { name: "duration_secs", value } => {
properties.duration_secs = Some(value);
}
_ => { }
}
}
Ok(properties)
}
}
deserializer.deserialize_seq(SkipInvalid)
}
fn scrap_duration(doc: &Html) -> Result<track::Duration, Error> {
let json = doc
.find("script[type = 'application/ld+json']")?
.text_first()?;
let application_data: ApplicationData = serde_json
::from_str(json)
.map_err(
|error| Error::Format(
format!("failed to parse json: {}", error).into()
)
)?;
let duration_secs = application_data
.additional_properties
.duration_secs
.ok_or_else(
|| Error::Format("missing duration_secs property".into())
)?;
if !(u16::MIN as f64 ..= u16::MAX as f64).contains(&duration_secs) {
return Err(
Error::Format(
format!("duration out of range: {}", duration_secs).into()
)
)
}
let seconds = duration_secs as u16;
Ok(
track::Duration::new(0, seconds)
)
}
fn scrap_track(doc: &Html) -> Result<Box<str>, Error> {
let track = doc
.find("h2.trackTitle")?
.text_first()?
.into();
Ok(track)
}
| true
|
f702fb97f74768701d24a87ec7caed6690a3df74
|
Rust
|
PainsPerdus/gboi-kirby
|
/podpacker/src/main.rs
|
UTF-8
| 5,815
| 3.046875
| 3
|
[] |
no_license
|
mod ogmo;
use ogmo::Level as OgmoLevel;
use ogmo::{BackgroundLayer, EntityLayer, LevelValues};
use clap::{App, Arg};
use std::ffi::OsStr;
use std::fs;
use std::io::{self};
use std::path::PathBuf;
use std::fmt::Write as FmtWrite;
// To parse an Ogmo level into a chunk:
//
// 1) Process "values" from the [Ogmo]Level - done automatically with serde.
// 2) Process the "background" TileLayer
// 3) Process the "enemies" layer,
// taking care to adapt the coordinates (must be divided by 8),
// and overwriting preexisting tiles.
fn to_tiles(lvl: &OgmoLevel) -> Vec<i32> {
// I tried to use tuple destructuring here, but the refs made my life too difficult...
let bg: &BackgroundLayer = &lvl.layers.0;
let mut tile_ids = bg.data.clone();
let ent_layer: &EntityLayer = &lvl.layers.1;
for entity in &ent_layer.entities {
let (i, j) = (entity.x / 8, entity.y / 8);
// Printing to stderr allows use to reserve stdout for the binary output.
/*eprintln!(
"Found entity with id {} at coords ({}, {})",
entity.values.id, i, j
);*/
let tile_idx = j * 8 + i;
//eprintln!("Writing to tile index {}", tile_idx);
tile_ids[(tile_idx) as usize] = entity.values.id as i32;
}
tile_ids
}
fn fits_in_n_ubits(tile_id: i32, n: u8) -> bool {
0 <= tile_id && tile_id < (1 << n)
}
fn to_binary(tile_ids: &[i32], values: LevelValues) -> [u8; 33] {
let mut binary_buffer = [0u8; 33];
// Handle the first 32 bytes: tiles.
let mut i = 0;
for tile_pair in tile_ids.chunks(2) {
if let &[tile_1, tile_2] = tile_pair {
//eprintln!("Handling tiles: {}, {}", tile_1, tile_2);
assert!(fits_in_n_ubits(tile_1, 4));
assert!(fits_in_n_ubits(tile_2, 4));
binary_buffer[i] = ((tile_1 as u8) << 4) | (tile_2 as u8);
i += 1;
} else {
// 64 is an even number, so...
unreachable!()
}
}
// Last byte: flags and clutter level.
binary_buffer[32] =
(values.top_door as u8) << 7 | (values.left_door as u8) << 6 | (values.corner as u8) << 5;
assert!(fits_in_n_ubits(values.clutter_level as i32, 5));
binary_buffer[32] |= values.clutter_level as u8;
binary_buffer
}
fn header(chunk_count: usize, index: &str) -> String {
// XXX: I feel like we should include types.h, but img2gb doesn't do it.
format!(
"// This file was generated by podpacker, DO NOT EDIT
#ifndef _CHUNKS{0}_H
#define _CHUNKS{0}_H
#define CHUNK_COUNT{0} {1}
extern const UINT8 CHUNKS{0}[CHUNK_COUNT{0}][33];
#endif
",
index,
chunk_count
)
}
fn array_decl(hex_lvls: Vec<String>, index: &str) -> String {
let mut inner = String::new();
for s in hex_lvls {
// '{{' is how you escape a curly bracket in a format string.
write!(inner, "{{ {} }},\n", s).unwrap();
}
format!(
"// This file was auto-generated by podpacker. DO NOT EDIT.
#include <types.h>
#include \"chunks{0}.h\"
const UINT8 CHUNKS{0}[CHUNK_COUNT{0}][33] = {{ {1} }};",
index,
inner
)
}
fn main() -> io::Result<()> {
let matches = App::new("podpacker - Pineapple of Doom Map Packer")
.version("0.2")
.author("Yohaï-Eliel BERREBY <yohaiberreby@gmail.com>")
.about(
"Converts Ogmo Map Editor 3 JSON levels to Pineapple of Doom's packed\
binary representation.",
)
.arg(
Arg::with_name("JSON_DIR")
.help("Sets the directory from which to extract levels")
.required(true)
.index(1),
)
.arg(
Arg::with_name("INDEX")
.help("Index of the chunks")
.required(true)
.index(2),
)
.arg(
Arg::with_name("OUTPUT_DIR")
.help("Sets the directory where to write the .c and .h (default: .)")
.required(false)
.index(3),
)
.arg(
Arg::with_name("output-format")
.help("Whether to output the binary data as-is, byte by byte, or generate a C file")
.long("output-format")
.possible_values(&["raw", "c"])
.takes_value(true),
)
.get_matches();
let search_dir = matches.value_of("JSON_DIR").unwrap();
let mut lvls = Vec::new();
for entry in fs::read_dir(search_dir)? {
let entry = entry?;
let path = entry.path();
if path.extension() == Some(OsStr::new("json")) {
let s = fs::read_to_string(path)?;
let level: OgmoLevel = serde_json::from_str(&s).unwrap();
lvls.push(level);
}
}
// In-place sort.
lvls.sort_by(|a, b| {
a.values
.clutter_level
.partial_cmp(&b.values.clutter_level)
.unwrap()
});
let bin_lvls: Vec<_> = lvls
.iter()
.map(|lvl| {
let tiles = to_tiles(&lvl);
to_binary(&tiles, lvl.values)
})
.collect();
let hex_lvls: Vec<String> = bin_lvls
.into_iter()
.map(|bin_buf| {
bin_buf
.iter()
.map(|byte| format!("0x{:X?}", byte))
.collect::<Vec<_>>()
.join(", ")
})
.collect();
let index = matches.value_of("INDEX").unwrap();
let output_dir = matches.value_of("OUTPUT_DIR").unwrap_or(".");
let mut path = PathBuf::new();
path.push(output_dir);
path.push(format!("chunks{}", index));
path.set_extension("h");
fs::write(&path, header(hex_lvls.len(), index))?;
path.set_extension("c");
fs::write(&path, array_decl(hex_lvls, index))?;
Ok(())
}
| true
|
61155f20cb22ef46a100c3cc4402562fcc4e6da5
|
Rust
|
deseven/pull-daemon
|
/pd.rs
|
UTF-8
| 2,242
| 3.203125
| 3
|
[] |
no_license
|
use std::net::{TcpStream, TcpListener};
use std::io::{Read, Write};
use std::thread;
use std::env;
use std::process::{Command,Stdio};
fn handle_read(mut stream: &TcpStream) -> String {
let mut buf = [0u8 ;4096];
let mut request = String::from("");
match stream.read(&mut buf) {
Ok(_) => {
println!("Request from {}!",stream.peer_addr().unwrap());
request = String::from_utf8_lossy(&buf).to_lowercase();
},
Err(e) => println!("Stream read error: {}",e),
}
return request;
}
fn handle_write(mut stream: TcpStream,response: &[u8]) {
//let response = b"HTTP/1.1 200 OK\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n{res:\"ok\"}\r\n";
match stream.write(response) {
Ok(_) => {} //println!("Response to {} sent!",stream.peer_addr().unwrap()),
Err(e) => println!("Failed sending response: {}",e),
}
}
fn handle_client(stream: TcpStream) {
let args: Vec<String> = env::args().collect();
let action = &args[2];
let request = handle_read(&stream);
if request.starts_with("get /pull") == true || request.starts_with("post /pull") == true {
println!("Executing `{}`",action);
handle_write(stream,b"HTTP/1.1 200 OK\r\nContent-Type: application/json; charset=UTF-8\r\n\r\n{res:\"ok\"}\r\n");
let proc = Command::new("/bin/sh").args(&["-c",action]).stdin(Stdio::null()).spawn().expect("failed to run command");
let output = proc.wait_with_output().expect("failed to wait on command");
assert!(output.status.success());
} else {
handle_write(stream,b"HTTP/1.1 403 Forbidden\r\nContent-Type: application/json; charset=UTF-8\r\n\r\n{res:\"failed\"}\r\n");
}
}
fn main() {
let args: Vec<String> = env::args().collect();
let port = &args[1];
let listener = TcpListener::bind(format!("{}:{}","0.0.0.0",port)).unwrap();
println!("Listening on 0.0.0.0:{}",port);
for stream in listener.incoming() {
match stream {
Ok(stream) => {
thread::spawn(|| {
handle_client(stream)
});
}
Err(e) => {
println!("Unable to connect: {}",e);
}
}
}
}
| true
|
c2ec37988fc91dda2f244c4a849018bc4f33bcd9
|
Rust
|
tnactim/functions
|
/src/main.rs
|
UTF-8
| 1,040
| 4.46875
| 4
|
[] |
no_license
|
fn main() {
println!( "Hello, world!" );
another_function( 5, 6.0 );
let x = five();
println!( "The value of x is: {}", x );
println!("x + 1 is: {}", plus_one( x ) );
}
// must declare type of each parameter in function signature
fn another_function( x: i32, y: f64 ) {
println!( "The value of x is: {}", x );
println!( "The value of y is: {}", y );
// cannot use statements as expressions
// let x = (let y = 6) or
// x = y = 6 are not allowed
// expressions evaluate to something, and can be a part of a statement
// e.g. calling a function is an expression, same with a macro or
// new scopes with a block {}
let y = {
let x = 3;
x + 1 // no ; at the end of an expression
}; // (would turn into statement)
println!( "The value of y is: {}", y );
}
// functions with Return Values
// don't name return values, but declare their type after an arrow ->
fn five() -> i32 {
5
}
fn plus_one( x: i32 ) -> i32 {
x + 1
}
| true
|
3cfe6d25a07788301d321948a28807dcf2468940
|
Rust
|
runfalk/weedesktop
|
/src/weechat/callbacks.rs
|
UTF-8
| 2,539
| 2.640625
| 3
|
[
"MIT"
] |
permissive
|
use super::{Buffer, CallResult, Plugin, Result};
use std::ffi::{c_void, CStr};
macro_rules! try_unwrap {
($expr:expr) => {
match $expr {
Ok(x) => x,
Err(_) => return ::ffi::WEECHAT_RC_ERROR,
}
};
}
pub type CommandHook = fn(&Plugin, buffer: Buffer, cmd: &str, args: Vec<&str>) -> CallResult;
pub type TimerHook = fn(&Plugin, i32) -> CallResult;
pub fn malloc_callback<T>(callback: T) -> Result<*mut T> {
// Allocate a blob big enough to hold a pointer to a function. This will be
// used to allow hook_timer_callback to dispatch the callback to the given
// TimerHook. We must use malloc since Weechat will automatically free the
// pointer we give when the plugin is tearing down
let callback_ptr = try_ptr!(unsafe { libc::malloc(std::mem::size_of::<T>()) as *mut T });
// Assign function pointer to the datablob that is sent to the callback hook
unsafe {
*callback_ptr = callback;
}
Ok(callback_ptr)
}
pub extern "C" fn hook_command(
ptr: *const c_void,
data: *mut c_void,
buffer: *mut ::ffi::t_gui_buffer,
argc: i32,
argv: *mut *mut i8,
_argv_eol: *mut *mut i8,
) -> i32 {
if data.is_null() || argc < 1 {
return ::ffi::WEECHAT_RC_ERROR;
}
let plugin = Plugin::new(ptr as *mut ::ffi::t_weechat_plugin);
let hdata = try_unwrap!(plugin.hdata_from_ptr("buffer", buffer as *mut c_void));
let buffer = try_unwrap!(Buffer::try_from_hdata(hdata));
let cmd = try_unwrap!(unsafe { CStr::from_ptr(*argv).to_str() });
// Since the first arg is the command name we start at 1 here
let mut args: Vec<&str> = Vec::with_capacity((argc - 1) as usize);
for i in 1..(argc as isize) {
match unsafe { CStr::from_ptr(*argv.offset(i)).to_str() } {
Ok(s) => args.push(s),
Err(_) => return ::ffi::WEECHAT_RC_ERROR,
};
}
let callback = unsafe { *(data as *mut CommandHook) };
match callback(&plugin, buffer, cmd, args) {
Ok(_) => ::ffi::WEECHAT_RC_OK,
Err(_) => ::ffi::WEECHAT_RC_ERROR,
}
}
pub extern "C" fn hook_timer(ptr: *const c_void, data: *mut c_void, remaining_calls: i32) -> i32 {
if data.is_null() {
return ::ffi::WEECHAT_RC_ERROR;
}
let callback = unsafe { *(data as *mut TimerHook) };
match callback(
&Plugin::new(ptr as *mut ::ffi::t_weechat_plugin),
remaining_calls,
) {
Ok(_) => ::ffi::WEECHAT_RC_OK,
Err(_) => ::ffi::WEECHAT_RC_ERROR,
}
}
| true
|
8394663eabc1b9fd51a6c120abfa01d8634711f1
|
Rust
|
emirayka/nia_events
|
/src/listeners/listener/listener_settings_builder.rs
|
UTF-8
| 1,320
| 2.546875
| 3
|
[
"MIT"
] |
permissive
|
use crate::enums::Key;
use crate::{DeviceId, KeyId, ListenerSettings};
use crate::{DeviceInfo, KeyChordProducerSettings};
pub struct ListenerSettingsBuilder {
devices: Vec<DeviceInfo>,
modifiers: Vec<Key>,
}
impl ListenerSettingsBuilder {
pub fn new() -> ListenerSettingsBuilder {
ListenerSettingsBuilder {
devices: Vec::new(),
modifiers: Vec::new(),
}
}
pub fn add_device(mut self, device_path: String, device_id: u16) -> ListenerSettingsBuilder {
let device_info = DeviceInfo::new(DeviceId::new(device_id), device_path);
self.devices.push(device_info);
self
}
pub fn add_modifier_1(mut self, key_code: u16) -> ListenerSettingsBuilder {
let modifier = Key::Key1(KeyId::new(key_code));
self.modifiers.push(modifier);
self
}
pub fn add_modifier_2(mut self, device_id: u16, key_code: u16) -> ListenerSettingsBuilder {
let modifier = Key::Key2(DeviceId::new(device_id), KeyId::new(key_code));
self.modifiers.push(modifier);
self
}
pub fn build(self) -> ListenerSettings {
let key_chord_producer_settings =
KeyChordProducerSettings::new(self.devices, self.modifiers);
ListenerSettings::new(key_chord_producer_settings)
}
}
| true
|
cbc933c625a610625bc1cebe7a610622d8076cca
|
Rust
|
mwatts/dyon
|
/src/mat4.rs
|
UTF-8
| 823
| 2.78125
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use embed::{ConvertMat4, PopVariable, PushVariable};
use {Runtime, Variable};
/// Wraps a 4D matrix for easier embedding with Dyon.
#[derive(Debug, Copy, Clone)]
pub struct Mat4(pub [[f32; 4]; 4]);
impl ConvertMat4 for Mat4 {
fn from(val: [[f32; 4]; 4]) -> Self {
Mat4(val)
}
fn to(&self) -> [[f32; 4]; 4] {
self.0
}
}
impl PopVariable for Mat4 {
fn pop_var(rt: &Runtime, var: &Variable) -> Result<Self, String> {
if let Variable::Mat4(ref v) = *var {
Ok(Mat4(**v))
} else {
Err(rt.expected(var, "mat4"))
}
}
}
impl PushVariable for Mat4 {
fn push_var(&self) -> Variable {
Variable::Mat4(Box::new(self.0))
}
}
impl From<[[f32; 4]; 4]> for Mat4 {
fn from(val: [[f32; 4]; 4]) -> Mat4 {
Mat4(val)
}
}
| true
|
32e2bf2b2dfcbe9e6bfba9b121456e93ddc4e511
|
Rust
|
burrbull/lcd
|
/tests/util/mod.rs
|
UTF-8
| 1,354
| 3.109375
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use lcd::*;
use std::string::String;
use std::vec::Vec;
use std::cell::RefCell;
pub struct BufferHardware<T> {
pub commands: RefCell<Vec<String>>,
pub mode: FunctionMode,
pub busy: RefCell<T>
}
impl<T> BufferHardware<T> {
pub fn command(&self, cmd: String) {
self.commands.borrow_mut().push(cmd);
}
}
impl<T> Hardware for BufferHardware<T> {
fn rs(&self, bit: bool) {
self.command(format!("R/S {}", bit));
}
fn enable(&self, bit: bool) {
self.command(format!("EN {}", bit));
}
fn data(&self, data: u8) {
let str = match self.mode {
FunctionMode::Bit4 => format!("DATA 0b{:04b}", data),
FunctionMode::Bit8 => format!("DATA 0b{:08b}", data),
};
self.command(str);
}
fn mode(&self) -> FunctionMode {
let mode = self.mode;
mode
}
}
impl<'a, T> Delay for BufferHardware<T> {
fn delay_us(&self, delay: u32) {
self.command(format!("DELAY {}", delay));
}
}
pub fn test<F, T>(mode: FunctionMode, busy: T, ops: F) -> Vec<String>
where F: Fn(&mut Display<BufferHardware<T>>) {
let hw = BufferHardware {
commands: RefCell::new(Vec::new()),
mode,
busy: RefCell::new(busy)
};
ops(&mut Display::new(&hw));
let result = hw.commands.borrow().clone();
result
}
| true
|
9a09fafb484715607f5028cd3d7fb3307baec3a9
|
Rust
|
ashishnegi/learndb
|
/src/unsafelist.rs
|
UTF-8
| 4,396
| 3.5625
| 4
|
[] |
no_license
|
use std::ptr;
#[derive(Debug)]
pub struct Queue<T> {
head: Link<T>,
tail: *mut Node<T>
}
type Link<T> = Option<Box<Node<T>>>;
#[derive(Debug)]
pub struct Node<T> {
val: T,
next: Link<T>
}
impl<T> Queue<T> {
pub fn new() -> Self {
Queue {
head: None,
tail: ptr::null_mut()
}
}
pub fn push(&mut self, v: T) {
let mut new_tail = Box::new(Node::new(v));
let raw_tail: *mut _ = &mut *new_tail;
if self.tail.is_null() {
self.head = Some(new_tail);
} else {
unsafe {
(*self.tail).next = Some(new_tail);
}
}
self.tail = raw_tail;
}
pub fn pop(&mut self) -> Option<T> {
self.head.take().map(|head| {
let hv = *head;
match hv.next {
None => {
self.tail = ptr::null_mut();
self.head = None;
},
Some(_) => {
self.head = hv.next;
}
};
hv.val
})
}
pub fn peek(&self) -> Option<&T> {
self.head.as_ref().map(|head|{
&head.val
})
}
pub fn into_iter(self) -> IntoIter<T> {
IntoIter{ next: self }
}
pub fn iter(&self) -> Iter<T> {
Iter {
next: self.head.as_ref().map(|head| {
&**head
})
}
}
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut {
next : self.head.as_mut().map(|head| &mut **head)
}
}
}
impl<T> Node<T> {
pub fn new(v: T) -> Self {
Node {
val: v,
next: None
}
}
}
#[derive(Debug)]
pub struct IntoIter<T> {
next: Queue<T>
}
impl<T> Iterator for IntoIter<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
self.next.pop()
}
}
#[derive(Debug)]
pub struct Iter<'a, T : 'a> {
next: Option<&'a Node<T>>
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
self.next.map(|next|{
self.next = next.next.as_ref().map(|next| {
&**next
});
&next.val
})
}
}
#[derive(Debug)]
pub struct IterMut<'a, T : 'a> {
next: Option<&'a mut Node<T>>
}
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
self.next.take().map(|next| {
self.next = next.next.as_mut().map(|next2| {
&mut **next2
});
&mut next.val
})
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
pub fn basics() {
let mut q = Queue::new();
assert_eq!(None, q.pop());
q.push(1);
assert_eq!(Some(1), q.pop());
assert_eq!(None, q.pop());
let mut q2 = q;
q2.push(2);
assert_eq!(Some(2), q2.pop());
assert_eq!(None, q2.pop());
}
#[test]
pub fn into_iter() {
let mut q = Queue::new();
q.push(1);
q.push(2);
q.push(3);
{
let mut iter = q.into_iter();
assert_eq!(Some(1), iter.next());
assert_eq!(Some(2), iter.next());
assert_eq!(Some(3), iter.next());
assert_eq!(None, iter.next());
}
// q.push(1); // compiler error.. :P
}
#[test]
pub fn iter() {
let mut q = Queue::new();
q.push(1);
q.push(2);
q.push(3);
{
let mut iter = q.iter();
assert_eq!(Some(&1), iter.next());
assert_eq!(Some(&2), iter.next());
assert_eq!(Some(&3), iter.next());
assert_eq!(None, iter.next());
}
assert_eq!(Some(&1), q.peek());
assert_eq!(Some(1), q.pop());
}
#[test]
pub fn iter_mut() {
let mut q = Queue::new();
q.push(1);
q.push(2);
q.push(3);
{
let mut iter = q.iter_mut();
assert_eq!(Some(&mut 1), iter.next());
assert_eq!(Some(&mut 2), iter.next());
assert_eq!(Some(&mut 3), iter.next());
assert_eq!(None, iter.next());
}
assert_eq!(Some(&1), q.peek());
assert_eq!(Some(1), q.pop());
}
}
| true
|
2ffec6da40fddff3a18f35c427c860870bfcf37c
|
Rust
|
zainulabideenbawa/edgeRust
|
/src/struct_scheme.rs
|
UTF-8
| 2,398
| 2.8125
| 3
|
[] |
no_license
|
#[derive(Debug,Clone,Serialize,Deserialize)]
pub struct Rooms{
#[serde(rename = "_id")]
pub id:Option<ObjectId>,
pub roomId:Option<String>,
pub ip:Option<String>,
pub port:Option<String>,
pub vituralPort:Option<String>,
pub status:Option<bool>
}
#[derive(Debug,Clone,Serialize,Deserialize)]
pub struct InsertableRoom{
pub roomId:Option<String>,
pub ip:Option<String>,
pub port:Option<String>,
pub vituralPort:Option<String>,
pub status:Option<bool>
}
impl InsertableRoom{
pub fn from_rooms(room:Rooms)->InsertableRoom{
InsertableRooms{
roomId:room.roomId,
ip:room.ip,
port:room.port,
vituralPort:room.vituralPort,
status:room.status
}
}
}
#[derive(Debug,Clone,Serialize,Deserialize)]
pub struct Logs{
#[serde(rename = "_id")]
pub id:Option<ObjectId>,
pub roomId:Option<String>,
pub userName:Option<String>,
pub applinceName:Option<String>,
pub status:Option<bool>,
pub dateTime:Option<DateTime>
}
#[derive(Debug,Clone,Serialize,Deserialize)]
pub struct InsertableLog{
pub roomId:Option<String>,
pub userName:Option<String>,
pub applinceName:Option<String>,
pub status:Option<bool>,
pub dateTime:Option<DateTime>
}
impl InsertableLog{
pub fn from_log(log:Logs)->InsertableLog{
InsertableRooms{
roomId:log.roomId,
userName:log.userName,
applinceName:log.applinceName,
status:log.status,
dateTime:log.dateTime
}
}
}
#[derive(Debug,Clone,Serialize,Deserialize)]
pub struct Logs{
#[serde(rename = "_id")]
pub id:Option<ObjectId>,
pub roomId:Option<String>,
pub userName:Option<String>,
pub applinceName:Option<String>,
pub status:Option<bool>,
pub dateTime:Option<DateTime>
}
#[derive(Debug,Clone,Serialize,Deserialize)]
pub struct InsertableLog{
pub roomId:Option<String>,
pub userName:Option<String>,
pub applinceName:Option<String>,
pub status:Option<bool>,
pub dateTime:Option<DateTime>
}
impl InsertableLog{
pub fn from_log(log:Logs)->InsertableLog{
InsertableRooms{
roomId:log.roomId,
userName:log.userName,
applinceName:log.applinceName,
status:log.status,
dateTime:log.dateTime
}
}
}
| true
|
61b23b605a8dfbdde7593afd35e9607405f4a7da
|
Rust
|
ericsink/rust-raytracer
|
/src/raytracer/octree.rs
|
UTF-8
| 5,836
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
use crate::prelude::*;
use core::slice::Iter;
use core::iter::FromIterator;
use crate::geometry::{BBox, PartialBoundingBox};
use crate::raytracer::Ray;
use crate::vec3::Vec3;
//
pub struct Octree<T> where T: PartialBoundingBox {
prims: Vec<T>,
infinites: Vec<T>, // for infinite prims (planes)
root: OctreeNode,
}
impl<T> FromIterator<T> for Octree<T> where T: PartialBoundingBox {
fn from_iter<I>(iterator: I) -> Self where I: IntoIterator<Item=T> {
let iterator = iterator.into_iter();
let (finites, infinites): (Vec<T>, Vec<T>) =
iterator.partition(|item| item.partial_bounding_box().is_some());
// TODO(sell): why do we need to map here? &T isn't PartialBoundingBox,
// but we need to find out how to make it so.
let bounds = BBox::from_union(finites.iter().map(|i| i.partial_bounding_box()))
.unwrap_or(BBox::zero());
// pbrt recommended max depth for a k-d tree (though, we're using an octree)
// For a k-d tree: 8 + 1.3 * log2(N)
let depth = (1.2 * (finites.len() as f64).log(8.0)).round() as i32;
let mut root_node = OctreeNode::new(bounds, depth);
for (i, prim) in finites.iter().enumerate() {
root_node.insert(i, prim.partial_bounding_box().unwrap());
}
Octree {
prims: finites,
infinites: infinites,
root: root_node,
}
}
}
impl<T> Octree<T> where T: PartialBoundingBox {
pub fn intersect_iter<'a>(&'a self, ray: &'a Ray) -> OctreeIterator<'a, T> {
OctreeIterator::new(self, ray)
}
}
pub struct OctreeNode {
bbox: BBox,
depth: i32,
children: Vec<OctreeNode>,
leaf_data: Vec<OctreeData>,
}
#[derive(Clone, Copy)]
struct OctreeData {
pub bbox: BBox,
pub index: usize
}
impl OctreeNode {
#[allow(dead_code)]
pub fn new(bbox: BBox, depth: i32) -> OctreeNode {
OctreeNode {
bbox: bbox,
depth: depth,
children: Vec::new(),
leaf_data: Vec::new(),
}
}
fn subdivide(&mut self) {
for x in 0u32..2 {
for y in 0u32..2 {
for z in 0u32..2 {
let len = self.bbox.len();
let child_bbox = BBox {
min: Vec3 {
x: self.bbox.min.x + x as f64 * len.x / 2.0,
y: self.bbox.min.y + y as f64 * len.y / 2.0,
z: self.bbox.min.z + z as f64 * len.z / 2.0
},
max: Vec3 {
x: self.bbox.max.x - (1 - x) as f64 * len.x / 2.0,
y: self.bbox.max.y - (1 - y) as f64 * len.y / 2.0,
z: self.bbox.max.z - (1 - z) as f64 * len.z / 2.0,
}
};
self.children.push(OctreeNode::new(child_bbox, self.depth - 1));
}
}
}
}
#[allow(dead_code)]
pub fn insert(&mut self, index: usize, object_bbox: BBox) -> () {
// Max depth
if self.depth <= 0 {
self.leaf_data.push(OctreeData { index: index, bbox: object_bbox });
return;
}
// Empty leaf node
if self.is_leaf() && self.leaf_data.len() == 0 {
self.leaf_data.push(OctreeData { index: index, bbox: object_bbox });
return;
}
// Occupied leaf node and not max depth: subdivide node
if self.is_leaf() && self.leaf_data.len() == 1 {
self.subdivide();
let old = self.leaf_data.remove(0);
// Reinsert old node and then fall through to insert current object
self.insert(old.index, old.bbox);
}
// Interior node (has children)
for child in self.children.iter_mut() {
if child.bbox.overlaps(&object_bbox) {
child.insert(index, object_bbox);
}
}
}
fn is_leaf(&self) -> bool {
self.children.len() == 0
}
}
pub struct OctreeIterator<'a, T:'a> {
prims: &'a [T],
stack: Vec<&'a OctreeNode>,
leaf_iter: Option<Iter<'a, OctreeData>>,
ray: &'a Ray,
infinites: Iter<'a, T>,
just_infinites: bool
}
impl<'a, T> OctreeIterator<'a, T> where T: PartialBoundingBox {
fn new<'b>(octree: &'b Octree<T>, ray: &'b Ray) -> OctreeIterator<'b, T> {
OctreeIterator {
prims: &octree.prims[..],
stack: vec![&octree.root],
leaf_iter: None,
ray: ray,
infinites: octree.infinites.iter(),
just_infinites: false
}
}
}
impl<'a, T> Iterator for OctreeIterator<'a, T> where T: PartialBoundingBox {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
if self.just_infinites {
return self.infinites.next();
}
loop {
let ray = self.ray;
if let Some(leaf_iter) = self.leaf_iter.as_mut() {
if let Some(val) = leaf_iter.filter(|x| x.bbox.intersects(ray)).next() {
return Some(&self.prims[val.index]);
}
// iterator went empty, so we'll pop from the stack and
// iterate on the next node's children now,
}
if let Some(node) = self.stack.pop() {
for child in node.children.iter() {
if child.bbox.intersects(self.ray) {
self.stack.push(child);
}
}
self.leaf_iter = Some(node.leaf_data.iter());
} else {
self.just_infinites = true;
return self.infinites.next()
}
}
}
}
| true
|
37a616f6e1609b754ea48a4949a2f473fe591ac8
|
Rust
|
danieleades/rust-finance
|
/src/transaction.rs
|
UTF-8
| 7,372
| 3.140625
| 3
|
[] |
no_license
|
use chrono::{DateTime, Utc};
use decimal::d128;
use serde_derive::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Transaction {
/// transaction value. a positive number represents flow into the account
amount: d128,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
payee: Option<String>,
/// the date that the transaction is created. If no transaction date is set, this will be used for sorting
date_created: DateTime<Utc>,
/// the date that the transaction occurred
#[serde(skip_serializing_if = "Option::is_none")]
date_transaction: Option<DateTime<Utc>>,
/// An optional category for the transaction
#[serde(skip_serializing_if = "Option::is_none")]
category: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
account: Option<String>,
/// A vector of strings used to organise transactions
tags: Vec<String>,
/// An optional non-unique id
#[serde(skip_serializing_if = "Option::is_none")]
id: Option<u16>,
/// A globally unique id
uuid: Uuid,
/// If true, the budget has been reconciled past the date of this transaction. reconciled transactions should not be edited (lightly)
reconciled: bool,
/// The source of this transaction. This enum may be used for differentiating between transactions
/// in a single ledger that came from different sources
source: Source,
}
impl Default for Transaction {
fn default() -> Self {
Transaction {
amount: d128::default(),
description: None,
payee: None,
date_created: Utc::now(),
date_transaction: None,
category: None,
account: None,
tags: Vec::<String>::default(),
id: None,
uuid: Uuid::new_v4(),
reconciled: false,
source: Source::Manual,
}
}
}
impl Transaction {
pub fn new<T: Into<d128>>(amount: T) -> Transaction {
Transaction {
amount: amount.into(),
..Self::default()
}
}
pub fn amount(&self) -> d128 {
self.amount
}
pub fn set_amount<T: Into<d128>>(&mut self, amount: T) {
self.amount = amount.into();
}
pub fn with_amount<T: Into<d128>>(mut self, amount: T) -> Self {
self.set_amount(amount.into());
self
}
pub fn created(&self) -> DateTime<Utc> {
self.date_created
}
pub fn date_transaction(&self) -> Option<DateTime<Utc>> {
self.date_transaction
}
pub fn set_date_transaction<T: Into<DateTime<Utc>>>(&mut self, date: Option<T>) {
self.date_transaction = date.map(T::into);
}
pub fn with_date_transaction<T: Into<DateTime<Utc>>>(mut self, date: T) -> Self {
self.set_date_transaction(Some(date.into()));
self
}
pub fn date(&self) -> DateTime<Utc> {
self.date_transaction().unwrap_or_else(|| self.created())
}
pub fn description(&self) -> &Option<String> {
&self.description
}
pub fn set_description<S: Into<String>>(&mut self, description: Option<S>) {
self.description = description.map(S::into);
}
pub fn with_description<S: Into<String>>(mut self, description: Option<S>) -> Self {
self.description = description.map(S::into);
self
}
pub fn payee(&self) -> &Option<String> {
&self.payee
}
pub fn set_payee<S: Into<String>>(&mut self, payee: Option<S>) {
self.payee = payee.map(S::into);
}
pub fn with_payee<S: Into<String>>(mut self, payee: Option<S>) -> Self {
self.payee = payee.map(S::into);
self
}
pub fn category(&self) -> &Option<String> {
&self.category
}
pub fn set_category<S: Into<String>>(&mut self, category: Option<S>) {
self.category = category.map(S::into);
}
pub fn with_category<S: Into<String>>(mut self, category: S) -> Self {
self.category = Some(category.into());
self
}
pub fn account(&self) -> &Option<String> {
&self.account
}
pub fn set_account<S: Into<String>>(&self) -> &Option<String> {
&self.account
}
/// add tag to transaction, if its not already present
pub fn tag<S: Into<String>>(&mut self, tag: S) {
let t: String = tag.into();
if !self.tags.contains(&t) {
self.tags.push(t)
}
}
pub fn with_tag<S: Into<String>>(mut self, tag: S) -> Self {
self.tag(tag);
self
}
/// removes a tag, if it exists
pub fn untag<S: AsRef<String>>(&mut self, tag: S) {
self.tags.retain(|x| x != tag.as_ref());
}
pub fn without_tag<S: AsRef<String>>(mut self, tag: S) -> Self {
self.untag(tag);
self
}
/// sets the transaction tags to exactly those supplied
pub fn set_tags<S: Into<String>>(&mut self, tags: Vec<S>) {
let mut t: Vec<String> = tags.into_iter().map(S::into).collect();
t.sort();
t.dedup();
self.tags = t;
}
pub fn tags(&self) -> std::slice::Iter<String> {
self.tags.iter()
}
pub fn id(&self) -> Option<u16> {
self.id
}
pub fn set_id<T: Into<u16>>(&mut self, id: Option<T>) {
self.id = id.map(T::into);
}
pub fn with_id<T: Into<u16>>(mut self, id: Option<T>) -> Self {
self.set_id(id);
self
}
pub fn uuid(&self) -> &Uuid {
&self.uuid
}
pub fn reconciled(&self) -> bool {
self.reconciled
}
pub fn set_reconciled(&mut self, b: bool) {
self.reconciled = b;
}
pub fn source(&self) -> &Source {
&self.source
}
pub fn set_source(&mut self, s: Source) {
self.source = s;
}
pub fn with_source(mut self, s: Source) -> Self {
self.set_source(s);
self
}
/// returns true if two transactions have the same amount, description, category, tags, transaction date.
/// ids, added date, source, and reconciled state are not considered.
pub fn is_similar(&self, other: &Transaction) -> bool {
self.amount() == other.amount()
&& self.description() == other.description()
&& self.category() == other.category()
&& self.date_transaction() == other.date_transaction()
&& self.tags().as_slice() == other.tags().as_slice()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Source {
Manual,
Reconciliation,
}
impl<T> std::ops::Add<T> for Transaction
where
T: Into<d128>,
{
type Output = Self;
fn add(mut self, other: T) -> Self {
self.amount += other.into();
self
}
}
impl<T> std::ops::AddAssign<T> for Transaction
where
T: Into<d128>,
{
fn add_assign(&mut self, other: T) {
self.amount += other.into();
}
}
impl<T> std::ops::Sub<T> for Transaction
where
T: Into<d128>,
{
type Output = Self;
fn sub(mut self, other: T) -> Self {
self.amount -= other.into();
self
}
}
impl<T> std::ops::SubAssign<T> for Transaction
where
T: Into<d128>,
{
fn sub_assign(&mut self, other: T) {
self.amount -= other.into();
}
}
| true
|
9374d0a6adac4c0ba3dd691978942f429722d8a8
|
Rust
|
minhkhiemm/rust
|
/display/src/main.rs
|
UTF-8
| 1,637
| 3.765625
| 4
|
[] |
no_license
|
use std::fmt;
// the way to declare a struct like this is similar to create a custom type in Golang
struct Structure(i32);
// example of create custom type in Rust::week date
#[allow(dead_code)]
struct WeekDay(i32);
// this also useful incase of type of a class need to assert
// implement Structure with fmt fn
impl fmt::Display for Structure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,"{}",self.0)
}
}
#[derive(Debug)]
struct Po {
name:String,
character:String,
}
impl fmt::Display for Po {
fn fmt(&self,f: &mut fmt::Formatter) -> fmt::Result {
write!(f,"name: {}, character: {}",self.name,self.character)
}
}
// field in a struct should be lower case + snake case
struct Info {
name: String,
age: u8,
}
struct List(Vec<i32>);
impl fmt::Display for List {
fn fmt(&self,f: &mut fmt::Formatter) -> fmt::Result {
let vec = &self.0;
write!(f,"[")?;
for (count,v) in vec.iter().enumerate() {
if count != 0 {write!(f,", ")?;}
write!(f,"{}: {}",count,v)?;
}
write!(f,"]")
}
}
fn main() {
let v = Structure(20);
println!("{}",v);
println!("{}",Structure(8));
let info = Info{name:"khiem".to_string(),age:22};
println!("{}\n{}",info.name,info.age);
let po = Po{name:"po".to_string(),character:"animal".to_string()};
println!("{}",po);
let l = List(vec![1,2,3,4]);
println!("{}",l);
let mut inmmu = 6;
inmmu += 1;
println!("{}",inmmu);
println!("{}",1000==1_000);
println!("1 + 2 = {}",1u32+2);
println!("1 - 2 = {}",1i32-2);
}
| true
|
ee100df17a4626743ab562fb74903859df589080
|
Rust
|
JustinRyanH/rust-snake
|
/game/src/main.rs
|
UTF-8
| 3,550
| 2.65625
| 3
|
[] |
no_license
|
use miniquad::*;
use megaui::hash;
use stages::GameState;
mod components;
mod events;
mod graphics;
mod shaders;
mod stages;
mod systems;
mod ui;
mod utils;
struct SnakeGame {
stages: stages::StageStack,
renderer: graphics::MainRenderer,
input: components::Input,
ui: ui::UiContext,
}
impl SnakeGame {
pub fn new(ctx: Context) -> Self {
let (width, height) = ctx.screen_size();
let mut renderer = graphics::MainRenderer::new(ctx);
let mut input = components::Input::default();
input.width = width;
input.height = height;
let mut stages = stages::new_stage_stack(16);
let init_state = GameState::new(&input, &mut renderer);
let game_stage = Box::new(init_state);
stages.push(game_stage as Box<dyn stages::Stage>);
let ui = ui::UiContext::new();
SnakeGame {
stages,
renderer,
input,
ui,
}
}
pub fn delta_time(&self) -> f32 {
0.
}
}
impl EventHandlerFree for SnakeGame {
fn resize_event(&mut self, width: f32, height: f32) {
self.input.width = width;
self.input.height = height;
self.input.resized = true;
}
fn update(&mut self) {
let stage = match self.stages.last_mut() {
Some(s) => s,
_ => return,
};
let next_stage = stage.update(&self.input, &mut self.renderer);
match next_stage {
stages::NextStage::Push(mut new_stage) => {
stage.exit(&mut self.renderer);
new_stage.enter(&mut self.renderer);
self.stages.push(new_stage);
}
stages::NextStage::Pop => {
stage.exit(&mut self.renderer);
self.stages.pop().expect("Popped an Empty StageStack");
match self.stages.last_mut() {
Some(s) => s.enter(&mut self.renderer),
_ => {}
};
}
_ => {}
};
self.ui.process_input(&self.input);
self.renderer.load_assets();
self.ui.window(hash!(), glam::Vec2::new(20., 20.), glam::Vec2::new(100., 200.), ui::WindowParams::default(), |ui: &mut megaui::Ui, atlas: &ui::Atlas| {
ui.label(atlas, None, "Some random text");
// if ui.button(&atlas, None, "click me") {
// println!("hi");
// }
});
self.input.reset();
}
fn draw(&mut self) {
for stage in self.stages.iter_mut() {
stage.draw(&mut self.renderer);
}
self.ui.draw(self.delta_time());
self.renderer.draw();
}
fn key_down_event(&mut self, keycode: KeyCode, _keymods: KeyMods, repeat: bool) {
if repeat {
return;
}
match keycode {
KeyCode::Left | KeyCode::A => {
self.input.go_left = true;
}
KeyCode::Right | KeyCode::D => {
self.input.go_right = true;
}
KeyCode::Up | KeyCode::W => {
self.input.go_up = true;
}
KeyCode::Down | KeyCode::S => {
self.input.go_down = true;
}
KeyCode::Escape => {
self.input.pause = true;
self.input.go_back = true;
}
_ => {}
}
}
}
fn main() {
miniquad::start(conf::Conf::default(), |ctx| {
UserData::free(SnakeGame::new(ctx))
});
}
| true
|
ee1d9c25349fb9db88fd610062f50f6fc09a511e
|
Rust
|
amedviediev/rltk_rs
|
/examples/ex15-specs.rs
|
UTF-8
| 4,548
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
rltk::add_wasm_support!();
use rltk::{Console, GameState, Rltk, VirtualKeyCode, RGB};
use specs::prelude::*;
// Define a bunch of components
/// Pos is a screen position
struct Pos {
x: i32,
y: i32,
}
impl Component for Pos {
type Storage = VecStorage<Self>;
}
/// Renderable is a glyph definition
struct Renderable {
glyph: u8,
fg: RGB,
bg: RGB,
}
impl Component for Renderable {
type Storage = VecStorage<Self>;
}
/// Marker for this is the player
struct Player {}
impl Component for Player {
type Storage = VecStorage<Self>;
}
/// Marker for this is a bouncing baby
struct BouncingBacy {}
impl Component for BouncingBacy {
type Storage = VecStorage<Self>;
}
// State gets a new World entry for Specs, an RNG, and a score counter
struct State {
ecs: World,
time: f32,
rng: rltk::RandomNumberGenerator,
saved: i32,
squished: i32,
}
impl GameState for State {
fn tick(&mut self, ctx: &mut Rltk) {
// Readable data stores
let mut positions = self.ecs.write_storage::<Pos>();
let renderables = self.ecs.write_storage::<Renderable>();
let mut players = self.ecs.write_storage::<Player>();
let mut babies = self.ecs.write_storage::<BouncingBacy>();
ctx.cls();
// Player movement
match ctx.key {
None => {} // Nothing happened
Some(key) => match key {
VirtualKeyCode::Left => {
for (_player, pos) in (&mut players, &mut positions).join() {
pos.x -= 1;
if pos.x < 0 {
pos.x = 0;
}
}
}
VirtualKeyCode::Right => {
for (_player, pos) in (&mut players, &mut positions).join() {
pos.x += 1;
if pos.x > 79 {
pos.x = 79;
}
}
}
_ => {}
},
}
self.time += ctx.frame_time_ms;
if self.time > 200.0 {
self.time = 0.0;
// Find the player
let mut player_x = 0;
for (_player, player_pos) in (&mut players, &mut positions).join() {
player_x = player_pos.x;
}
// Baby movement
for (_baby, pos) in (&mut babies, &mut positions).join() {
pos.y += 1;
if pos.y > 48 {
pos.y = 0;
if player_x == pos.x {
// We saved them
self.saved += 1;
} else {
// Squish!
self.squished += 1;
}
pos.x = self.rng.roll_dice(1, 79);
}
}
}
// Draw renderables
for (pos, render) in (&positions, &renderables).join() {
ctx.set(pos.x, pos.y, render.fg, render.bg, render.glyph);
}
// Print the score
ctx.print_centered(0, "Left & right arrows to move. Catch the falling babies!");
ctx.print_centered(
2,
&format!("Saved {}, Squished {}", self.saved, self.squished),
);
}
}
fn main() {
let mut gs = State {
ecs: World::new(),
time: 0.0,
rng: rltk::RandomNumberGenerator::new(),
saved: 0,
squished: 0,
};
gs.ecs.register::<Pos>();
gs.ecs.register::<Renderable>();
gs.ecs.register::<Player>();
gs.ecs.register::<BouncingBacy>();
gs.ecs
.create_entity()
.with(Pos { x: 40, y: 49 })
.with(Renderable {
glyph: rltk::to_cp437('@'),
fg: RGB::named(rltk::YELLOW),
bg: RGB::named(rltk::BLACK),
})
.with(Player {})
.build();
for i in 0..3 {
gs.ecs
.create_entity()
.with(Pos {
x: (i * 22) + 12,
y: gs.rng.roll_dice(1, 20),
})
.with(Renderable {
glyph: rltk::to_cp437('☺'),
fg: RGB::named(rltk::MAGENTA),
bg: RGB::named(rltk::BLACK),
})
.with(BouncingBacy {})
.build();
}
let context = Rltk::init_simple8x8(
80,
50,
"Example 15 - Bouncing Babies with SPECS",
"resources",
);
rltk::main_loop(context, gs);
}
| true
|
c8d6f1926ec74338015e08e2503e54ea55bbd268
|
Rust
|
observatory-economic-complexity/product-space-rs
|
/src/complexity.rs
|
UTF-8
| 5,067
| 2.890625
| 3
|
[] |
no_license
|
use nalgebra::{DMatrix, convert};
// rca input is matrix of rca, where
// - col indexes are product
// - row indexes are countries
pub fn complexity(rca: &DMatrix<f64>) -> (DMatrix<f64>, DMatrix<f64>) {
// first pass: following instructions are from
// simoes ps_calcs proximity fn using np
// np notes:
// dot is just multiplication, not dot product.
// mul/div is componentwise, not sweeping or otherwise
// k product
let kp0 = rca.row_sum_tr();
let mut kp = kp0.clone();
// k country
let kc0 = rca.column_sum();
let mut kc = kc0.clone();
// (from simoes, it loops 10 times but I don't know why.
// On last pass, does additional mult by kp)
for i in 0..19 {
// temps needed because the calculations in the next step
// modify kc and kp, but depend on their value at the beginning
// of the loop pass
let kc_temp = kc.clone();
let kp_temp = kp.clone();
kp = convert((rca.transpose() * &kc_temp).component_div(&kp0));
if i < 18 {
kc = (rca * &kp_temp).component_div(&kc0);
}
}
println!("kp0: {}", kp0);
println!("kc0: {}", kc0);
println!("kp: {}", kp);
println!("kc: {}", kc);
let kc_mean = mean(&convert(kc.clone()));
let kc_std = std(&convert(kc.clone()), None);
let mut geo_complexity = kc;
let kp_mean = mean(&convert(kp.clone()));
let kp_std = std(&convert(kp.clone()), None);
let mut prod_complexity = kp;
println!("kp_mean: {}", kp_mean);
println!("kc_mean: {}", kc_mean);
println!("kp_std: {}", kp_std);
println!("kc_std: {}", kc_std);
geo_complexity.apply(|x| (x - kc_mean) / kc_std);
prod_complexity.apply(|x| (x - kp_mean) / kp_std);
(convert(geo_complexity), convert(prod_complexity))
}
// only for <U1, Dynamic> vectors
fn mean(m: &DMatrix<f64>) -> f64 {
assert!(m.ncols() == 1);
let col = m.columns(0,1);
let n = col.len();
let total = col.iter().sum::<f64>();
total / n as f64
}
// only for <U1, Dynamic> vectors
// This is just temp, until I find a lib or something to calculate
// std deviation
//
// ddof is delta degrees of freedom. In pandas, the default is 1,
// in numpy, it's 0. In order to be the same as ps_calcs, which
// use pandas, I set the default as 1 for the complexity calc.
// It's not broken out into a param for the moment.
// See notes in `complexity_bug.md`
fn std(m: &DMatrix<f64>, ddof: Option<u32>) -> f64 {
// The standard deviation is the square root of the
// average of the squared deviations from the mean, i.e.,
// `std = sqrt(mean(abs(x - x.mean())**2))`.
assert!(m.ncols() == 1);
let mean = mean(m);
let col = m.columns(0,1);
let n = col.len();
let dev = col.iter()
.map(|x| ((x - mean).abs()).powf(2.0));
let ddof = ddof.unwrap_or(1);
let d = n as u32 - ddof;
let variance = dev.sum::<f64>() / d as f64;
variance.sqrt()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::rca;
#[test]
fn test_std_ddof0() {
let m = DMatrix::from_vec(3,1,vec![1.0, 3.0, 5.0]);
let std_dev = std(&m, Some(0));
assert_eq!(std_dev, 1.632993161855452);
let m = DMatrix::from_vec(3,1,vec![1.0, 3.0, 6.0]);
let std_dev = std(&m, Some(0));
assert_eq!(std_dev, 2.0548046676563256);
let m = DMatrix::from_vec(4,1,vec![9.365921518323761,9.365168229974921,9.366119246144434,9.366618939884766]);
let std_dev = std(&m, Some(0));
assert_eq!(std_dev, 0.0005215135001035631);
}
#[test]
fn test_std_ddof1() {
let m = DMatrix::from_vec(4,1,vec![9.365921518323761,9.365168229974921,9.366119246144434,9.366618939884766]);
let std_dev = std(&m, None);
assert_eq!(std_dev, 0.0006021919193416322);
}
#[test]
fn test_complexity() {
println!("columns: product, rows: country");
let m = DMatrix::from_vec(2,4,vec![100.0, 2000.0, 3.0, 4000.0, 500.0, 6000.0, 17.0, 23.0]);
println!("matrix:\n{}", m);
let rca = rca(&m);
println!("rca:\n{}", rca);
let complexity = complexity(&rca);
println!("geo complexity:\n{}", complexity.0);
println!("product complexity:\n{}", complexity.1);
let expected_geo= DMatrix::from_vec(2,1,
vec![
0.7071067811857505,
-0.7071067811873445,
]
);
println!("expected_geo:\n{}", expected_geo);
let expected_product= DMatrix::from_vec(4,1,
vec![
-0.058893613597594,
-1.3098043691969639,
0.2694532378334895,
1.0992447449640181,
]
);
println!("expected_product:\n{}", expected_product);
assert_eq!(complexity.0, expected_geo);
assert_eq!(complexity.1, expected_product);
}
}
// expected from simoes
// (0 0.707107
// 1 -0.707107
// dtype: float64, 0 -0.058894
// 1 -1.309804
// 2 0.269453
// 3 1.099245
| true
|
6116a645c4124d367007d75b9619f345c2dc96f4
|
Rust
|
cozydate/rust-in-production
|
/http/src/bin/async_read.rs
|
UTF-8
| 1,129
| 3
| 3
|
[
"MIT"
] |
permissive
|
use std::pin::Pin;
use std::task::Context;
use tokio::io::AsyncRead;
use tokio::macros::support::Poll;
/// AsyncReadable is an example struct that implements tokio::io::AsyncRead.
struct AsyncReadable(u8);
impl AsyncRead for AsyncReadable {
fn poll_read(mut self: Pin<&mut Self>, cx: &mut Context<'_>, mut buf: &mut [u8]) -> Poll<tokio::io::Result<usize>> {
self.0 += 1;
use std::io::Write;
match self.0 {
1 => {
cx.waker().clone().wake(); // Tell executor to wake the task again again.
Poll::Pending
}
2 => Poll::Ready(buf.write(b"aaa")),
3 => Poll::Ready(buf.write(b"bbb")),
_ => Poll::Ready(tokio::io::Result::Ok(0))
}
}
}
pub async fn async_main() {
let mut async_readable = AsyncReadable(0);
let mut value = String::new();
tokio::io::AsyncReadExt::read_to_string(&mut async_readable, &mut value).await.unwrap();
println!("{:?}", value);
}
pub fn main() {
tokio::runtime::Runtime::new().unwrap().block_on(async_main());
}
// $ cargo run --bin async_read
// "aaabbb"
| true
|
31fec84ca9f215f65f16b8bcecbb5aa3e5046035
|
Rust
|
rust-lang/rust
|
/tests/ui/const-generics/generic_const_exprs/issue-99705.rs
|
UTF-8
| 718
| 2.703125
| 3
|
[
"Apache-2.0",
"LLVM-exception",
"NCSA",
"BSD-2-Clause",
"LicenseRef-scancode-unicode",
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
// check-pass
#![crate_type = "lib"]
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
pub trait MyIterator {
type Output;
}
pub trait Foo {
const ABC: usize;
}
pub struct IteratorStruct<const N: usize>{
}
pub struct Bar<const N: usize> {
pub data: [usize; N]
}
impl<const N: usize> MyIterator for IteratorStruct<N> {
type Output = Bar<N>;
}
pub fn test1<T: Foo>() -> impl MyIterator<Output = Bar<{T::ABC}>> where [(); T::ABC]: Sized {
IteratorStruct::<{T::ABC}>{}
}
pub trait Baz<const N: usize>{}
impl<const N: usize> Baz<N> for Bar<N> {}
pub fn test2<T: Foo>() -> impl MyIterator<Output = impl Baz<{ T::ABC }>> where [(); T::ABC]: Sized {
IteratorStruct::<{T::ABC}>{}
}
| true
|
48adaf3f0b6093df93b94bd8ef57f461f8f20e42
|
Rust
|
mripard/doremi
|
/src/property.rs
|
UTF-8
| 1,250
| 2.875
| 3
|
[
"MIT"
] |
permissive
|
use crate::device::Device;
use crate::error::Error;
use crate::error::Result;
use crate::object::Object;
use crate::object::ObjectType;
use crate::rawdevice::drm_mode_get_property;
#[derive(Debug)]
pub struct Property<'a> {
dev: &'a Device,
id: u32,
name: String,
}
impl<'a> Object for Property<'a> {
fn get_dev(&self) -> &Device {
self.dev
}
fn get_id(&self) -> u32 {
self.id
}
fn get_type(&self) -> ObjectType {
ObjectType::Property
}
fn get_property_id(&self, property: &str) -> Result<u32> {
let dev = self.get_dev();
Ok(dev
.get_properties(self)?
.iter()
.find(|prop| prop.get_name() == property)
.ok_or(Error::NoneError)?
.get_id())
}
}
impl<'a> Property<'a> {
pub(crate) fn new(
dev: &'a Device,
property: drm_mode_get_property,
) -> Result<Property<'_>> {
let name = std::str::from_utf8(&property.name)?
.trim_end_matches(char::from(0))
.to_string();
Ok(Property {
dev,
name,
id: property.prop_id,
})
}
pub(crate) fn get_name(&self) -> &str {
&self.name
}
}
| true
|
0ad757e43c1df026879dd3c4655cebca9389b41b
|
Rust
|
TianyiShi2001/suffix-array-li2016
|
/rust/src/naive.rs
|
UTF-8
| 1,968
| 3.359375
| 3
|
[
"MIT"
] |
permissive
|
use num_traits::{PrimInt, Unsigned};
use std::fmt::{Debug, Display};
pub struct SuffixArray<'a, T: PrimInt + Unsigned + Display + Debug> {
pub text: &'a [T],
pub sa: Vec<usize>,
pub lcp: Vec<usize>,
}
impl<'a, T> SuffixArray<'a, T>
where
T: PrimInt + Unsigned + Display + Debug,
{
pub fn from_str_very_naive(s: &'a [T]) -> Self {
let mut sa = (0..s.len()).collect::<Vec<_>>();
sa.sort_by(|&a, &b| s[a..].cmp(&s[b..]));
// let lcp = Self::lcp(&sa, s);
Self {
sa,
text: s,
lcp: Vec::new(),
}
}
fn lcp(sa: &[usize], text: &[T]) -> Vec<usize> {
fn _lcp<T: PrimInt + Unsigned>(a: &[T], b: &[T]) -> usize {
a.iter()
.zip(b.iter())
.take_while(|(ca, cb)| ca == cb)
.count()
}
let mut lcps = vec![0usize; sa.len()];
for (i, win) in sa.windows(2).enumerate() {
lcps[i + 1] = _lcp(&text[win[0]..], &text[win[1]..]);
}
lcps
}
}
#[cfg(test)]
mod tests {
use super::*;
use lazy_static::lazy_static;
lazy_static! {
static ref ABRACADABRA_STR: &'static str = "abracadabra";
static ref ABRACADABRA: &'static [u8] = b"abracadabra";
}
#[test]
fn suffix_array() {
let sa = SuffixArray::from_str_very_naive(&*ABRACADABRA);
assert_eq!(&sa.sa, &[10, 7, 0, 3, 5, 8, 1, 4, 6, 9, 2]);
// assert_eq!(&sa.lcp, &[0, 1, 4, 1, 1, 0, 3, 0, 0, 0, 2]);
// idx sorted suffix lcp
// 10 a 0
// 7 abra 1
// 0 abracadabra 4
// 3 acadabra 1
// 5 adabra 1
// 8 bra 0
// 1 bracadabra 3
// 4 cadabra 0
// 6 dabra 0
// 9 ra 0
// 2 racadabra 2
}
}
| true
|
a40da7f85ea504224b324a9cdbe1ad561aa81c1f
|
Rust
|
ECJ222/the-year-of-the-rustacean
|
/borrowing/src/main.rs
|
UTF-8
| 447
| 3.421875
| 3
|
[
"MIT"
] |
permissive
|
fn main() {
let mut team = String::from("Chelsea Football Club");
let newTeam = to_upper(&team);
// modify_borrowed(&team); // We are not allowed to modify something we have immutable reference to
modify_borrowed2(&mut team);
println!("old name = {}, new name = {}", team, newTeam);
}
fn to_upper(s: &String) -> String {
s.to_uppercase()
}
fn modify_borrowed2(s: &mut String) {
s.push_str(" , Stamford Bridge.");
}
| true
|
1cde552fd228bb80668d229ffc7d63429843e8d2
|
Rust
|
dakom/rust-bunnymark
|
/src/bunny.rs
|
UTF-8
| 1,803
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
use super::data::{Point, Area};
const START_GRAVITY:f32= 0.75;
//mimicking https://github.com/pixijs/bunny-mark/blob/master/src/Bunny.js
#[derive(Debug)]
pub struct Bunny {
pub gravity: f32,
pub speed: Point,
}
impl Bunny {
pub fn new(count:usize, stage_size: Area, img_size: Area) -> (Self, (f32, f32)) {
let mut speed = Point::new_random();
speed.x *= 10.0;
speed.y = (speed.y * 10.0) - 5.0;
//alternate between corners
let pos_x = match count % 2 {
0 => 0.0f32,
_ => (stage_size.width - img_size.width) as f32
};
let pos_y = (stage_size.height - img_size.height) as f32;
let bunny = Self {
gravity: START_GRAVITY,
speed,
};
(bunny, (pos_x, pos_y))
}
//movement is made to match https://github.com/pixijs/bunny-mark/blob/master/src/Bunny.js
pub fn update(&mut self, stage_size: Area, img_size:Area, pos_x: &mut f32, pos_y: &mut f32) {
*pos_x += self.speed.x;
*pos_y -= self.speed.y;
self.speed.y += self.gravity;
let bounds_right = (stage_size.width - img_size.width) as f32;
if *pos_x > bounds_right {
self.speed.x *= -1.0;
*pos_x = bounds_right;
} else if *pos_x < 0.0 {
self.speed.x *= -1.0;
*pos_x = 0.0
}
let bounds_top = (stage_size.height - img_size.height) as f32;
if *pos_y < 0.0 {
self.speed.y *= -0.85;
*pos_y = 0.0;
if js_sys::Math::random() > 0.5 {
self.speed.y -= (js_sys::Math::random() as f32) * 6.0;
}
} else if *pos_y > bounds_top {
self.speed.y = 0.0;
*pos_y = bounds_top;
}
}
}
| true
|
50f8738d63513b625018daa6acfa56477fad9e12
|
Rust
|
embed-rs/stm32f7-discovery-rewrite
|
/src/lcd/stdout.rs
|
UTF-8
| 1,463
| 2.65625
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use super::{FramebufferAl88, Layer, TextWriter};
use core::fmt;
use cortex_m::interrupt;
use spin::Mutex;
static STDOUT: Stdout = Stdout(Mutex::new(None));
struct Stdout<'a>(Mutex<Option<TextWriter<'a, FramebufferAl88>>>);
impl<'a> Stdout<'a> {
fn with(&self, f: impl FnOnce(&mut Option<TextWriter<'a, FramebufferAl88>>)) {
interrupt::free(|_| f(&mut self.0.lock()))
}
}
pub fn init(layer: Layer<FramebufferAl88>) {
static mut LAYER: Option<Layer<FramebufferAl88>> = None;
STDOUT.with(|stdout| {
let layer = unsafe { LAYER.get_or_insert_with(|| layer) };
*stdout = Some(layer.text_writer());
});
}
#[macro_export]
macro_rules! println {
($fmt:expr) => (print!(concat!($fmt, "\n")));
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*));
}
#[macro_export]
macro_rules! print {
($($arg:tt)*) => ({
$crate::lcd::stdout::print(format_args!($($arg)*));
});
}
pub fn print(args: fmt::Arguments) {
use core::fmt::Write;
let mut uninitialized = false;
STDOUT.with(|stdout| {
if let Some(ref mut stdout) = *stdout {
stdout.write_fmt(args).unwrap();
} else {
uninitialized = true;
}
});
if uninitialized {
panic!("stdout uninitialized")
}
}
pub fn is_initialized() -> bool {
let mut initialized = false;
STDOUT.with(|stdout| {
initialized = stdout.is_some();
});
initialized
}
| true
|
9de5afd525ae74c3690c606c455164a44a1422eb
|
Rust
|
jtescher/rust-postgres
|
/codegen/src/type_gen.rs
|
UTF-8
| 6,204
| 2.703125
| 3
|
[
"MIT"
] |
permissive
|
use regex::Regex;
use std::ascii::AsciiExt;
use std::collections::BTreeMap;
use std::fs::File;
use std::io::{Write, BufWriter};
use std::path::Path;
use marksman_escape::Escape;
use snake_to_camel;
const PG_TYPE_H: &'static str = include_str!("pg_type.h");
const PG_RANGE_H: &'static str = include_str!("pg_range.h");
struct Type {
name: &'static str,
variant: String,
kind: &'static str,
element: u32,
doc: String,
}
pub fn build(path: &Path) {
let mut file = BufWriter::new(File::create(path.join("types/type_gen.rs")).unwrap());
let ranges = parse_ranges();
let types = parse_types(&ranges);
make_header(&mut file);
make_enum(&mut file, &types);
make_display_impl(&mut file);
make_impl(&mut file, &types);
}
fn parse_ranges() -> BTreeMap<u32, u32> {
let mut ranges = BTreeMap::new();
for line in PG_RANGE_H.lines() {
if !line.starts_with("DATA") {
continue;
}
let split = line.split_whitespace().collect::<Vec<_>>();
let oid = split[2].parse().unwrap();
let element = split[3].parse().unwrap();
ranges.insert(oid, element);
}
ranges
}
fn parse_types(ranges: &BTreeMap<u32, u32>) -> BTreeMap<u32, Type> {
let doc_re = Regex::new(r#"DESCR\("([^"]+)"\)"#).unwrap();
let range_vector_re = Regex::new("(range|vector)$").unwrap();
let array_re = Regex::new("^_(.*)").unwrap();
let mut types = BTreeMap::new();
let mut lines = PG_TYPE_H.lines().peekable();
while let Some(line) = lines.next() {
if !line.starts_with("DATA") {
continue;
}
let split = line.split_whitespace().collect::<Vec<_>>();
let oid = split[3].parse().unwrap();
let name = split[5];
let variant = match name {
"anyarray" => "AnyArray".to_owned(),
name => {
let variant = range_vector_re.replace(name, "_$1");
let variant = array_re.replace(&variant, "$1_array");
snake_to_camel(&variant)
}
};
let kind = split[11];
// we need to be able to pull composite fields and enum variants at runtime
if kind == "C" || kind == "E" {
continue;
}
let element = if let Some(&element) = ranges.get(&oid) {
element
} else {
split[16].parse().unwrap()
};
let doc = array_re.replace(name, "$1[]");
let mut doc = doc.to_ascii_uppercase();
let descr = lines.peek()
.and_then(|line| doc_re.captures(line))
.and_then(|captures| captures.at(1));
if let Some(descr) = descr {
doc.push_str(" - ");
doc.push_str(descr);
}
let doc = Escape::new(doc.as_bytes().iter().cloned()).collect();
let doc = String::from_utf8(doc).unwrap();
let type_ = Type {
name: name,
variant: variant,
kind: kind,
element: element,
doc: doc,
};
types.insert(oid, type_);
}
types
}
fn make_header(w: &mut BufWriter<File>) {
write!(w,
"// Autogenerated file - DO NOT EDIT
use std::fmt;
use types::{{Oid, Kind, Other}};
"
).unwrap();
}
fn make_enum(w: &mut BufWriter<File>, types: &BTreeMap<u32, Type>) {
write!(w,
"/// A Postgres type.
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum Type {{
"
).unwrap();
for type_ in types.values() {
write!(w,
" /// {}
{},
"
, type_.doc, type_.variant).unwrap();
}
write!(w,
r" /// An unknown type.
Other(Other),
}}
" ).unwrap();
}
fn make_display_impl(w: &mut BufWriter<File>) {
write!(w,
r#"impl fmt::Display for Type {{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {{
match self.schema() {{
"public" | "pg_catalog" => {{}}
schema => write!(fmt, "{{}}.", schema)?,
}}
fmt.write_str(self.name())
}}
}}
"#,
).unwrap();
}
fn make_impl(w: &mut BufWriter<File>, types: &BTreeMap<u32, Type>) {
write!(w,
"impl Type {{
/// Returns the `Type` corresponding to the provided `Oid` if it
/// corresponds to a built-in type.
pub fn from_oid(oid: Oid) -> Option<Type> {{
match oid {{
",
).unwrap();
for (oid, type_) in types {
write!(w,
" {} => Some(Type::{}),
",
oid, type_.variant).unwrap();
}
write!(w,
" _ => None,
}}
}}
/// Returns the OID of the `Type`.
pub fn oid(&self) -> Oid {{
match *self {{
",
).unwrap();
for (oid, type_) in types {
write!(w,
" Type::{} => {},
",
type_.variant, oid).unwrap();
}
write!(w,
" Type::Other(ref u) => u.oid(),
}}
}}
/// Returns the kind of this type.
pub fn kind(&self) -> &Kind {{
match *self {{
",
).unwrap();
for type_ in types.values() {
let kind = match type_.kind {
"P" => "Pseudo".to_owned(),
"A" => format!("Array(Type::{})", types[&type_.element].variant),
"R" => format!("Range(Type::{})", types[&type_.element].variant),
_ => "Simple".to_owned(),
};
write!(w,
" Type::{} => {{
const V: &'static Kind = &Kind::{};
V
}}
",
type_.variant, kind).unwrap();
}
write!(w,
r#" Type::Other(ref u) => u.kind(),
}}
}}
/// Returns the schema of this type.
pub fn schema(&self) -> &str {{
match *self {{
Type::Other(ref u) => u.schema(),
_ => "pg_catalog",
}}
}}
/// Returns the name of this type.
pub fn name(&self) -> &str {{
match *self {{
"#,
).unwrap();
for type_ in types.values() {
write!(w,
r#" Type::{} => "{}",
"#,
type_.variant, type_.name).unwrap();
}
write!(w,
" Type::Other(ref u) => u.name(),
}}
}}
}}
"
).unwrap();
}
| true
|
63e31e1b5203d649b819d9d305fc6e433430cdd3
|
Rust
|
prangel-git/genetic
|
/src/algorithm/utils.rs
|
UTF-8
| 3,010
| 3.15625
| 3
|
[] |
no_license
|
use crate::genetic::Genetic;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::hash::Hash;
use std::rc::Rc;
use rand::distributions::WeightedIndex;
use rand::prelude::*;
use super::GenotypeToFitness;
/// If the initial population is smaller than the max_population, it completes it with random elements
pub(super) fn initial_population_make<T>(
initial_population: &Vec<Rc<T>>,
max_population: usize,
) -> Vec<Rc<T>>
where
T: Genetic,
{
let mut population = initial_population.clone();
while population.len() < max_population {
population.push(Rc::new(T::new_random()))
}
return population;
}
/// Finds the distribution for survival of a population based on a fitness function.
pub(super) fn fitness_proportion_distribution<T>(
population: &Vec<Rc<T>>,
fitness: &Box<dyn Fn(&T) -> f64>,
cache: &mut HashMap<Rc<T>, f64>,
) -> WeightedIndex<f64>
where
T: Genetic + Hash + Eq,
{
WeightedIndex::new(
population
.iter()
.map(|sample| calc_fitness(sample, fitness, cache)),
)
.unwrap()
}
/// Finds distribution based on number of wins in tournament
pub(super) fn tournament_wins<T>(
population: &Vec<Rc<T>>,
matching: &Box<dyn Fn(&T, &T) -> f64>,
) -> Vec<f64>
where
T: Genetic + Hash + Eq,
{
let mut wins = vec![0f64; population.len()];
for idx_a in 0..population.len() {
for idx_b in 0..population.len() {
let result_match = matching(&population[idx_a], &population[idx_b]);
wins[idx_a] += result_match;
wins[idx_b] -= result_match;
}
}
wins.iter().map(|x| x.max(0f64)).collect()
}
/// Selects and reproduces a new population using a given distribution.
pub(super) fn roulette_wheel_selection<T>(
population: &Vec<Rc<T>>,
dist: &WeightedIndex<f64>,
offspring_len: usize,
mutation_rate: f64,
co_rate: f64,
) -> Vec<Rc<T>>
where
T: Genetic,
{
let mut offspring = Vec::with_capacity(offspring_len);
let mut rng = thread_rng();
for _ in 0..offspring_len {
let parent_a = &population[dist.sample(&mut rng)];
let parent_b = &population[dist.sample(&mut rng)];
let child = reproduction(parent_a, parent_b, mutation_rate, co_rate);
offspring.push(child);
}
return offspring;
}
/// Produces offspring from two parents.
fn reproduction<T>(parent_a: &Rc<T>, parent_b: &Rc<T>, mutation_rate: f64, co_rate: f64) -> Rc<T>
where
T: Genetic,
{
Rc::new(
parent_a
.cross_over(parent_b, co_rate)
.mutation(mutation_rate),
)
}
/// Calcuates fitness and updates cache
fn calc_fitness<T>(
element: &Rc<T>,
fitness: &Box<dyn Fn(&T) -> f64>,
cache: &mut GenotypeToFitness<T>,
) -> f64
where
T: Genetic + Hash + Eq,
{
match cache.entry(element.clone()) {
Entry::Vacant(entry) => *entry.insert(fitness(element)),
Entry::Occupied(entry) => *entry.get(),
}
}
| true
|
3c9967d87b625c9c5edbf3c23ae90a392fa28b4a
|
Rust
|
roberts-ivanovs/itf-ctf-2021
|
/actix-backend/src/ctf/models/namebuilder.rs
|
UTF-8
| 2,592
| 2.75
| 3
|
[] |
no_license
|
use regex::Regex;
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use crate::{how::Error, state::AppState};
type SqlID = u64;
#[derive(FromRow, Serialize, Deserialize, Clone, Debug)]
pub struct Namebuilder {
pub id: SqlID,
pub name: String,
}
#[async_trait]
pub trait INameBuilder {
async fn get_unique_name(&self) -> sqlx::Result<String>;
}
#[derive(FromRow)]
struct MaxId {
count: i64,
}
#[async_trait]
impl INameBuilder for AppState {
async fn get_unique_name(&self) -> sqlx::Result<String> {
// Get noun
let user_count = sqlx::query_as!(
MaxId,
r#"
SELECT COUNT(*) as count
FROM users
ORDER BY id
"#,
)
.fetch_one(&self.sql)
.await?;
let noun_count = sqlx::query_as!(
MaxId,
r#"
SELECT COUNT(*) as count
FROM nouns
ORDER BY id
"#,
)
.fetch_one(&self.sql)
.await?;
let next_noun_id = user_count.count % noun_count.count;
let noun = sqlx::query_as!(
Namebuilder,
r#"
SELECT*
FROM nouns
WHERE id = ?
ORDER BY id
"#,
next_noun_id
)
.fetch_one(&self.sql)
.await
.unwrap_or(Namebuilder {
id: 0,
name: "Anonynmous".to_owned(),
})
.name;
// Generate adjactives
let adjactive_count = sqlx::query_as!(
MaxId,
r#"
SELECT COUNT(*) as count
FROM adjactives
ORDER BY id
"#,
)
.fetch_one(&self.sql)
.await?;
let mut adjacives = String::new();
let mut ucnt = user_count.count;
let mut next_adjactive_id = ucnt % adjactive_count.count;
while next_adjactive_id > 0 {
let adjactive = sqlx::query_as!(
Namebuilder,
r#"
SELECT*
FROM adjactives
WHERE id = ?
ORDER BY id
"#,
next_adjactive_id
)
.fetch_one(&self.sql)
.await
.unwrap_or(Namebuilder {
id: 0,
name: "Anonynmous".to_owned(),
});
ucnt -= next_adjactive_id;
next_adjactive_id = ucnt % adjactive_count.count;
adjacives += " ";
adjacives += &adjactive.name;
}
Ok(format!("{} {} |{}", adjacives, noun, user_count.count))
}
}
| true
|
72aaa5c7fa2302bb9c90fcb1290cb2946c3c94d4
|
Rust
|
marco-c/gecko-dev-wordified-and-comments-removed
|
/third_party/rust/chrono/src/format/parse.rs
|
UTF-8
| 33,568
| 2.5625
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
#
!
[
allow
(
deprecated
)
]
use
core
:
:
borrow
:
:
Borrow
;
use
core
:
:
str
;
use
core
:
:
usize
;
use
super
:
:
scan
;
use
super
:
:
{
Fixed
InternalFixed
InternalInternal
Item
Numeric
Pad
Parsed
}
;
use
super
:
:
{
ParseError
ParseErrorKind
ParseResult
}
;
use
super
:
:
{
BAD_FORMAT
INVALID
NOT_ENOUGH
OUT_OF_RANGE
TOO_LONG
TOO_SHORT
}
;
use
{
DateTime
FixedOffset
Weekday
}
;
fn
set_weekday_with_num_days_from_sunday
(
p
:
&
mut
Parsed
v
:
i64
)
-
>
ParseResult
<
(
)
>
{
p
.
set_weekday
(
match
v
{
0
=
>
Weekday
:
:
Sun
1
=
>
Weekday
:
:
Mon
2
=
>
Weekday
:
:
Tue
3
=
>
Weekday
:
:
Wed
4
=
>
Weekday
:
:
Thu
5
=
>
Weekday
:
:
Fri
6
=
>
Weekday
:
:
Sat
_
=
>
return
Err
(
OUT_OF_RANGE
)
}
)
}
fn
set_weekday_with_number_from_monday
(
p
:
&
mut
Parsed
v
:
i64
)
-
>
ParseResult
<
(
)
>
{
p
.
set_weekday
(
match
v
{
1
=
>
Weekday
:
:
Mon
2
=
>
Weekday
:
:
Tue
3
=
>
Weekday
:
:
Wed
4
=
>
Weekday
:
:
Thu
5
=
>
Weekday
:
:
Fri
6
=
>
Weekday
:
:
Sat
7
=
>
Weekday
:
:
Sun
_
=
>
return
Err
(
OUT_OF_RANGE
)
}
)
}
fn
parse_rfc2822
<
'
a
>
(
parsed
:
&
mut
Parsed
mut
s
:
&
'
a
str
)
-
>
ParseResult
<
(
&
'
a
str
(
)
)
>
{
macro_rules
!
try_consume
{
(
e
:
expr
)
=
>
{
{
let
(
s_
v
)
=
e
?
;
s
=
s_
;
v
}
}
;
}
s
=
s
.
trim_left
(
)
;
if
let
Ok
(
(
s_
weekday
)
)
=
scan
:
:
short_weekday
(
s
)
{
if
!
s_
.
starts_with
(
'
'
)
{
return
Err
(
INVALID
)
;
}
s
=
&
s_
[
1
.
.
]
;
parsed
.
set_weekday
(
weekday
)
?
;
}
s
=
s
.
trim_left
(
)
;
parsed
.
set_day
(
try_consume
!
(
scan
:
:
number
(
s
1
2
)
)
)
?
;
s
=
scan
:
:
space
(
s
)
?
;
parsed
.
set_month
(
1
+
i64
:
:
from
(
try_consume
!
(
scan
:
:
short_month0
(
s
)
)
)
)
?
;
s
=
scan
:
:
space
(
s
)
?
;
let
prevlen
=
s
.
len
(
)
;
let
mut
year
=
try_consume
!
(
scan
:
:
number
(
s
2
usize
:
:
MAX
)
)
;
let
yearlen
=
prevlen
-
s
.
len
(
)
;
match
(
yearlen
year
)
{
(
2
0
.
.
.
49
)
=
>
{
year
+
=
2000
;
}
(
2
50
.
.
.
99
)
=
>
{
year
+
=
1900
;
}
(
3
_
)
=
>
{
year
+
=
1900
;
}
(
_
_
)
=
>
{
}
}
parsed
.
set_year
(
year
)
?
;
s
=
scan
:
:
space
(
s
)
?
;
parsed
.
set_hour
(
try_consume
!
(
scan
:
:
number
(
s
2
2
)
)
)
?
;
s
=
scan
:
:
char
(
s
.
trim_left
(
)
b
'
:
'
)
?
.
trim_left
(
)
;
parsed
.
set_minute
(
try_consume
!
(
scan
:
:
number
(
s
2
2
)
)
)
?
;
if
let
Ok
(
s_
)
=
scan
:
:
char
(
s
.
trim_left
(
)
b
'
:
'
)
{
parsed
.
set_second
(
try_consume
!
(
scan
:
:
number
(
s_
2
2
)
)
)
?
;
}
s
=
scan
:
:
space
(
s
)
?
;
if
let
Some
(
offset
)
=
try_consume
!
(
scan
:
:
timezone_offset_2822
(
s
)
)
{
parsed
.
set_offset
(
i64
:
:
from
(
offset
)
)
?
;
}
Ok
(
(
s
(
)
)
)
}
fn
parse_rfc3339
<
'
a
>
(
parsed
:
&
mut
Parsed
mut
s
:
&
'
a
str
)
-
>
ParseResult
<
(
&
'
a
str
(
)
)
>
{
macro_rules
!
try_consume
{
(
e
:
expr
)
=
>
{
{
let
(
s_
v
)
=
e
?
;
s
=
s_
;
v
}
}
;
}
parsed
.
set_year
(
try_consume
!
(
scan
:
:
number
(
s
4
4
)
)
)
?
;
s
=
scan
:
:
char
(
s
b
'
-
'
)
?
;
parsed
.
set_month
(
try_consume
!
(
scan
:
:
number
(
s
2
2
)
)
)
?
;
s
=
scan
:
:
char
(
s
b
'
-
'
)
?
;
parsed
.
set_day
(
try_consume
!
(
scan
:
:
number
(
s
2
2
)
)
)
?
;
s
=
match
s
.
as_bytes
(
)
.
first
(
)
{
Some
(
&
b
'
t
'
)
|
Some
(
&
b
'
T
'
)
=
>
&
s
[
1
.
.
]
Some
(
_
)
=
>
return
Err
(
INVALID
)
None
=
>
return
Err
(
TOO_SHORT
)
}
;
parsed
.
set_hour
(
try_consume
!
(
scan
:
:
number
(
s
2
2
)
)
)
?
;
s
=
scan
:
:
char
(
s
b
'
:
'
)
?
;
parsed
.
set_minute
(
try_consume
!
(
scan
:
:
number
(
s
2
2
)
)
)
?
;
s
=
scan
:
:
char
(
s
b
'
:
'
)
?
;
parsed
.
set_second
(
try_consume
!
(
scan
:
:
number
(
s
2
2
)
)
)
?
;
if
s
.
starts_with
(
'
.
'
)
{
let
nanosecond
=
try_consume
!
(
scan
:
:
nanosecond
(
&
s
[
1
.
.
]
)
)
;
parsed
.
set_nanosecond
(
nanosecond
)
?
;
}
let
offset
=
try_consume
!
(
scan
:
:
timezone_offset_zulu
(
s
|
s
|
scan
:
:
char
(
s
b
'
:
'
)
)
)
;
if
offset
<
=
-
86_400
|
|
offset
>
=
86_400
{
return
Err
(
OUT_OF_RANGE
)
;
}
parsed
.
set_offset
(
i64
:
:
from
(
offset
)
)
?
;
Ok
(
(
s
(
)
)
)
}
pub
fn
parse
<
'
a
I
B
>
(
parsed
:
&
mut
Parsed
s
:
&
str
items
:
I
)
-
>
ParseResult
<
(
)
>
where
I
:
Iterator
<
Item
=
B
>
B
:
Borrow
<
Item
<
'
a
>
>
{
parse_internal
(
parsed
s
items
)
.
map
(
|
_
|
(
)
)
.
map_err
(
|
(
_s
e
)
|
e
)
}
fn
parse_internal
<
'
a
'
b
I
B
>
(
parsed
:
&
mut
Parsed
mut
s
:
&
'
b
str
items
:
I
)
-
>
Result
<
&
'
b
str
(
&
'
b
str
ParseError
)
>
where
I
:
Iterator
<
Item
=
B
>
B
:
Borrow
<
Item
<
'
a
>
>
{
macro_rules
!
try_consume
{
(
e
:
expr
)
=
>
{
{
match
e
{
Ok
(
(
s_
v
)
)
=
>
{
s
=
s_
;
v
}
Err
(
e
)
=
>
return
Err
(
(
s
e
)
)
}
}
}
;
}
for
item
in
items
{
match
*
item
.
borrow
(
)
{
Item
:
:
Literal
(
prefix
)
=
>
{
if
s
.
len
(
)
<
prefix
.
len
(
)
{
return
Err
(
(
s
TOO_SHORT
)
)
;
}
if
!
s
.
starts_with
(
prefix
)
{
return
Err
(
(
s
INVALID
)
)
;
}
s
=
&
s
[
prefix
.
len
(
)
.
.
]
;
}
#
[
cfg
(
any
(
feature
=
"
alloc
"
feature
=
"
std
"
test
)
)
]
Item
:
:
OwnedLiteral
(
ref
prefix
)
=
>
{
if
s
.
len
(
)
<
prefix
.
len
(
)
{
return
Err
(
(
s
TOO_SHORT
)
)
;
}
if
!
s
.
starts_with
(
&
prefix
[
.
.
]
)
{
return
Err
(
(
s
INVALID
)
)
;
}
s
=
&
s
[
prefix
.
len
(
)
.
.
]
;
}
Item
:
:
Space
(
_
)
=
>
{
s
=
s
.
trim_left
(
)
;
}
#
[
cfg
(
any
(
feature
=
"
alloc
"
feature
=
"
std
"
test
)
)
]
Item
:
:
OwnedSpace
(
_
)
=
>
{
s
=
s
.
trim_left
(
)
;
}
Item
:
:
Numeric
(
ref
spec
ref
_pad
)
=
>
{
use
super
:
:
Numeric
:
:
*
;
type
Setter
=
fn
(
&
mut
Parsed
i64
)
-
>
ParseResult
<
(
)
>
;
let
(
width
signed
set
)
:
(
usize
bool
Setter
)
=
match
*
spec
{
Year
=
>
(
4
true
Parsed
:
:
set_year
)
YearDiv100
=
>
(
2
false
Parsed
:
:
set_year_div_100
)
YearMod100
=
>
(
2
false
Parsed
:
:
set_year_mod_100
)
IsoYear
=
>
(
4
true
Parsed
:
:
set_isoyear
)
IsoYearDiv100
=
>
(
2
false
Parsed
:
:
set_isoyear_div_100
)
IsoYearMod100
=
>
(
2
false
Parsed
:
:
set_isoyear_mod_100
)
Month
=
>
(
2
false
Parsed
:
:
set_month
)
Day
=
>
(
2
false
Parsed
:
:
set_day
)
WeekFromSun
=
>
(
2
false
Parsed
:
:
set_week_from_sun
)
WeekFromMon
=
>
(
2
false
Parsed
:
:
set_week_from_mon
)
IsoWeek
=
>
(
2
false
Parsed
:
:
set_isoweek
)
NumDaysFromSun
=
>
(
1
false
set_weekday_with_num_days_from_sunday
)
WeekdayFromMon
=
>
(
1
false
set_weekday_with_number_from_monday
)
Ordinal
=
>
(
3
false
Parsed
:
:
set_ordinal
)
Hour
=
>
(
2
false
Parsed
:
:
set_hour
)
Hour12
=
>
(
2
false
Parsed
:
:
set_hour12
)
Minute
=
>
(
2
false
Parsed
:
:
set_minute
)
Second
=
>
(
2
false
Parsed
:
:
set_second
)
Nanosecond
=
>
(
9
false
Parsed
:
:
set_nanosecond
)
Timestamp
=
>
(
usize
:
:
MAX
false
Parsed
:
:
set_timestamp
)
Internal
(
ref
int
)
=
>
match
int
.
_dummy
{
}
}
;
s
=
s
.
trim_left
(
)
;
let
v
=
if
signed
{
if
s
.
starts_with
(
'
-
'
)
{
let
v
=
try_consume
!
(
scan
:
:
number
(
&
s
[
1
.
.
]
1
usize
:
:
MAX
)
)
;
0i64
.
checked_sub
(
v
)
.
ok_or
(
(
s
OUT_OF_RANGE
)
)
?
}
else
if
s
.
starts_with
(
'
+
'
)
{
try_consume
!
(
scan
:
:
number
(
&
s
[
1
.
.
]
1
usize
:
:
MAX
)
)
}
else
{
try_consume
!
(
scan
:
:
number
(
s
1
width
)
)
}
}
else
{
try_consume
!
(
scan
:
:
number
(
s
1
width
)
)
}
;
set
(
parsed
v
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
Item
:
:
Fixed
(
ref
spec
)
=
>
{
use
super
:
:
Fixed
:
:
*
;
match
spec
{
&
ShortMonthName
=
>
{
let
month0
=
try_consume
!
(
scan
:
:
short_month0
(
s
)
)
;
parsed
.
set_month
(
i64
:
:
from
(
month0
)
+
1
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
LongMonthName
=
>
{
let
month0
=
try_consume
!
(
scan
:
:
short_or_long_month0
(
s
)
)
;
parsed
.
set_month
(
i64
:
:
from
(
month0
)
+
1
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
ShortWeekdayName
=
>
{
let
weekday
=
try_consume
!
(
scan
:
:
short_weekday
(
s
)
)
;
parsed
.
set_weekday
(
weekday
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
LongWeekdayName
=
>
{
let
weekday
=
try_consume
!
(
scan
:
:
short_or_long_weekday
(
s
)
)
;
parsed
.
set_weekday
(
weekday
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
LowerAmPm
|
&
UpperAmPm
=
>
{
if
s
.
len
(
)
<
2
{
return
Err
(
(
s
TOO_SHORT
)
)
;
}
let
ampm
=
match
(
s
.
as_bytes
(
)
[
0
]
|
32
s
.
as_bytes
(
)
[
1
]
|
32
)
{
(
b
'
a
'
b
'
m
'
)
=
>
false
(
b
'
p
'
b
'
m
'
)
=
>
true
_
=
>
return
Err
(
(
s
INVALID
)
)
}
;
parsed
.
set_ampm
(
ampm
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
s
=
&
s
[
2
.
.
]
;
}
&
Nanosecond
|
&
Nanosecond3
|
&
Nanosecond6
|
&
Nanosecond9
=
>
{
if
s
.
starts_with
(
'
.
'
)
{
let
nano
=
try_consume
!
(
scan
:
:
nanosecond
(
&
s
[
1
.
.
]
)
)
;
parsed
.
set_nanosecond
(
nano
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
}
&
Internal
(
InternalFixed
{
val
:
InternalInternal
:
:
Nanosecond3NoDot
}
)
=
>
{
if
s
.
len
(
)
<
3
{
return
Err
(
(
s
TOO_SHORT
)
)
;
}
let
nano
=
try_consume
!
(
scan
:
:
nanosecond_fixed
(
s
3
)
)
;
parsed
.
set_nanosecond
(
nano
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
Internal
(
InternalFixed
{
val
:
InternalInternal
:
:
Nanosecond6NoDot
}
)
=
>
{
if
s
.
len
(
)
<
6
{
return
Err
(
(
s
TOO_SHORT
)
)
;
}
let
nano
=
try_consume
!
(
scan
:
:
nanosecond_fixed
(
s
6
)
)
;
parsed
.
set_nanosecond
(
nano
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
Internal
(
InternalFixed
{
val
:
InternalInternal
:
:
Nanosecond9NoDot
}
)
=
>
{
if
s
.
len
(
)
<
9
{
return
Err
(
(
s
TOO_SHORT
)
)
;
}
let
nano
=
try_consume
!
(
scan
:
:
nanosecond_fixed
(
s
9
)
)
;
parsed
.
set_nanosecond
(
nano
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
TimezoneName
=
>
{
try_consume
!
(
scan
:
:
timezone_name_skip
(
s
)
)
;
}
&
TimezoneOffsetColon
|
&
TimezoneOffset
=
>
{
let
offset
=
try_consume
!
(
scan
:
:
timezone_offset
(
s
.
trim_left
(
)
scan
:
:
colon_or_space
)
)
;
parsed
.
set_offset
(
i64
:
:
from
(
offset
)
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
TimezoneOffsetColonZ
|
&
TimezoneOffsetZ
=
>
{
let
offset
=
try_consume
!
(
scan
:
:
timezone_offset_zulu
(
s
.
trim_left
(
)
scan
:
:
colon_or_space
)
)
;
parsed
.
set_offset
(
i64
:
:
from
(
offset
)
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
Internal
(
InternalFixed
{
val
:
InternalInternal
:
:
TimezoneOffsetPermissive
}
)
=
>
{
let
offset
=
try_consume
!
(
scan
:
:
timezone_offset_permissive
(
s
.
trim_left
(
)
scan
:
:
colon_or_space
)
)
;
parsed
.
set_offset
(
i64
:
:
from
(
offset
)
)
.
map_err
(
|
e
|
(
s
e
)
)
?
;
}
&
RFC2822
=
>
try_consume
!
(
parse_rfc2822
(
parsed
s
)
)
&
RFC3339
=
>
try_consume
!
(
parse_rfc3339
(
parsed
s
)
)
}
}
Item
:
:
Error
=
>
{
return
Err
(
(
s
BAD_FORMAT
)
)
;
}
}
}
if
!
s
.
is_empty
(
)
{
Err
(
(
s
TOO_LONG
)
)
}
else
{
Ok
(
s
)
}
}
impl
str
:
:
FromStr
for
DateTime
<
FixedOffset
>
{
type
Err
=
ParseError
;
fn
from_str
(
s
:
&
str
)
-
>
ParseResult
<
DateTime
<
FixedOffset
>
>
{
const
DATE_ITEMS
:
&
'
static
[
Item
<
'
static
>
]
=
&
[
Item
:
:
Numeric
(
Numeric
:
:
Year
Pad
:
:
Zero
)
Item
:
:
Space
(
"
"
)
Item
:
:
Literal
(
"
-
"
)
Item
:
:
Numeric
(
Numeric
:
:
Month
Pad
:
:
Zero
)
Item
:
:
Space
(
"
"
)
Item
:
:
Literal
(
"
-
"
)
Item
:
:
Numeric
(
Numeric
:
:
Day
Pad
:
:
Zero
)
]
;
const
TIME_ITEMS
:
&
'
static
[
Item
<
'
static
>
]
=
&
[
Item
:
:
Numeric
(
Numeric
:
:
Hour
Pad
:
:
Zero
)
Item
:
:
Space
(
"
"
)
Item
:
:
Literal
(
"
:
"
)
Item
:
:
Numeric
(
Numeric
:
:
Minute
Pad
:
:
Zero
)
Item
:
:
Space
(
"
"
)
Item
:
:
Literal
(
"
:
"
)
Item
:
:
Numeric
(
Numeric
:
:
Second
Pad
:
:
Zero
)
Item
:
:
Fixed
(
Fixed
:
:
Nanosecond
)
Item
:
:
Space
(
"
"
)
Item
:
:
Fixed
(
Fixed
:
:
TimezoneOffsetZ
)
Item
:
:
Space
(
"
"
)
]
;
let
mut
parsed
=
Parsed
:
:
new
(
)
;
match
parse_internal
(
&
mut
parsed
s
DATE_ITEMS
.
iter
(
)
)
{
Err
(
(
remainder
e
)
)
if
e
.
0
=
=
ParseErrorKind
:
:
TooLong
=
>
{
if
remainder
.
starts_with
(
'
T
'
)
|
|
remainder
.
starts_with
(
'
'
)
{
parse
(
&
mut
parsed
&
remainder
[
1
.
.
]
TIME_ITEMS
.
iter
(
)
)
?
;
}
else
{
Err
(
INVALID
)
?
;
}
}
Err
(
(
_s
e
)
)
=
>
Err
(
e
)
?
Ok
(
_
)
=
>
Err
(
NOT_ENOUGH
)
?
}
;
parsed
.
to_datetime
(
)
}
}
#
[
cfg
(
test
)
]
#
[
test
]
fn
test_parse
(
)
{
use
super
:
:
IMPOSSIBLE
;
use
super
:
:
*
;
fn
parse_all
(
s
:
&
str
items
:
&
[
Item
]
)
-
>
ParseResult
<
Parsed
>
{
let
mut
parsed
=
Parsed
:
:
new
(
)
;
parse
(
&
mut
parsed
s
items
.
iter
(
)
)
?
;
Ok
(
parsed
)
}
macro_rules
!
check
{
(
fmt
:
expr
items
:
expr
;
err
:
tt
)
=
>
(
assert_eq
!
(
parse_all
(
fmt
&
items
)
Err
(
err
)
)
)
;
(
fmt
:
expr
items
:
expr
;
(
k
:
ident
:
v
:
expr
)
*
)
=
>
(
#
[
allow
(
unused_mut
)
]
{
let
mut
expected
=
Parsed
:
:
new
(
)
;
(
expected
.
k
=
Some
(
v
)
;
)
*
assert_eq
!
(
parse_all
(
fmt
&
items
)
Ok
(
expected
)
)
}
)
;
}
check
!
(
"
"
[
]
;
)
;
check
!
(
"
"
[
]
;
TOO_LONG
)
;
check
!
(
"
a
"
[
]
;
TOO_LONG
)
;
check
!
(
"
"
[
sp
!
(
"
"
)
]
;
)
;
check
!
(
"
"
[
sp
!
(
"
"
)
]
;
)
;
check
!
(
"
\
t
"
[
sp
!
(
"
"
)
]
;
)
;
check
!
(
"
\
n
\
r
\
n
"
[
sp
!
(
"
"
)
]
;
)
;
check
!
(
"
a
"
[
sp
!
(
"
"
)
]
;
TOO_LONG
)
;
check
!
(
"
"
[
lit
!
(
"
a
"
)
]
;
TOO_SHORT
)
;
check
!
(
"
"
[
lit
!
(
"
a
"
)
]
;
INVALID
)
;
check
!
(
"
a
"
[
lit
!
(
"
a
"
)
]
;
)
;
check
!
(
"
aa
"
[
lit
!
(
"
a
"
)
]
;
TOO_LONG
)
;
check
!
(
"
A
"
[
lit
!
(
"
a
"
)
]
;
INVALID
)
;
check
!
(
"
xy
"
[
lit
!
(
"
xy
"
)
]
;
)
;
check
!
(
"
xy
"
[
lit
!
(
"
x
"
)
lit
!
(
"
y
"
)
]
;
)
;
check
!
(
"
x
y
"
[
lit
!
(
"
x
"
)
lit
!
(
"
y
"
)
]
;
INVALID
)
;
check
!
(
"
xy
"
[
lit
!
(
"
x
"
)
sp
!
(
"
"
)
lit
!
(
"
y
"
)
]
;
)
;
check
!
(
"
x
y
"
[
lit
!
(
"
x
"
)
sp
!
(
"
"
)
lit
!
(
"
y
"
)
]
;
)
;
check
!
(
"
1987
"
[
num
!
(
Year
)
]
;
year
:
1987
)
;
check
!
(
"
1987
"
[
num
!
(
Year
)
]
;
TOO_LONG
)
;
check
!
(
"
0x12
"
[
num
!
(
Year
)
]
;
TOO_LONG
)
;
check
!
(
"
x123
"
[
num
!
(
Year
)
]
;
INVALID
)
;
check
!
(
"
2015
"
[
num
!
(
Year
)
]
;
year
:
2015
)
;
check
!
(
"
0000
"
[
num
!
(
Year
)
]
;
year
:
0
)
;
check
!
(
"
9999
"
[
num
!
(
Year
)
]
;
year
:
9999
)
;
check
!
(
"
\
t987
"
[
num
!
(
Year
)
]
;
year
:
987
)
;
check
!
(
"
5
"
[
num
!
(
Year
)
]
;
year
:
5
)
;
check
!
(
"
5
\
0
"
[
num
!
(
Year
)
]
;
TOO_LONG
)
;
check
!
(
"
\
05
"
[
num
!
(
Year
)
]
;
INVALID
)
;
check
!
(
"
"
[
num
!
(
Year
)
]
;
TOO_SHORT
)
;
check
!
(
"
12345
"
[
num
!
(
Year
)
lit
!
(
"
5
"
)
]
;
year
:
1234
)
;
check
!
(
"
12345
"
[
nums
!
(
Year
)
lit
!
(
"
5
"
)
]
;
year
:
1234
)
;
check
!
(
"
12345
"
[
num0
!
(
Year
)
lit
!
(
"
5
"
)
]
;
year
:
1234
)
;
check
!
(
"
12341234
"
[
num
!
(
Year
)
num
!
(
Year
)
]
;
year
:
1234
)
;
check
!
(
"
1234
1234
"
[
num
!
(
Year
)
num
!
(
Year
)
]
;
year
:
1234
)
;
check
!
(
"
1234
1235
"
[
num
!
(
Year
)
num
!
(
Year
)
]
;
IMPOSSIBLE
)
;
check
!
(
"
1234
1234
"
[
num
!
(
Year
)
lit
!
(
"
x
"
)
num
!
(
Year
)
]
;
INVALID
)
;
check
!
(
"
1234x1234
"
[
num
!
(
Year
)
lit
!
(
"
x
"
)
num
!
(
Year
)
]
;
year
:
1234
)
;
check
!
(
"
1234xx1234
"
[
num
!
(
Year
)
lit
!
(
"
x
"
)
num
!
(
Year
)
]
;
INVALID
)
;
check
!
(
"
1234
x
1234
"
[
num
!
(
Year
)
lit
!
(
"
x
"
)
num
!
(
Year
)
]
;
INVALID
)
;
check
!
(
"
-
42
"
[
num
!
(
Year
)
]
;
year
:
-
42
)
;
check
!
(
"
+
42
"
[
num
!
(
Year
)
]
;
year
:
42
)
;
check
!
(
"
-
0042
"
[
num
!
(
Year
)
]
;
year
:
-
42
)
;
check
!
(
"
+
0042
"
[
num
!
(
Year
)
]
;
year
:
42
)
;
check
!
(
"
-
42195
"
[
num
!
(
Year
)
]
;
year
:
-
42195
)
;
check
!
(
"
+
42195
"
[
num
!
(
Year
)
]
;
year
:
42195
)
;
check
!
(
"
-
42195
"
[
num
!
(
Year
)
]
;
year
:
-
42195
)
;
check
!
(
"
+
42195
"
[
num
!
(
Year
)
]
;
year
:
42195
)
;
check
!
(
"
-
42
"
[
num
!
(
Year
)
]
;
INVALID
)
;
check
!
(
"
+
42
"
[
num
!
(
Year
)
]
;
INVALID
)
;
check
!
(
"
-
"
[
num
!
(
Year
)
]
;
TOO_SHORT
)
;
check
!
(
"
+
"
[
num
!
(
Year
)
]
;
TOO_SHORT
)
;
check
!
(
"
345
"
[
num
!
(
Ordinal
)
]
;
ordinal
:
345
)
;
check
!
(
"
+
345
"
[
num
!
(
Ordinal
)
]
;
INVALID
)
;
check
!
(
"
-
345
"
[
num
!
(
Ordinal
)
]
;
INVALID
)
;
check
!
(
"
345
"
[
num
!
(
Ordinal
)
]
;
ordinal
:
345
)
;
check
!
(
"
+
345
"
[
num
!
(
Ordinal
)
]
;
INVALID
)
;
check
!
(
"
-
345
"
[
num
!
(
Ordinal
)
]
;
INVALID
)
;
check
!
(
"
1234
5678
"
[
num
!
(
Year
)
num
!
(
IsoYear
)
]
;
year
:
1234
isoyear
:
5678
)
;
check
!
(
"
12
34
56
78
"
[
num
!
(
YearDiv100
)
num
!
(
YearMod100
)
num
!
(
IsoYearDiv100
)
num
!
(
IsoYearMod100
)
]
;
year_div_100
:
12
year_mod_100
:
34
isoyear_div_100
:
56
isoyear_mod_100
:
78
)
;
check
!
(
"
1
2
3
4
5
6
"
[
num
!
(
Month
)
num
!
(
Day
)
num
!
(
WeekFromSun
)
num
!
(
WeekFromMon
)
num
!
(
IsoWeek
)
num
!
(
NumDaysFromSun
)
]
;
month
:
1
day
:
2
week_from_sun
:
3
week_from_mon
:
4
isoweek
:
5
weekday
:
Weekday
:
:
Sat
)
;
check
!
(
"
7
89
01
"
[
num
!
(
WeekdayFromMon
)
num
!
(
Ordinal
)
num
!
(
Hour12
)
]
;
weekday
:
Weekday
:
:
Sun
ordinal
:
89
hour_mod_12
:
1
)
;
check
!
(
"
23
45
6
78901234
567890123
"
[
num
!
(
Hour
)
num
!
(
Minute
)
num
!
(
Second
)
num
!
(
Nanosecond
)
num
!
(
Timestamp
)
]
;
hour_div_12
:
1
hour_mod_12
:
11
minute
:
45
second
:
6
nanosecond
:
78_901_234
timestamp
:
567_890_123
)
;
check
!
(
"
apr
"
[
fix
!
(
ShortMonthName
)
]
;
month
:
4
)
;
check
!
(
"
Apr
"
[
fix
!
(
ShortMonthName
)
]
;
month
:
4
)
;
check
!
(
"
APR
"
[
fix
!
(
ShortMonthName
)
]
;
month
:
4
)
;
check
!
(
"
ApR
"
[
fix
!
(
ShortMonthName
)
]
;
month
:
4
)
;
check
!
(
"
April
"
[
fix
!
(
ShortMonthName
)
]
;
TOO_LONG
)
;
check
!
(
"
A
"
[
fix
!
(
ShortMonthName
)
]
;
TOO_SHORT
)
;
check
!
(
"
Sol
"
[
fix
!
(
ShortMonthName
)
]
;
INVALID
)
;
check
!
(
"
Apr
"
[
fix
!
(
LongMonthName
)
]
;
month
:
4
)
;
check
!
(
"
Apri
"
[
fix
!
(
LongMonthName
)
]
;
TOO_LONG
)
;
check
!
(
"
April
"
[
fix
!
(
LongMonthName
)
]
;
month
:
4
)
;
check
!
(
"
Aprill
"
[
fix
!
(
LongMonthName
)
]
;
TOO_LONG
)
;
check
!
(
"
Aprill
"
[
fix
!
(
LongMonthName
)
lit
!
(
"
l
"
)
]
;
month
:
4
)
;
check
!
(
"
Aprl
"
[
fix
!
(
LongMonthName
)
lit
!
(
"
l
"
)
]
;
month
:
4
)
;
check
!
(
"
April
"
[
fix
!
(
LongMonthName
)
lit
!
(
"
il
"
)
]
;
TOO_SHORT
)
;
check
!
(
"
thu
"
[
fix
!
(
ShortWeekdayName
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
Thu
"
[
fix
!
(
ShortWeekdayName
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
THU
"
[
fix
!
(
ShortWeekdayName
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
tHu
"
[
fix
!
(
ShortWeekdayName
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
Thursday
"
[
fix
!
(
ShortWeekdayName
)
]
;
TOO_LONG
)
;
check
!
(
"
T
"
[
fix
!
(
ShortWeekdayName
)
]
;
TOO_SHORT
)
;
check
!
(
"
The
"
[
fix
!
(
ShortWeekdayName
)
]
;
INVALID
)
;
check
!
(
"
Nop
"
[
fix
!
(
ShortWeekdayName
)
]
;
INVALID
)
;
check
!
(
"
Thu
"
[
fix
!
(
LongWeekdayName
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
Thur
"
[
fix
!
(
LongWeekdayName
)
]
;
TOO_LONG
)
;
check
!
(
"
Thurs
"
[
fix
!
(
LongWeekdayName
)
]
;
TOO_LONG
)
;
check
!
(
"
Thursday
"
[
fix
!
(
LongWeekdayName
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
Thursdays
"
[
fix
!
(
LongWeekdayName
)
]
;
TOO_LONG
)
;
check
!
(
"
Thursdays
"
[
fix
!
(
LongWeekdayName
)
lit
!
(
"
s
"
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
Thus
"
[
fix
!
(
LongWeekdayName
)
lit
!
(
"
s
"
)
]
;
weekday
:
Weekday
:
:
Thu
)
;
check
!
(
"
Thursday
"
[
fix
!
(
LongWeekdayName
)
lit
!
(
"
rsday
"
)
]
;
TOO_SHORT
)
;
check
!
(
"
am
"
[
fix
!
(
LowerAmPm
)
]
;
hour_div_12
:
0
)
;
check
!
(
"
pm
"
[
fix
!
(
LowerAmPm
)
]
;
hour_div_12
:
1
)
;
check
!
(
"
AM
"
[
fix
!
(
LowerAmPm
)
]
;
hour_div_12
:
0
)
;
check
!
(
"
PM
"
[
fix
!
(
LowerAmPm
)
]
;
hour_div_12
:
1
)
;
check
!
(
"
am
"
[
fix
!
(
UpperAmPm
)
]
;
hour_div_12
:
0
)
;
check
!
(
"
pm
"
[
fix
!
(
UpperAmPm
)
]
;
hour_div_12
:
1
)
;
check
!
(
"
AM
"
[
fix
!
(
UpperAmPm
)
]
;
hour_div_12
:
0
)
;
check
!
(
"
PM
"
[
fix
!
(
UpperAmPm
)
]
;
hour_div_12
:
1
)
;
check
!
(
"
Am
"
[
fix
!
(
LowerAmPm
)
]
;
hour_div_12
:
0
)
;
check
!
(
"
Am
"
[
fix
!
(
LowerAmPm
)
]
;
INVALID
)
;
check
!
(
"
ame
"
[
fix
!
(
LowerAmPm
)
]
;
TOO_LONG
)
;
check
!
(
"
a
"
[
fix
!
(
LowerAmPm
)
]
;
TOO_SHORT
)
;
check
!
(
"
p
"
[
fix
!
(
LowerAmPm
)
]
;
TOO_SHORT
)
;
check
!
(
"
x
"
[
fix
!
(
LowerAmPm
)
]
;
TOO_SHORT
)
;
check
!
(
"
xx
"
[
fix
!
(
LowerAmPm
)
]
;
INVALID
)
;
check
!
(
"
"
[
fix
!
(
LowerAmPm
)
]
;
TOO_SHORT
)
;
check
!
(
"
"
[
fix
!
(
Nanosecond
)
]
;
)
;
check
!
(
"
4
"
[
fix
!
(
Nanosecond
)
]
;
TOO_LONG
)
;
check
!
(
"
4
"
[
fix
!
(
Nanosecond
)
num
!
(
Second
)
]
;
second
:
4
)
;
check
!
(
"
.
0
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
0
)
;
check
!
(
"
.
4
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
400_000_000
)
;
check
!
(
"
.
42
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
420_000_000
)
;
check
!
(
"
.
421
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
421_000_000
)
;
check
!
(
"
.
42195
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
421_950_000
)
;
check
!
(
"
.
421950803
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
421_950_803
)
;
check
!
(
"
.
421950803547
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
421_950_803
)
;
check
!
(
"
.
000000003547
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
3
)
;
check
!
(
"
.
000000000547
"
[
fix
!
(
Nanosecond
)
]
;
nanosecond
:
0
)
;
check
!
(
"
.
"
[
fix
!
(
Nanosecond
)
]
;
TOO_SHORT
)
;
check
!
(
"
.
4x
"
[
fix
!
(
Nanosecond
)
]
;
TOO_LONG
)
;
check
!
(
"
.
4
"
[
fix
!
(
Nanosecond
)
]
;
INVALID
)
;
check
!
(
"
.
4
"
[
fix
!
(
Nanosecond
)
]
;
TOO_LONG
)
;
check
!
(
"
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
0
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
4
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
42
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
421
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
nanosecond
:
421_000_000
)
;
check
!
(
"
42143
"
[
internal_fix
!
(
Nanosecond3NoDot
)
num
!
(
Second
)
]
;
nanosecond
:
421_000_000
second
:
43
)
;
check
!
(
"
42195
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
TOO_LONG
)
;
check
!
(
"
4x
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
4
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
INVALID
)
;
check
!
(
"
.
421
"
[
internal_fix
!
(
Nanosecond3NoDot
)
]
;
INVALID
)
;
check
!
(
"
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
0
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
42195
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
421950
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
nanosecond
:
421_950_000
)
;
check
!
(
"
000003
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
nanosecond
:
3000
)
;
check
!
(
"
000000
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
nanosecond
:
0
)
;
check
!
(
"
4x
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
4
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
INVALID
)
;
check
!
(
"
.
42100
"
[
internal_fix
!
(
Nanosecond6NoDot
)
]
;
INVALID
)
;
check
!
(
"
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
42195
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
TOO_SHORT
)
;
check
!
(
"
421950803
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
nanosecond
:
421_950_803
)
;
check
!
(
"
000000003
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
nanosecond
:
3
)
;
check
!
(
"
42195080354
"
[
internal_fix
!
(
Nanosecond9NoDot
)
num
!
(
Second
)
]
;
nanosecond
:
421_950_803
second
:
54
)
;
check
!
(
"
421950803547
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
TOO_LONG
)
;
check
!
(
"
000000000
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
nanosecond
:
0
)
;
check
!
(
"
00000000x
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
INVALID
)
;
check
!
(
"
4
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
INVALID
)
;
check
!
(
"
.
42100000
"
[
internal_fix
!
(
Nanosecond9NoDot
)
]
;
INVALID
)
;
check
!
(
"
+
00
:
00
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
0
)
;
check
!
(
"
-
00
:
00
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
0
)
;
check
!
(
"
+
00
:
01
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
60
)
;
check
!
(
"
-
00
:
01
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
-
60
)
;
check
!
(
"
+
00
:
30
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
30
*
60
)
;
check
!
(
"
-
00
:
30
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
-
30
*
60
)
;
check
!
(
"
+
04
:
56
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
296
*
60
)
;
check
!
(
"
-
04
:
56
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
-
296
*
60
)
;
check
!
(
"
+
24
:
00
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
24
*
60
*
60
)
;
check
!
(
"
-
24
:
00
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
-
24
*
60
*
60
)
;
check
!
(
"
+
99
:
59
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
(
100
*
60
-
1
)
*
60
)
;
check
!
(
"
-
99
:
59
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
-
(
100
*
60
-
1
)
*
60
)
;
check
!
(
"
+
00
:
59
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
59
*
60
)
;
check
!
(
"
+
00
:
60
"
[
fix
!
(
TimezoneOffset
)
]
;
OUT_OF_RANGE
)
;
check
!
(
"
+
00
:
99
"
[
fix
!
(
TimezoneOffset
)
]
;
OUT_OF_RANGE
)
;
check
!
(
"
#
12
:
34
"
[
fix
!
(
TimezoneOffset
)
]
;
INVALID
)
;
check
!
(
"
12
:
34
"
[
fix
!
(
TimezoneOffset
)
]
;
INVALID
)
;
check
!
(
"
+
12
:
34
"
[
fix
!
(
TimezoneOffset
)
]
;
TOO_LONG
)
;
check
!
(
"
+
12
:
34
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
754
*
60
)
;
check
!
(
"
\
t
-
12
:
34
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
-
754
*
60
)
;
check
!
(
"
"
[
fix
!
(
TimezoneOffset
)
]
;
TOO_SHORT
)
;
check
!
(
"
+
"
[
fix
!
(
TimezoneOffset
)
]
;
TOO_SHORT
)
;
check
!
(
"
+
1
"
[
fix
!
(
TimezoneOffset
)
]
;
TOO_SHORT
)
;
check
!
(
"
+
12
"
[
fix
!
(
TimezoneOffset
)
]
;
TOO_SHORT
)
;
check
!
(
"
+
123
"
[
fix
!
(
TimezoneOffset
)
]
;
TOO_SHORT
)
;
check
!
(
"
+
1234
"
[
fix
!
(
TimezoneOffset
)
]
;
offset
:
754
*
60
)
;
check
!
(
"
+
12345
"
[
fix
!
(
TimezoneOffset
)
]
;
TOO_LONG
)
;
check
!
(
"
+
12345
"
[
fix
!
(
TimezoneOffset
)
num
!
(
Day
)
]
;
offset
:
754
*
60
day
:
5
)
;
check
!
(
"
Z
"
[
fix
!
(
TimezoneOffset
)
]
;
INVALID
)
;
check
!
(
"
z
"
[
fix
!
(
TimezoneOffset
)
]
;
INVALID
)
;
check
!
(
"
Z
"
[
fix
!
(
TimezoneOffsetZ
)
]
;
offset
:
0
)
;
check
!
(
"
z
"
[
fix
!
(
TimezoneOffsetZ
)
]
;
offset
:
0
)
;
check
!
(
"
Y
"
[
fix
!
(
TimezoneOffsetZ
)
]
;
INVALID
)
;
check
!
(
"
Zulu
"
[
fix
!
(
TimezoneOffsetZ
)
lit
!
(
"
ulu
"
)
]
;
offset
:
0
)
;
check
!
(
"
zulu
"
[
fix
!
(
TimezoneOffsetZ
)
lit
!
(
"
ulu
"
)
]
;
offset
:
0
)
;
check
!
(
"
+
1234ulu
"
[
fix
!
(
TimezoneOffsetZ
)
lit
!
(
"
ulu
"
)
]
;
offset
:
754
*
60
)
;
check
!
(
"
+
12
:
34ulu
"
[
fix
!
(
TimezoneOffsetZ
)
lit
!
(
"
ulu
"
)
]
;
offset
:
754
*
60
)
;
check
!
(
"
Z
"
[
internal_fix
!
(
TimezoneOffsetPermissive
)
]
;
offset
:
0
)
;
check
!
(
"
z
"
[
internal_fix
!
(
TimezoneOffsetPermissive
)
]
;
offset
:
0
)
;
check
!
(
"
+
12
:
00
"
[
internal_fix
!
(
TimezoneOffsetPermissive
)
]
;
offset
:
12
*
60
*
60
)
;
check
!
(
"
+
12
"
[
internal_fix
!
(
TimezoneOffsetPermissive
)
]
;
offset
:
12
*
60
*
60
)
;
check
!
(
"
CEST
5
"
[
fix
!
(
TimezoneName
)
lit
!
(
"
"
)
num
!
(
Day
)
]
;
day
:
5
)
;
check
!
(
"
2015
-
02
-
04T14
:
37
:
05
+
09
:
00
"
[
num
!
(
Year
)
lit
!
(
"
-
"
)
num
!
(
Month
)
lit
!
(
"
-
"
)
num
!
(
Day
)
lit
!
(
"
T
"
)
num
!
(
Hour
)
lit
!
(
"
:
"
)
num
!
(
Minute
)
lit
!
(
"
:
"
)
num
!
(
Second
)
fix
!
(
TimezoneOffset
)
]
;
year
:
2015
month
:
2
day
:
4
hour_div_12
:
1
hour_mod_12
:
2
minute
:
37
second
:
5
offset
:
32400
)
;
check
!
(
"
20150204143705567
"
[
num
!
(
Year
)
num
!
(
Month
)
num
!
(
Day
)
num
!
(
Hour
)
num
!
(
Minute
)
num
!
(
Second
)
internal_fix
!
(
Nanosecond3NoDot
)
]
;
year
:
2015
month
:
2
day
:
4
hour_div_12
:
1
hour_mod_12
:
2
minute
:
37
second
:
5
nanosecond
:
567000000
)
;
check
!
(
"
Mon
10
Jun
2013
09
:
32
:
37
GMT
"
[
fix
!
(
ShortWeekdayName
)
lit
!
(
"
"
)
sp
!
(
"
"
)
num
!
(
Day
)
sp
!
(
"
"
)
fix
!
(
ShortMonthName
)
sp
!
(
"
"
)
num
!
(
Year
)
sp
!
(
"
"
)
num
!
(
Hour
)
lit
!
(
"
:
"
)
num
!
(
Minute
)
lit
!
(
"
:
"
)
num
!
(
Second
)
sp
!
(
"
"
)
lit
!
(
"
GMT
"
)
]
;
year
:
2013
month
:
6
day
:
10
weekday
:
Weekday
:
:
Mon
hour_div_12
:
0
hour_mod_12
:
9
minute
:
32
second
:
37
)
;
check
!
(
"
Sun
Aug
02
13
:
39
:
15
CEST
2020
"
[
fix
!
(
ShortWeekdayName
)
sp
!
(
"
"
)
fix
!
(
ShortMonthName
)
sp
!
(
"
"
)
num
!
(
Day
)
sp
!
(
"
"
)
num
!
(
Hour
)
lit
!
(
"
:
"
)
num
!
(
Minute
)
lit
!
(
"
:
"
)
num
!
(
Second
)
sp
!
(
"
"
)
fix
!
(
TimezoneName
)
sp
!
(
"
"
)
num
!
(
Year
)
]
;
year
:
2020
month
:
8
day
:
2
weekday
:
Weekday
:
:
Sun
hour_div_12
:
1
hour_mod_12
:
1
minute
:
39
second
:
15
)
;
check
!
(
"
20060102150405
"
[
num
!
(
Year
)
num
!
(
Month
)
num
!
(
Day
)
num
!
(
Hour
)
num
!
(
Minute
)
num
!
(
Second
)
]
;
year
:
2006
month
:
1
day
:
2
hour_div_12
:
1
hour_mod_12
:
3
minute
:
4
second
:
5
)
;
check
!
(
"
3
:
14PM
"
[
num
!
(
Hour12
)
lit
!
(
"
:
"
)
num
!
(
Minute
)
fix
!
(
LowerAmPm
)
]
;
hour_div_12
:
1
hour_mod_12
:
3
minute
:
14
)
;
check
!
(
"
12345678901234
.
56789
"
[
num
!
(
Timestamp
)
lit
!
(
"
.
"
)
num
!
(
Nanosecond
)
]
;
nanosecond
:
56_789
timestamp
:
12_345_678_901_234
)
;
check
!
(
"
12345678901234
.
56789
"
[
num
!
(
Timestamp
)
fix
!
(
Nanosecond
)
]
;
nanosecond
:
567_890_000
timestamp
:
12_345_678_901_234
)
;
}
#
[
cfg
(
test
)
]
#
[
test
]
fn
test_rfc2822
(
)
{
use
super
:
:
NOT_ENOUGH
;
use
super
:
:
*
;
use
offset
:
:
FixedOffset
;
use
DateTime
;
let
testdates
=
[
(
"
Tue
20
Jan
2015
17
:
35
:
20
-
0800
"
Ok
(
"
Tue
20
Jan
2015
17
:
35
:
20
-
0800
"
)
)
(
"
Fri
2
Jan
2015
17
:
35
:
20
-
0800
"
Ok
(
"
Fri
02
Jan
2015
17
:
35
:
20
-
0800
"
)
)
(
"
Fri
02
Jan
2015
17
:
35
:
20
-
0800
"
Ok
(
"
Fri
02
Jan
2015
17
:
35
:
20
-
0800
"
)
)
(
"
20
Jan
2015
17
:
35
:
20
-
0800
"
Ok
(
"
Tue
20
Jan
2015
17
:
35
:
20
-
0800
"
)
)
(
"
20
JAN
2015
17
:
35
:
20
-
0800
"
Ok
(
"
Tue
20
Jan
2015
17
:
35
:
20
-
0800
"
)
)
(
"
Tue
20
Jan
2015
17
:
35
-
0800
"
Ok
(
"
Tue
20
Jan
2015
17
:
35
:
00
-
0800
"
)
)
(
"
11
Sep
2001
09
:
45
:
00
EST
"
Ok
(
"
Tue
11
Sep
2001
09
:
45
:
00
-
0500
"
)
)
(
"
30
Feb
2015
17
:
35
:
20
-
0800
"
Err
(
OUT_OF_RANGE
)
)
(
"
Tue
20
Jan
2015
"
Err
(
TOO_SHORT
)
)
(
"
Tue
20
Avr
2015
17
:
35
:
20
-
0800
"
Err
(
INVALID
)
)
(
"
Tue
20
Jan
2015
25
:
35
:
20
-
0800
"
Err
(
OUT_OF_RANGE
)
)
(
"
Tue
20
Jan
2015
7
:
35
:
20
-
0800
"
Err
(
INVALID
)
)
(
"
Tue
20
Jan
2015
17
:
65
:
20
-
0800
"
Err
(
OUT_OF_RANGE
)
)
(
"
Tue
20
Jan
2015
17
:
35
:
90
-
0800
"
Err
(
OUT_OF_RANGE
)
)
(
"
Tue
20
Jan
2015
17
:
35
:
20
-
0890
"
Err
(
OUT_OF_RANGE
)
)
(
"
6
Jun
1944
04
:
00
:
00Z
"
Err
(
INVALID
)
)
(
"
Tue
20
Jan
2015
17
:
35
:
20
HAS
"
Err
(
NOT_ENOUGH
)
)
]
;
fn
rfc2822_to_datetime
(
date
:
&
str
)
-
>
ParseResult
<
DateTime
<
FixedOffset
>
>
{
let
mut
parsed
=
Parsed
:
:
new
(
)
;
parse
(
&
mut
parsed
date
[
Item
:
:
Fixed
(
Fixed
:
:
RFC2822
)
]
.
iter
(
)
)
?
;
parsed
.
to_datetime
(
)
}
fn
fmt_rfc2822_datetime
(
dt
:
DateTime
<
FixedOffset
>
)
-
>
String
{
dt
.
format_with_items
(
[
Item
:
:
Fixed
(
Fixed
:
:
RFC2822
)
]
.
iter
(
)
)
.
to_string
(
)
}
for
&
(
date
checkdate
)
in
testdates
.
iter
(
)
{
let
d
=
rfc2822_to_datetime
(
date
)
;
let
dt
=
match
d
{
Ok
(
dt
)
=
>
Ok
(
fmt_rfc2822_datetime
(
dt
)
)
Err
(
e
)
=
>
Err
(
e
)
}
;
if
dt
!
=
checkdate
.
map
(
|
s
|
s
.
to_string
(
)
)
{
panic
!
(
"
Date
conversion
failed
for
{
}
\
nReceived
:
{
:
?
}
\
nExpected
:
{
:
?
}
"
date
dt
checkdate
)
;
}
}
}
#
[
cfg
(
test
)
]
#
[
test
]
fn
parse_rfc850
(
)
{
use
{
TimeZone
Utc
}
;
static
RFC850_FMT
:
&
'
static
str
=
"
%
A
%
d
-
%
b
-
%
y
%
T
GMT
"
;
let
dt_str
=
"
Sunday
06
-
Nov
-
94
08
:
49
:
37
GMT
"
;
let
dt
=
Utc
.
ymd
(
1994
11
6
)
.
and_hms
(
8
49
37
)
;
assert_eq
!
(
dt
.
format
(
RFC850_FMT
)
.
to_string
(
)
dt_str
)
;
assert_eq
!
(
Ok
(
dt
)
Utc
.
datetime_from_str
(
"
Sunday
06
-
Nov
-
94
08
:
49
:
37
GMT
"
RFC850_FMT
)
)
;
let
testdates
=
[
(
Utc
.
ymd
(
1994
11
7
)
.
and_hms
(
8
49
37
)
"
Monday
07
-
Nov
-
94
08
:
49
:
37
GMT
"
)
(
Utc
.
ymd
(
1994
11
8
)
.
and_hms
(
8
49
37
)
"
Tuesday
08
-
Nov
-
94
08
:
49
:
37
GMT
"
)
(
Utc
.
ymd
(
1994
11
9
)
.
and_hms
(
8
49
37
)
"
Wednesday
09
-
Nov
-
94
08
:
49
:
37
GMT
"
)
(
Utc
.
ymd
(
1994
11
10
)
.
and_hms
(
8
49
37
)
"
Thursday
10
-
Nov
-
94
08
:
49
:
37
GMT
"
)
(
Utc
.
ymd
(
1994
11
11
)
.
and_hms
(
8
49
37
)
"
Friday
11
-
Nov
-
94
08
:
49
:
37
GMT
"
)
(
Utc
.
ymd
(
1994
11
12
)
.
and_hms
(
8
49
37
)
"
Saturday
12
-
Nov
-
94
08
:
49
:
37
GMT
"
)
]
;
for
val
in
&
testdates
{
assert_eq
!
(
Ok
(
val
.
0
)
Utc
.
datetime_from_str
(
val
.
1
RFC850_FMT
)
)
;
}
}
#
[
cfg
(
test
)
]
#
[
test
]
fn
test_rfc3339
(
)
{
use
super
:
:
*
;
use
offset
:
:
FixedOffset
;
use
DateTime
;
let
testdates
=
[
(
"
2015
-
01
-
20T17
:
35
:
20
-
08
:
00
"
Ok
(
"
2015
-
01
-
20T17
:
35
:
20
-
08
:
00
"
)
)
(
"
1944
-
06
-
06T04
:
04
:
00Z
"
Ok
(
"
1944
-
06
-
06T04
:
04
:
00
+
00
:
00
"
)
)
(
"
2001
-
09
-
11T09
:
45
:
00
-
08
:
00
"
Ok
(
"
2001
-
09
-
11T09
:
45
:
00
-
08
:
00
"
)
)
(
"
2015
-
01
-
20T17
:
35
:
20
.
001
-
08
:
00
"
Ok
(
"
2015
-
01
-
20T17
:
35
:
20
.
001
-
08
:
00
"
)
)
(
"
2015
-
01
-
20T17
:
35
:
20
.
000031
-
08
:
00
"
Ok
(
"
2015
-
01
-
20T17
:
35
:
20
.
000031
-
08
:
00
"
)
)
(
"
2015
-
01
-
20T17
:
35
:
20
.
000000004
-
08
:
00
"
Ok
(
"
2015
-
01
-
20T17
:
35
:
20
.
000000004
-
08
:
00
"
)
)
(
"
2015
-
01
-
20T17
:
35
:
20
.
000000000452
-
08
:
00
"
Ok
(
"
2015
-
01
-
20T17
:
35
:
20
-
08
:
00
"
)
)
(
"
2015
-
02
-
30T17
:
35
:
20
-
08
:
00
"
Err
(
OUT_OF_RANGE
)
)
(
"
2015
-
01
-
20T25
:
35
:
20
-
08
:
00
"
Err
(
OUT_OF_RANGE
)
)
(
"
2015
-
01
-
20T17
:
65
:
20
-
08
:
00
"
Err
(
OUT_OF_RANGE
)
)
(
"
2015
-
01
-
20T17
:
35
:
90
-
08
:
00
"
Err
(
OUT_OF_RANGE
)
)
(
"
2015
-
01
-
20T17
:
35
:
20
-
24
:
00
"
Err
(
OUT_OF_RANGE
)
)
]
;
fn
rfc3339_to_datetime
(
date
:
&
str
)
-
>
ParseResult
<
DateTime
<
FixedOffset
>
>
{
let
mut
parsed
=
Parsed
:
:
new
(
)
;
parse
(
&
mut
parsed
date
[
Item
:
:
Fixed
(
Fixed
:
:
RFC3339
)
]
.
iter
(
)
)
?
;
parsed
.
to_datetime
(
)
}
fn
fmt_rfc3339_datetime
(
dt
:
DateTime
<
FixedOffset
>
)
-
>
String
{
dt
.
format_with_items
(
[
Item
:
:
Fixed
(
Fixed
:
:
RFC3339
)
]
.
iter
(
)
)
.
to_string
(
)
}
for
&
(
date
checkdate
)
in
testdates
.
iter
(
)
{
let
d
=
rfc3339_to_datetime
(
date
)
;
let
dt
=
match
d
{
Ok
(
dt
)
=
>
Ok
(
fmt_rfc3339_datetime
(
dt
)
)
Err
(
e
)
=
>
Err
(
e
)
}
;
if
dt
!
=
checkdate
.
map
(
|
s
|
s
.
to_string
(
)
)
{
panic
!
(
"
Date
conversion
failed
for
{
}
\
nReceived
:
{
:
?
}
\
nExpected
:
{
:
?
}
"
date
dt
checkdate
)
;
}
}
}
| true
|
8c374d634f9d9b0e4d3b8b8da841375b0e0d19c8
|
Rust
|
energiacte/cteepbd
|
/src/types/carrier.rs
|
UTF-8
| 4,457
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
// Copyright (c) 2018-2022 Ministerio de Fomento
// Instituto de Ciencias de la Construcción Eduardo Torroja (IETcc-CSIC)
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// Author(s): Rafael Villar Burke <pachi@ietcc.csic.es>,
// Daniel Jiménez González <dani@ietcc.csic.es>,
// Marta Sorribes Gil <msorribes@ietcc.csic.es>
//! Vectores energéticos
use std::fmt;
use std::str;
use serde::{Deserialize, Serialize};
use super::ProdSource;
use crate::error::EpbdError;
/// Vector energético (energy carrier).
#[allow(non_camel_case_types)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub enum Carrier {
/// Environment thermal energy (from heat pumps and other)
EAMBIENTE,
/// Biofuel
BIOCARBURANTE,
/// Biomass
BIOMASA,
/// Densified biomass (pellets)
BIOMASADENSIFICADA,
/// Coal
CARBON,
/// Electricity
ELECTRICIDAD,
/// Natural gas
GASNATURAL,
/// Diesel oil
GASOLEO,
/// LPG - Liquefied petroleum gas
GLP,
/// Generic energy carrier 1
RED1,
/// Generic energy carrier 2
RED2,
/// Thermal energy from solar collectors
TERMOSOLAR,
}
/// TODO: La clasificación de los vectores en función del perímetro debería hacerse
/// TODO: en la propia definición de esos vectores
impl Carrier {
/// Vectores considerados dentro del perímetro NEARBY (a excepción de la ELECTRICIDAD in situ).
pub const NRBY: [Carrier; 6] = [
Carrier::BIOMASA,
Carrier::BIOMASADENSIFICADA,
Carrier::RED1,
Carrier::RED2,
Carrier::EAMBIENTE,
Carrier::TERMOSOLAR,
]; // Ver B.23. Solo biomasa sólida
/// Vectores considerados dentro del perímetro ONSITE (a excepción de la ELECTRICIDAD in situ).
pub const ONST: [Carrier; 2] = [Carrier::EAMBIENTE, Carrier::TERMOSOLAR];
/// Is this a carrier from the onsite or nearby perimeter?
pub fn is_nearby(&self) -> bool {
Carrier::NRBY.contains(self)
}
/// Is this a carrier from the onsite perimeter?
pub fn is_onsite(&self) -> bool {
Carrier::ONST.contains(self)
}
}
impl str::FromStr for Carrier {
type Err = EpbdError;
fn from_str(s: &str) -> Result<Carrier, Self::Err> {
match s {
"EAMBIENTE" => Ok(Carrier::EAMBIENTE),
"BIOCARBURANTE" => Ok(Carrier::BIOCARBURANTE),
"BIOMASA" => Ok(Carrier::BIOMASA),
"BIOMASADENSIFICADA" => Ok(Carrier::BIOMASADENSIFICADA),
"CARBON" => Ok(Carrier::CARBON),
"ELECTRICIDAD" => Ok(Carrier::ELECTRICIDAD),
"GASNATURAL" => Ok(Carrier::GASNATURAL),
"GASOLEO" => Ok(Carrier::GASOLEO),
"GLP" => Ok(Carrier::GLP),
"RED1" => Ok(Carrier::RED1),
"RED2" => Ok(Carrier::RED2),
"TERMOSOLAR" => Ok(Carrier::TERMOSOLAR),
_ => Err(EpbdError::ParseError(s.into())),
}
}
}
impl std::fmt::Display for Carrier {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::convert::From<ProdSource> for Carrier {
fn from(value: ProdSource) -> Self {
match value {
ProdSource::EL_INSITU => Carrier::ELECTRICIDAD,
ProdSource::EL_COGEN => Carrier::ELECTRICIDAD,
ProdSource::TERMOSOLAR => Carrier::TERMOSOLAR,
ProdSource::EAMBIENTE => Carrier::EAMBIENTE,
}
}
}
| true
|
89e4ba9941d47966ff871836eb41303137813ac8
|
Rust
|
Techcable/toymath
|
/lib/extended-float-macros/src/lib.rs
|
UTF-8
| 3,326
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
#![feature(proc_macro, const_fn, ptr_offset_from)]
extern crate proc_macro;
extern crate extended_float_sys as sys;
#[macro_use]
extern crate syn;
#[macro_use]
extern crate quote;
use std::{mem, ptr};
use std::ffi::CString;
use syn::{LitInt, IntSuffix, LitStr, LitFloat};
use syn::synom::Synom;
use proc_macro::TokenStream;
enum ExtendedFloatLiteral {
UnsignedInteger(u64),
Integer(i64),
Float(f64),
String(String)
}
impl ExtendedFloatLiteral {
pub fn parse(&self) -> sys::ExtendedFloat {
unsafe {
let mut out = mem::uninitialized();
match *self {
ExtendedFloatLiteral::UnsignedInteger(value) => {
sys::extended_convert_from_u64(&mut out, value);
},
ExtendedFloatLiteral::Integer(value) => {
sys::extended_convert_from_i64(&mut out, value);
},
ExtendedFloatLiteral::Float(value) => {
sys::extended_convert_from_f64(&mut out, value);
},
ExtendedFloatLiteral::String(ref value) => {
assert!(!value.is_empty(), "Empty literal string!");
let first = value.chars().next().unwrap();
assert!(!first.is_whitespace(), "Literal starts with whitespace: {:?}", value);
let native = CString::new(value.as_bytes())
.unwrap_or_else(|_| panic!("Literal contains null byte: {:?}", value));
let mut end = ptr::null_mut();
let expected_end = native.as_ptr().add(value.len());
sys::extended_parse(&mut out,native.as_ptr(), &mut end);
if (end as *const _) != expected_end {
let consumed = end.offset_from(native.as_ptr());
panic!("Only parsed {} of {:?}", consumed, value)
}
}
}
out
}
}
}
impl Synom for ExtendedFloatLiteral {
named!(parse -> Self, alt!(
syn!(LitInt) => { |lit| match lit.suffix() {
IntSuffix::None | IntSuffix::I64 => ExtendedFloatLiteral::Integer(lit.value() as i64),
IntSuffix::U64 => ExtendedFloatLiteral::UnsignedInteger(lit.value() as u64),
_ => panic!("Invalid suffix {:?}", lit.suffix())
} } |
negative_int => { |value| ExtendedFloatLiteral::Integer(value) } |
signed_float => { |value| ExtendedFloatLiteral::Float(value) } |
syn!(LitStr) => { |lit| ExtendedFloatLiteral::String(lit.value()) }
));
}
named!(signed_float -> f64, alt!(
negative_float |
syn!(LitFloat) => { |lit| lit.value() }
));
named!(negative_float -> f64, do_parse!(
punct!(-) >>
lit: syn!(LitFloat) >>
(-lit.value())
));
named!(negative_int -> i64, do_parse!(
punct!(-) >>
lit: syn!(LitInt) >>
(-(lit.value() as i64))
));
fn emit(value: sys::ExtendedFloat) -> TokenStream {
let bytes = &value.0;
quote!(ExtendedFloat::from_bits([#(#bytes),*])).into()
}
#[proc_macro]
pub fn extended_float(input: TokenStream) -> TokenStream {
let literal = ::syn::parse::<ExtendedFloatLiteral>(input.clone())
.unwrap_or_else(|_| panic!("Invalid literal: {:?}", input));
emit(literal.parse())
}
| true
|
352bff93f20a5e4d30dee33055eee0ccfd656631
|
Rust
|
peterhj/libdualnum
|
/src/lib.rs
|
UTF-8
| 4,077
| 3.046875
| 3
|
[
"Apache-2.0"
] |
permissive
|
extern crate arithmetic;
use arithmetic::*;
use std::f32;
use std::ops::{Neg, Add, Sub, Mul, Div};
#[derive(Clone, Copy, Debug)]
pub struct DualNum<T>(pub T, pub T);
impl<T> DualNum<T> where T: PseudoField {
pub fn constant(value: T) -> DualNum<T> {
DualNum(value, T::zero())
}
pub fn param(value: T) -> DualNum<T> {
DualNum(value, T::one())
}
}
impl<T> DualNum<T> {
pub fn real(self) -> T {
self.0
}
pub fn dual(self) -> T {
self.1
}
}
impl<T> DualNum<T> where T: Copy + PseudoField {
pub fn reciprocal(self) -> DualNum<T> {
DualNum::constant(T::zero()) / self
}
}
impl<T> Neg for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn neg(self) -> DualNum<T> {
DualNum(-self.0, -self.1)
}
}
impl<T> Add<T> for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn add(self, rhs: T) -> DualNum<T> {
DualNum(self.0 + rhs, self.1)
}
}
impl<T> Add for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn add(self, rhs: DualNum<T>) -> DualNum<T> {
DualNum(self.0 + rhs.0, self.1 + rhs.1)
}
}
impl<T> Sub<T> for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn sub(self, rhs: T) -> DualNum<T> {
DualNum(self.0 - rhs, self.1)
}
}
impl<T> Sub for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn sub(self, rhs: DualNum<T>) -> DualNum<T> {
DualNum(self.0 - rhs.0, self.1 - rhs.1)
}
}
impl<T> Mul<T> for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn mul(self, rhs: T) -> DualNum<T> {
DualNum(self.0 * rhs, self.1 * rhs)
}
}
impl<T> Mul for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn mul(self, rhs: DualNum<T>) -> DualNum<T> {
DualNum(self.0 * rhs.0, self.1 * rhs.0 + self.0 * rhs.1)
}
}
impl<T> Div<T> for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn div(self, rhs: T) -> DualNum<T> {
DualNum(self.0 / rhs, self.1 / rhs)
}
}
impl<T> Div for DualNum<T> where T: Copy + PseudoField {
type Output = DualNum<T>;
fn div(self, rhs: DualNum<T>) -> DualNum<T> {
DualNum(self.0 / rhs.0, (self.1 * rhs.0 - self.0 * rhs.1) / (rhs.0 * rhs.0))
}
}
pub trait DualNumExt<T> {
fn sqrt(self) -> DualNum<T>;
fn exp(self) -> DualNum<T>;
fn exp2(self) -> DualNum<T>;
fn ln(self) -> DualNum<T>;
fn log2(self) -> DualNum<T>;
fn log10(self) -> DualNum<T>;
fn sin(self) -> DualNum<T>;
fn cos(self) -> DualNum<T>;
fn tan(self) -> DualNum<T>;
/*fn asin(self) -> DualNum<T>;
fn acos(self) -> DualNum<T>;
fn atan(self) -> DualNum<T>;
fn atan2(self, other: DualNum<T>) -> DualNum<T>;
fn sinh(self) -> DualNum<T>;
fn cosh(self) -> DualNum<T>;
fn tanh(self) -> DualNum<T>;
fn asinh(self) -> DualNum<T>;
fn acosh(self) -> DualNum<T>;
fn atanh(self) -> DualNum<T>;*/
}
impl DualNumExt<f32> for DualNum<f32> {
fn sqrt(self) -> DualNum<f32> {
let y = self.0.sqrt();
let dy = 0.5 / y;
DualNum(y, dy * self.1)
}
fn exp(self) -> DualNum<f32> {
let y = self.0.exp();
let dy = y;
DualNum(y, dy * self.1)
}
fn exp2(self) -> DualNum<f32> {
let y = self.0.exp2();
let dy = f32::consts::LN_2 * y;
DualNum(y, dy * self.1)
}
fn ln(self) -> DualNum<f32> {
let y = self.0.ln();
let dy = 1.0 / self.0;
DualNum(y, dy * self.1)
}
fn log2(self) -> DualNum<f32> {
let y = self.0.log2();
let dy = 1.0 / (f32::consts::LN_2 * self.0);
DualNum(y, dy * self.1)
}
fn log10(self) -> DualNum<f32> {
let y = self.0.log10();
let dy = 1.0 / (f32::consts::LN_10 * self.0);
DualNum(y, dy * self.1)
}
fn sin(self) -> DualNum<f32> {
let y = self.0.sin();
let dy = self.0.cos();
DualNum(y, dy * self.1)
}
fn cos(self) -> DualNum<f32> {
let y = self.0.cos();
let dy = -self.0.sin();
DualNum(y, dy * self.1)
}
fn tan(self) -> DualNum<f32> {
let y = self.0.tan();
let c = self.0.cos();
let dy = 1.0 / (c * c);
DualNum(y, dy * self.1)
}
}
| true
|
64905cd4932799fab57d32560cf9871dc73ea774
|
Rust
|
kwe/discount_calculator
|
/src/main.rs
|
UTF-8
| 2,271
| 3.234375
| 3
|
[] |
no_license
|
use shop;
use rand::seq::SliceRandom;
fn main() {
// define promotional rules as a static string. Normally this would come from
// dynamic endpoint such as a REST/Graphql endpoint.
let promotional_rules = r#"
{
"version" : 1,
"total_discount_threshold": 60.00,
"total_discount_percentage": 10.00,
"products": [
{
"id":"001",
"name":"Lavender heart",
"price":9.25,
"discount_threshold":2,
"discount_price": 8.50
},
{
"id":"002",
"name":"Personalised cufflinks",
"price":45.00,
"discount_threshold":0.0,
"discount_price": 0.0
},
{
"id":"003",
"name":"Kids T-shirt ",
"price":19.95,
"discount_threshold":0.0,
"discount_price": 0.0
}
]
}
"#;
println!("Test data\n-----------");
// Create a Vecor of items in a 'basket'
let mut basket = vec!["001", "002", "003"];
println!("Basket: {:?}", basket);
// Create a new Checkout
let mut co = shop::Checkout::new(promotional_rules);
// loop through the basket, scanning items
for item in basket {
co.scan(item)
}
// print out the total
println!("£{:#?}", co.total());
// and repeat...
let mut co = shop::Checkout::new(promotional_rules);
basket = vec!["001", "003", "001"];
println!("Basket: {:?}", basket);
for item in basket.iter() {
co.scan(item);
}
println!("£{:?}", co.total());
let mut co = shop::Checkout::new(promotional_rules);
let mut basket = vec!["001", "002", "001", "003"];
println!("Basket: {:?}", basket);
for item in basket.iter() {
co.scan(item);
}
println!("£{:?}", co.total());
let temp_total = co.total();
// randomise the basket
let mut rng = rand::thread_rng();
basket.shuffle(&mut rng);
let mut co = shop::Checkout::new(promotional_rules);
println!("\nOnce more for luck, randomise the last basket order\n\nBasket: {:?}", basket);
for item in basket.iter() {
co.scan(item);
}
println!("£{:?} which is equal to £{:?}", co.total(), temp_total);
}
| true
|
a8a05ca03821b1ed0d61367f84c66b7619195380
|
Rust
|
ChuckBates/AdventOfCode2020
|
/src/bin/day_3.rs
|
UTF-8
| 2,467
| 3.328125
| 3
|
[] |
no_license
|
use std::io::{BufRead, BufReader};
use std::fs::File;
fn main() {
let input_file_path = "src/bin/inputs/day_three_input.txt";
// Part One
let part_one_result = execute_part_one(input_file_path, vec![3,1]);
println!("trees: {}", part_one_result);
// Part Two
let slopes = vec![
vec![1,1],
vec![3,1],
vec![5,1],
vec![7,1],
vec![1,2]
];
let part_two_result = execute_part_two(input_file_path, slopes);
println!("trees: {}", part_two_result);
}
fn execute_part_two(file_path:&str, slopes:Vec<Vec<usize>>) -> usize {
let mut results = vec![];
for slope in slopes {
results.push(execute_part_one(file_path, slope));
}
let mut total_trees = 1;
for trees in results {
total_trees = total_trees * trees;
}
return total_trees;
}
fn execute_part_one(file_path:&str, slope:Vec<usize>) -> usize {
let grid = parse_input(file_path);
let horizontal_width = grid[0].len();
let horizontal_step = slope[0];
let vertical_step = slope[1];
let mut horizontal_position = 0;
let mut vertical_position = 0;
let done = false;
let mut counter = 0;
while !done {
let row_number = increment_vertical(vertical_position, vertical_step);
let column_number = increment_horizontal(horizontal_position, horizontal_step, horizontal_width);
if row_number >= grid.len() {
break;
}
let row = &grid[row_number];
let square = &row[column_number];
if square == "#" {
counter = counter + 1;
}
horizontal_position = column_number;
vertical_position = row_number;
}
return counter;
}
fn increment_horizontal(position:usize, step:usize, width:usize) -> usize {
if position + step >= width {
return position + step - width;
}
return position + step;
}
fn increment_vertical(position:usize, step:usize) -> usize {
return position + step;
}
fn parse_input(file_path:&str) -> Vec<Vec<String>> {
let reader = BufReader::new(File::open(file_path).expect("Cannot open file"));
let mut result = vec![];
for line in reader.lines() {
let mut line_vector = vec![];
for part in line.unwrap().chars() {
line_vector.push(String::from(part));
}
result.push(line_vector);
}
return result;
}
#[cfg(test)]
#[path = "tests/day_3_tests.rs"]
mod day_3_tests;
| true
|
785e89ce9f55e94388781de396411aa53922c049
|
Rust
|
dennisss/dacha
|
/pkg/http/src/query.rs
|
UTF-8
| 4,309
| 3.109375
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Utilities for dealing with the form parameters encoded in a URL's query
// string or form body also known as 'application/x-www-form-urlencoded'.
//
// The specificication is defined in:
// https://url.spec.whatwg.org/#application/x-www-form-urlencoded
use std::collections::HashMap;
use std::fmt::Write;
use common::errors::*;
use parsing::ascii::AsciiString;
use parsing::opaque::OpaqueString;
// /// Map based storage of query parameters.
// /// This is efficient for lookup but ignores any ordering between values with
// different names. pub struct QueryParams {
// params: HashMap<OpaqueString, Vec<OpaqueString>>,
// }
pub struct QueryParamsBuilder {
out: String,
}
impl QueryParamsBuilder {
pub fn new() -> Self {
Self { out: String::new() }
}
pub fn add(mut self, key: &[u8], value: &[u8]) -> Self {
self.out.reserve(key.len() + value.len() + 2);
if !self.out.is_empty() {
self.out.push('&');
}
self.add_slice(key);
if !value.is_empty() {
self.out.push('=');
self.add_slice(value);
}
self
}
fn add_slice(&mut self, data: &[u8]) {
for byte in data.iter().cloned() {
if byte == b' ' {
self.out.push('+');
} else if byte.is_ascii_alphanumeric() {
// TODO: Also allow some punctionation.
self.out.push(byte as char);
} else {
write!(self.out, "%{:02X}", byte).unwrap();
}
}
}
pub fn build(self) -> AsciiString {
AsciiString::from(self.out).unwrap()
}
}
pub struct QueryParamsParser<'a> {
input: &'a [u8],
}
impl<'a> QueryParamsParser<'a> {
pub fn new(input: &'a [u8]) -> Self {
Self { input }
}
fn decode_percent_encoded(&mut self) -> Option<u8> {
if self.input.len() < 2 {
return None;
}
let s = match std::str::from_utf8(&self.input[0..2]) {
Ok(s) => s,
Err(_) => {
return None;
}
};
match u8::from_str_radix(s, 16) {
Ok(v) => {
self.input = &self.input[2..];
Some(v)
}
Err(_) => None,
}
}
}
impl std::iter::Iterator for QueryParamsParser<'_> {
type Item = (OpaqueString, OpaqueString);
fn next(&mut self) -> Option<Self::Item> {
let mut name = vec![];
let mut value = vec![];
let mut parsing_value = false;
while !self.input.is_empty() && self.input[0] == b'&' {
self.input = &self.input[1..];
}
if self.input.is_empty() {
return None;
}
while !self.input.is_empty() {
let mut byte = self.input[0];
self.input = &self.input[1..];
if byte == b'=' {
if !parsing_value {
parsing_value = true;
continue;
}
} else if byte == b'&' {
break;
}
if byte == b'+' {
byte = b' ';
} else if byte == b'%' {
if let Some(decoded) = self.decode_percent_encoded() {
byte = decoded;
}
}
if parsing_value {
value.push(byte);
} else {
name.push(byte);
}
}
Some((OpaqueString::from(name), OpaqueString::from(value)))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parser_test() {
// TODO: Distinguish between 'name' and 'name='?
let input = b"&hello=wor=ld&value=123 +go&&=&name&encoded=%333r%ZZ%";
let raw_expected_outputs: &[(&[u8], &[u8])] = &[
(b"hello", b"wor=ld"),
(b"value", b"123 go"),
(b"", b""),
(b"name", b""),
(b"encoded", b"33r%ZZ%"),
];
let expected_outputs = raw_expected_outputs
.iter()
.map(|(k, v)| (OpaqueString::from(*k), OpaqueString::from(*v)))
.collect::<Vec<_>>();
let outputs = QueryParamsParser::new(input).collect::<Vec<_>>();
assert_eq!(outputs, expected_outputs);
}
}
| true
|
a4edb3cea0de3fe93ee2abc21c88e9a86cbfb497
|
Rust
|
NVSL/Corundum
|
/examples/store/plist.rs
|
UTF-8
| 7,414
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::Prog;
use corundum::default::*;
use std::fmt::{Debug, Display, Error, Formatter};
use std::panic::RefUnwindSafe;
use std::panic::UnwindSafe;
use std::str::FromStr;
use corundum::open_flags::*;
type P = Allocator;
type Link<T> = PRefCell<Option<Pbox<Node<T>>>>;
pub trait NVData = PSafe + TxInSafe + TxOutSafe + UnwindSafe + RefUnwindSafe + Clone;
struct Node<T: NVData> {
data: T,
next: Link<T>,
}
impl<T: NVData> Node<T> {
fn push_back(&self, data: T, j: &Journal) {
if let Some(next) = &*self.next.borrow() {
next.push_back(data, j);
return;
}
*self.next.borrow_mut(j) = Some(Pbox::new(
Node {
data,
next: PRefCell::new(None),
},
j,
));
}
fn pop_back(&self, j: &Journal) -> (T, bool) {
let mut drop_next = false;
let res = if let Some(next) = &*self.next.borrow() {
if next.next.borrow().is_none() {
drop_next = true;
(next.data.clone(), false)
} else {
next.pop_back(j)
}
} else {
(self.data.clone(), true)
};
if drop_next {
self.next.replace(None, j);
}
res
}
}
impl<T: NVData + Display> Display for Node<T> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), Error> {
if let Some(next) = &*self.next.borrow() {
write!(fmt, "({})->{}", self.data, next)
} else {
write!(fmt, "({})", self.data)
}
}
}
impl<T: NVData> PClone<P> for Node<T> {
fn pclone(&self, j: &Journal) -> Self {
Self {
data: self.data.clone(),
next: self.next.pclone(j),
}
}
}
pub struct List<T: NVData> {
root: Link<T>,
}
impl<T: NVData> List<T> {
pub fn push_front(&self, data: T, j: &Journal) {
*self.root.borrow_mut(j) = Some(Pbox::new(
Node {
data,
next: self.root.pclone(j),
},
j,
));
}
pub fn push_back(&self, data: T, j: &Journal) {
if let Some(ref root) = &*self.root.borrow() {
root.push_back(data, j);
return;
}
self.push_front(data, j);
}
pub fn pop_front(&self, j: &Journal) -> Option<T> {
let mut drop_root = false;
let res = if let Some(root) = &*self.root.borrow() {
drop_root = true;
Some(root.data.clone())
} else {
None
};
if drop_root {
let mut root = self.root.borrow_mut(j);
let r = root.pclone(j).unwrap();
let next = r.next.borrow();
if let Some(next) = &*next {
*root = Some(next.pclone(j));
} else {
*root = None;
}
}
res
}
pub fn pop_back(&self, j: &Journal) -> Option<T> {
let mut drop_root = false;
let res = if let Some(root) = &*self.root.borrow() {
let (d, drop) = root.pop_back(j);
drop_root = drop;
Some(d)
} else {
None
};
if drop_root {
*self.root.borrow_mut(j) = None;
}
res
}
pub fn clear(&self, j: &Journal) {
*self.root.borrow_mut(j) = None;
}
}
impl<T: NVData + Display> Display for List<T> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), Error> {
if let Some(root) = &*self.root.borrow() {
write!(fmt, "{}", root)
} else {
write!(fmt, "Empty")
}
}
}
impl<T: NVData> Default for List<T> {
fn default() -> Self {
Self {
root: PRefCell::new(None),
}
}
}
impl<T: 'static + NVData + Display + FromStr + Debug> Prog for List<T>
where
<T as FromStr>::Err: Debug,
{
fn perform<F: FnOnce(&Self)>(f: F) {
let root = P::open::<Self>("list.pool", O_CFNE).unwrap();
f(&root)
}
fn exec(&self, args: Vec<String>) -> bool {
if args.len() < 2 {
Self::help();
} else {
let mut i = 2;
while i < args.len() {
if let Some(op) = Self::next(&args, &mut i) {
if op == "help" {
Self::help()
} else if op == "repeat" {
if let Some(n) = Self::next(&args, &mut i) {
let n: usize = n.parse().expect("Expected an integer");
if !self.repeat(&args, i, n) {
return false;
}
} else {
return false;
}
} else if op == "push_back" {
if let Some(n) = Self::next(&args, &mut i) {
let n: T = n
.parse()
.expect(&format!("Expected a(n) {}", std::any::type_name::<T>()));
P::transaction(|j| self.push_back(n, j)).unwrap();
} else {
return false;
}
} else if op == "push_front" {
if let Some(n) = Self::next(&args, &mut i) {
let n: T = n
.parse()
.expect(&format!("Expected a(n) {}", std::any::type_name::<T>()));
P::transaction(|j| self.push_front(n, j)).unwrap();
} else {
return false;
}
} else if op == "pop_back" {
P::transaction(|j| println!("{:?}", self.pop_back(j))).unwrap();
} else if op == "pop_front" {
P::transaction(|j| println!("{:?}", self.pop_front(j))).unwrap();
} else if op == "run" {
if let Some(filename) = Self::next(&args, &mut i) {
return self.run(&filename);
} else {
return false;
}
} else if op == "print" {
println!("{}", self)
} else if op == "help" {
Self::help()
}
} else {
return true;
}
}
}
true
}
fn help() {
println!("usage: store plist [OPERATIONS]");
println!("data type: {}", std::any::type_name::<T>());
println!();
println!("OPERATIONS:");
println!(" push_back data Push data to the tail");
println!(" push_front data Push data to the head");
println!(" pop_back Pop an element from the tail");
println!(" pop_front Pop an element from the head");
println!(" repeat n Repeat the next operation n times");
println!(" run file Run a script file");
println!(" clear Delete all elements");
println!(" print Print the entire list");
println!(" help Display help");
println!();
}
}
| true
|
1f5f2b89900949343b7096a3974904a98e072f86
|
Rust
|
ebsnet/blockchain
|
/bin/webservice/src/error.rs
|
UTF-8
| 1,001
| 2.703125
| 3
|
[] |
no_license
|
use std::io::Cursor;
use rocket::http::{ContentType, Status};
use rocket::response::Responder;
use rocket::{Request, Response};
#[derive(Debug, Fail)]
pub enum BlockchainError {
// #[fail(display = "Key pair {} already exists", path)]
// KeyPairAlreadyExists { path: String },
#[fail(display = "Invalid block")]
InvalidBlock,
#[fail(display = "Cannot get lock")]
CannotGetLock,
#[fail(display = "Empty chain")]
EmptyChain,
}
impl Responder<'static> for BlockchainError {
fn respond_to(self, _: &Request) -> Result<Response<'static>, Status> {
use BlockchainError::*;
let msg = format!("{}", self);
let status = match self {
InvalidBlock => Status::NotAcceptable,
EmptyChain => Status::Conflict,
_ => Status::InternalServerError,
};
Response::build()
.header(ContentType::Plain)
.sized_body(Cursor::new(msg))
.status(status)
.ok()
}
}
| true
|
5689a94089038dc85053a7b696b9579afc150364
|
Rust
|
Floozutter/flooidsim
|
/midi-chaos/src/main.rs
|
UTF-8
| 1,712
| 2.875
| 3
|
[
"Unlicense"
] |
permissive
|
fn run() -> Result<(), Box<dyn std::error::Error>> {
use std::io::Write;
let imidi = midir::MidiInput::new("midi-chaos midir input")?;
// get an input port (read from console if multiple are available)
let iports = imidi.ports();
let iport = match iports.len() {
0 => return Err("no available input port found".into()),
1 => {
println!("only available input port: {}", imidi.port_name(&iports[0]).unwrap());
&iports[0]
},
_ => {
println!("available input ports:");
for (i, p) in iports.iter().enumerate() {
println!("{}: {}", i, imidi.port_name(p).unwrap());
}
print!("select input port: ");
std::io::stdout().flush()?;
let mut input = String::new();
std::io::stdin().read_line(&mut input)?;
iports.get(input.trim().parse::<usize>()?).ok_or("invalid input port selected")?
}
};
println!("\nopening connection...");
let iport_name = imidi.port_name(iport)?;
// _connection needs to be named to keep it alive
let _connection = imidi.connect(iport, "midi-chaos_iport", move |stamp, message, _| {
println!("{}: {:?} (len = {})", stamp, message, message.len());
}, ())?;
println!("connection open, reading input from `{}`...", iport_name);
// quit on enter key press
print!("press enter at any point to quit.");
std::io::stdout().flush()?;
{
let mut input = String::new();
std::io::stdin().read_line(&mut input)?;
}
println!("bye-bye! >:3c");
Ok(())
}
fn main() {
if let Err(err) = run() {
eprintln!("error: {}!", err);
}
}
| true
|
630b962aaa8ae42f929eca17616653f2c356cf5d
|
Rust
|
dmosse/frippy
|
/src/utils.rs
|
UTF-8
| 3,220
| 3.15625
| 3
|
[
"MIT"
] |
permissive
|
use std::borrow::Cow;
use std::io::{self, Read};
use std::time::Duration;
use reqwest::header::{HeaderValue, ACCEPT_LANGUAGE, CONNECTION};
use reqwest::{Client, ClientBuilder};
use self::error::{DownloadError, ErrorKind};
use failure::ResultExt;
#[derive(Clone, Debug)]
pub struct Url<'a> {
url: Cow<'a, str>,
max_kib: Option<usize>,
timeout: Option<Duration>,
}
impl<'a> From<String> for Url<'a> {
fn from(url: String) -> Self {
Url {
url: Cow::from(url),
max_kib: None,
timeout: None,
}
}
}
impl<'a> From<&'a str> for Url<'a> {
fn from(url: &'a str) -> Self {
Url {
url: Cow::from(url),
max_kib: None,
timeout: None,
}
}
}
impl<'a> Url<'a> {
pub fn max_kib(mut self, limit: usize) -> Self {
self.max_kib = Some(limit);
self
}
pub fn timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
/// Downloads the file and converts it to a String.
/// Any invalid bytes are converted to a replacement character.
///
/// The error indicated either a failed download or
/// that the limit set by max_kib() was reached.
pub fn request(&self) -> Result<String, DownloadError> {
let client = if let Some(timeout) = self.timeout {
ClientBuilder::new().timeout(timeout).build().unwrap()
} else {
Client::new()
};
let mut response = client
.get(self.url.as_ref())
.header(CONNECTION, HeaderValue::from_static("close"))
.header(ACCEPT_LANGUAGE, HeaderValue::from_static("en-US,en;q=0.5"))
.send()
.context(ErrorKind::Connection)?;
// 100 kibibyte buffer
let mut buf = [0; 100 * 1024];
let mut written = 0;
let mut bytes = Vec::new();
// Read until we reach EOF or max_kib KiB
loop {
let len = match response.read(&mut buf) {
Ok(0) => break,
Ok(len) => len,
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue,
Err(e) => Err(e).context(ErrorKind::Read)?,
};
bytes.extend_from_slice(&buf[..len]);
written += len;
// Check if the file is too large to download
if let Some(max_kib) = self.max_kib {
if written > max_kib * 1024 {
Err(ErrorKind::DownloadLimit)?;
}
}
}
Ok(String::from_utf8_lossy(&bytes).into_owned())
}
pub fn as_str(&self) -> &str {
&self.url
}
}
pub mod error {
use failure::Fail;
use frippy_derive::Error;
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail, Error)]
#[error = "DownloadError"]
pub enum ErrorKind {
/// Connection Error
#[fail(display = "A connection error has occured")]
Connection,
/// Read Error
#[fail(display = "A read error has occured")]
Read,
/// Reached download limit error
#[fail(display = "Reached download limit")]
DownloadLimit,
}
}
| true
|
798379e71f6a8a90a907fb47e7733be7e2998bb2
|
Rust
|
arapat/sparrow
|
/src/commons/tree.rs
|
UTF-8
| 4,737
| 2.703125
| 3
|
[] |
no_license
|
use TFeature;
use commons::Example;
use commons::is_zero;
type DimScaleType = usize;
/*
Why JSON but not binary?
- Readable for human
- Compatible with Python
- BufReader-friendly by using newline as separator
*/
#[derive(Serialize, Deserialize, Debug)]
pub struct Tree {
max_nodes: DimScaleType,
pub num_nodes: DimScaleType,
left_child: Vec<DimScaleType>,
right_child: Vec<DimScaleType>,
split_feature: Vec<Option<DimScaleType>>,
threshold: Vec<TFeature>,
predicts: Vec<f32>,
leaf_depth: Vec<DimScaleType>
}
impl Clone for Tree {
fn clone(&self) -> Tree {
Tree {
max_nodes: self.max_nodes,
num_nodes: self.num_nodes,
left_child: self.left_child.clone(),
right_child: self.right_child.clone(),
split_feature: self.split_feature.clone(),
threshold: self.threshold.clone(),
predicts: self.predicts.clone(),
leaf_depth: self.leaf_depth.clone()
}
}
}
impl Tree {
pub fn new(num_splits: usize) -> Tree {
let max_nodes = (num_splits + 1) * 2 - 1;
let mut tree = Tree {
max_nodes: max_nodes,
num_nodes: 0,
left_child: Vec::with_capacity(max_nodes as usize),
right_child: Vec::with_capacity(max_nodes as usize),
split_feature: Vec::with_capacity(max_nodes as usize),
threshold: Vec::with_capacity(max_nodes as usize),
predicts: Vec::with_capacity(max_nodes as usize),
leaf_depth: Vec::with_capacity(max_nodes as usize)
// leaf_parent: Vec::with_capacity(max_leaves),
// leaf_count: Vec::with_capacity(max_leaves),
// internal_value: Vec::with_capacity(max_leaves as usize),
// internal_count: Vec::with_capacity(max_leaves),
};
tree.add_new_node(0.0, 0);
tree
}
pub fn release(&mut self) {
self.left_child.shrink_to_fit();
self.right_child.shrink_to_fit();
self.split_feature.shrink_to_fit();
self.threshold.shrink_to_fit();
self.predicts.shrink_to_fit();
self.leaf_depth.shrink_to_fit();
}
pub fn split(
&mut self, parent: usize, feature: usize, threshold: TFeature,
left_predict: f32, right_predict: f32,
) -> (usize, usize) {
let predict = self.predicts[parent];
let parent_depth = self.leaf_depth[parent];
self.split_feature[parent] = Some(feature as DimScaleType);
self.threshold[parent] = threshold;
self.left_child[parent] = self.num_nodes as DimScaleType;
self.add_new_node(predict + left_predict, parent_depth + 1);
self.right_child[parent] = self.num_nodes as DimScaleType;
self.add_new_node(predict + right_predict, parent_depth + 1);
(self.left_child[parent], self.right_child[parent])
}
pub fn get_leaf_index_prediction(&self, data: &Example) -> (usize, f32) {
let mut node: usize = 0;
let feature = &(data.feature);
while let Some(split_feature) = self.split_feature[node] {
node = if feature[split_feature as usize] <= self.threshold[node] {
self.left_child[node]
} else {
self.right_child[node]
} as usize;
}
(node, self.predicts[node])
}
pub fn get_leaf_prediction(&self, data: &Example) -> f32 {
self.get_leaf_index_prediction(data).1
}
pub fn is_full_tree(&self) -> bool {
trace!("is-full-tree, {}, {}", self.num_nodes, self.max_nodes);
self.num_nodes >= self.max_nodes
}
fn add_new_node(&mut self, predict: f32, depth: DimScaleType) {
self.num_nodes += 1;
self.left_child.push(0);
self.right_child.push(0);
self.split_feature.push(None);
self.threshold.push(0);
self.predicts.push(predict);
self.leaf_depth.push(depth);
}
}
impl PartialEq for Tree {
fn eq(&self, other: &Tree) -> bool {
let k = self.num_nodes;
if k == other.num_nodes &&
self.split_feature[0..k] == other.split_feature[0..k] &&
self.left_child[0..k] == other.left_child[0..k] &&
self.right_child[0..k] == other.right_child[0..k] {
for i in 0..k {
if self.threshold[i] != other.threshold[i] ||
!is_zero(self.predicts[i] - other.predicts[i]) {
return false;
}
}
return true;
}
false
}
}
impl Eq for Tree {}
| true
|
aec028d4a40e94655c3f9bb7ccef7f27ffc08004
|
Rust
|
lmt-swallow/rvvisor
|
/hypervisor/src/uart.rs
|
UTF-8
| 2,212
| 3.078125
| 3
|
[] |
no_license
|
use crate::memlayout;
use core::fmt::{Error, Write};
pub struct Uart {
addr_base: *mut u8,
}
impl Write for Uart {
fn write_str(&mut self, out: &str) -> Result<(), Error> {
for c in out.bytes() {
self.put(c);
}
Ok(())
}
}
impl Uart {
pub fn new(uart_base: usize) -> Self {
let ptr = uart_base as *mut u8;
Uart { addr_base: ptr }
}
fn thr(&mut self) -> *mut u8 {
unsafe { self.addr_base.offset(0) }
}
fn rbr(&mut self) -> *mut u8 {
unsafe { self.addr_base.offset(0) }
}
fn ier(&mut self) -> *mut u8 {
unsafe { self.addr_base.offset(1) }
}
fn fcr(&mut self) -> *mut u8 {
unsafe { self.addr_base.offset(2) }
}
fn lcr(&mut self) -> *mut u8 {
unsafe { self.addr_base.offset(3) }
}
fn lsr(&mut self) -> *mut u8 {
unsafe { self.addr_base.offset(5) }
}
pub fn init(&mut self) {
unsafe {
// enable interrupts
self.ier().write_volatile(1 << 0);
// enable FIFO
self.fcr().write_volatile(1 << 0);
// set WLS to 8 bits
self.lcr().write_volatile((1 << 0) | (1 << 1));
}
}
pub fn put(&mut self, c: u8) {
unsafe {
// spin until bit 5 of LSR holds
while self.lsr().read_volatile() & (1 << 5) == 0 {}
// add `c` to the FIFO
self.thr().write_volatile(c);
}
}
pub fn get(&mut self) -> Option<u8> {
unsafe {
// read LSR first in order to check whether read FIFO has any data or not
if self.lsr().read_volatile() & (1 << 0) == 0 {
None
} else {
Some(self.rbr().offset(0).read_volatile())
}
}
}
}
#[macro_export]
macro_rules! print
{
($($args:tt)+) => ({
use core::fmt::Write;
let _ = write!(crate::uart::Uart::new(0x1000_0000), $($args)+);
});
}
#[macro_export]
macro_rules! println
{
() => ({
print!("\r\n")
});
($fmt:expr) => ({
print!(concat!($fmt, "\r\n"))
});
($fmt:expr, $($args:tt)+) => ({
print!(concat!($fmt, "\r\n"), $($args)+)
});
}
pub fn handle_interrupt() {
let mut uart = Uart::new(memlayout::UART_BASE);
if let Some(c) = uart.get() {
// TODO (enhancement): pass it buffer or somewhere else
// echo back
match c {
8 => {
println!("{} {}", 8 as char, 8 as char);
}
10 | 13 => {
println!();
}
_ => {
print!("{}", c as char);
}
}
}
}
| true
|
6d528e5e5a247fa9eedb04257c4fceb27ed31f60
|
Rust
|
Turbo87/rust-igc
|
/src/utils/headers/source.rs
|
UTF-8
| 1,063
| 3.0625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#[derive(Debug, PartialEq, Eq, Hash, Clone, EnumString)]
pub enum HeaderSource {
FlightRecorder,
Observer,
Other(char),
}
impl HeaderSource {
pub fn from_byte_unchecked(byte: u8) -> HeaderSource {
debug_assert!(byte.is_ascii());
match byte {
b'F' => HeaderSource::FlightRecorder,
b'O' => HeaderSource::Observer,
_ => HeaderSource::Other(byte as char),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_byte() {
assert_eq!(HeaderSource::from_byte_unchecked(b'F'), HeaderSource::FlightRecorder);
assert_eq!(HeaderSource::from_byte_unchecked(b'O'), HeaderSource::Observer);
assert_eq!(HeaderSource::from_byte_unchecked(b'2'), HeaderSource::Other('2'));
assert_eq!(HeaderSource::from_byte_unchecked(b'?'), HeaderSource::Other('?'));
}
proptest! {
#[test]
#[allow(unused_must_use)]
fn from_byte_doesnt_crash(b in 0..127u8) {
HeaderSource::from_byte_unchecked(b)
}
}
}
| true
|
4d8d6695da8db7cb4854c3cbffefb5c28dbc798b
|
Rust
|
yhnu/RustLearn
|
/ch06-02-match/src/main.rs
|
UTF-8
| 1,975
| 4.15625
| 4
|
[] |
no_license
|
// 可以把 match 表达式想象成某种硬币分类器:硬币滑入有着不同大小孔洞的轨道,每一个硬币都会掉入符合它大小的孔洞。
// 同样地,值也会通过 match 的每一个模式,并且在遇到第一个 “符合” 的模式时,值会进入相关联的代码块并在执行中被使用。
#![allow(unused)]
enum Coin {
Penny,
Nickel,
Dime,
Quarter(UsState),
}
fn value_in_cents(coin: Coin) -> u8 {
match coin {
// 与switch很像,但是并不是switch
// 这里它可以是任何类型的
Coin::Penny => {
println!("Lucky penny!");
1
}
Coin::Nickel => 5,
Coin::Dime => 10,
//可以绑定匹配的模式的部分值
Coin::Quarter(state) => {
println!("{:#?}", state);
15
}
}
}
// 可以绑定匹配的模式的部分值
#[derive(Debug)] // 这样可以可以立刻看到州的名称
enum UsState {
Alabama,
Alaska,
// --snip--
}
// 匹配 Option<T>
fn plus_one(x: Option<i32>) -> Option<i32> {
match x {
None => None,
Some(i) => Some(i + 1),
}
}
// 将 match 与枚举相结合在很多场景中都是有用的。你会在 Rust 代码中看到很多这样的模式:match 一个枚举,绑定其中的值到一个变量,接着根据其值执行代码。
// 这在一开始有点复杂,不过一旦习惯了,你会希望所有语言都拥有它!这一直是用户的最爱。
// _ 通配符
fn underline_match() {
let some_u8_value = 0u8;
match some_u8_value {
1 => println!("one"),
3 => println!("three"),
5 => println!("five"),
7 => println!("seven"),
_ => (),
}
}
// 然而,match 在只关心 一个 情况的场景中可能就有点啰嗦了。为此 Rust 提供了if let。
fn main() {
let five = Some(5);
let six = plus_one(five);
println!("Hello, world!");
}
| true
|
a7b53aa1228dcf933b8e0ba8cb26acdbf4e35d49
|
Rust
|
uorbe001/rustspec-assertions
|
/src/matchers/contain.rs
|
UTF-8
| 1,031
| 3.015625
| 3
|
[
"MIT"
] |
permissive
|
use matchers::matcher::Matcher;
pub struct Contain<T: PartialEq> {
value: T,
file_line: (&'static str, u32)
}
impl <T: PartialEq> Matcher<Vec<T>> for Contain<T> {
fn assert_check(&self, expected: Vec<T>) -> bool {
expected.contains(&self.value)
}
#[allow(unused_variables)] fn msg(&self, expected: Vec<T>) -> String {
format!("Expected {} to contain {} but it did not.", stringify!(expected), stringify!(self.value))
}
#[allow(unused_variables)] fn negated_msg(&self, expected: Vec<T>) -> String {
format!("Expected {} NOT to contain {} but it did.", stringify!(expected), stringify!(self.value))
}
fn get_file_line(&self) -> (&'static str, u32) {
self.file_line
}
}
pub fn contain<T: PartialEq>(value: T, file_line: (&'static str, u32)) -> Box<Contain<T>> {
Box::new(Contain { value: value, file_line: file_line })
}
#[macro_export]
macro_rules! contain(
($value:expr) => (
contain($value.clone(), (file!(), expand_line!()))
);
);
| true
|
76f95e567f850bfd1c3a58b5a8d044da240b2684
|
Rust
|
emlun/advent_of_code_2017
|
/emlun-rust/src/day09.rs
|
UTF-8
| 1,402
| 3.34375
| 3
|
[] |
no_license
|
pub struct Solver {}
impl ::framework::Solver for Solver {
type A = u32;
type B = u32;
fn solve(&self, input: &Vec<&str>) -> (Self::A, Self::B) {
let mut state = State::new();
input.iter()
.flat_map(|line| line.chars())
.for_each(|next| step(&mut state, next));
(state.score, state.garbage_count)
}
}
struct State {
depth: u32,
garbage_count: u32,
ignore_next: bool,
in_garbage: bool,
score: u32,
}
impl State {
fn new() -> State {
State {
depth: 0,
garbage_count: 0,
ignore_next: false,
in_garbage: false,
score: 0,
}
}
}
fn step(state: &mut State, next: char) {
if state.in_garbage {
if state.ignore_next {
state.ignore_next = false;
} else {
match next {
'!' => { state.ignore_next = true; }
'>' => { state.in_garbage = false; }
_ => { state.garbage_count += 1; }
}
}
} else {
match next {
'{' => { state.depth += 1; }
'}' => {
state.score += state.depth;
state.depth -= 1;
}
'<' => { state.in_garbage = true; }
',' => {}
_ => panic!(format!("Unknown non-garbage: {}", next))
}
}
}
| true
|
48d8bc94c289a5faa7b731449c2a8822098f6f53
|
Rust
|
bahildebrand/usync
|
/src/task/waker.rs
|
UTF-8
| 1,627
| 2.765625
| 3
|
[] |
no_license
|
use alloc::{
sync::Arc,
task::Wake
};
use futures_util::task::AtomicWaker;
use core::{
cmp::Ordering,
task::Waker
};
use crossbeam_queue::ArrayQueue;
use super::TaskId;
pub(crate) struct TaskWaker {
task_id: TaskId,
task_queue: Arc<ArrayQueue<TaskId>>,
}
impl TaskWaker {
pub(crate) fn new(task_id: TaskId, task_queue: Arc<ArrayQueue<TaskId>>) -> Waker {
Waker::from(Arc::new(TaskWaker {
task_id,
task_queue,
}))
}
pub(crate) fn wake_task(&self) {
self.task_queue.push(self.task_id).expect("task_queue full");
}
}
impl Wake for TaskWaker {
fn wake(self: Arc<Self>) {
self.wake_task();
}
fn wake_by_ref(self: &Arc<Self>) {
self.wake_task();
}
}
pub(crate) struct TimerWaker {
pub(crate) ms: u64,
waker: AtomicWaker
}
impl TimerWaker {
pub(crate) fn new(ms: u64) -> TimerWaker {
TimerWaker {
ms: ms,
waker: AtomicWaker::new()
}
}
pub(crate) fn register_waker(&self, waker: &Waker) {
self.waker.register(waker);
}
pub(crate) fn wake(&self) {
self.waker.wake();
}
pub(crate) fn get_time(&self) -> u64 {
self.ms
}
}
impl Eq for TimerWaker {}
impl PartialEq for TimerWaker {
fn eq(&self, other: &Self) -> bool {
self.ms == other.ms
}
}
impl Ord for TimerWaker {
fn cmp(&self, other: &Self) -> Ordering {
self.ms.cmp(&other.ms)
}
}
impl PartialOrd for TimerWaker {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
| true
|
a6a669f9adf3142d11f48cb19020b4d99fe15e9d
|
Rust
|
Victory/bells-quantum-entanglement
|
/bells_qe/src/lib.rs
|
UTF-8
| 7,320
| 3.3125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#![feature(core)]
#![feature(rand)]
/**
* Simulate Bell's quantum entanglement experiments
*/
use std::thread::Thread;
use std::rand::random;
use std::fmt;
use Direction::{SpinUp, SpinDown, SpinSuper};
use Plan::{Trivial, OddBall};
#[derive(Copy, Debug, PartialEq)]
enum Direction {
SpinUp,
SpinDown,
SpinSuper,
}
impl fmt::Display for Direction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let r = match self {
&SpinUp => "SpinUp",
&SpinDown => "SpinDown",
&SpinSuper => "SpinSuper"
};
write!(f, "{}", r)
}
}
impl fmt::Display for Detector {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Detector::{D12, D3, D9};
let r = match self {
&D12 => "12 oclock",
&D3 => "3 oclock",
&D9 => "9 oclock"
};
write!(f, "{}", r)
}
}
struct Pair<T> {
lhs: T,
rhs: T
}
struct Particle {
spin: Direction
}
enum Plan {
Trivial, // up-up-up -> down-down-down
OddBall // up-down-up -> down-up-down
}
#[derive(Debug, PartialEq)]
enum Detector {
D12, // 12 o'clock
D3, // 3 o'clock
D9, // 9 o' clock
}
impl Particle {
fn new_pair () -> Pair<Particle> {
let d1 = SpinSuper;
let d2 = SpinSuper;
let p1 = Particle{spin: d1};
let p2 = Particle{spin: d2};
return Pair{lhs: p1, rhs: p2};
}
pub fn measure (&mut self, detector: &Detector) {
self.spin = match detector {
&Detector::D12 => SpinUp,
&Detector::D3 => match rand32() {
0.0 ... 0.25 => SpinDown,
0.25 ... 1.00 => SpinUp,
_ => unreachable!()
},
&Detector::D9 => match rand32() {
0.0 ... 0.25 => SpinDown,
0.25 ... 1.00 => SpinUp,
_ => unreachable!()
},
};
}
// NOTE: in either spooky or hidden only if measured in the same
// direction must the spins be opposite
// measure with with a message
pub fn spooky (&mut self, friend: &mut Particle) -> Pair<Direction> {
let detector1 = Detector::D12;
let detector2 = Particle::get_detector_direction();
self.measure (&detector1);
if detector1 == detector2 { // 1/3
friend.spin = match self.spin {
SpinUp => SpinDown,
SpinDown => SpinUp,
_ => unreachable!()
};
} else { // 2/3
friend.measure(&detector2);
}
return Pair{lhs: self.spin, rhs: friend.spin};
}
fn get_detector_direction () -> Detector {
let detector = match rand32() {
0.0 ... 0.3333333 => Detector::D12,
0.3333333 ... 0.6666666 => Detector::D3,
0.6666666 ... 1.00 => Detector::D9,
_ => unreachable!()
};
return detector;
}
pub fn hidden_information (&mut self, friend: &mut Particle, plan: Plan) -> Pair<Direction> {
let detector = Particle::get_detector_direction();
let spin = match plan {
Trivial => SpinUp,
OddBall => match detector {
Detector::D3 => SpinDown,
_ => SpinUp
}
};
friend.spin = spin;
let detector = Particle::get_detector_direction();
let spin = match plan {
Trivial => SpinDown,
OddBall => match detector {
Detector::D3 => SpinUp,
_ => SpinDown
}
};
self.spin = spin;
return Pair{lhs: self.spin, rhs: friend.spin};
}
}
fn rand32 () -> f32 {
return random::<f32>();
}
/**
* run spooky for trials number of trials
*/
pub fn get_spooky (trials: f64) -> usize {
let mut num_different = 0;
let mut lhs;
let mut rhs;
for _ in 0 .. trials as usize {
let particles = Particle::new_pair();
lhs = particles.lhs;
rhs = particles.rhs;
lhs.spooky(&mut rhs);
if lhs.spin != rhs.spin {
num_different += 1;
}
}
return num_different;
}
pub fn run_spooky (trials: f64) {
let num_different = get_spooky(trials) as f64;
println!("Percent different for spooky {}%", 100f64 * (num_different) / trials as f64);
println!(" Should be about 1/2 or 50%");
}
/**
* Run test for hidden information, set the plan_probability to choose
* OddBall vs Trivial
*/
pub fn get_hidden(trials: f64, plan_probability: f32) -> usize {
let mut num_different = 0;
let mut lhs;
let mut rhs;
for _ in 0 .. trials as usize {
let particles = Particle::new_pair();
lhs = particles.lhs;
rhs = particles.rhs;
if rand32() < plan_probability {
lhs.hidden_information(&mut rhs, OddBall);
} else {
lhs.hidden_information(&mut rhs, Trivial);
}
if lhs.spin != rhs.spin {
num_different += 1;
}
}
return num_different;
}
pub fn run_hidden (trials: f64, plan_probability: f32) {
let num_different = get_hidden(trials, plan_probability) as f64;
println!("Percent different for hidden info {}%", 100f64 * (num_different) / trials as f64);
println!(" Should be greater than 5/9th or {}%", 100.0 * 5.0/9.0);
println!("With OddBall choosen {}% of the time and Trivial {}% of the time",
100.0 * plan_probability,
100.0 * (1.0 - plan_probability));
}
extern crate test;
static BENCH_ITTERS: f64 = 10000f64;
#[bench]
fn bench_one_spooky (b: &mut test::Bencher) {
b.iter(|| {
let trials: f64 = BENCH_ITTERS;
get_spooky(trials);
});
}
#[bench]
fn bench_one_hidden (b: &mut test::Bencher) {
b.iter(|| {
let trials: f64 = BENCH_ITTERS;
get_hidden(trials, 0.5);
});
}
#[bench]
fn bench_many_linear (b: &mut test::Bencher) {
b.iter(|| {
let trials: f64 = BENCH_ITTERS;
get_spooky(trials);
get_hidden(trials, 0.5);
get_hidden(trials, 1.0);
get_hidden(trials, 0.0);
})
}
#[bench]
fn bench_many_threads (b: &mut test::Bencher) {
b.iter(|| {
let trials: f64 = BENCH_ITTERS;
Thread::spawn(move || {
get_spooky(trials);
});
Thread::spawn(move || {
get_hidden(trials, 0.5);
});
Thread::spawn(move || {
get_hidden(trials, 1.0);
});
Thread::spawn(move || {
get_hidden(trials, 0.0);
});
})
}
#[test]
fn spooky_is_nearly_fiftyfifty () {
let trials: f64 = 1000f64;
let num_different = get_spooky(trials);
let in_range = num_different > 450 && num_different < 550;
assert!(in_range);
}
#[test]
fn hidden_is_nearly_more_fiftyfifty () {
let trials: f64 = 100000f64;
let num_different = get_hidden(trials, 0.0);
let in_range = num_different > 55000;
assert!(in_range);
let num_different = get_hidden(trials, 5.0);
let in_range = num_different > 55000;
assert!(in_range);
let num_different = get_hidden(trials, 1.0);
let in_range = num_different > 55000;
assert!(in_range);
}
| true
|
fa00758f68af9e3eff66ee6e299dfd7a4e212334
|
Rust
|
apoorvchebolu/cs4414-ps0
|
/match.rs
|
UTF-8
| 271
| 2.796875
| 3
|
[] |
no_license
|
fn main(){
let x = (41, false);
match x{
(y,true) if y>20 && y<26 => {println("Statement a");},
(y,true) if y<20 || y>26 => {println("Statement b");},
(y,_) if y>40 && y<49 => {println("Statement c");},
(_,_) => {println("Default");}
}
}
| true
|
a812bcd085bc9069635578f0715157f7517fd89a
|
Rust
|
brigand/couchbase-rs
|
/couchbase/src/options.rs
|
UTF-8
| 1,841
| 2.734375
| 3
|
[] |
no_license
|
use std::time::Duration;
#[derive(Debug)]
pub struct GetOptions {
timeout: Option<Duration>,
}
impl GetOptions {
pub fn new() -> Self {
Self { timeout: None }
}
pub fn set_timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
pub fn timeout(&self) -> &Option<Duration> {
&self.timeout
}
}
#[derive(Debug)]
pub struct InsertOptions {
timeout: Option<Duration>,
}
impl InsertOptions {
pub fn new() -> Self {
Self { timeout: None }
}
pub fn set_timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
pub fn timeout(&self) -> &Option<Duration> {
&self.timeout
}
}
#[derive(Debug)]
pub struct UpsertOptions {
timeout: Option<Duration>,
}
impl UpsertOptions {
pub fn new() -> Self {
Self { timeout: None }
}
pub fn set_timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
pub fn timeout(&self) -> &Option<Duration> {
&self.timeout
}
}
#[derive(Debug)]
pub struct ReplaceOptions {
timeout: Option<Duration>,
}
impl ReplaceOptions {
pub fn new() -> Self {
Self { timeout: None }
}
pub fn set_timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
pub fn timeout(&self) -> &Option<Duration> {
&self.timeout
}
}
#[derive(Debug)]
pub struct RemoveOptions {
timeout: Option<Duration>,
}
impl RemoveOptions {
pub fn new() -> Self {
Self { timeout: None }
}
pub fn set_timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
pub fn timeout(&self) -> &Option<Duration> {
&self.timeout
}
}
| true
|
e476796e9da775523467c9cf02cae0dbe4b6d91b
|
Rust
|
khonoka/hasurustbot
|
/src/file_data.rs
|
UTF-8
| 2,379
| 3.3125
| 3
|
[] |
no_license
|
use std::path::Path;
use std::{collections::BTreeSet, io::BufReader};
use std::fs::{create_dir_all, File};
use std::io::prelude::*;
pub struct FileData {
_file_name: String,
pub content: Vec<String>,
_if_unique: bool,
}
impl FileData {
pub fn new<S>(file_name: S, if_unique: bool) -> Result<Self, std::io::Error>
where
S: Into<String>,
{
let file_name = file_name.into();
let mut data = Self {
_file_name: file_name.clone(),
content: Vec::new(),
_if_unique: if_unique,
};
if let Some(p) = Path::new(&file_name).parent() {
create_dir_all(p).unwrap_or_default();
}
let file = match File::open(&file_name) {
Ok(f) => f,
Err(e) => {
if let std::io::ErrorKind::NotFound = e.kind() {
File::create(&file_name)?
} else {
return Err(e);
}
}
};
let buf = BufReader::new(file);
data.content = buf
.lines()
.map(|l| l.expect("Could not parse line"))
.collect();
if if_unique {
let mut set = BTreeSet::new();
for s in data.content.iter() {
set.insert(s);
}
data.content = set.iter().map(|s| String::from(*s)).collect();
let mut file = File::create(&file_name)?;
for s in data.content.iter() {
file.write_all(format!("{}\n", s).as_bytes())?;
}
file.sync_all()?;
}
Ok(data)
}
pub fn save<S>(&mut self, content: S) -> Result<(), std::io::Error>
where
S: Into<String>,
{
self.content.push(content.into());
let mut file = File::create(&self._file_name)?;
for s in self.content.iter() {
file.write_all(format!("{}\n", s).as_bytes())?;
}
file.sync_all()?;
Ok(())
}
/*
pub fn del(&mut self, index: usize) -> Result<(), std::io::Error> {
self.content.swap_remove(index);
let mut file = File::create(&self._file_name)?;
for s in self.content.iter() {
file.write_all(format!("{}\n", s).as_bytes())?;
}
file.sync_all()?;
Ok(())
}
*/
}
| true
|
f7f29a6325582cbb9da5506e79d3be0b9bc3a337
|
Rust
|
RustWorks/Draw2D_GLFW_Vulkan_FFI
|
/src/graphics/vulkan/texture/texture_image.rs
|
UTF-8
| 9,005
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
use super::{MipmapExtent, TextureImage};
use std::sync::Arc;
use crate::graphics::vulkan::{buffer::Buffer, Device};
use anyhow::{bail, Result};
use ash::{version::DeviceV1_0, vk};
impl TextureImage {
/// The raw image handle used by this texture.
///
/// Unsafe because it is up to the caller to synchronize access to the
/// image.
pub unsafe fn raw_image(&self) -> vk::Image {
self.image
}
/// The raw image view used by this texture.
///
/// Unsafe because it is up to the caller to synchronize access to the
/// view.
pub unsafe fn raw_view(&self) -> vk::ImageView {
self.view
}
/// Create the image, allocate memory, create a view for the texture.
///
/// Bytes per pixel is used by the various `upload_*` methods when copying
/// data from a buffer into the image. For example, if the image format
/// is R8G8B8A8_SRGB then the bytes per pixel is 4.
pub fn new(
device: Arc<Device>,
image_create_info: vk::ImageCreateInfo,
memory_property_flags: vk::MemoryPropertyFlags,
bytes_per_pixel: u64,
) -> Result<Self> {
let image = unsafe {
device
.logical_device
.create_image(&image_create_info, None)?
};
let allocation = unsafe {
let memory_requirements =
device.logical_device.get_image_memory_requirements(image);
device
.allocate_memory(memory_requirements, memory_property_flags)?
};
unsafe {
device.logical_device.bind_image_memory(
image,
allocation.memory,
allocation.offset,
)?;
}
let view_create_info = vk::ImageViewCreateInfo {
image,
view_type: vk::ImageViewType::TYPE_2D,
format: image_create_info.format,
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
base_mip_level: 0,
level_count: image_create_info.mip_levels,
base_array_layer: 0,
layer_count: 1,
},
components: vk::ComponentMapping {
r: vk::ComponentSwizzle::R,
g: vk::ComponentSwizzle::G,
b: vk::ComponentSwizzle::B,
a: vk::ComponentSwizzle::A,
},
..Default::default()
};
let view = unsafe {
device
.logical_device
.create_image_view(&view_create_info, None)?
};
Ok(Self {
bytes_per_pixel,
image,
extent: image_create_info.extent,
view,
allocation,
device,
})
}
/// Upload a texture's data from a buffer.
///
/// This method is just an alias to [Self::upload_mipmaps_from_buffer]
/// which only updates the first mipmap. It's particularly convenient for
/// textures which only have a single mipmap level.
pub unsafe fn upload_from_buffer<Buf>(&mut self, src: &Buf) -> Result<()>
where
Buf: Buffer,
{
let mipmap_extent = MipmapExtent {
width: self.extent.width,
height: self.extent.height,
};
self.upload_mipmaps_from_buffer(src, &[mipmap_extent])
}
/// Upload a texture's mipmaps from a buffer.
///
/// * This method assumes that each mipmap has the same `bytes_per_pixel`
/// as the texture image.
/// * Order is super important. The first entry in `mipmap_sizes`
/// corresponds to the first region of memory in the src bufer. The
/// mipmap extents are used to compute the byte offset and size of each
/// mipmap region.
pub unsafe fn upload_mipmaps_from_buffer(
&mut self,
src: &impl Buffer,
mipmap_sizes: &[MipmapExtent],
) -> Result<()> {
let required_size: u64 = mipmap_sizes
.iter()
.map(|mipmap_size| mipmap_size.size_in_bytes(self.bytes_per_pixel))
.sum();
if required_size > src.size_in_bytes() {
bail!(
"The texture expects {:?} bytes, but the provided buffer includes only {:?} bytes of data!",
required_size,
src.size_in_bytes()
);
}
self.device.sync_graphics_commands(|command_buffer| {
let mut mip_level = 0;
let mut offset: u64 = 0;
for extent in mipmap_sizes {
self.write_barrier(command_buffer, mip_level);
self.copy_buffer_to_image(
command_buffer,
src.raw(),
offset,
extent,
mip_level,
);
self.read_barrier(command_buffer, mip_level);
mip_level += 1;
offset += extent.size_in_bytes(self.bytes_per_pixel);
}
Ok(())
})
}
/// Transition the image memory layout such that it is an optimal transfer
/// target.
pub unsafe fn write_barrier(
&self,
command_buffer: vk::CommandBuffer,
mip_level: u32,
) {
let write_barrier = vk::ImageMemoryBarrier {
old_layout: vk::ImageLayout::UNDEFINED,
new_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
image: self.image,
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
base_mip_level: mip_level,
level_count: 1,
base_array_layer: 0,
layer_count: 1,
},
src_access_mask: vk::AccessFlags::empty(),
dst_access_mask: vk::AccessFlags::TRANSFER_WRITE,
..Default::default()
};
self.device.logical_device.cmd_pipeline_barrier(
command_buffer,
vk::PipelineStageFlags::TOP_OF_PIPE,
vk::PipelineStageFlags::TRANSFER,
vk::DependencyFlags::empty(),
&[],
&[],
&[write_barrier],
);
}
/// Transition the image memory layout such that is is optimal for reading
/// within the fragment shader.
unsafe fn read_barrier(
&self,
command_buffer: vk::CommandBuffer,
mip_level: u32,
) {
let read_barrier = vk::ImageMemoryBarrier {
old_layout: vk::ImageLayout::TRANSFER_DST_OPTIMAL,
new_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,
image: self.image,
subresource_range: vk::ImageSubresourceRange {
aspect_mask: vk::ImageAspectFlags::COLOR,
base_mip_level: mip_level,
level_count: 1,
base_array_layer: 0,
layer_count: 1,
},
src_access_mask: vk::AccessFlags::TRANSFER_WRITE,
dst_access_mask: vk::AccessFlags::SHADER_READ,
..Default::default()
};
self.device.logical_device.cmd_pipeline_barrier(
command_buffer,
vk::PipelineStageFlags::TRANSFER,
vk::PipelineStageFlags::FRAGMENT_SHADER,
vk::DependencyFlags::empty(),
&[],
&[],
&[read_barrier],
);
}
/// Copy a region of the buffer's memory into the image mipmap.
unsafe fn copy_buffer_to_image(
&self,
command_buffer: vk::CommandBuffer,
src_buffer: vk::Buffer,
offset: u64,
mipmap_extent: &MipmapExtent,
mip_level: u32,
) {
let region = vk::BufferImageCopy {
buffer_offset: offset,
buffer_row_length: 0,
buffer_image_height: 0,
image_subresource: vk::ImageSubresourceLayers {
aspect_mask: vk::ImageAspectFlags::COLOR,
mip_level,
base_array_layer: 0,
layer_count: 1,
},
image_offset: vk::Offset3D { x: 0, y: 0, z: 0 },
image_extent: vk::Extent3D {
width: mipmap_extent.width,
height: mipmap_extent.height,
depth: 1,
},
};
self.device.logical_device.cmd_copy_buffer_to_image(
command_buffer,
src_buffer,
self.image,
vk::ImageLayout::TRANSFER_DST_OPTIMAL,
&[region],
);
}
}
impl Drop for TextureImage {
fn drop(&mut self) {
log::trace!("DESTROY TEXTURE");
unsafe {
self.device
.logical_device
.destroy_image_view(self.view, None);
self.device.logical_device.destroy_image(self.image, None);
self.image = vk::Image::null();
self.device.free_memory(&self.allocation).unwrap();
}
}
}
| true
|
203e3ec5bffcd7bbf8194e63d389edc2f0acc537
|
Rust
|
bytecodealliance/wasmtime
|
/crates/wasmtime/src/component/types.rs
|
UTF-8
| 16,633
| 3.25
| 3
|
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
//! This module defines the `Type` type, representing the dynamic form of a component interface type.
use crate::component::matching::InstanceType;
use crate::component::values::{self, Val};
use anyhow::{anyhow, Result};
use std::fmt;
use std::mem;
use std::ops::Deref;
use std::sync::Arc;
use wasmtime_environ::component::{
CanonicalAbiInfo, ComponentTypes, InterfaceType, ResourceIndex, TypeEnumIndex, TypeFlagsIndex,
TypeListIndex, TypeOptionIndex, TypeRecordIndex, TypeResultIndex, TypeTupleIndex,
TypeVariantIndex,
};
use wasmtime_environ::PrimaryMap;
pub use crate::component::resources::ResourceType;
/// An owned and `'static` handle for type information in a component.
///
/// The components here are:
///
/// * `index` - a `TypeFooIndex` defined in the `wasmtime_environ` crate. This
/// then points into the next field of...
///
/// * `types` - this is an allocation originally created from compilation and is
/// stored in a compiled `Component`. This contains all types necessary and
/// information about recursive structures and all other type information
/// within the component. The above `index` points into this structure.
///
/// * `resources` - this is used to "close the loop" and represent a concrete
/// instance type rather than an abstract component type. Instantiating a
/// component with different resources produces different instance types but
/// the same underlying component type, so this field serves the purpose to
/// distinguish instance types from one another. This is runtime state created
/// during instantiation and threaded through here.
#[derive(Clone)]
struct Handle<T> {
index: T,
types: Arc<ComponentTypes>,
resources: Arc<PrimaryMap<ResourceIndex, ResourceType>>,
}
impl<T> Handle<T> {
fn new(index: T, ty: &InstanceType<'_>) -> Handle<T> {
Handle {
index,
types: ty.types.clone(),
resources: ty.resources.clone(),
}
}
fn instance(&self) -> InstanceType<'_> {
InstanceType {
types: &self.types,
resources: &self.resources,
}
}
}
impl<T: fmt::Debug> fmt::Debug for Handle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Handle")
.field("index", &self.index)
.finish()
}
}
impl<T: PartialEq> PartialEq for Handle<T> {
fn eq(&self, other: &Self) -> bool {
// FIXME: This is an overly-restrictive definition of equality in that it doesn't consider types to be
// equal unless they refer to the same declaration in the same component. It's a good shortcut for the
// common case, but we should also do a recursive structural equality test if the shortcut test fails.
self.index == other.index
&& Arc::ptr_eq(&self.types, &other.types)
&& Arc::ptr_eq(&self.resources, &other.resources)
}
}
impl<T: Eq> Eq for Handle<T> {}
/// A `list` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct List(Handle<TypeListIndex>);
impl List {
/// Instantiate this type with the specified `values`.
pub fn new_val(&self, values: Box<[Val]>) -> Result<Val> {
Ok(Val::List(values::List::new(self, values)?))
}
pub(crate) fn from(index: TypeListIndex, ty: &InstanceType<'_>) -> Self {
List(Handle::new(index, ty))
}
/// Retreive the element type of this `list`.
pub fn ty(&self) -> Type {
Type::from(&self.0.types[self.0.index].element, &self.0.instance())
}
}
/// A field declaration belonging to a `record`
pub struct Field<'a> {
/// The name of the field
pub name: &'a str,
/// The type of the field
pub ty: Type,
}
/// A `record` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Record(Handle<TypeRecordIndex>);
impl Record {
/// Instantiate this type with the specified `values`.
pub fn new_val<'a>(&self, values: impl IntoIterator<Item = (&'a str, Val)>) -> Result<Val> {
Ok(Val::Record(values::Record::new(self, values)?))
}
pub(crate) fn from(index: TypeRecordIndex, ty: &InstanceType<'_>) -> Self {
Record(Handle::new(index, ty))
}
/// Retrieve the fields of this `record` in declaration order.
pub fn fields(&self) -> impl ExactSizeIterator<Item = Field<'_>> {
self.0.types[self.0.index].fields.iter().map(|field| Field {
name: &field.name,
ty: Type::from(&field.ty, &self.0.instance()),
})
}
}
/// A `tuple` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Tuple(Handle<TypeTupleIndex>);
impl Tuple {
/// Instantiate this type ith the specified `values`.
pub fn new_val(&self, values: Box<[Val]>) -> Result<Val> {
Ok(Val::Tuple(values::Tuple::new(self, values)?))
}
pub(crate) fn from(index: TypeTupleIndex, ty: &InstanceType<'_>) -> Self {
Tuple(Handle::new(index, ty))
}
/// Retrieve the types of the fields of this `tuple` in declaration order.
pub fn types(&self) -> impl ExactSizeIterator<Item = Type> + '_ {
self.0.types[self.0.index]
.types
.iter()
.map(|ty| Type::from(ty, &self.0.instance()))
}
}
/// A case declaration belonging to a `variant`
pub struct Case<'a> {
/// The name of the case
pub name: &'a str,
/// The optional payload type of the case
pub ty: Option<Type>,
}
/// A `variant` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Variant(Handle<TypeVariantIndex>);
impl Variant {
/// Instantiate this type with the specified case `name` and `value`.
pub fn new_val(&self, name: &str, value: Option<Val>) -> Result<Val> {
Ok(Val::Variant(values::Variant::new(self, name, value)?))
}
pub(crate) fn from(index: TypeVariantIndex, ty: &InstanceType<'_>) -> Self {
Variant(Handle::new(index, ty))
}
/// Retrieve the cases of this `variant` in declaration order.
pub fn cases(&self) -> impl ExactSizeIterator<Item = Case> {
self.0.types[self.0.index].cases.iter().map(|case| Case {
name: &case.name,
ty: case
.ty
.as_ref()
.map(|ty| Type::from(ty, &self.0.instance())),
})
}
}
/// An `enum` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Enum(Handle<TypeEnumIndex>);
impl Enum {
/// Instantiate this type with the specified case `name`.
pub fn new_val(&self, name: &str) -> Result<Val> {
Ok(Val::Enum(values::Enum::new(self, name)?))
}
pub(crate) fn from(index: TypeEnumIndex, ty: &InstanceType<'_>) -> Self {
Enum(Handle::new(index, ty))
}
/// Retrieve the names of the cases of this `enum` in declaration order.
pub fn names(&self) -> impl ExactSizeIterator<Item = &str> {
self.0.types[self.0.index]
.names
.iter()
.map(|name| name.deref())
}
}
/// An `option` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptionType(Handle<TypeOptionIndex>);
impl OptionType {
/// Instantiate this type with the specified `value`.
pub fn new_val(&self, value: Option<Val>) -> Result<Val> {
Ok(Val::Option(values::OptionVal::new(self, value)?))
}
pub(crate) fn from(index: TypeOptionIndex, ty: &InstanceType<'_>) -> Self {
OptionType(Handle::new(index, ty))
}
/// Retrieve the type parameter for this `option`.
pub fn ty(&self) -> Type {
Type::from(&self.0.types[self.0.index].ty, &self.0.instance())
}
}
/// An `expected` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct ResultType(Handle<TypeResultIndex>);
impl ResultType {
/// Instantiate this type with the specified `value`.
pub fn new_val(&self, value: Result<Option<Val>, Option<Val>>) -> Result<Val> {
Ok(Val::Result(values::ResultVal::new(self, value)?))
}
pub(crate) fn from(index: TypeResultIndex, ty: &InstanceType<'_>) -> Self {
ResultType(Handle::new(index, ty))
}
/// Retrieve the `ok` type parameter for this `option`.
pub fn ok(&self) -> Option<Type> {
Some(Type::from(
self.0.types[self.0.index].ok.as_ref()?,
&self.0.instance(),
))
}
/// Retrieve the `err` type parameter for this `option`.
pub fn err(&self) -> Option<Type> {
Some(Type::from(
self.0.types[self.0.index].err.as_ref()?,
&self.0.instance(),
))
}
}
/// A `flags` interface type
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Flags(Handle<TypeFlagsIndex>);
impl Flags {
/// Instantiate this type with the specified flag `names`.
pub fn new_val(&self, names: &[&str]) -> Result<Val> {
Ok(Val::Flags(values::Flags::new(self, names)?))
}
pub(crate) fn from(index: TypeFlagsIndex, ty: &InstanceType<'_>) -> Self {
Flags(Handle::new(index, ty))
}
/// Retrieve the names of the flags of this `flags` type in declaration order.
pub fn names(&self) -> impl ExactSizeIterator<Item = &str> {
self.0.types[self.0.index]
.names
.iter()
.map(|name| name.deref())
}
pub(crate) fn canonical_abi(&self) -> &CanonicalAbiInfo {
&self.0.types[self.0.index].abi
}
}
/// Represents a component model interface type
#[derive(Clone, PartialEq, Eq, Debug)]
#[allow(missing_docs)]
pub enum Type {
Bool,
S8,
U8,
S16,
U16,
S32,
U32,
S64,
U64,
Float32,
Float64,
Char,
String,
List(List),
Record(Record),
Tuple(Tuple),
Variant(Variant),
Enum(Enum),
Option(OptionType),
Result(ResultType),
Flags(Flags),
Own(ResourceType),
Borrow(ResourceType),
}
impl Type {
/// Retrieve the inner [`List`] of a [`Type::List`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::List`].
pub fn unwrap_list(&self) -> &List {
if let Type::List(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a list", self.desc())
}
}
/// Retrieve the inner [`Record`] of a [`Type::Record`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Record`].
pub fn unwrap_record(&self) -> &Record {
if let Type::Record(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a record", self.desc())
}
}
/// Retrieve the inner [`Tuple`] of a [`Type::Tuple`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Tuple`].
pub fn unwrap_tuple(&self) -> &Tuple {
if let Type::Tuple(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a tuple", self.desc())
}
}
/// Retrieve the inner [`Variant`] of a [`Type::Variant`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Variant`].
pub fn unwrap_variant(&self) -> &Variant {
if let Type::Variant(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a variant", self.desc())
}
}
/// Retrieve the inner [`Enum`] of a [`Type::Enum`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Enum`].
pub fn unwrap_enum(&self) -> &Enum {
if let Type::Enum(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a enum", self.desc())
}
}
/// Retrieve the inner [`OptionType`] of a [`Type::Option`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Option`].
pub fn unwrap_option(&self) -> &OptionType {
if let Type::Option(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a option", self.desc())
}
}
/// Retrieve the inner [`ResultType`] of a [`Type::Result`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Result`].
pub fn unwrap_result(&self) -> &ResultType {
if let Type::Result(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a result", self.desc())
}
}
/// Retrieve the inner [`Flags`] of a [`Type::Flags`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Flags`].
pub fn unwrap_flags(&self) -> &Flags {
if let Type::Flags(handle) = self {
&handle
} else {
panic!("attempted to unwrap a {} as a flags", self.desc())
}
}
/// Retrieve the inner [`ResourceType`] of a [`Type::Own`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Own`].
pub fn unwrap_own(&self) -> &ResourceType {
match self {
Type::Own(ty) => ty,
_ => panic!("attempted to unwrap a {} as a own", self.desc()),
}
}
/// Retrieve the inner [`ResourceType`] of a [`Type::Borrow`].
///
/// # Panics
///
/// This will panic if `self` is not a [`Type::Borrow`].
pub fn unwrap_borrow(&self) -> &ResourceType {
match self {
Type::Borrow(ty) => ty,
_ => panic!("attempted to unwrap a {} as a own", self.desc()),
}
}
pub(crate) fn check(&self, value: &Val) -> Result<()> {
let other = &value.ty();
if self == other {
Ok(())
} else if mem::discriminant(self) != mem::discriminant(other) {
Err(anyhow!(
"type mismatch: expected {}, got {}",
self.desc(),
other.desc()
))
} else {
Err(anyhow!(
"type mismatch for {}, possibly due to mixing distinct composite types",
self.desc()
))
}
}
/// Convert the specified `InterfaceType` to a `Type`.
pub(crate) fn from(ty: &InterfaceType, instance: &InstanceType<'_>) -> Self {
match ty {
InterfaceType::Bool => Type::Bool,
InterfaceType::S8 => Type::S8,
InterfaceType::U8 => Type::U8,
InterfaceType::S16 => Type::S16,
InterfaceType::U16 => Type::U16,
InterfaceType::S32 => Type::S32,
InterfaceType::U32 => Type::U32,
InterfaceType::S64 => Type::S64,
InterfaceType::U64 => Type::U64,
InterfaceType::Float32 => Type::Float32,
InterfaceType::Float64 => Type::Float64,
InterfaceType::Char => Type::Char,
InterfaceType::String => Type::String,
InterfaceType::List(index) => Type::List(List::from(*index, instance)),
InterfaceType::Record(index) => Type::Record(Record::from(*index, instance)),
InterfaceType::Tuple(index) => Type::Tuple(Tuple::from(*index, instance)),
InterfaceType::Variant(index) => Type::Variant(Variant::from(*index, instance)),
InterfaceType::Enum(index) => Type::Enum(Enum::from(*index, instance)),
InterfaceType::Option(index) => Type::Option(OptionType::from(*index, instance)),
InterfaceType::Result(index) => Type::Result(ResultType::from(*index, instance)),
InterfaceType::Flags(index) => Type::Flags(Flags::from(*index, instance)),
InterfaceType::Own(index) => Type::Own(instance.resource_type(*index)),
InterfaceType::Borrow(index) => Type::Borrow(instance.resource_type(*index)),
}
}
fn desc(&self) -> &'static str {
match self {
Type::Bool => "bool",
Type::S8 => "s8",
Type::U8 => "u8",
Type::S16 => "s16",
Type::U16 => "u16",
Type::S32 => "s32",
Type::U32 => "u32",
Type::S64 => "s64",
Type::U64 => "u64",
Type::Float32 => "float32",
Type::Float64 => "float64",
Type::Char => "char",
Type::String => "string",
Type::List(_) => "list",
Type::Record(_) => "record",
Type::Tuple(_) => "tuple",
Type::Variant(_) => "variant",
Type::Enum(_) => "enum",
Type::Option(_) => "option",
Type::Result(_) => "result",
Type::Flags(_) => "flags",
Type::Own(_) => "own",
Type::Borrow(_) => "borrow",
}
}
}
| true
|
c914badd74823706a142f184e87ae54ea7d9c4a4
|
Rust
|
CBenoit/nushell
|
/crates/nu-command/src/strings/str_/ends_with.rs
|
UTF-8
| 3,589
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
use nu_engine::CallExt;
use nu_protocol::ast::Call;
use nu_protocol::ast::CellPath;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::Category;
use nu_protocol::Spanned;
use nu_protocol::{Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Value};
#[derive(Clone)]
pub struct SubCommand;
impl Command for SubCommand {
fn name(&self) -> &str {
"str ends-with"
}
fn signature(&self) -> Signature {
Signature::build("str ends-with")
.required("pattern", SyntaxShape::String, "the pattern to match")
.rest(
"rest",
SyntaxShape::CellPath,
"optionally matches suffix of text by column paths",
)
.category(Category::Strings)
}
fn usage(&self) -> &str {
"Check if a string ends with a pattern"
}
fn search_terms(&self) -> Vec<&str> {
vec!["pattern", "match", "find", "search"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
operate(engine_state, stack, call, input)
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Checks if string ends with '.rb' pattern",
example: "'my_library.rb' | str ends-with '.rb'",
result: Some(Value::Bool {
val: true,
span: Span::test_data(),
}),
},
Example {
description: "Checks if string ends with '.txt' pattern",
example: "'my_library.rb' | str ends-with '.txt'",
result: Some(Value::Bool {
val: false,
span: Span::test_data(),
}),
},
]
}
}
fn operate(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let pattern: Spanned<String> = call.req(engine_state, stack, 0)?;
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
input.map(
move |v| {
if column_paths.is_empty() {
action(&v, &pattern.item, head)
} else {
let mut ret = v;
for path in &column_paths {
let p = pattern.item.clone();
let r = ret.update_cell_path(
&path.members,
Box::new(move |old| action(old, &p, head)),
);
if let Err(error) = r {
return Value::Error { error };
}
}
ret
}
},
engine_state.ctrlc.clone(),
)
}
fn action(input: &Value, pattern: &str, head: Span) -> Value {
match input {
Value::String { val, .. } => Value::Bool {
val: val.ends_with(pattern),
span: head,
},
other => Value::Error {
error: ShellError::UnsupportedInput(
format!(
"Input's type is {}. This command only works with strings.",
other.get_type()
),
head,
),
},
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(SubCommand {})
}
}
| true
|
1840665d0ad40019cdf9695510f7dacdb0abb885
|
Rust
|
kaj/fanrs
|
/src/models/part.rs
|
UTF-8
| 5,567
| 2.546875
| 3
|
[] |
no_license
|
use super::{Episode, Issue, IssueRef};
use crate::schema::episode_parts::dsl as ep;
use crate::schema::publications::dsl as p;
use crate::templates::ToHtml;
use diesel::dsl::count_star;
use diesel::prelude::*;
use diesel::result::Error;
use diesel_async::{AsyncPgConnection, RunQueryDsl};
use log::warn;
use std::io::{self, Write};
#[derive(Debug, Queryable)]
pub struct Part {
pub no: Option<i16>,
pub name: Option<String>,
}
impl Part {
fn none() -> Part {
Part {
no: None,
name: None,
}
}
/// true for an actual part, false for the whole episode
pub fn is_part(&self) -> bool {
self.no.is_some() || self.name.is_some()
}
pub fn is_first(&self) -> bool {
self.no.map_or(true, |n| n == 1)
}
pub fn name(&self) -> Option<&str> {
self.name.as_deref()
}
pub async fn publish(
episode: &Episode,
part: &Part,
issue: &Issue,
seqno: Option<i16>,
best_plac: Option<i16>,
label: &str,
db: &mut AsyncPgConnection,
) -> Result<(), Error> {
let mut existing = p::publications
.select(count_star())
.left_join(ep::episode_parts)
.filter(ep::episode_id.eq(episode.id))
.filter(p::issue_id.eq(issue.id))
.into_boxed();
if part.is_part() {
existing = existing
.filter(ep::part_no.is_not_distinct_from(part.no))
.filter(ep::part_name.is_not_distinct_from(part.name()));
}
match existing.first::<i64>(db).await? {
0 => (),
1 => return Ok(()),
n => warn!("{} of {:?} in {}", n, episode, issue),
}
let part_id = Self::g_o_c_part_id(episode.id, part, db).await?;
if let Some((id, old_seqno, old_label)) = p::publications
.filter(p::issue_id.eq(issue.id))
.filter(p::episode_part.eq(part_id))
.select((p::id, p::seqno, p::label))
.first::<(i32, Option<i16>, String)>(db)
.await
.optional()?
{
if seqno.is_some() && old_seqno != seqno {
unimplemented!(
"Should update seqno for publication #{} ({:?} != {:?})",
id,
seqno,
old_seqno
);
}
if !label.is_empty() && old_label != label {
diesel::update(p::publications)
.set(p::label.eq(label))
.filter(p::id.eq(id))
.execute(db)
.await?;
}
} else {
diesel::insert_into(p::publications)
.values((
p::issue_id.eq(issue.id),
p::episode_part.eq(part_id),
p::seqno.eq(seqno),
p::best_plac.eq(best_plac),
p::label.eq(label),
))
.execute(db)
.await?;
}
Ok(())
}
pub async fn prevpub(
episode: &Episode,
issue: &Issue,
db: &mut AsyncPgConnection,
) -> Result<(), Error> {
let existing = p::publications
.select(count_star())
.left_join(ep::episode_parts)
.filter(ep::episode_id.eq(episode.id))
.filter(p::issue_id.eq(issue.id));
if existing.first::<i64>(db).await? > 0 {
return Ok(());
}
let part_id =
Self::g_o_c_part_id(episode.id, &Part::none(), db).await?;
diesel::insert_into(p::publications)
.values((p::issue_id.eq(issue.id), p::episode_part.eq(part_id)))
.execute(db)
.await?;
Ok(())
}
async fn g_o_c_part_id(
episode_id: i32,
part: &Part,
db: &mut AsyncPgConnection,
) -> Result<i32, Error> {
if let Some(part_id) = ep::episode_parts
.select(ep::id)
.filter(ep::episode_id.eq(episode_id))
.filter(ep::part_no.is_not_distinct_from(part.no))
.filter(ep::part_name.is_not_distinct_from(part.name()))
.first::<i32>(db)
.await
.optional()?
{
Ok(part_id)
} else {
Ok(diesel::insert_into(ep::episode_parts)
.values((
ep::episode_id.eq(episode_id),
ep::part_no.eq(part.no),
ep::part_name.eq(part.name()),
))
.returning(ep::id)
.get_result(db)
.await?)
}
}
}
impl ToHtml for Part {
fn to_html(&self, out: &mut dyn Write) -> io::Result<()> {
if !self.is_part() {
return Ok(());
}
write!(out, "<span class='part'>")?;
if let Some(no) = self.no {
write!(out, "del {no}")?;
if self.name.is_some() {
write!(out, ": ")?;
}
}
if let Some(ref name) = self.name {
name.to_html(out)?;
}
write!(out, "</span>")
}
}
#[derive(Debug, Queryable)]
pub struct PartInIssue(pub IssueRef, pub Part, pub Option<i16>);
impl ToHtml for PartInIssue {
fn to_html(&self, out: &mut dyn Write) -> io::Result<()> {
self.0.to_html(out)?;
if self.1.is_part() {
write!(out, " (")?;
self.1.to_html(out)?;
write!(out, ")")?;
}
Ok(())
}
}
| true
|
1b6621db73df48b9ea5633d07ba3176be8c1120d
|
Rust
|
OpenNOX/project-euler
|
/rust/src/bin/solution_005.rs
|
UTF-8
| 2,376
| 3.125
| 3
|
[] |
no_license
|
#![feature(test)]
use clap::{value_parser, Arg, ArgMatches};
use project_euler::{run_solution, Solution};
/// Command-line argument maximum multiple type.
type MaximumMultiple = u64;
/// Command-line argument maximum multiple placeholder.
const MAXIMUM_MULTIPLE: &str = "MAXIMUM_MULTIPLE";
pub struct Solution005;
impl Solution for Solution005 {
fn title(&self) -> String {
"Smallest multiple".to_string()
}
fn description(&self) -> String {
format!(
"Smallest number that is evenly divisible by all numbers from 1 to {}.",
MAXIMUM_MULTIPLE
)
}
fn arguments(&self) -> Vec<Arg> {
vec![Arg::new(MAXIMUM_MULTIPLE)
.help("Maximum multiple to be checked.")
.required(true)
.value_parser(value_parser!(MaximumMultiple))]
}
fn run(&self, arguments: &ArgMatches) -> u64 {
let maximum_multiple = *arguments
.get_one::<MaximumMultiple>(MAXIMUM_MULTIPLE)
.expect("command-line arguments parser to get argument");
let mut solution = maximum_multiple - 2;
let mut solution_found = false;
while !solution_found {
for i in 3..=maximum_multiple {
if solution % i != 0 {
break;
} else if i == maximum_multiple {
solution_found = true;
}
}
if !solution_found {
solution += 1;
}
}
solution
}
}
fn main() {
run_solution(&Solution005 {});
}
#[cfg(test)]
mod tests {
extern crate test;
use super::Solution005;
use clap::command;
use project_euler::Solution;
use test::Bencher;
const APPLICATION_NAME: &str = "test_app";
#[test]
fn solves_problem_005_example() {
let solution = Solution005 {};
let arguments = command!()
.args(solution.arguments())
.get_matches_from(vec![APPLICATION_NAME, "10"]);
assert_eq!(solution.run(&arguments), 2520);
}
#[bench]
fn bench_solution_005(bencher: &mut Bencher) {
let solution = Solution005 {};
let arguments = command!()
.args(solution.arguments())
.get_matches_from(vec![APPLICATION_NAME, "20"]);
bencher.iter(|| solution.run(&arguments));
}
}
| true
|
eed7c7c215a4d2dda25f1c56a36f96914590c778
|
Rust
|
fosskers/aura
|
/rust/aura-core/src/cache.rs
|
UTF-8
| 7,625
| 3.078125
| 3
|
[] |
no_license
|
//! Cache manipulation internals.
use crate::Package;
use alpm::Alpm;
use std::collections::{HashMap, HashSet};
use std::ffi::OsString;
use std::fs::Metadata;
use std::path::{Path, PathBuf};
use std::time::SystemTime;
use std::{cmp::Ordering, process::Command};
/// A validated path to a package tarball.
#[derive(Debug, PartialEq, Eq)]
pub struct PkgPath<'a> {
path: PathBuf,
pkg: Package<'a>,
}
impl<'a> PkgPath<'a> {
/// Validate that `PathBuf` has an expected extension.
pub fn new(path: PathBuf) -> Option<PkgPath<'static>> {
match Package::from_path(&path) {
Some(pkg) if is_package(&path) => Some(PkgPath { path, pkg }),
_ => None,
}
}
/// The path postfixed by its `.sig` extension.
pub fn sig_file(&self) -> PathBuf {
let mut new: PathBuf = self.path.clone();
let mut ext: OsString = new.extension().unwrap().to_os_string();
ext.push(".sig");
new.set_extension(ext);
new
}
/// The internal `Path` of this validated tarball.
pub fn as_path(&self) -> &Path {
&self.path
}
/// Consume this `PkgPath` to get its inner `PathBuf`.
pub fn into_pathbuf(self) -> PathBuf {
self.path
}
/// Pull a simple package definition from this tarball path.
pub fn as_package(&self) -> &Package<'a> {
&self.pkg
}
/// Delete this `PkgPath` and its `.sig` file, if there is one.
pub fn remove(self) -> Result<(), std::io::Error> {
std::fs::remove_file(&self.path)?;
let sig = self.sig_file();
if sig.exists() {
std::fs::remove_file(sig)?;
}
Ok(())
}
// TODO I'd like it if this could be avoided.
/// Remove this via a shell call to `rm`.
pub fn sudo_remove(self) -> Result<(), PathBuf> {
match Command::new("sudo").arg("rm").arg(&self.path).status() {
Ok(s) if s.success() => Ok(()),
Ok(_) | Err(_) => Err(self.path),
}
}
}
impl<'a> PartialOrd for PkgPath<'a> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<'a> Ord for PkgPath<'a> {
fn cmp(&self, other: &Self) -> Ordering {
self.pkg.cmp(&other.pkg)
}
}
/// A description of the size of the package cache.
pub struct CacheSize {
/// The number of package files in the cache.
pub files: usize,
/// The number of bytes of all files combined.
pub bytes: u64,
}
/// Cache statistics for a particular package.
#[derive(Debug)]
pub struct CacheInfo {
/// The name of the package.
pub name: String,
/// The most recent version of the package available.
pub version: String,
/// The date/time that the tarball was downloaded or built.
pub created: SystemTime,
/// Is a signature file present for this entry?
pub signature: bool,
/// Size in bytes of the tarball.
pub size: u64,
/// Available versions.
pub available: Vec<String>,
}
/// All package tarball filenames that match a given string.
pub fn search<'a, P>(caches: &'a [P], term: &'a str) -> impl Iterator<Item = PathBuf> + 'a
where
P: AsRef<Path>,
{
crate::read_dirs(caches)
.filter_map(|r| r.ok())
.map(|de| de.path())
.filter(move |path| {
path.file_name()
.and_then(|s| s.to_str())
.map(|s| s.contains(term))
.unwrap_or(false)
})
}
/// Yield the [`CacheInfo`], if possible, of the given packages.
pub fn info(caches: &[&Path], package: &str) -> Result<Option<CacheInfo>, std::io::Error> {
let mut matches: Vec<(PkgPath, Metadata)> = search(caches, package)
.filter_map(|path| {
path.metadata()
.ok()
.and_then(|meta| PkgPath::new(path).map(|pp| (pp, meta)))
})
.filter(|(pp, _)| pp.pkg.name == package)
.collect();
matches.sort_by(|(p0, _), (p1, _)| p1.cmp(p0));
let available: Vec<String> = matches
.iter()
.map(|(pp, _)| pp.pkg.version.to_string())
.collect();
match matches.into_iter().next() {
None => Ok(None),
Some((pp, meta)) => {
let created = meta.created()?;
let signature = pp.sig_file().exists();
let info = CacheInfo {
name: pp.pkg.name.into_owned(),
version: pp.pkg.version.into_owned(),
created,
signature,
size: meta.len(),
available,
};
Ok(Some(info))
}
}
}
/// The number of files and all bytes consumed by files contained in the given
/// directory `Path`s.
pub fn size<P>(paths: &[P]) -> CacheSize
where
P: AsRef<Path>,
{
let (files, bytes) = crate::read_dirs(paths)
.filter_map(|de| de.ok())
.filter(|de| is_package(&de.path()))
.filter_map(|de| de.metadata().ok())
.map(|meta| meta.len())
.fold((0, 0), |(ac, al), l| (ac + 1, al + l));
CacheSize { files, bytes }
}
/// Valid [`PkgPath`]s in the given caches.
pub fn package_paths<P>(caches: &[P]) -> impl Iterator<Item = PkgPath<'static>> + '_
where
P: AsRef<Path>,
{
crate::read_dirs(caches)
.filter_map(|r| r.ok())
.map(|de| de.path())
.filter_map(PkgPath::new)
}
/// Installed official packages that have no tarball in the cache.
pub fn officials_missing_tarballs<'a>(
alpm: &'a Alpm,
caches: &[&Path],
) -> impl Iterator<Item = alpm::Package<'a>> {
let groups = all_versions(caches);
crate::native_packages(alpm).filter(move |p| {
let pv = p.version().as_str();
groups
.get(p.name())
.map(|vs| !vs.contains(pv))
.unwrap_or(true)
})
}
/// Installed foreign packages that have no tarball in the cache.
pub fn foreigns_missing_tarballs<'a>(
alpm: &'a Alpm,
caches: &[&Path],
) -> impl Iterator<Item = alpm::Package<'a>> {
let groups = all_versions(caches);
crate::foreign_packages(alpm).filter(move |p| {
let pv = p.version().as_str();
groups
.get(p.name())
.map(|vs| !vs.contains(pv))
.unwrap_or(true)
})
}
/// Installed packages that have no tarball in the cache.
pub fn missing_tarballs<'a>(
alpm: &'a Alpm,
caches: &[&Path],
) -> impl Iterator<Item = alpm::Package<'a>> {
let groups: HashMap<String, HashSet<String>> = all_versions(caches);
alpm.localdb().pkgs().into_iter().filter(move |p| {
let pv = p.version().as_str();
groups
.get(p.name())
.map(|vs| !vs.contains(pv))
.unwrap_or(true)
})
}
// TODO Provide a similar function for signature files.
/// Is a given `Path` a legal Arch Linux package tarball?
///
/// ```
/// use aura_core::cache::is_package;
/// use std::path::Path;
///
/// assert!(is_package(Path::new("libebml-1.3.10-1-x86_64.pkg.tar.xz")));
/// assert!(is_package(Path::new("libebml-1.4.0-1-x86_64.pkg.tar.zst")));
/// ```
pub fn is_package(path: &Path) -> bool {
path.to_str()
.map(|p| p.ends_with(".pkg.tar.zst") || p.ends_with(".pkg.tar.xz"))
.unwrap_or(false)
}
/// Every version of every package available in the caches.
pub fn all_versions(caches: &[&Path]) -> HashMap<String, HashSet<String>> {
let mut map = HashMap::new();
for pp in package_paths(caches) {
let pkg = pp.as_package();
let set = map.entry(pkg.name.to_string()).or_insert_with(HashSet::new);
set.insert(pkg.version.to_string());
}
map
}
| true
|
e85ae71d8e9f4e3dfc0a6136105754b465c3a28e
|
Rust
|
mmueller/advent2018
|
/src/advent/day11.rs
|
UTF-8
| 2,227
| 3.40625
| 3
|
[] |
no_license
|
use advent::AdventSolver;
use failure::Error;
const SERIAL_NO: i64 = 4172;
#[derive(Default)]
pub struct Solver;
impl AdventSolver for Solver {
fn solve(&mut self) -> Result<(), Error> {
println!("Max 3x3 square: {:?}",
find_largest_total_power(SERIAL_NO, 3, 3));
println!("Max NxN square: {:?}",
find_largest_total_power(SERIAL_NO, 1, 300));
Ok(())
}
}
// Power in the single cell specified
fn cell_power_level(serial_no: i64, x: i64, y: i64) -> i64 {
let rack_id = x + 10;
let mut power_level = rack_id * y;
power_level += serial_no;
power_level *= rack_id;
power_level /= 100;
power_level %= 10;
power_level -= 5;
power_level
}
fn find_largest_total_power(serial_no: i64, size_min: i64, size_max: i64)
-> (i64, i64, i64) {
let mut result = (0, 0, 0);
let mut max_power: i64 = std::i64::MIN;
for size in size_min..=size_max {
eprint!(".");
for x in 1..=300-size+1 {
for y in 1..=300-size+1 {
let power = square_power_level(serial_no, x, y, size);
if power > max_power {
result = (x, y, size);
max_power = power;
}
}
}
}
eprintln!("");
result
}
// Power in the (size x size) square specified. In the spirit of Advent, uses
// the `cached` crate to memoize the results across invocations. (Without the
// cache, the naive solution runs in about 3 minutes. With the cache, 8 sec.)
cached! {
SQUARE_POWER_LEVELS;
fn square_power_level(serial_no: i64, x: i64, y: i64, size: i64) -> i64 = {
if size == 1 {
cell_power_level(serial_no, x, y)
} else {
square_power_level(serial_no, x, y, size-1) +
// Count the bottom row of the square
(x..x+size).map(|x| cell_power_level(serial_no, x, y+size-1))
.sum::<i64>() +
// ...and the right row of the square, but don't count the bottom
// right corner cell twice!
(y..y+size-1).map(|y| cell_power_level(serial_no, x+size-1, y))
.sum::<i64>()
}
}
}
| true
|
be9e20bd33dc158faf563d1908b20655605c7027
|
Rust
|
6293/transaction-rs
|
/transaction/src/loop_fn.rs
|
UTF-8
| 1,340
| 3.046875
| 3
|
[] |
no_license
|
use std::marker::PhantomData;
use {IntoTransaction, Transaction};
pub fn loop_fn<Ctx, S, T, F, A>(initial_state: S, f: F) -> LoopFn<Ctx, F, A>
where
A: IntoTransaction<Ctx, Item = Loop<S, T>>,
F: Fn(S) -> A,
{
LoopFn {
tx: f(initial_state).into_transaction(),
f: f,
_phantom: PhantomData,
}
}
/// The result of `loop_fn`
#[derive(Debug)]
#[must_use]
pub struct LoopFn<Ctx, F, A: IntoTransaction<Ctx>> {
tx: A::Tx,
f: F,
_phantom: PhantomData<(Ctx)>,
}
/// The status of a `loop_fn` loop.
#[derive(Debug)]
pub enum Loop<S, T> {
/// Indicates that the loop has completed with output `T`.
Break(T),
/// Indicates that the loop function should be called again with input state `S`.
Continue(S),
}
impl<Ctx, S, T, F, A> Transaction for LoopFn<Ctx, F, A>
where
F: Fn(S) -> A,
A: IntoTransaction<Ctx, Item = Loop<S, T>>,
{
type Ctx = Ctx;
type Item = T;
type Err = A::Err;
fn run(&self, ctx: &mut Self::Ctx) -> Result<Self::Item, Self::Err> {
let LoopFn { ref tx, ref f, .. } = *self;
let mut ret = tx.run(ctx)?;
loop {
let s = match ret {
Loop::Break(t) => return Ok(t),
Loop::Continue(s) => s,
};
ret = f(s).into_transaction().run(ctx)?;
}
}
}
| true
|
371d0ff949efd34a6ff9279bb192de554adde985
|
Rust
|
JakeHartnell/oasis-rs
|
/tests/xcc-b/src/main.rs
|
UTF-8
| 477
| 2.71875
| 3
|
[
"Apache-2.0"
] |
permissive
|
use oasis_std::{Context, Service};
use serde::{Deserialize, Serialize};
#[derive(Service)]
pub struct ServiceB {
seed: Number,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct Number(pub u8);
impl ServiceB {
pub fn new(_ctx: &Context, seed: Number) -> Self {
Self { seed }
}
pub fn random(&self, _ctx: &Context, count: Number) -> Vec<Number> {
vec![Number(4); count.0 as usize]
}
}
fn main() {
oasis_std::service!(ServiceB);
}
| true
|
21b5d74372e86a154e4b141a0919b35658ff945d
|
Rust
|
microsoft/qsharp-runtime
|
/src/Simulation/qdk_sim_rs/src/error.rs
|
UTF-8
| 5,619
| 2.921875
| 3
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"Unlicense"
] |
permissive
|
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
//! Module defining common errors that can occur during quantum simulations.
use std::{backtrace::Backtrace, str::Utf8Error};
use miette::Diagnostic;
use ndarray::ShapeError;
use thiserror::Error;
/// Represents errors that can occur during linear algebra operations.
#[derive(Debug, Diagnostic, Error)]
pub enum QdkSimError {
// NB: As a design note, please consider if a more specific error is better
// suited for your usecase before returning `MiscError`.
/// Raised on miscellaneous errors.
#[error("{msg}")]
#[diagnostic(code(qdk_sim::other))]
MiscError {
/// The message associated with this error.
msg: String,
/// A backtrace from where this error was originally raised.
#[backtrace]
backtrace: std::backtrace::Backtrace,
},
/// Raised when functionality that has not yet been implemented is called.
#[error("Not yet implemented: {0}")]
#[diagnostic(code(qdk_sim::not_yet_implemented))]
NotYetImplemented(String),
/// Raised when the wrong number of qubits is provided for a quantum
/// process.
#[error("Channel acts on {expected} qubits, but was applied to an {actual}-qubit state.")]
#[diagnostic(code(qdk_sim::process::wrong_n_qubits))]
WrongNumberOfQubits {
/// The number of qubits that was expected, as given by the size of the
/// channel to be applied.
expected: usize,
/// The actual number of qubits for the given state.
actual: usize,
},
/// Raised when a channel cannot be applied to a given state due to a
/// mismatch between channel and state kinds.
#[error("Unsupported quantum process variant {channel_variant} for applying to state variant {state_variant}.")]
#[diagnostic(code(qdk_sim::process::unsupported_apply))]
UnsupportedApply {
/// The enum variant of the channel to be applied.
channel_variant: &'static str,
/// The enum variant of the state that the channel is to be applied to.
state_variant: &'static str,
},
/// Raised when a matrix is singular, and thus does not have an inverse.
#[error("expected invertible matrix, but got a singular or very poorly conditioned matrix (det = {det})")]
#[diagnostic(code(qdk_sim::linalg::singular))]
Singular {
/// Actual determinant of the matrix which caused this error.
det: f64,
},
/// Raised when a shape error occurs internally to [`qdk_sim`].
#[error(transparent)]
#[diagnostic(code(qdk_sim::linalg::internal_shape))]
InternalShapeError(
#[from]
#[backtrace]
ShapeError,
),
/// Raised when an algorithm requires a matrix to be square, but a
/// rectangular matrix was passed instead.
#[error("expected square matrix, but got shape `{0}` × `{1}")]
#[diagnostic(code(qdk_sim::linalg::not_square))]
NotSquare(usize, usize),
/// Raised when an algorithm needs to convert an element between two
/// different scalar types, but no such conversion exists for those types.
#[error("could not convert value of type `{0}` into element type `{1}`")]
#[diagnostic(code(qdk_sim::linalg::cannot_convert_element))]
CannotConvertElement(String, String),
/// Raised when no noise model exists for a given name.
#[error("{0} is not the name of any valid noise model")]
#[diagnostic(code(qdk_sim::noise_model::invalid_repr))]
InvalidNoiseModel(String),
/// Raised when an initial state representation is invalid.
#[error("C API error: {0} is not a valid initial state representation")]
#[diagnostic(code(qdk_sim::c_api::invalid_repr))]
InvalidRepresentation(String),
/// Raised when a null pointer is passed through the C API.
#[error("C API error: {0} was null")]
#[diagnostic(code(qdk_sim::c_api::nullptr))]
NullPointer(String),
/// Raised when an invalid simulator ID is passed to the C API.
#[error("C API error: No simulator with ID {invalid_id} exists. Expected: {expected:?}.")]
#[diagnostic(code(qdk_sim::c_api::invalid_sim))]
NoSuchSimulator {
/// The invalid simulator id which caused this error.
invalid_id: usize,
/// A list of valid simulator ids at the point when this error occured.
expected: Vec<usize>,
},
/// Raised when a string passed to the C API contains could not be decoded
/// as a UTF-8 string.
#[error("C API error: UTF-8 error decoding {arg_name} argument: {source}")]
#[diagnostic(code(qdk_sim::c_api::utf8))]
InvalidUtf8InArgument {
/// The name of the argument containing invalid UTF-8 data.
arg_name: String,
/// The underlying UTF-8 error that caused this error.
#[source]
source: Utf8Error,
},
/// Raised when a JSON serialization error occurs during a C API call.
#[error(transparent)]
#[diagnostic(code(qdk_sim::c_api::json_deser))]
JsonDeserializationError(
#[from]
#[backtrace]
serde_json::Error,
),
/// Raised when an unanticipated error occurs during a C API call.
#[error(transparent)]
#[diagnostic(code(qdk_sim::c_api::unanticipated))]
UnanticipatedCApiError(
#[from]
#[backtrace]
anyhow::Error,
),
}
impl QdkSimError {
pub(crate) fn misc<T: Into<String>>(msg: T) -> Self {
let msg: String = msg.into();
QdkSimError::MiscError {
msg,
backtrace: Backtrace::force_capture(),
}
}
}
| true
|
d8e266d1c9a1c6c8dbcbed200076f767be54c702
|
Rust
|
prz23/zinc
|
/zinc-vm/src/error.rs
|
UTF-8
| 3,413
| 3.0625
| 3
|
[
"Apache-2.0"
] |
permissive
|
//!
//! The Zinc virtual machine error.
//!
use num::BigInt;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum TypeSizeError {
#[error("expected input value of size {expected}, found {found}")]
Input { expected: usize, found: usize },
#[error("expected output value of size {expected}, found {found}")]
Output { expected: usize, found: usize },
}
#[derive(Debug, Error)]
pub enum MalformedBytecode {
#[error("invalid arguments to an intrinsic function: {0}")]
InvalidArguments(String),
#[error("unexpected `loop_end` instruction")]
UnexpectedLoopEnd,
#[error("unexpected `return` instruction")]
UnexpectedReturn,
#[error("unexpected `else` instruction")]
UnexpectedElse,
#[error("unexpected `end_if` instruction")]
UnexpectedEndIf,
#[error("stack underflow")]
StackUnderflow,
#[error("reading uninitialized memory")]
UninitializedStorageAccess,
#[error("conditional branches produced results of different sizes")]
BranchStacksDoNotMatch,
}
#[derive(Debug, Error)]
pub enum VerificationError {
#[error("value overflow: value {0} is not in the field")]
ValueOverflow(BigInt),
#[error("failed to synthesize circuit: {0}")]
SynthesisError(franklin_crypto::bellman::SynthesisError),
}
#[derive(Debug, Error)]
pub enum Error {
#[error("synthesis error: {0}")]
SynthesisError(#[from] franklin_crypto::bellman::SynthesisError),
#[error("database error: {0}")]
DatabaseError(#[from] sqlx::Error),
#[error("zkSync error: {0}")]
ZkSyncClient(#[from] zksync::error::ClientError),
#[error("internal error in virtual machine: {0}")]
InternalError(String),
#[error("malformed bytecode: {0}")]
MalformedBytecode(#[from] MalformedBytecode),
#[error("require error: {0}")]
RequireError(String),
#[error(
"index out of bounds: expected index in range {lower_bound}..{upper_bound}, found {found}"
)]
IndexOutOfBounds {
lower_bound: usize,
upper_bound: usize,
found: usize,
},
#[error("type error: expected {expected}, found {found}")]
TypeError { expected: String, found: String },
#[error("constant value expected, found variable (witness)")]
ExpectedConstant,
#[error("size is too large: {0}")]
ExpectedUsize(BigInt),
#[error("value overflow or constraint violation")]
UnsatisfiedConstraint,
#[error("division by zero")]
DivisionByZero,
#[error("inverting zero")]
ZeroInversion,
#[error("type size mismatch: {0}")]
TypeSize(#[from] TypeSizeError),
#[error("overflow: value {value} is not in range of type {scalar_type}")]
ValueOverflow {
value: BigInt,
scalar_type: zinc_types::ScalarType,
},
#[error("the unit test data is missing")]
UnitTestDataMissing,
#[error("the instruction is available only for contracts")]
OnlyForContracts,
#[error("invalid storage value")]
InvalidStorageValue,
#[error("contract {address} does not exist")]
ContractNotFound { address: String },
#[error("contract {address} already exists")]
ContractAlreadyExists { address: String },
#[error("contract instance {address} cannot be fetched twice")]
ContractAlreadyFetched { address: String },
#[error("contract method `{found}` does not exist")]
MethodNotFound { found: String },
}
| true
|
51f25fc786d0248808b79bd3822db4f9b9f4a27c
|
Rust
|
23prime/completion-judgment
|
/src/rules.rs
|
UTF-8
| 2,078
| 2.921875
| 3
|
[] |
no_license
|
use types::*;
use std::collections::HashMap;
pub type Require = HashMap<Group, CreditNum>;
// Rules
pub fn require() -> Require {
let mut req = HashMap::new();;
req.insert("基礎科目".to_string(), 1.0);
req.insert("専攻共通 必修".to_string(), 1.0);
req.insert("専攻共通 選択".to_string(), 4.0);
req.insert("教科教育(数学教育)".to_string(), 6.0);
req.insert("教科専門(数学)".to_string(), 12.0);
req.insert("教科選択(研究)".to_string(), 6.0);
req.insert("その他".to_string(), 0.0);
return req;
}
pub fn mk_group(cd: &Code) -> Group {
let (f, a) = cd.split_at(3);
let (b, cs) = a.split_at(1);
let cs: Vec<char> = cs.chars().collect();
let c = cs[0];
if f != "01B" {
return "その他".to_string();
} else if a == "1001" {
return "基礎科目".to_string();
} else if a == "1011" {
return "専攻共通 必修".to_string();
} else if b == "2" {
return "専攻共通 選択".to_string();
} else if b != "6" {
return "その他".to_string();
} else if c == '1' {
return "教科教育(数学教育)".to_string();
} else if c == '5' {
return "教科選択(研究)".to_string();
} else {
return "教科専門(数学)".to_string();
}
}
// Parse Credits
fn find_term_index (s: &str, ss: &Vec<String>) -> usize {
let idx = ss.iter().position(|&ref ssi| ssi == &s);
match idx {
Some(i) => return i,
None => panic!("Index not found")
}
}
pub fn mk_credit(ss: &Vec<String>, ds: &Vec<String>) -> Credit {
let cd = &ds[find_term_index("科目番号", &ss)];
let title = &ds[find_term_index("科目名", &ss)];
let grade = &ds[find_term_index("総合評価", &ss)];
return Credit {
code: cd.to_string(),
title: title.to_string(),
num: ds[find_term_index("単位数", &ss)].parse().unwrap(),
grade: grade.to_string(),
group: mk_group(cd)
}
}
| true
|
95db4ae343818487e1b3702599505081b09d3972
|
Rust
|
laanwj/k210-pac
|
/src/aes/data_out_flag.rs
|
UTF-8
| 4,941
| 2.71875
| 3
|
[
"ISC"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::DATA_OUT_FLAG {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `data_out_flag`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DATA_OUT_FLAGR {
#[doc = "Data cannot output"]
CANNOT_OUTPUT,
#[doc = "Data can output"]
CAN_OUTPUT,
}
impl DATA_OUT_FLAGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
DATA_OUT_FLAGR::CANNOT_OUTPUT => false,
DATA_OUT_FLAGR::CAN_OUTPUT => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> DATA_OUT_FLAGR {
match value {
false => DATA_OUT_FLAGR::CANNOT_OUTPUT,
true => DATA_OUT_FLAGR::CAN_OUTPUT,
}
}
#[doc = "Checks if the value of the field is `CANNOT_OUTPUT`"]
#[inline]
pub fn is_cannot_output(&self) -> bool {
*self == DATA_OUT_FLAGR::CANNOT_OUTPUT
}
#[doc = "Checks if the value of the field is `CAN_OUTPUT`"]
#[inline]
pub fn is_can_output(&self) -> bool {
*self == DATA_OUT_FLAGR::CAN_OUTPUT
}
}
#[doc = "Values that can be written to the field `data_out_flag`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DATA_OUT_FLAGW {
#[doc = "Data cannot output"]
CANNOT_OUTPUT,
#[doc = "Data can output"]
CAN_OUTPUT,
}
impl DATA_OUT_FLAGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
DATA_OUT_FLAGW::CANNOT_OUTPUT => false,
DATA_OUT_FLAGW::CAN_OUTPUT => true,
}
}
}
#[doc = r" Proxy"]
pub struct _DATA_OUT_FLAGW<'a> {
w: &'a mut W,
}
impl<'a> _DATA_OUT_FLAGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: DATA_OUT_FLAGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Data cannot output"]
#[inline]
pub fn cannot_output(self) -> &'a mut W {
self.variant(DATA_OUT_FLAGW::CANNOT_OUTPUT)
}
#[doc = "Data can output"]
#[inline]
pub fn can_output(self) -> &'a mut W {
self.variant(DATA_OUT_FLAGW::CAN_OUTPUT)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Data can be read from out_data when this flag is set"]
#[inline]
pub fn data_out_flag(&self) -> DATA_OUT_FLAGR {
DATA_OUT_FLAGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Data can be read from out_data when this flag is set"]
#[inline]
pub fn data_out_flag(&mut self) -> _DATA_OUT_FLAGW {
_DATA_OUT_FLAGW { w: self }
}
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.