blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
06086c5a4a73b4205d2119703a41c42e594f5284
|
Rust
|
jridgewell/faster-from_utf8
|
/src/main.rs
|
UTF-8
| 23,937
| 2.515625
| 3
|
[] |
no_license
|
#![feature(test)]
extern crate test;
mod current;
mod proposal;
use std::fs::File;
use std::io::Read;
use test::{black_box, Bencher};
#[test]
fn from_utf8_ascii() {
let text = LONG.as_bytes();
assert_eq!(
current::run_utf8_validation(text, true).unwrap(),
proposal::run_utf8_validation(text, true).unwrap()
);
}
#[bench]
fn from_utf8_ascii_regular(b: &mut Bencher) {
let text = black_box(LONG.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_ascii_fast(b: &mut Bencher) {
let text = black_box(LONG.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_mixed() {
let text = MIXED.as_bytes();
assert_eq!(
current::run_utf8_validation(text, true).unwrap(),
proposal::run_utf8_validation(text, true).unwrap()
);
}
#[bench]
fn from_utf8_mixed_regular(b: &mut Bencher) {
let text = black_box(MIXED.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_mixed_fast(b: &mut Bencher) {
let text = black_box(MIXED.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_mostlyasc() {
let text = MOSTLY_ASCII.as_bytes();
assert_eq!(
current::run_utf8_validation(text, true).unwrap(),
proposal::run_utf8_validation(text, true).unwrap()
);
}
#[bench]
fn from_utf8_mostlyasc_regular(b: &mut Bencher) {
let text = black_box(MOSTLY_ASCII.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_mostlyasc_fast(b: &mut Bencher) {
let text = black_box(MOSTLY_ASCII.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_cyr() {
let text = LONG_CY.as_bytes();
assert_eq!(
current::run_utf8_validation(text, true).unwrap(),
proposal::run_utf8_validation(text, true).unwrap()
);
}
#[bench]
fn from_utf8_cyr_regular(b: &mut Bencher) {
let text = black_box(LONG_CY.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_cyr_fast(b: &mut Bencher) {
let text = black_box(LONG_CY.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_enwik8() {
let mut text = Vec::new();
let mut f = File::open("enwik8").unwrap();
f.read_to_end(&mut text).unwrap();
assert_eq!(
current::run_utf8_validation(&text, true).unwrap(),
proposal::run_utf8_validation(&text, true).unwrap()
);
}
#[bench]
fn from_utf8_enwik8_regular(b: &mut Bencher) {
let mut text = Vec::new();
let mut f = File::open("enwik8").unwrap();
f.read_to_end(&mut text).unwrap();
b.iter(|| assert!(current::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_enwik8_fast(b: &mut Bencher) {
let mut text = Vec::new();
let mut f = File::open("enwik8").unwrap();
f.read_to_end(&mut text).unwrap();
b.iter(|| assert!(proposal::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_jawik10() {
let mut text = Vec::new();
let mut f = File::open("jawik10").unwrap();
f.read_to_end(&mut text).unwrap();
assert_eq!(
current::run_utf8_validation(&text, true).unwrap(),
proposal::run_utf8_validation(&text, true).unwrap()
);
}
#[bench]
fn from_utf8_jawik10_regular(b: &mut Bencher) {
let mut text = Vec::new();
let mut f = File::open("jawik10").unwrap();
f.read_to_end(&mut text).unwrap();
b.iter(|| assert!(current::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_jawik10_fast(b: &mut Bencher) {
let mut text = Vec::new();
let mut f = File::open("jawik10").unwrap();
f.read_to_end(&mut text).unwrap();
b.iter(|| assert!(proposal::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_2_bytes() {
let s = String::from("£").repeat(200);
let text = s.as_bytes();
assert_eq!(
current::run_utf8_validation(&text, true).unwrap(),
proposal::run_utf8_validation(&text, true).unwrap()
);
}
#[bench]
fn from_utf8_2_bytes_regular(b: &mut Bencher) {
let s = String::from("£").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_2_bytes_fast(b: &mut Bencher) {
let s = String::from("£").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_3_bytes() {
let s = String::from("ࠀက").repeat(200);
let text = s.as_bytes();
assert_eq!(
current::run_utf8_validation(&text, true).unwrap(),
proposal::run_utf8_validation(&text, true).unwrap()
);
}
#[bench]
fn from_utf8_3_bytes_regular(b: &mut Bencher) {
let s = String::from("ࠀက").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_3_bytes_fast(b: &mut Bencher) {
let s = String::from("ࠀက").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_4_bytes() {
let s = String::from("𐀀").repeat(200);
let text = s.as_bytes();
assert_eq!(
current::run_utf8_validation(&text, true).unwrap(),
proposal::run_utf8_validation(&text, true).unwrap()
);
}
#[bench]
fn from_utf8_4_bytes_regular(b: &mut Bencher) {
let s = String::from("𐀀").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_4_bytes_fast(b: &mut Bencher) {
let s = String::from("𐀀").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[test]
fn from_utf8_all_bytes() {
let s = String::from("£ࠀက𐀀").repeat(200);
let text = s.as_bytes();
assert_eq!(
current::run_utf8_validation(&text, true).unwrap(),
proposal::run_utf8_validation(&text, true).unwrap()
);
}
#[bench]
fn from_utf8_all_bytes_regular(b: &mut Bencher) {
let s = String::from("£ࠀက𐀀").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(current::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
#[bench]
fn from_utf8_all_bytes_fast(b: &mut Bencher) {
let s = String::from("£ࠀက𐀀").repeat(200);
let text = black_box(s.as_bytes());
b.iter(|| assert!(proposal::run_utf8_validation(&text, false).is_ok()));
b.bytes = text.len() as u64;
}
static LONG: &'static str = "\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
tempus vel, gravida nec quam.
In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
interdum. Curabitur ut nisi justo.
Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
Aliquam sit amet placerat lorem.
Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
cursus accumsan.
Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
malesuada sollicitudin quam eu fermentum.";
static LONG_CY: &'static str = "\
Брутэ дольорэ компрэхэнжам йн эжт, ючю коммюны дылыктуч эа, квюо льаорыыт вёвындо мэнандря экз. Ед ыюм емпыдит аккюсам, нык дйкит ютенам ад. Хаж аппэтырэ хонэзтатёз нэ. Ад мовэт путант юрбанйтаж вяш.
Коммодо квюальизквюэ абхоррэант нэ ыюм, праэчынт еракюндйа ылаборарэт эю мыа. Нэ квуым жюмо вольуптатибюж вяш, про ыт бонорюм вёвындо, мэя юллюм новум ку. Пропрёаы такематыш атоморюм зыд ан. Эи омнэжквюы оффекйяж компрэхэнжам жят, апыирёан конкыптам ёнкорруптэ ючю ыт.
Жят алёа лэгыры ед, эи мацим оффэндйт вим. Нык хёнк льаборэж йн, зыд прима тимэам ан. Векж нужквюам инимёкюж ты, ыам эа омнеж ырант рэформйданч. Эрож оффекйяж эю вэл.
Ад нам ножтрюд долорюм, еюж ут вэрыар эюрйпйдяч. Квюач аффэрт тинкидюнт про экз, дёкант вольуптатибюж ат зыд. Ыт зыд экшырки констятюам. Квюо квюиж юрбанйтаж ометтантур экз, хёз экз мютат граэкы рыкючабо, нэ прё пюрто элитр пэрпэтюа. Но квюандо минемум ыам.
Амэт лыгимуз ометтантур кюм ан. Витюпырата котёдиэквюэ нам эю, эю вокынт алёквюам льебэравичсы жят. Экз пыртенакж янтэрэсщэт инзтруктеор нам, еюж ад дйкит каючаэ, шэа витаэ конжтетуто ут. Квюач мандамюч кюм ат, но ёнкорруптэ рэформйданч ючю, незл либриз аюдирэ зыд эи. Ты эож аугюэ иреуры льюкяльиюч, мэль алььтыра докэндё омнэжквюы ат. Анёмал жямиляквюы аккоммодары ыам нэ, экз пэрчёус дэфянятйоныс квюо. Эи дуо фюгит маиорюм.
Эвэртё партйэндо пытынтёюм ыюм ан, шэа ку промпта квюаырэндум. Агам дикунт вим ку. Мюкиуж аюдиам тамквюам про ут, ку мыа квюод квюот эррэм, вяш ад номинави зючкёпит янжольэнж. Нык эи пожжёт путант эффякиантур. Ку еюж нощтыр контынтёонэж. Кюм йужто харюм ёужто ад, ыюм оратио квюоджё экз.
Чонэт факэтэ кюм ан, вэре факэр зальютатуж мэя но. Ыюм ут зальы эффикеэнди, экз про алиё конжыквуюнтюр. Квуй ыльит хабымуч ты, алёа омнэжквюы мандамюч шэа ыт, пльакырат аккюжамюз нэ мэль. Хаж нэ партым нюмквуам прёнкипыз, ат импэрдеэт форынчйбюж кончэктэтюыр шэа. Пльакырат рэформйданч эи векж, ючю дюиж фюйзчыт эи.
Экз квюо ажжюм аугюэ, ат нык мёнём анёмал кытэрож. Кюм выльёт эрюдитя эа. Йн порро малйж кончэктэтюыр хёз, жят кашы эрюдитя ат. Эа вяш мацим пыртенакж, но порро утамюр дяшзынтиыт кюм. Ыт мютат зючкёпит эож, нэ про еракюндйа котёдиэквюэ. Квуй лаудым плььатонэм ед, ку вим ножтрюм лаборамюз.
Вёжи янвыняры хаж ед, ты нолюёжжэ индоктум квуй. Квюач тебиквюэ ут жят, тальэ адхюк убяквюэ йн эож. Ыррор бландит вяш ан, ютроквюы нолюёжжэ констятюам йн ыюм, жят эи прима нобёз тхэопхражтуз. Ты дёкант дэльэнйт нолюёжжэ пэр, молыжтйаы модыратиюз интыллыгам ку мэль.
Ад ылаборарэт конжыквуюнтюр ентырпрытаряш прё, факэтэ лыгэндоч окюррырэт вим ад, элитр рэформйданч квуй ед. Жюмо зальы либриз мэя ты. Незл зюаз видишчы ан ыюм, но пожжэ молыжтйаы мэль. Фиэрэнт адипижкй ометтантур квюо экз. Ут мольлиз пырикюлёз квуй. Ыт квюиж граэко рыпудяары жят, вим магна обльйквюэ контынтёонэж эю, ты шэа эним компльыктётюр.
";
static MIXED: &'static str = "\
Sentences that contain all letters commonly used in a language
--------------------------------------------------------------
Markus Kuhn <http://www.cl.cam.ac.uk/~mgk25/> -- 2012-04-11
This is an example of a plain-text file encoded in UTF-8.
Danish (da)
---------
Quizdeltagerne spiste jordbær med fløde, mens cirkusklovnen
Wolther spillede på xylofon.
(= Quiz contestants were eating strawbery with cream while Wolther
the circus clown played on xylophone.)
German (de)
-----------
Falsches Üben von Xylophonmusik quält jeden größeren Zwerg
(= Wrongful practicing of xylophone music tortures every larger dwarf)
Zwölf Boxkämpfer jagten Eva quer über den Sylter Deich
(= Twelve boxing fighters hunted Eva across the dike of Sylt)
Heizölrückstoßabdämpfung
(= fuel oil recoil absorber)
(jqvwxy missing, but all non-ASCII letters in one word)
Greek (el)
----------
Γαζέες καὶ μυρτιὲς δὲν θὰ βρῶ πιὰ στὸ χρυσαφὶ ξέφωτο
(= No more shall I see acacias or myrtles in the golden clearing)
Ξεσκεπάζω τὴν ψυχοφθόρα βδελυγμία
(= I uncover the soul-destroying abhorrence)
English (en)
------------
The quick brown fox jumps over the lazy dog
Spanish (es)
------------
El pingüino Wenceslao hizo kilómetros bajo exhaustiva lluvia y
frío, añoraba a su querido cachorro.
(Contains every letter and every accent, but not every combination
of vowel + acute.)
French (fr)
-----------
Portez ce vieux whisky au juge blond qui fume sur son île intérieure, à
côté de l'alcôve ovoïde, où les bûches se consument dans l'âtre, ce
qui lui permet de penser à la cænogenèse de l'être dont il est question
dans la cause ambiguë entendue à Moÿ, dans un capharnaüm qui,
pense-t-il, diminue çà et là la qualité de son œuvre.
l'île exiguë
Où l'obèse jury mûr
Fête l'haï volapük,
Âne ex aéquo au whist,
Ôtez ce vœu déçu.
Le cœur déçu mais l'âme plutôt naïve, Louÿs rêva de crapaüter en
canoë au delà des îles, près du mälström où brûlent les novæ.
Irish Gaelic (ga)
-----------------
D'fhuascail Íosa, Úrmhac na hÓighe Beannaithe, pór Éava agus Ádhaimh
Hungarian (hu)
--------------
Árvíztűrő tükörfúrógép
(= flood-proof mirror-drilling machine, only all non-ASCII letters)
Icelandic (is)
--------------
Kæmi ný öxi hér ykist þjófum nú bæði víl og ádrepa
Sævör grét áðan því úlpan var ónýt
(some ASCII letters missing)
Japanese (jp)
-------------
Hiragana: (Iroha)
いろはにほへとちりぬるを
わかよたれそつねならむ
うゐのおくやまけふこえて
あさきゆめみしゑひもせす
Katakana:
イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン
Hebrew (iw)
-----------
? דג סקרן שט בים מאוכזב ולפתע מצא לו חברה איך הקליטה
Polish (pl)
-----------
Pchnąć w tę łódź jeża lub ośm skrzyń fig
(= To push a hedgehog or eight bins of figs in this boat)
Russian (ru)
------------
В чащах юга жил бы цитрус? Да, но фальшивый экземпляр!
(= Would a citrus live in the bushes of south? Yes, but only a fake one!)
Съешь же ещё этих мягких французских булок да выпей чаю
(= Eat some more of these fresh French loafs and have some tea)
Thai (th)
---------
[--------------------------|------------------------]
๏ เป็นมนุษย์สุดประเสริฐเลิศคุณค่า กว่าบรรดาฝูงสัตว์เดรัจฉาน
จงฝ่าฟันพัฒนาวิชาการ อย่าล้างผลาญฤๅเข่นฆ่าบีฑาใคร
ไม่ถือโทษโกรธแช่งซัดฮึดฮัดด่า หัดอภัยเหมือนกีฬาอัชฌาสัย
ปฏิบัติประพฤติกฎกำหนดใจ พูดจาให้จ๊ะๆ จ๋าๆ น่าฟังเอย ฯ
[The copyright for the Thai example is owned by The Computer
Association of Thailand under the Royal Patronage of His Majesty the
King.]
Turkish (tr)
------------
Pijamalı hasta, yağız şoföre çabucak güvendi.
(=Patient with pajamas, trusted swarthy driver quickly)
Special thanks to the people from all over the world who contributed
these sentences since 1999.
A much larger collection of such pangrams is now available at
http://en.wikipedia.org/wiki/List_of_pangrams
";
static MOSTLY_ASCII: &'static str = "\
Sentences that contain all letters commonly used in a language
--------------------------------------------------------------
Markus Kuhn <http://www.cl.cam.ac.uk/~mgk25/> -- 2012-04-11
This is an example of a plain-text file encoded in UTF-8.
Danish (da)
---------
Quizdeltagerne spiste jordbær med fløde, mens cirkusklovnen
Wolther spillede på xylofon.
(= Quiz contestants were eating strawbery with cream while Wolther
the circus clown played on xylophone.)
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
tempus vel, gravida nec quam.
In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
interdum. Curabitur ut nisi justo.
Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
Aliquam sit amet placerat lorem.
German (de)
-----------
Falsches Üben von Xylophonmusik quält jeden größeren Zwerg
(= Wrongful practicing of xylophone music tortures every larger dwarf)
Zwölf Boxkämpfer jagten Eva quer über den Sylter Deich
(= Twelve boxing fighters hunted Eva across the dike of Sylt)
Heizölrückstoßabdämpfung
(= fuel oil recoil absorber)
(jqvwxy missing, but all non-ASCII letters in one word)
Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
cursus accumsan.
Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
malesuada sollicitudin quam eu fermentum.
Special thanks to the people from all over the world who contributed
these sentences since 1999.
A much larger collection of such pangrams is now available at
http://en.wikipedia.org/wiki/List_of_pangrams
";
| true
|
aa2ed14e56bd4d7fff129071b6e27d72362b355d
|
Rust
|
haraldmaida/fixed-map
|
/src/lib.rs
|
UTF-8
| 1,712
| 3.359375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! A fixed-size Map implementation.
//!
//! This crate provides a map implementation that can make use of a fixed-size backing storage.
//!
//! ## The `Key` derive
//!
//! The `Key` derive is provided to construct optimized storage for a given Key.
//!
//! For example:
//!
//! ```rust
//! use fixed_map::{Key, Map};
//!
//! #[derive(Clone, Copy, Key)]
//! enum Part {
//! One,
//! Two,
//! }
//!
//! #[derive(Clone, Copy, Key)]
//! enum Key {
//! Simple,
//! Composite(Part),
//! String(&'static str),
//! Number(u32),
//! Singleton(()),
//! }
//!
//! let mut map = Map::new();
//!
//! map.insert(Key::Simple, 1);
//! map.insert(Key::Composite(Part::One), 2);
//! map.insert(Key::String("foo"), 3);
//! map.insert(Key::Number(1), 4);
//! map.insert(Key::Singleton(()), 5);
//!
//! assert_eq!(map.get(Key::Simple), Some(&1));
//! assert_eq!(map.get(Key::Composite(Part::One)), Some(&2));
//! assert_eq!(map.get(Key::Composite(Part::Two)), None);
//! assert_eq!(map.get(Key::String("foo")), Some(&3));
//! assert_eq!(map.get(Key::String("bar")), None);
//! assert_eq!(map.get(Key::Number(1)), Some(&4));
//! assert_eq!(map.get(Key::Number(2)), None);
//! assert_eq!(map.get(Key::Singleton(())), Some(&5));
//! ```
//!
//! ## Unsafe Use
//!
//! This crate uses unsafe for its iterators.
//! This is needed because there is no proper way to associate generic lifetimes to associated types.
//!
//! Instead, we associate the lifetime to the container (`Map` or `Set`) which wraps a set of unsafe derefs over raw //! pointers.
#![deny(missing_docs)]
pub mod key;
pub mod map;
pub mod set;
pub mod storage;
pub use self::map::Map;
pub use self::set::Set;
pub use fixed_map_derive::Key;
| true
|
a7d7faa9533f4b4b14fcba3858ce2db128c26bbd
|
Rust
|
mattsse/chromiumoxide
|
/examples/block-navigation.rs
|
UTF-8
| 6,935
| 2.578125
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use async_std::task::sleep;
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
use chromiumoxide::cdp::browser_protocol::fetch::{
self, ContinueRequestParams, EventRequestPaused, FailRequestParams, FulfillRequestParams,
};
use chromiumoxide::cdp::browser_protocol::network::{
self, ErrorReason, EventRequestWillBeSent, ResourceType,
};
use chromiumoxide::Page;
use futures::{select, StreamExt};
use chromiumoxide::browser::{Browser, BrowserConfig};
const CONTENT: &str = "<html><head><meta http-equiv=\"refresh\" content=\"0;URL='http://www.example.com/'\" /></head><body><h1>TEST</h1></body></html>";
const TARGET: &str = "http://google.com/";
#[async_std::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
tracing_subscriber::fmt::init();
// Spawn browser
let (mut browser, mut handler) = Browser::launch(
BrowserConfig::builder()
.enable_request_intercept()
.disable_cache()
.request_timeout(Duration::from_secs(1))
.build()?,
)
.await?;
let browser_handle = async_std::task::spawn(async move {
while let Some(h) = handler.next().await {
if h.is_err() {
break;
}
}
});
// Setup request interception
let page = Arc::new(browser.new_page("about:blank").await?);
let mut request_will_be_sent = page
.event_listener::<EventRequestWillBeSent>()
.await
.unwrap()
.fuse();
let mut request_paused = page
.event_listener::<EventRequestPaused>()
.await
.unwrap()
.fuse();
let intercept_page = page.clone();
let intercept_handle = async_std::task::spawn(async move {
let mut resolutions: HashMap<network::RequestId, InterceptResolution> = HashMap::new();
loop {
select! {
event = request_paused.next() => {
if let Some(event) = event {
// Responses
if event.response_status_code.is_some() {
forward(&intercept_page, &event.request_id).await;
continue;
}
if let Some(network_id) = event.network_id.as_ref().map(|id| id.as_network_id()) {
let resolution = resolutions.entry(network_id.clone()).or_insert(InterceptResolution::new());
resolution.request_id = Some(event.request_id.clone());
if event.request.url == TARGET {
resolution.action = InterceptAction::Fullfill;
}
println!("paused: {resolution:?}, network: {network_id:?}");
resolve(&intercept_page, &network_id, &mut resolutions).await;
}
}
},
event = request_will_be_sent.next() => {
if let Some(event) = event {
let resolution = resolutions.entry(event.request_id.clone()).or_insert(InterceptResolution::new());
let action = if is_navigation(&event) {
InterceptAction::Abort
} else {
InterceptAction::Forward
};
resolution.action = action;
println!("sent: {resolution:?}");
resolve(&intercept_page, &event.request_id, &mut resolutions).await;
}
},
complete => break,
}
}
println!("done");
});
sleep(Duration::from_secs(5)).await;
// Navigate to target
page.goto("http://google.com").await?;
let content = page.content().await?;
println!("Content: {content}");
browser.close().await?;
browser_handle.await;
intercept_handle.await;
Ok(())
}
#[derive(Debug)]
enum InterceptAction {
Forward,
Abort,
Fullfill,
None,
}
#[derive(Debug)]
struct InterceptResolution {
action: InterceptAction,
request_id: Option<fetch::RequestId>,
}
impl InterceptResolution {
pub fn new() -> Self {
Self {
action: InterceptAction::None,
request_id: None,
}
}
}
trait RequestIdExt {
fn as_network_id(&self) -> network::RequestId;
}
impl RequestIdExt for fetch::RequestId {
fn as_network_id(&self) -> network::RequestId {
network::RequestId::new(self.inner().clone())
}
}
fn is_navigation(event: &EventRequestWillBeSent) -> bool {
if event.request_id.inner() == event.loader_id.inner()
&& event
.r#type
.as_ref()
.map(|t| *t == ResourceType::Document)
.unwrap_or(false)
{
return true;
}
false
}
async fn resolve(
page: &Page,
network_id: &network::RequestId,
resolutions: &mut HashMap<network::RequestId, InterceptResolution>,
) {
if let Some(resolution) = resolutions.get(network_id) {
if let Some(request_id) = &resolution.request_id {
match resolution.action {
InterceptAction::Forward => {
forward(page, request_id).await;
resolutions.remove(network_id);
}
InterceptAction::Abort => {
abort(page, request_id).await;
resolutions.remove(network_id);
}
InterceptAction::Fullfill => {
fullfill(page, request_id).await;
resolutions.remove(network_id);
}
InterceptAction::None => (), // Processed pausd but not will be sent
}
}
}
}
async fn forward(page: &Page, request_id: &fetch::RequestId) {
println!("Request {request_id:?} forwarded");
if let Err(e) = page
.execute(ContinueRequestParams::new(request_id.clone()))
.await
{
println!("Failed to forward request: {e}");
}
}
async fn abort(page: &Page, request_id: &fetch::RequestId) {
println!("Request {request_id:?} aborted");
if let Err(e) = page
.execute(FailRequestParams::new(
request_id.clone(),
ErrorReason::Aborted,
))
.await
{
println!("Failed to abort request: {e}");
}
}
async fn fullfill(page: &Page, request_id: &fetch::RequestId) {
println!("Request {request_id:?} fullfilled");
if let Err(e) = page
.execute(
FulfillRequestParams::builder()
.request_id(request_id.clone())
.body(BASE64_STANDARD.encode(CONTENT))
.response_code(200)
.build()
.unwrap(),
)
.await
{
println!("Failed to fullfill request: {e}");
}
}
| true
|
68a8a189fbc8baeb010fb6a75e4de053a9d48986
|
Rust
|
Arekkusuva/actix-web-sample
|
/src/api/errors.rs
|
UTF-8
| 362
| 2.953125
| 3
|
[] |
no_license
|
pub trait IntoValidationErrorStr {
fn into_validation_error_str(self) -> &'static str;
}
impl<'a> IntoValidationErrorStr for &'a str {
fn into_validation_error_str(self) -> &'static str {
match self {
"email" => "must be email address",
"password" => "must have at least 7 characters",
_ => "validation failed",
}
}
}
| true
|
0cc872cdbc5d30b7e9d891d056fec23284c2a84e
|
Rust
|
izderadicka/asmuth_bloom_secret_sharing
|
/src/lib.rs
|
UTF-8
| 11,490
| 2.796875
| 3
|
[] |
no_license
|
extern crate num;
#[macro_use]
extern crate quick_error;
extern crate rand;
use num::bigint::{BigInt, BigUint, RandBigInt, ToBigInt, ToBigUint, ParseBigIntError};
use num::traits::{One, Zero, Num};
use rand::os::OsRng;
use std::string::ToString;
use std::str::FromStr;
mod ops;
mod zero_rle;
use ops::{miller_rabin_test, pow};
quick_error! {
#[derive(Debug)]
pub enum Error {
NotEnoughShares {}
SecretTooLong {}
StringFormatError {}
N0NotSameInAllShares {}
NoSharesInString {}
NumberFormatError(err: ParseBigIntError ) {
from()
cause(err)
}
ZeroRleDecodingErorr(err: zero_rle::Error) {
from()
cause(err)
}
}
}
type ShareResult<T> = Result<T, Error>;
pub struct BigPrimesGenerator {
last_odd: BigUint,
tests: usize,
}
impl BigPrimesGenerator {
/// Creates new primes iterator for primes bigger then
/// given limit.
/// As the primarity test is probabilistic, tolerable
/// error_level (probability number not being prime) must be supplied
/// for example error_level = 1e-9 means there that
/// for each generated number there is probablity 1 in bilion
/// that number is not prime
pub fn new(bigger_then: &BigUint, error_level: f64) -> Self {
assert!(error_level < 1.0 && error_level > 0.0);
assert!(*bigger_then > BigUint::zero());
let start;
let tests = error_level.log(0.25).ceil() as usize;
if bigger_then % 2.to_biguint().unwrap() == BigUint::zero() {
start = bigger_then - BigUint::one();
} else {
start = bigger_then.clone();
}
BigPrimesGenerator {
last_odd: start,
tests,
}
}
}
impl Iterator for BigPrimesGenerator {
type Item = BigUint;
fn next(&mut self) -> Option<Self::Item> {
loop {
self.last_odd = &self.last_odd + 2.to_biguint().unwrap();
if miller_rabin_test(&self.last_odd, self.tests) {
return Some(self.last_odd.clone());
}
}
}
}
/// Generator of shared secrects
pub struct AsmuthBloomShare {
threshold: u16,
max_bits: u16,
primes: Vec<BigUint>,
n0: BigUint,
rng: OsRng,
}
/// Recoveror from shared secrects
pub struct AsmuthBloomRecover {
threshold: u16,
}
/// Structure representing shared secret
#[derive(Debug)]
pub struct ABSharedSecret {
n0: BigUint,
shares: Vec<(BigUint, BigUint)>,
}
impl ToString for ABSharedSecret {
fn to_string(&self) -> String {
let mut res = String::new();
let n0 = self.n0.to_str_radix(32);
for s in &self.shares {
let v = s.0.to_str_radix(32);
let n = s.1.to_str_radix(32);
res.push_str(&format!("{}:{}:{}", zero_rle::encode(&n0), zero_rle::encode(&n), v));
res.push_str("\n")
}
res
}
}
impl FromStr for ABSharedSecret {
type Err = Error;
fn from_str(s:&str) -> Result<Self, Self::Err> {
let mut n0 = None;
let mut shares = vec![];
for line in s.lines() {
let parts: Vec<_> = line.split(":").take(3).map(|s| s.trim()).collect();
if parts.len() != 3 {
return Err(Error::StringFormatError)
}
let n0_tmp = BigUint::from_str_radix(&zero_rle::decode(parts[0])?, 32)?;
let v = BigUint::from_str_radix(parts[2], 32)?;
let n = BigUint::from_str_radix(&zero_rle::decode(parts[1])?, 32)?;
shares.push((v,n));
if n0.is_none() {
n0 = Some(n0_tmp);
} else {
if n0_tmp != *n0.as_ref().unwrap() {
return Err(Error::N0NotSameInAllShares)
}
}
}
if n0.is_none() {
Err(Error::NoSharesInString)
} else {
Ok(Self {
n0:n0.unwrap() ,
shares
})
}
}
}
fn gen_primes(min_n1_bits: u16, n: u16, error_level: f64) -> Vec<BigUint> {
let min1_n1 = pow(&2.to_biguint().unwrap(), &min_n1_bits.to_biguint().unwrap());
BigPrimesGenerator::new(&min1_n1, error_level)
.take(n as usize)
.collect()
}
fn test_primes(n0: &BigUint, primes: &Vec<BigUint>, k: u16) -> bool {
let bi = primes.len() - (k as usize) + 1;
let low = n0 * primes[bi..].iter().fold(BigUint::one(), |a, b| a * b);
let high = primes[..k as usize]
.iter()
.fold(BigUint::one(), |a, b| a * b);
return low < high;
}
impl AsmuthBloomShare {
/// Creates new object for generating shared secrects
/// max_bits - secret always has to be smaller than this limit, minimum is 8, above 800 can cause performance issues
/// shares - total number of shares to generate
/// threshhold - minimum number of shared secrects need to recover original secret
/// error_level - probablity that one of geneated moduli is not prime
/// add there some small number like 1e-9 -see below
///
/// Asmuth-Bloom scheme depends on the Chinese Reminder Theorem, which requires that
/// moduli are pairwise coprime. To assure that we generate them as prime numbers, but
/// since they are big we use probabilistic Miller Rabin test. Problem can arise only
/// when they are two false primes with GCD bigger the 1.
pub fn new(max_bits: u16, shares: u16, threshold: u16, error_level: f64) -> Self {
assert!(max_bits>=8);
assert!(shares >= threshold);
let min_prime_limit = pow(&2.to_biguint().unwrap(), &max_bits.to_biguint().unwrap());
let n0 = BigPrimesGenerator::new(&min_prime_limit, error_level)
.next()
.unwrap();
let mut primes;
let mut min_n1_bits = max_bits + 1;
let mut tries = 3;
loop {
primes = gen_primes(min_n1_bits, shares, error_level);
if test_primes(&n0, &primes, threshold) {
break;
}
min_n1_bits += 1;
tries -= 1;
if tries <= 0 {
panic!("Cannot genereate random numbers satisfying AB condition ");
}
}
assert!(n0<primes[0]);
AsmuthBloomShare {
threshold,
max_bits,
n0,
primes,
rng: OsRng::new().unwrap(),
}
}
/// creates shared secrets
pub fn create_share(&mut self, secret: &[u8]) -> ShareResult<ABSharedSecret> {
if secret.len() * 8 > self.max_bits as usize {
return Err(Error::SecretTooLong);
}
let s = BigUint::from_bytes_be(secret);
let max_limit = (self.primes[..self.threshold as usize]
.iter()
.fold(BigUint::one(), |a, b| a * b) - &s) / &self.n0;
let a = self.rng.gen_biguint_range(&BigUint::one(), &max_limit);
let mod_s = &s + &a * &self.n0;
let shares: Vec<(BigUint, BigUint)> = self.primes
.iter()
.map(|n| (&mod_s % n, n.clone()))
.collect();
Ok(ABSharedSecret {
n0: self.n0.clone(),
shares,
})
}
}
fn mul_inv(a: &BigUint, b: &BigUint) -> BigUint {
if *b == BigUint::one() {
return BigUint::one();
}
let mut t;
let mut q;
let mut x0 = BigInt::zero();
let mut x1 = BigInt::one();
let mut a = a.to_bigint().unwrap();
let mut b = b.to_bigint().unwrap();
let b0 = b.clone();
while a > BigInt::one() {
q = &a / &b;
t = b.clone();
b = a % b;
a = t;
t = x0.clone();
x0 = x1 - q * x0;
x1 = t;
}
if x1 < BigInt::zero() {
x1 = x1 + b0;
}
x1.to_biguint().unwrap()
}
fn chinese_remainder(c: &[(BigUint, BigUint)]) -> BigUint {
let len = c.len();
let mut p;
let prod = c.iter().map(|a| &a.1).fold(BigUint::one(), |a, b| a * b);
let mut sum = BigUint::zero();
for i in 0..len {
p = &prod / &c[i].1;
sum = &sum + &c[i].0 * mul_inv(&p, &c[i].1) * p;
}
sum % prod
}
impl AsmuthBloomRecover {
/// Create object to recover original secret
/// threshold is minimum number of shared secrets required
pub fn new(threshold: u16) -> Self {
AsmuthBloomRecover { threshold }
}
/// recovers original secret from shared secrets
pub fn recover_secret(&self, share: &ABSharedSecret) -> ShareResult<Vec<u8>> {
if share.shares.len() < self.threshold as usize {
return Err(Error::NotEnoughShares)
}
let s0 = chinese_remainder(&share.shares);
let s = s0 % &share.n0;
Ok(s.to_bytes_be())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ab_creation() {
let mut ab = AsmuthBloomShare::new(50, 5, 3, 1e-9);
assert_eq!(ab.primes.len(), 5);
{
let p = &ab.primes;
assert!(&ab.n0 * &p[3] * &p[4] < &p[0] * &p[1] * &p[2]);
}
let mut share = ab.create_share(b"ABCD").unwrap();
assert_eq!((&share).shares.len(), 5);
let abr = AsmuthBloomRecover::new(3);
share.shares.remove(1);
share.shares.remove(2);
let s = abr.recover_secret(&share).unwrap();
assert_eq!(&s, b"ABCD");
}
#[test]
#[ignore]
fn test_ab_creation_big() {
let mut ab = AsmuthBloomShare::new(800, 7, 4, 1e-12);
assert_eq!(ab.primes.len(), 7);
let my_secret=b"This is very secret secret, top secret that no one should know ever forefer";
let mut share = ab.create_share(my_secret).unwrap();
assert_eq!((&share).shares.len(), 7);
let abr = AsmuthBloomRecover::new(4);
share.shares.remove(1);
share.shares.remove(2);
share.shares.remove(4);
let s = abr.recover_secret(&share).unwrap();
assert_eq!(&s[..], &my_secret[..]);
}
#[test]
fn test_str_conversion() {
let mut ab = AsmuthBloomShare::new(64, 5, 3, 1e-9);
let share = ab.create_share(b"password").unwrap();
let s = share.to_string();
assert!(s.len()>100);
print!("{}", &s);
let mut share2 = ABSharedSecret::from_str(&s).unwrap();
let s2 = share2.to_string();
print!("{}", &s2);
assert_eq!(&s, &s2);
let abr = AsmuthBloomRecover::new(3);
share2.shares.remove(0);
share2.shares.remove(3);
let s = abr.recover_secret(&share2).unwrap();
assert_eq!(&s, b"password");
}
#[test]
fn test_primes_iterator() {
fn conv<T: ToBigUint>(input: Vec<T>) -> Vec<BigUint> {
input.into_iter().map(|x| x.to_biguint().unwrap()).collect()
}
let gen = BigPrimesGenerator::new(&100.to_biguint().unwrap(), 1e-9);
let res: Vec<BigUint> = gen.take(10).collect();
assert_eq!(
res,
conv(vec![101, 103, 107, 109, 113, 127, 131, 137, 139, 149])
);
let gen = BigPrimesGenerator::new(&961748940.to_biguint().unwrap(), 1e-9);
let res: Vec<BigUint> = gen.take(16).collect();
let v = vec![ 961748941, 961748947, 961748951, 961748969, 961748987, 961748993, 961749023,
961749037, 961749043, 961749067, 961749079, 961749091, 961749097, 961749101, 961749121,
961749157, ];
assert_eq!(res, conv(v))
}
}
| true
|
916d2230b2faff491b5701ceb3dc5f8b5c846c96
|
Rust
|
Naxaes/rust-wasm-test
|
/src/app.rs
|
UTF-8
| 8,917
| 2.53125
| 3
|
[] |
no_license
|
use std::sync::Arc;
use std::sync::Mutex;
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
use wasm_bindgen::prelude::*;
use web_sys::{EventListener, HtmlCanvasElement};
use super::log;
const KEY_FORWARD : &str = "KeyW";
const KEY_BACKWARDS : &str = "KeyS";
const KEY_LEFT : &str = "KeyA";
const KEY_RIGHT : &str = "KeyD";
const KEY_UP : &str = "Space";
const KEY_DOWN : &str = "ControlLeft";
const KEY_ROTATE_LEFT : &str = "KeyQ";
const KEY_ROTATE_RIGHT : &str = "KeyE";
const KEY_TOGGLE_MOUSE_LOCK : &str = "Escape";
pub const KEY_FORWARD_INDEX : usize = 1;
pub const KEY_LEFT_INDEX : usize = 2;
pub const KEY_BACKWARDS_INDEX : usize = 3;
pub const KEY_RIGHT_INDEX : usize = 4;
pub const KEY_UP_INDEX : usize = 5;
pub const KEY_DOWN_INDEX : usize = 6;
pub const KEY_ROTATE_LEFT_INDEX : usize = 7;
pub const KEY_ROTATE_RIGHT_INDEX : usize = 8;
lazy_static! {
static ref APP_STATE: Mutex<Arc<AppState>> = Mutex::new(Arc::new(AppState::new()));
}
pub fn get_current_state() -> AppState {
*APP_STATE.lock().unwrap().clone()
}
pub fn set_current_state(state: AppState) {
let mut data = APP_STATE.lock().unwrap();
*data = Arc::new(state);
}
pub fn get_state_of_frame_start(dt: f32, width: f32, height: f32) -> AppState {
let mut data = APP_STATE.lock().unwrap();
let new_state = AppState {
canvas_height: height,
canvas_width: width,
time: data.time + dt,
..*data.clone()
};
*data = Arc::new(new_state.clone());
new_state
}
#[derive(Debug, Copy, Clone)]
pub struct AppState {
pub canvas_height: f32,
pub canvas_width: f32,
pub mouse_locked: bool,
pub mouse_down: bool,
pub mouse_x: f32,
pub mouse_y: f32,
pub delta_mouse_x: f32,
pub delta_mouse_y: f32,
pub time: f32,
pub key_pressed: [bool; 9]
}
impl AppState {
pub fn new() -> Self {
Self {
canvas_height: 0.,
canvas_width: 0.,
mouse_locked: true,
mouse_down: false,
mouse_x: -1.,
mouse_y: -1.,
delta_mouse_x: 0.,
delta_mouse_y: 0.,
time: 0.,
key_pressed: [false, false, false, false, false, false, false, false, false],
}
}
}
pub fn update_mouse_down(event: web_sys::MouseEvent) {
let mut data = APP_STATE.lock().unwrap();
*data = Arc::new(AppState {
mouse_down: true,
mouse_x: event.client_x() as f32,
mouse_y: data.canvas_height - event.client_y() as f32,
..*data.clone()
});
}
pub fn update_mouse_up(event: web_sys::MouseEvent) {
let mut data = APP_STATE.lock().unwrap();
*data = Arc::new(AppState {
mouse_down: false,
mouse_x: event.client_x() as f32,
mouse_y: data.canvas_height - event.client_y() as f32,
..*data.clone()
});
}
pub fn update_mouse_move(event: web_sys::MouseEvent) {
let mut data = APP_STATE.lock().unwrap();
let inverted_y = data.canvas_height - event.client_y() as f32;
*data = Arc::new(AppState {
mouse_x: event.client_x() as f32,
mouse_y: inverted_y,
delta_mouse_x: data.mouse_x - event.client_y() as f32,
delta_mouse_y: data.mouse_y - inverted_y,
..*data.clone()
});
}
fn get_key_index(code: &str) -> usize {
if code == KEY_FORWARD {
KEY_FORWARD_INDEX
} else if code == KEY_LEFT {
KEY_LEFT_INDEX
} else if code == KEY_BACKWARDS {
KEY_BACKWARDS_INDEX
} else if code == KEY_RIGHT {
KEY_RIGHT_INDEX
} else if code == KEY_UP {
KEY_UP_INDEX
} else if code == KEY_DOWN {
KEY_DOWN_INDEX
} else if code == KEY_ROTATE_LEFT {
KEY_ROTATE_LEFT_INDEX
} else if code == KEY_ROTATE_RIGHT {
KEY_ROTATE_RIGHT_INDEX
} else {
0
}
}
pub fn update_key_down(event: web_sys::KeyboardEvent) {
let index = get_key_index(&event.code());
let mut data = APP_STATE.lock().unwrap();
let mut key_pressed = data.key_pressed;
key_pressed[index] = true;
*data = Arc::new(AppState {
key_pressed,
..*data.clone()
});
}
pub fn update_key_up(event: web_sys::KeyboardEvent) {
let code = event.code();
if code == KEY_TOGGLE_MOUSE_LOCK {
let mut data = APP_STATE.lock().unwrap();
let mouse_locked = !data.mouse_locked;
*data = Arc::new(AppState {
mouse_locked,
..*data.clone()
});
let window = web_sys::window().expect("no global `window` exists");
let document = window.document().expect("should have a document on window");
let body = document.body().expect("document should have a body");
if mouse_locked {
body.request_pointer_lock();
} else {
document.exit_pointer_lock();
}
return;
}
let index = get_key_index(&event.code());
let mut data = APP_STATE.lock().unwrap();
let mut key_pressed = data.key_pressed;
key_pressed[index] = false;
*data = Arc::new(AppState {
key_pressed,
..*data.clone()
});
}
// ---- ATTACHMENTS ----
pub fn attach_mouse_down_callback(canvas: &HtmlCanvasElement, callback: fn(web_sys::MouseEvent)) -> Result<(), JsValue> {
let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
canvas.add_event_listener_with_callback("mousedown", callback.as_ref().unchecked_ref())?;
callback.forget();
Ok(())
}
pub fn attach_mouse_up_callback(canvas: &HtmlCanvasElement, callback: fn(event: web_sys::MouseEvent)) -> Result<(), JsValue> {
let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
canvas.add_event_listener_with_callback("mouseup", callback.as_ref().unchecked_ref())?;
callback.forget();
Ok(())
}
pub fn attach_mouse_move_callback(canvas: &HtmlCanvasElement, callback: fn(event: web_sys::MouseEvent)) -> Result<(), JsValue> {
let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
canvas.add_event_listener_with_callback("mousemove", callback.as_ref().unchecked_ref())?;
callback.forget();
Ok(())
}
// https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.KeyboardEvent.html
pub fn attach_key_down_callback(canvas: &HtmlCanvasElement, callback: fn(event: web_sys::KeyboardEvent)) -> Result<(), JsValue> {
let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
canvas.add_event_listener_with_callback("keydown", callback.as_ref().unchecked_ref())?;
callback.forget();
Ok(())
}
pub fn attach_key_up_callback(canvas: &HtmlCanvasElement, callback: fn(event: web_sys::KeyboardEvent)) -> Result<(), JsValue> {
let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
canvas.add_event_listener_with_callback("keyup", callback.as_ref().unchecked_ref())?;
callback.forget();
Ok(())
}
//
// pub fn attach_mouse_up_callback(canvas: &HtmlCanvasElement) -> Result<(), JsValue> {
// let callback = move |event: web_sys::MouseEvent| {
// update_mouse_pressed(event.client_x() as f32, event.client_y() as f32, false);
// };
//
// let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
// canvas.add_event_listener_with_callback("mouseup", callback.as_ref().unchecked_ref())?;
// callback.forget();
//
// Ok(())
// }
//
// pub fn attach_mouse_move_callback(canvas: &HtmlCanvasElement) -> Result<(), JsValue> {
// let callback = move |event: web_sys::MouseEvent| {
// update_mouse_position(event.client_x() as f32, event.client_y() as f32);
// };
//
// let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
// canvas.add_event_listener_with_callback("mousemove", callback.as_ref().unchecked_ref())?;
// callback.forget();
//
// Ok(())
// }
//
// // https://rustwasm.github.io/wasm-bindgen/api/web_sys/struct.KeyboardEvent.html
// pub fn attach_key_down_callback(canvas: &HtmlCanvasElement) -> Result<(), JsValue> {
// let callback = move |event: web_sys::KeyboardEvent| {
// update_key_press(&event.code(), true);
// };
//
// let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
// canvas.add_event_listener_with_callback("keydown", callback.as_ref().unchecked_ref())?;
// callback.forget();
//
// log("Attached key down callback.");
// Ok(())
// }
//
//
// pub fn attach_key_up_callback(canvas: &HtmlCanvasElement) -> Result<(), JsValue> {
// let callback = move |event: web_sys::KeyboardEvent| {
// update_key_press(&event.code(), false);
// };
//
// let callback = Closure::wrap(Box::new(callback) as Box<dyn FnMut(_)>);
// canvas.add_event_listener_with_callback("keyup", callback.as_ref().unchecked_ref())?;
// callback.forget();
//
// log("Attached key up callback.");
// Ok(())
// }
//
//
| true
|
0a104f631f6aa18c6aa0722fe25e987c3bbe8b84
|
Rust
|
matoushybl/sm4
|
/Software/embedded/firmware/src/blocks/leds.rs
|
UTF-8
| 912
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
use crate::board::definitions::{ErrorLED, StatusLED};
use blinq::Blinq;
pub struct LEDs {
status_led: Blinq<blinq::consts::U8, StatusLED>,
error_led: Blinq<blinq::consts::U8, ErrorLED>,
}
impl LEDs {
pub fn new(status_led: StatusLED, error_led: ErrorLED) -> Self {
Self {
status_led: Blinq::new(status_led, false),
error_led: Blinq::new(error_led, false),
}
}
pub fn tick(&mut self) {
self.status_led.step();
self.error_led.step();
}
pub fn signalize_sync(&mut self) {
self.status_led
.enqueue(blinq::patterns::blinks::SHORT_ON_OFF);
}
pub fn heartbeat(&mut self) {
self.status_led
.enqueue(blinq::patterns::blinks::SHORT_ON_OFF);
}
pub fn signalize_can_error(&mut self) {
self.error_led
.enqueue(blinq::patterns::blinks::MEDIUM_OFF_ON);
}
}
| true
|
d49dd7e6b3066aed9bc038e8acd0ce3672616bb4
|
Rust
|
Bingwen-Hu/hackaway
|
/books/TRPL/cp15_smart_pointer/derefs.rs
|
UTF-8
| 1,107
| 3.90625
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
use std::ops::Deref;
#[derive(Debug)]
struct Mp3 {
audio: Vec<u8>,
artist: Option<String>,
title: Option<String>,
}
// deref -> *
impl Deref for Mp3 {
type Target = Vec<u8>; // associated type covered in chapter19
// not that return a reference
// deref enable `*` operator, like __len__ enable len() in Python
fn deref(&self) -> &Vec<u8> {
&self.audio
}
}
// deref coercion, good to know
// When the Deref trait is defined for the types involved, Rust will analyze
// the types and use Deref::deref as many times as necessary to get a reference
// to match the parameter’s type. The number of times that Deref::deref needs
// to be inserted is resolved at compile time, so there is no runtime penalty
// for taking advantage of deref coercion!
fn main() {
let my_favorite_song = Mp3 {
audio: vec![1, 2, 3],
artist: Some(String::from("Nirvana")),
title: Some(String::from("Smells like Teen Spirit")),
};
// autually, rust run this code
// *(my_favorite_song.deref())
assert_eq!(vec![1,2,3], *my_favorite_song);
}
| true
|
bed354bc0fd26d59212c1c24eecb0c3ad8e28103
|
Rust
|
bytecodealliance/wasmtime
|
/cranelift/codegen/src/write.rs
|
UTF-8
| 20,396
| 3.03125
| 3
|
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
//! Converting Cranelift IR to text.
//!
//! The `write` module provides the `write_function` function which converts an IR `Function` to an
//! equivalent textual form. This textual form can be read back by the `cranelift-reader` crate.
use crate::entity::SecondaryMap;
use crate::ir::entities::AnyEntity;
use crate::ir::{Block, DataFlowGraph, Function, Inst, SigRef, Type, Value, ValueDef};
use crate::packed_option::ReservedValue;
use alloc::string::{String, ToString};
use alloc::vec::Vec;
use core::fmt::{self, Write};
/// A `FuncWriter` used to decorate functions during printing.
pub trait FuncWriter {
/// Write the basic block header for the current function.
fn write_block_header(
&mut self,
w: &mut dyn Write,
func: &Function,
block: Block,
indent: usize,
) -> fmt::Result;
/// Write the given `inst` to `w`.
fn write_instruction(
&mut self,
w: &mut dyn Write,
func: &Function,
aliases: &SecondaryMap<Value, Vec<Value>>,
inst: Inst,
indent: usize,
) -> fmt::Result;
/// Write the preamble to `w`. By default, this uses `write_entity_definition`.
fn write_preamble(&mut self, w: &mut dyn Write, func: &Function) -> Result<bool, fmt::Error> {
self.super_preamble(w, func)
}
/// Default impl of `write_preamble`
fn super_preamble(&mut self, w: &mut dyn Write, func: &Function) -> Result<bool, fmt::Error> {
let mut any = false;
for (ss, slot) in func.dynamic_stack_slots.iter() {
any = true;
self.write_entity_definition(w, func, ss.into(), slot)?;
}
for (ss, slot) in func.sized_stack_slots.iter() {
any = true;
self.write_entity_definition(w, func, ss.into(), slot)?;
}
for (gv, gv_data) in &func.global_values {
any = true;
self.write_entity_definition(w, func, gv.into(), gv_data)?;
}
for (table, table_data) in &func.tables {
if !table_data.index_type.is_invalid() {
any = true;
self.write_entity_definition(w, func, table.into(), table_data)?;
}
}
// Write out all signatures before functions since function declarations can refer to
// signatures.
for (sig, sig_data) in &func.dfg.signatures {
any = true;
self.write_entity_definition(w, func, sig.into(), &sig_data)?;
}
for (fnref, ext_func) in &func.dfg.ext_funcs {
if ext_func.signature != SigRef::reserved_value() {
any = true;
self.write_entity_definition(
w,
func,
fnref.into(),
&ext_func.display(Some(&func.params)),
)?;
}
}
for (&cref, cval) in func.dfg.constants.iter() {
any = true;
self.write_entity_definition(w, func, cref.into(), cval)?;
}
if let Some(limit) = func.stack_limit {
any = true;
self.write_entity_definition(w, func, AnyEntity::StackLimit, &limit)?;
}
Ok(any)
}
/// Write an entity definition defined in the preamble to `w`.
fn write_entity_definition(
&mut self,
w: &mut dyn Write,
func: &Function,
entity: AnyEntity,
value: &dyn fmt::Display,
) -> fmt::Result {
self.super_entity_definition(w, func, entity, value)
}
/// Default impl of `write_entity_definition`
#[allow(unused_variables)]
fn super_entity_definition(
&mut self,
w: &mut dyn Write,
func: &Function,
entity: AnyEntity,
value: &dyn fmt::Display,
) -> fmt::Result {
writeln!(w, " {} = {}", entity, value)
}
}
/// A `PlainWriter` that doesn't decorate the function.
pub struct PlainWriter;
impl FuncWriter for PlainWriter {
fn write_instruction(
&mut self,
w: &mut dyn Write,
func: &Function,
aliases: &SecondaryMap<Value, Vec<Value>>,
inst: Inst,
indent: usize,
) -> fmt::Result {
write_instruction(w, func, aliases, inst, indent)
}
fn write_block_header(
&mut self,
w: &mut dyn Write,
func: &Function,
block: Block,
indent: usize,
) -> fmt::Result {
write_block_header(w, func, block, indent)
}
}
/// Write `func` to `w` as equivalent text.
/// Use `isa` to emit ISA-dependent annotations.
pub fn write_function(w: &mut dyn Write, func: &Function) -> fmt::Result {
decorate_function(&mut PlainWriter, w, func)
}
/// Create a reverse-alias map from a value to all aliases having that value as a direct target
fn alias_map(func: &Function) -> SecondaryMap<Value, Vec<Value>> {
let mut aliases = SecondaryMap::<_, Vec<_>>::new();
for v in func.dfg.values() {
// VADFS returns the immediate target of an alias
if let Some(k) = func.dfg.value_alias_dest_for_serialization(v) {
aliases[k].push(v);
}
}
aliases
}
/// Writes `func` to `w` as text.
/// write_function_plain is passed as 'closure' to print instructions as text.
/// pretty_function_error is passed as 'closure' to add error decoration.
pub fn decorate_function<FW: FuncWriter>(
func_w: &mut FW,
w: &mut dyn Write,
func: &Function,
) -> fmt::Result {
write!(w, "function ")?;
write_spec(w, func)?;
writeln!(w, " {{")?;
let aliases = alias_map(func);
let mut any = func_w.write_preamble(w, func)?;
for block in &func.layout {
if any {
writeln!(w)?;
}
decorate_block(func_w, w, func, &aliases, block)?;
any = true;
}
writeln!(w, "}}")
}
//----------------------------------------------------------------------
//
// Function spec.
fn write_spec(w: &mut dyn Write, func: &Function) -> fmt::Result {
write!(w, "{}{}", func.name, func.signature)
}
//----------------------------------------------------------------------
//
// Basic blocks
fn write_arg(w: &mut dyn Write, func: &Function, arg: Value) -> fmt::Result {
write!(w, "{}: {}", arg, func.dfg.value_type(arg))
}
/// Write out the basic block header, outdented:
///
/// block1:
/// block1(v1: i32):
/// block10(v4: f64, v5: b1):
///
pub fn write_block_header(
w: &mut dyn Write,
func: &Function,
block: Block,
indent: usize,
) -> fmt::Result {
let cold = if func.layout.is_cold(block) {
" cold"
} else {
""
};
// The `indent` is the instruction indentation. block headers are 4 spaces out from that.
write!(w, "{1:0$}{2}", indent - 4, "", block)?;
let mut args = func.dfg.block_params(block).iter().cloned();
match args.next() {
None => return writeln!(w, "{}:", cold),
Some(arg) => {
write!(w, "(")?;
write_arg(w, func, arg)?;
}
}
// Remaining arguments.
for arg in args {
write!(w, ", ")?;
write_arg(w, func, arg)?;
}
writeln!(w, "){}:", cold)
}
fn decorate_block<FW: FuncWriter>(
func_w: &mut FW,
w: &mut dyn Write,
func: &Function,
aliases: &SecondaryMap<Value, Vec<Value>>,
block: Block,
) -> fmt::Result {
// Indent all instructions if any srclocs are present.
let indent = if func.rel_srclocs().is_empty() { 4 } else { 36 };
func_w.write_block_header(w, func, block, indent)?;
for a in func.dfg.block_params(block).iter().cloned() {
write_value_aliases(w, aliases, a, indent)?;
}
for inst in func.layout.block_insts(block) {
func_w.write_instruction(w, func, aliases, inst, indent)?;
}
Ok(())
}
//----------------------------------------------------------------------
//
// Instructions
// Should `inst` be printed with a type suffix?
//
// Polymorphic instructions may need a suffix indicating the value of the controlling type variable
// if it can't be trivially inferred.
//
fn type_suffix(func: &Function, inst: Inst) -> Option<Type> {
let inst_data = &func.dfg.insts[inst];
let constraints = inst_data.opcode().constraints();
if !constraints.is_polymorphic() {
return None;
}
// If the controlling type variable can be inferred from the type of the designated value input
// operand, we don't need the type suffix.
if constraints.use_typevar_operand() {
let ctrl_var = inst_data.typevar_operand(&func.dfg.value_lists).unwrap();
let def_block = match func.dfg.value_def(ctrl_var) {
ValueDef::Result(instr, _) => func.layout.inst_block(instr),
ValueDef::Param(block, _) => Some(block),
ValueDef::Union(..) => None,
};
if def_block.is_some() && def_block == func.layout.inst_block(inst) {
return None;
}
}
let rtype = func.dfg.ctrl_typevar(inst);
assert!(
!rtype.is_invalid(),
"Polymorphic instruction must produce a result"
);
Some(rtype)
}
/// Write out any aliases to the given target, including indirect aliases
fn write_value_aliases(
w: &mut dyn Write,
aliases: &SecondaryMap<Value, Vec<Value>>,
target: Value,
indent: usize,
) -> fmt::Result {
let mut todo_stack = vec![target];
while let Some(target) = todo_stack.pop() {
for &a in &aliases[target] {
writeln!(w, "{1:0$}{2} -> {3}", indent, "", a, target)?;
todo_stack.push(a);
}
}
Ok(())
}
fn write_instruction(
w: &mut dyn Write,
func: &Function,
aliases: &SecondaryMap<Value, Vec<Value>>,
inst: Inst,
indent: usize,
) -> fmt::Result {
// Prefix containing source location, encoding, and value locations.
let mut s = String::with_capacity(16);
// Source location goes first.
let srcloc = func.srcloc(inst);
if !srcloc.is_default() {
write!(s, "{} ", srcloc)?;
}
// Write out prefix and indent the instruction.
write!(w, "{1:0$}", indent, s)?;
// Write out the result values, if any.
let mut has_results = false;
for r in func.dfg.inst_results(inst) {
if !has_results {
has_results = true;
write!(w, "{}", r)?;
} else {
write!(w, ", {}", r)?;
}
}
if has_results {
write!(w, " = ")?;
}
// Then the opcode, possibly with a '.type' suffix.
let opcode = func.dfg.insts[inst].opcode();
match type_suffix(func, inst) {
Some(suf) => write!(w, "{}.{}", opcode, suf)?,
None => write!(w, "{}", opcode)?,
}
write_operands(w, &func.dfg, inst)?;
writeln!(w)?;
// Value aliases come out on lines after the instruction defining the referent.
for r in func.dfg.inst_results(inst) {
write_value_aliases(w, aliases, *r, indent)?;
}
Ok(())
}
/// Write the operands of `inst` to `w` with a prepended space.
pub fn write_operands(w: &mut dyn Write, dfg: &DataFlowGraph, inst: Inst) -> fmt::Result {
let pool = &dfg.value_lists;
let jump_tables = &dfg.jump_tables;
use crate::ir::instructions::InstructionData::*;
match dfg.insts[inst] {
AtomicRmw { op, args, .. } => write!(w, " {} {}, {}", op, args[0], args[1]),
AtomicCas { args, .. } => write!(w, " {}, {}, {}", args[0], args[1], args[2]),
LoadNoOffset { flags, arg, .. } => write!(w, "{} {}", flags, arg),
StoreNoOffset { flags, args, .. } => write!(w, "{} {}, {}", flags, args[0], args[1]),
Unary { arg, .. } => write!(w, " {}", arg),
UnaryImm { imm, .. } => write!(w, " {}", imm),
UnaryIeee32 { imm, .. } => write!(w, " {}", imm),
UnaryIeee64 { imm, .. } => write!(w, " {}", imm),
UnaryGlobalValue { global_value, .. } => write!(w, " {}", global_value),
UnaryConst {
constant_handle, ..
} => write!(w, " {}", constant_handle),
Binary { args, .. } => write!(w, " {}, {}", args[0], args[1]),
BinaryImm8 { arg, imm, .. } => write!(w, " {}, {}", arg, imm),
BinaryImm64 { arg, imm, .. } => write!(w, " {}, {}", arg, imm),
Ternary { args, .. } => write!(w, " {}, {}, {}", args[0], args[1], args[2]),
MultiAry { ref args, .. } => {
if args.is_empty() {
write!(w, "")
} else {
write!(w, " {}", DisplayValues(args.as_slice(pool)))
}
}
NullAry { .. } => write!(w, " "),
TernaryImm8 { imm, args, .. } => write!(w, " {}, {}, {}", args[0], args[1], imm),
Shuffle { imm, args, .. } => {
let data = dfg.immediates.get(imm).expect(
"Expected the shuffle mask to already be inserted into the immediates table",
);
write!(w, " {}, {}, {}", args[0], args[1], data)
}
IntCompare { cond, args, .. } => write!(w, " {} {}, {}", cond, args[0], args[1]),
IntCompareImm { cond, arg, imm, .. } => write!(w, " {} {}, {}", cond, arg, imm),
IntAddTrap { args, code, .. } => write!(w, " {}, {}, {}", args[0], args[1], code),
FloatCompare { cond, args, .. } => write!(w, " {} {}, {}", cond, args[0], args[1]),
Jump { destination, .. } => {
write!(w, " {}", destination.display(pool))
}
Brif {
arg,
blocks: [block_then, block_else],
..
} => {
write!(w, " {}, {}", arg, block_then.display(pool))?;
write!(w, ", {}", block_else.display(pool))
}
BranchTable { arg, table, .. } => {
write!(w, " {}, {}", arg, jump_tables[table].display(pool))
}
Call {
func_ref, ref args, ..
} => write!(w, " {}({})", func_ref, DisplayValues(args.as_slice(pool))),
CallIndirect {
sig_ref, ref args, ..
} => {
let args = args.as_slice(pool);
write!(
w,
" {}, {}({})",
sig_ref,
args[0],
DisplayValues(&args[1..])
)
}
FuncAddr { func_ref, .. } => write!(w, " {}", func_ref),
StackLoad {
stack_slot, offset, ..
} => write!(w, " {}{}", stack_slot, offset),
StackStore {
arg,
stack_slot,
offset,
..
} => write!(w, " {}, {}{}", arg, stack_slot, offset),
DynamicStackLoad {
dynamic_stack_slot, ..
} => write!(w, " {}", dynamic_stack_slot),
DynamicStackStore {
arg,
dynamic_stack_slot,
..
} => write!(w, " {}, {}", arg, dynamic_stack_slot),
TableAddr {
table, arg, offset, ..
} => {
if i32::from(offset) == 0 {
write!(w, " {}, {}", table, arg)
} else {
write!(w, " {}, {}{}", table, arg, offset)
}
}
Load {
flags, arg, offset, ..
} => write!(w, "{} {}{}", flags, arg, offset),
Store {
flags,
args,
offset,
..
} => write!(w, "{} {}, {}{}", flags, args[0], args[1], offset),
Trap { code, .. } => write!(w, " {}", code),
CondTrap { arg, code, .. } => write!(w, " {}, {}", arg, code),
}?;
let mut sep = " ; ";
for arg in dfg.inst_values(inst) {
if let ValueDef::Result(src, _) = dfg.value_def(arg) {
let imm = match dfg.insts[src] {
UnaryImm { imm, .. } => imm.to_string(),
UnaryIeee32 { imm, .. } => imm.to_string(),
UnaryIeee64 { imm, .. } => imm.to_string(),
UnaryConst {
constant_handle, ..
} => constant_handle.to_string(),
_ => continue,
};
write!(w, "{}{} = {}", sep, arg, imm)?;
sep = ", ";
}
}
Ok(())
}
/// Displayable slice of values.
struct DisplayValues<'a>(&'a [Value]);
impl<'a> fmt::Display for DisplayValues<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (i, val) in self.0.iter().enumerate() {
if i == 0 {
write!(f, "{}", val)?;
} else {
write!(f, ", {}", val)?;
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::cursor::{Cursor, CursorPosition, FuncCursor};
use crate::ir::types;
use crate::ir::{Function, InstBuilder, StackSlotData, StackSlotKind, UserFuncName};
use alloc::string::ToString;
#[test]
fn basic() {
let mut f = Function::new();
assert_eq!(f.to_string(), "function u0:0() fast {\n}\n");
f.name = UserFuncName::testcase("foo");
assert_eq!(f.to_string(), "function %foo() fast {\n}\n");
f.create_sized_stack_slot(StackSlotData::new(StackSlotKind::ExplicitSlot, 4));
assert_eq!(
f.to_string(),
"function %foo() fast {\n ss0 = explicit_slot 4\n}\n"
);
let block = f.dfg.make_block();
f.layout.append_block(block);
assert_eq!(
f.to_string(),
"function %foo() fast {\n ss0 = explicit_slot 4\n\nblock0:\n}\n"
);
f.dfg.append_block_param(block, types::I8);
assert_eq!(
f.to_string(),
"function %foo() fast {\n ss0 = explicit_slot 4\n\nblock0(v0: i8):\n}\n"
);
f.dfg.append_block_param(block, types::F32.by(4).unwrap());
assert_eq!(
f.to_string(),
"function %foo() fast {\n ss0 = explicit_slot 4\n\nblock0(v0: i8, v1: f32x4):\n}\n"
);
{
let mut cursor = FuncCursor::new(&mut f);
cursor.set_position(CursorPosition::After(block));
cursor.ins().return_(&[])
};
assert_eq!(
f.to_string(),
"function %foo() fast {\n ss0 = explicit_slot 4\n\nblock0(v0: i8, v1: f32x4):\n return\n}\n"
);
}
#[test]
fn aliases() {
use crate::ir::InstBuilder;
let mut func = Function::new();
{
let block0 = func.dfg.make_block();
let mut pos = FuncCursor::new(&mut func);
pos.insert_block(block0);
// make some detached values for change_to_alias
let v0 = pos.func.dfg.append_block_param(block0, types::I32);
let v1 = pos.func.dfg.append_block_param(block0, types::I32);
let v2 = pos.func.dfg.append_block_param(block0, types::I32);
pos.func.dfg.detach_block_params(block0);
// alias to a param--will be printed at beginning of block defining param
let v3 = pos.func.dfg.append_block_param(block0, types::I32);
pos.func.dfg.change_to_alias(v0, v3);
// alias to an alias--should print attached to alias, not ultimate target
pos.func.dfg.make_value_alias_for_serialization(v0, v2); // v0 <- v2
// alias to a result--will be printed after instruction producing result
let _dummy0 = pos.ins().iconst(types::I32, 42);
let v4 = pos.ins().iadd(v0, v0);
pos.func.dfg.change_to_alias(v1, v4);
let _dummy1 = pos.ins().iconst(types::I32, 23);
let _v7 = pos.ins().iadd(v1, v1);
}
assert_eq!(
func.to_string(),
"function u0:0() fast {\nblock0(v3: i32):\n v0 -> v3\n v2 -> v0\n v4 = iconst.i32 42\n v5 = iadd v0, v0\n v1 -> v5\n v6 = iconst.i32 23\n v7 = iadd v1, v1\n}\n"
);
}
#[test]
fn cold_blocks() {
let mut func = Function::new();
{
let mut pos = FuncCursor::new(&mut func);
let block0 = pos.func.dfg.make_block();
pos.insert_block(block0);
pos.func.layout.set_cold(block0);
let block1 = pos.func.dfg.make_block();
pos.insert_block(block1);
pos.func.dfg.append_block_param(block1, types::I32);
pos.func.layout.set_cold(block1);
}
assert_eq!(
func.to_string(),
"function u0:0() fast {\nblock0 cold:\n\nblock1(v0: i32) cold:\n}\n"
);
}
}
| true
|
e7f9681058f800636e9d0df670c5731c8ada6512
|
Rust
|
darkdarkfruit/url_encoded_data
|
/examples/simple_example.rs
|
UTF-8
| 3,579
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
#[macro_use]
extern crate maplit;
use std::borrow::Cow;
use url_encoded_data::UrlEncodedData;
fn simple_example() -> anyhow::Result<()> {
let qs = "a=1&b=2&c=3&c=4&key_without_value&=value_without_key".to_string();
for s in [
qs.as_str(),
("https://abc.com/?".to_string() + qs.as_str()).as_str(),
("https://abc.com/?????".to_string() + qs.as_str()).as_str(),
]
.iter()
{
let q = UrlEncodedData::parse_str(s);
// let mut q = UrlEncodedData::prepare(url_1);
// let q = q.parse();
println!("got qs: {}", q);
assert_eq!(q.keys_length(), 5);
assert_eq!(q.len(), 6);
let pairs_expected_as_str = [
("a", "1"),
("b", "2"),
("c", "3"),
("c", "4"),
("key_without_value", ""),
("", "value_without_key"),
];
for (i, (k, v)) in q.as_pairs_of_original_order().iter().enumerate() {
let (k_, v_) = pairs_expected_as_str[i];
assert_eq!(k.as_ref(), k_);
assert_eq!(v.as_ref(), v_);
}
//
let map_of_multiple_values_expected = hashmap! {
"a"=>vec!("1"),
"b"=>vec!("2"),
"c"=>vec!("3", "4"),
"key_without_value" => vec!(""),
"" => vec!("value_without_key"),
};
dbg!("as_map_of_single_key_to_multiple_values");
// println!("as_map_of_single_key_to_multiple_values");
let map = q.as_map_of_single_key_to_multiple_values();
assert_eq!(map.len(), 5);
for (k1, v1) in map {
let v2 = map_of_multiple_values_expected.get(k1.as_ref()).unwrap();
for (i, v2i) in v2.into_iter().enumerate() {
assert_eq!(v1[i].as_ref(), *v2i);
}
}
//
let map_of_first_occurrence_value_expected = hashmap! {
"a"=>"1",
"b"=>"2",
"c"=>"3",
"key_without_value" => "",
"" => "value_without_key",
};
dbg!("as_map_of_single_key_to_first_occurrence_value");
let map = q.as_map_of_single_key_to_first_occurrence_value();
assert_eq!(map.len(), 5);
for (k1, v1) in map {
let v2 = map_of_first_occurrence_value_expected
.get(k1.as_ref())
.unwrap();
// let v3 = &v1;
assert_eq!(&v1, v2); // ok, signifies comparing with references, it will auto-dereference to compare the value, which is more convenient
let ptr1 = v1 as *const Cow<'_, str> as *const usize;
let ptr2 = v2 as *const &str as *const usize;
// let msg = format!("{:p}, {:p}", ptr1, ptr2);
// dbg!(msg);
// println!("{:p}, {:p}", ptr1, ptr2);
assert!(!std::ptr::eq(ptr1, ptr2));
assert_eq!(*v1, **v2); // ok, value compare
}
//
let map_of_last_occurrence_value_expected = hashmap! {
"a"=>"1",
"b"=>"2",
"c"=>"4",
"key_without_value" => "",
"" => "value_without_key",
};
dbg!("as_map_of_single_key_to_last_occurrence_value");
let map = q.as_map_of_single_key_to_last_occurrence_value();
assert_eq!(map.len(), 5);
for (k1, v1) in map {
let v2 = map_of_last_occurrence_value_expected
.get(k1.as_ref())
.unwrap();
assert_eq!(&v1, v2);
}
}
Ok(())
}
fn main() {
simple_example().unwrap();
}
| true
|
42a3eaf772f7bc7bbd735eee842019708921a9cf
|
Rust
|
thepowersgang/rust_os
|
/Kernel/Core/threads/sleep_object.rs
|
UTF-8
| 4,684
| 2.734375
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
// "Tifflin" Kernel
// - By John Hodge (thePowersGang)
//
// Core/threads/sleep_object.rs
//! Sleep object
use core::ops;
use super::thread::{ThreadPtr, RunState};
use super::s_runnable_threads;
/// An object on which a thread can sleep, woken by various event sources
///
/// This object should not be moved while references are active
pub struct SleepObject<'a>
{
// Type that allows `fn get_ref` to borrow self and prevent moving
_nomove: ::core::marker::PhantomData<&'a SleepObject<'a>>,
name: &'static str,
inner: crate::sync::Spinlock< SleepObjectInner >,
}
impl<'a> ::core::fmt::Debug for SleepObject<'a>
{
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
let lh = self.inner.lock();
write!(f, "SleepObject(\"{}\" {} refs, flag={})", self.name, lh.reference_count, lh.flag)
}
}
#[derive(Default)]
struct SleepObjectInner
{
flag: bool,
reference_count: usize,
thread: Option<ThreadPtr>,
}
/// Referece to an active sleep object
pub struct SleepObjectRef
{
// 'static is useful to avoid needing a lifetime param here... AND it prevents calling
// get_ref again
obj: *const SleepObject<'static>,
}
unsafe impl ::core::marker::Send for SleepObjectRef {}
impl<'a> SleepObject<'a>
{
/// Create a new sleep object
/// UNSAFE: The caller must ensure that this type's destructor is called (maintaining the correctness of obtained SleepObjectRef instances)
pub const unsafe fn new(name: &'static str) -> SleepObject
{
SleepObject {
_nomove: ::core::marker::PhantomData,
name: name,
inner: crate::sync::Spinlock::new(SleepObjectInner {
flag: false,
reference_count: 0,
thread: None,
}),
}
}
/// Create a new sleep object and call a closure with it
pub fn with_new<T>(name: &'static str, f: impl FnOnce(&mut SleepObject)->T) -> T {
// SAFE: Destructor is called
unsafe {
let mut v = Self::new(name);
// TODO: Pass a handle instead?
f(&mut v)
}
}
/// Wait the current thread on this object
pub fn wait(&self)
{
//log_trace!("SleepObject::wait {:p} '{}'", self, self.name);
let irql = crate::sync::hold_interrupts();
let mut lh = self.inner.lock();
assert!( lh.thread.is_none(), "A thread is already sleeping on object {:p} '{}'", self, self.name );
if lh.flag == false
{
let mut cur = super::get_cur_thread();
cur.run_state = RunState::Sleep(self as *const _ as *const () as *const _); // Go via () to erase the lifetime
lh.thread = Some(cur);
::core::mem::drop(lh);
::core::mem::drop(irql);
super::reschedule();
let cur = super::get_cur_thread();
assert!( !is!(cur.run_state, RunState::Sleep(_)) );
assert!( is!(cur.run_state, RunState::Runnable) );
super::rel_cur_thread(cur);
}
else
{
lh.flag = false;
}
}
/// Signal this sleep object (waking threads)
//#[is_safe(irq)] // Holds interrupts before locking
pub fn signal(&self)
{
//log_trace!("SleepObject::signal {:p} '{}'", self, self.name);
let _irq_lock = crate::sync::hold_interrupts();
let mut lh = self.inner.lock();
// 1. Check for a waiter
if let Some(mut t) = lh.thread.take()
{
t.set_state( RunState::Runnable );
s_runnable_threads.lock().push(t);
}
else
{
lh.flag = true;
}
}
/// Obtain a reference to the sleep object
///
/// NOTE: After this is called, self must not move. This is enforced using a self-borrow
pub fn get_ref(&'a self) -> SleepObjectRef {
self.inner.lock().reference_count += 1;
SleepObjectRef {
obj: self as *const _ as *const () as *const _,
}
}
}
impl<'a> ops::Drop for SleepObject<'a>
{
fn drop(&mut self)
{
let lh = self.inner.lock();
assert!(lh.reference_count == 0, "Sleep object being dropped while references are active");
}
}
impl SleepObjectRef
{
/// Checks if this reference points to the passed object
pub fn is_from(&self, obj: &SleepObject) -> bool {
self.obj == obj as *const _ as *const () as *const SleepObject<'static>
}
}
impl ops::Deref for SleepObjectRef
{
type Target = SleepObject<'static>;
fn deref(&self) -> &SleepObject<'static> {
// SAFE: Reference counting ensures that this pointer is valid.
unsafe { &*self.obj } // > ASSUMPTION: The SleepObject doesn't move after it's borrowed
}
}
impl ops::Drop for SleepObjectRef
{
fn drop(&mut self)
{
// SAFE: Should still be valid
let mut lh = unsafe { (*self.obj).inner.lock() };
assert!(lh.reference_count > 0, "Sleep object's reference count is zero when dropping a reference");
lh.reference_count -= 1;
}
}
| true
|
87006cfa85f90a0b1ab7829e5b669c2b5ebd9fc6
|
Rust
|
twittner/quickcheck_derive
|
/src/lib.rs
|
UTF-8
| 4,554
| 2.625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
extern crate proc_macro2;
#[cfg(test)]
extern crate quickcheck;
#[cfg(test)]
extern crate rand;
#[cfg_attr(test, macro_use)]
extern crate syn;
#[macro_use]
extern crate synstructure;
use proc_macro2::TokenStream;
decl_derive!([Arbitrary] => arbitrary_derive);
fn arbitrary_derive(s: synstructure::Structure) -> TokenStream {
let (g, body) = match s.variants().len() {
// zero-variant enum
0 => panic!("Cannot derive `Arbitrary` for an enum with no variants."),
// struct or single-variant enum
1 => {
let body = s.variants()[0].construct(|_, _| quote! { ::quickcheck::Arbitrary::arbitrary(g) });
let g = if let syn::Fields::Unit = s.variants()[0].ast().fields {
quote!(_g)
} else {
quote!(g)
};
(g, body)
},
// multiple-variant enum
_ => {
let mut variant_tokens = TokenStream::new();
for (count, variant) in s.variants().iter().enumerate() {
let constructor = variant.construct(|_, _| quote! { ::quickcheck::Arbitrary::arbitrary(g) });
variant_tokens.extend(quote! { #count => #constructor, });
}
let count = s.variants().len();
let body = quote! {
match ::rand::Rng::gen_range(g, 0, #count) {
#variant_tokens
_ => unreachable!()
}
};
(quote!(g), body)
},
};
s.gen_impl(quote! {
gen impl ::quickcheck::Arbitrary for @Self {
fn arbitrary<G: ::quickcheck::Gen>(#g: &mut G) -> Self {
#body
}
}
})
}
#[test]
fn test_arbitrary_unit_struct() {
test_derive! {
arbitrary_derive {
#[derive(Clone)]
struct ArbitraryTest;
}
expands to {
#[allow(non_upper_case_globals)]
const _DERIVE_quickcheck_Arbitrary_FOR_ArbitraryTest: () = {
impl ::quickcheck::Arbitrary for ArbitraryTest {
fn arbitrary<G: ::quickcheck::Gen>(_g: &mut G) -> Self {
ArbitraryTest
}
}
};
}
}
}
#[test]
fn test_arbitrary_struct() {
test_derive! {
arbitrary_derive {
#[derive(Clone)]
struct ArbitraryTest(u8, bool);
}
expands to {
#[allow(non_upper_case_globals)]
const _DERIVE_quickcheck_Arbitrary_FOR_ArbitraryTest: () = {
impl ::quickcheck::Arbitrary for ArbitraryTest {
fn arbitrary<G: ::quickcheck::Gen>(g: &mut G) -> Self {
ArbitraryTest(::quickcheck::Arbitrary::arbitrary(g),
::quickcheck::Arbitrary::arbitrary(g), )
}
}
};
}
}
}
#[test]
#[should_panic(expected = "Cannot derive `Arbitrary` for an enum with no variants.")]
fn test_arbitrary_zero_variant_enum() {
let input = parse_quote! {
#[derive(Clone)]
enum ArbitraryTest {}
};
arbitrary_derive(synstructure::Structure::new(&input));
}
#[test]
fn test_arbitrary_enum() {
test_derive! {
arbitrary_derive {
#[derive(Clone)]
enum ArbitraryTest {
A,
B(usize, u32),
C{ b: bool, d: (u16, u16) }
}
}
expands to {
#[allow(non_upper_case_globals)]
const _DERIVE_quickcheck_Arbitrary_FOR_ArbitraryTest: () = {
impl ::quickcheck::Arbitrary for ArbitraryTest {
fn arbitrary<G: ::quickcheck::Gen>(g: &mut G) -> Self {
match ::rand::Rng::gen_range(g, 0, 3usize) {
0usize => ArbitraryTest::A,
1usize => ArbitraryTest::B(::quickcheck::Arbitrary::arbitrary(g),
::quickcheck::Arbitrary::arbitrary(g),
),
2usize => ArbitraryTest::C {
b : ::quickcheck::Arbitrary::arbitrary(g),
d : ::quickcheck::Arbitrary::arbitrary(g),
},
_ => unreachable!()
}
}
}
};
}
}
}
| true
|
69c3df4af90e4fa8dde6d9f32bfe3e4c101711f3
|
Rust
|
casey/qc
|
/src/main.rs
|
UTF-8
| 9,642
| 3.21875
| 3
|
[
"CC0-1.0"
] |
permissive
|
mod common;
mod error;
mod opt;
use crate::common::*;
fn run(args: &[&str]) -> Result<Vec<isize>, Error> {
let (flags, program): (Vec<&str>, Vec<&str>) = args.iter().partition(|f| f.starts_with("--"));
let opt = Opt::from_iter(flags);
let mut stack = Vec::new();
for word in program {
match word {
"add" => add(&mut stack)?,
"sub" => sub(&mut stack)?,
"mul" => mul(&mut stack)?,
"div" => div(&mut stack)?,
":add" => add_all(&mut stack)?,
":sub" => sub_all(&mut stack)?,
":mul" => mul_all(&mut stack)?,
":div" => div_all(&mut stack)?,
"pop" => pop(&mut stack).map(|_| ())?,
"." => pop_print(&mut stack)?,
":." => pop_print_all(&mut stack)?,
_ => num(&mut stack, word)?,
}
if opt.verbose {
println!("Stack:\t\t{:?}", &stack);
}
}
Ok(stack)
}
fn main() -> Result<(), Error> {
let mut buffer = std::env::args().collect::<Vec<String>>();
buffer.remove(0);
let slice = buffer.iter().map(|s| s.as_str()).collect::<Vec<&str>>();
run(&slice)?;
Ok(())
}
fn pop_print_all(stack: &mut Vec<isize>) -> Result<(), Error> {
while !stack.is_empty() {
pop_print(stack)?;
}
Ok(())
}
fn pop_print(stack: &mut Vec<isize>) -> Result<(), Error> {
let item = pop(stack)?;
println!(
"dec: {}\t\thex: 0x{:x}\t\toct: o{:o}\t\tbin: b{:b}",
item, item, item, item
);
Ok(())
}
fn pop(stack: &mut Vec<isize>) -> Result<isize, Error> {
match stack.pop() {
Some(x) => Ok(x),
None => Err(Error::StackUnderflow),
}
}
/// Pops top two items off the stack, adds them, and pushes the sum on the stack.
fn add(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let sum = b + a;
stack.push(sum);
Ok(())
}
/// Pops top two items off the stack, subtracts them, and pushes the difference on the stack.
fn sub(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let difference = b - a;
stack.push(difference);
Ok(())
}
/// Pops top two items off the stack, multiplies them, and pushes the product on the stack.
fn mul(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let product = b * a;
stack.push(product);
Ok(())
}
/// Pops top two items off the stack, divides them, and pushes the quotient on the stack.
fn div(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let quotient = b / a;
stack.push(quotient);
Ok(())
}
/// Pops all items before `:add` off the stack, adds them, and pushes the sum on the stack.
fn add_all(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let mut sum = b + a;
while !stack.is_empty() {
sum += stack.pop().unwrap();
}
stack.push(sum);
Ok(())
}
/// Pops all items before `:sub` off the stack, subtracts them, and pushes the difference on the stack.
fn sub_all(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let mut difference = b - a;
while !stack.is_empty() {
difference = stack.pop().unwrap() - difference;
}
stack.push(difference);
Ok(())
}
/// Pops all items before `:mul` off the stack, multiplies them, and pushes the product on the stack.
fn mul_all(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let mut product = b * a;
while !stack.is_empty() {
product *= stack.pop().unwrap();
}
stack.push(product);
Ok(())
}
/// Pops all items before `:div` off the stack, divides them, and pushes the quotient on the stack.
fn div_all(stack: &mut Vec<isize>) -> Result<(), Error> {
let a = pop(stack)?;
let b = pop(stack)?;
let mut quotient = b / a;
while !stack.is_empty() {
quotient = stack.pop().unwrap() / quotient;
}
stack.push(quotient);
Ok(())
}
/// Parse arg as a number and push it onto the stack
fn num(stack: &mut Vec<isize>, arg: &str) -> Result<(), Error> {
if arg.starts_with("0x") {
let arg = &arg[2..arg.len()];
stack.push(isize::from_str_radix(&arg, 16).unwrap());
} else if arg.starts_with("x") {
let arg = &arg[1..arg.len()];
stack.push(isize::from_str_radix(&arg, 16).unwrap());
} else if arg.starts_with("0o") {
let arg = &arg[2..arg.len()];
stack.push(isize::from_str_radix(&arg, 8).unwrap());
} else if arg.starts_with("o") {
let arg = &arg[1..arg.len()];
stack.push(isize::from_str_radix(&arg, 8).unwrap());
} else if arg.starts_with("0b") {
let arg = &arg[2..arg.len()];
stack.push(isize::from_str_radix(&arg, 2).unwrap());
} else if arg.starts_with("b") {
let arg = &arg[1..arg.len()];
stack.push(isize::from_str_radix(&arg, 2).unwrap());
} else {
stack.push(arg.parse().unwrap());
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
/* Test helper function and macros */
fn lex(text: &str) -> Vec<&str> {
text.split_whitespace().collect()
}
fn test(text: &str) -> Result<Vec<isize>, Error> {
run(&lex(text))
}
/// Tests intended to succeed
macro_rules! test {
{
name: $name:ident,
text: $text:expr,
want: $want:expr $(,)?
} => {
#[test]
fn $name() {
let have = test($text).expect("Expected success");
let want = $want.iter().cloned().map(|x| x as isize).collect::<Vec<isize>>();
assert_eq!(have, want);
}
};
}
/// Tests intended to fail
macro_rules! error {
{
name: $name:ident,
text: $text:expr,
want: $want:expr $(,)?
} => {
#[test]
fn $name() {
let have = test($text).expect_err("Expected error");
let want = $want;
assert_eq!(have, want);
}
};
}
/// Test lexer
#[test]
fn lex_empty() {
let text = "";
let have = lex(text);
let want: Vec<String> = Vec::new();
assert_eq!(have, want);
}
#[test]
fn lex_blank() {
let text = " ";
let have = lex(text);
let want: Vec<String> = Vec::new();
assert_eq!(have, want);
}
#[test]
fn lex_word() {
let text = "foo";
let have = lex(text);
let want = vec!["foo".to_string()];
assert_eq!(have, want);
}
#[test]
fn lex_args() {
let text = "1 2 add";
let have = lex(text);
let want = vec!["1".to_string(), "2".to_string(), "add".to_string()];
assert_eq!(have, want);
}
/* Test errors */
// Error when popping on empty stack
// $ qc pop
error! {
name: pop_empty,
text: "pop",
want: Error::StackUnderflow,
}
// $ qc 1 add
error! {
name: add_underflow,
text: "1 add",
want: Error::StackUnderflow,
}
/* Test add */
// $qc 1 2 add
test! {
name: add_two_args,
text: "1 2 add",
want: [3],
}
// $qc 1 2 3 add
test! {
name: add_three_args,
text: "1 2 3 add",
want: [1, 5],
}
// $qc 1 2 :add
test! {
name: add_all_two_args,
text: "1 2 :add",
want: [3],
}
// $qc 1 2 3 :add
test! {
name: add_all_three_args,
text: "1 2 3 :add",
want: [6],
}
/* Test sub */
// $ qc 1 2 sub
test! {
name: sub_two_args,
text: "1 2 sub",
want: [-1],
}
// $ qc 1 2 3 sub
test! {
name: sub_three_args,
text: "1 2 3 sub",
want: [1, -1],
}
// $ qc 1 2 :sub
test! {
name: sub_all_two_args,
text: "1 2 :sub",
want: [-1],
}
// $qc 1 2 3 :sub
test! {
name: sub_all_three_args,
text: "1 2 3 :sub",
want: [2],
}
/* Test mul */
// $ qc 2 3 mul
test! {
name: mul_two_args,
text: "2 3 mul",
want: [6],
}
// $ qc 1 2 3 mul
test! {
name: mul_three_args,
text: "1 2 3 mul",
want: [1, 6],
}
// $ qc 2 4 :mul
test! {
name: mul_all_two_args,
text: "2 4 :mul",
want: [8],
}
// $ qc 1 2 3 :mul
test! {
name: mul_all_three_args,
text: "1 2 3 :mul",
want: [6],
}
/* Test div */
// $ qc 9 3 div
test! {
name: div_two_args,
text: "9 3 div",
want: [3],
}
// $ qc 1 6 2 div
test! {
name: div_three_args,
text: "1 6 2 div",
want: [1, 3],
}
// $ qc 9 3 :div
test! {
name: div_all_two_args,
text: "9 3 :div",
want: [3],
}
// $ qc 6 2 1 :div
test! {
name: div_all_three_args,
text: "6 2 1 :div",
want: [3],
}
/* Test miscellaneous binary calcs */
// $ qc 4 7 9 add 2 8 mul
test! {
name: add_three_mul_two,
text: "4 7 9 add 2 8 mul",
want: [4, 16, 16],
}
// $ qc 4 7 9 add add 2 8 mul mul
test! {
name: add_two_mul_two,
text: "4 7 9 add add 2 8 mul mul",
want: [320],
}
}
| true
|
4bf561781b119b33b2e5ca61ada30ab3b2e5172f
|
Rust
|
pjohansson/windy-city-politics
|
/base/src/texture.rs
|
UTF-8
| 4,158
| 3.203125
| 3
|
[] |
no_license
|
use amethyst::{
assets::{AssetStorage, Handle, Loader, Progress},
renderer::{
palette::{Pixel, Srgb},
rendy::{
resource::{Filter, SamplerInfo, ViewKind, WrapMode},
texture::{pixel::Rgb8Srgb, TextureBuilder},
},
types::TextureData,
Kind, Texture,
},
};
pub fn create_texture<P: Progress>(
data: &[[u8; 4]],
(nx, ny): (u32, u32),
store: &AssetStorage<Texture>,
loader: &Loader,
progress: P,
) -> Result<Handle<Texture>, String> {
match data.len() {
0 => Err(String::from(
"input texture data size was 0 but cannot create texture from no data",
)),
n if n != (nx * ny) as usize => Err(format![
"input texture data size {} does not match given dimensions {} x {} = {}",
data.len(),
nx,
ny,
nx * ny
]),
_ => {
let buffer: Vec<Rgb8Srgb> = data
.iter()
.map(|p| Srgb::from_raw(p))
.map(|&p| p.into())
.collect();
let texture_data: TextureData = TextureBuilder::new()
.with_data(buffer)
.with_data_width(nx)
.with_data_height(ny)
.with_kind(Kind::D2(nx, ny, 1, 1))
.with_view_kind(ViewKind::D2)
.with_sampler_info(SamplerInfo::new(Filter::Nearest, WrapMode::Tile))
.into();
Ok(loader.load_from_data(texture_data, progress, &store))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use amethyst::assets::ProgressCounter;
// use amethyst_test::prelude::*;
use lazy_static::lazy_static;
use rayon::ThreadPoolBuilder;
use std::sync::Arc;
lazy_static! {
static ref LOADER: Loader = {
let builder = ThreadPoolBuilder::new().num_threads(1);
let pool = Arc::new(builder.build().expect("invalid config"));
Loader::new("", pool)
};
}
#[test]
fn create_texture_with_matching_dimensions_returns_handle() {
let data_1 = &[[0, 0, 0, 0]];
let data_2 = &[[0, 0, 0, 0], [0, 0, 0, 0]];
let data_6 = &[
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
];
let store = AssetStorage::<Texture>::new();
assert!(create_texture(data_1, (1, 1), &store, &LOADER, ()).is_ok());
assert!(create_texture(data_2, (2, 1), &store, &LOADER, ()).is_ok());
assert!(create_texture(data_2, (1, 2), &store, &LOADER, ()).is_ok());
assert!(create_texture(data_6, (3, 2), &store, &LOADER, ()).is_ok());
}
#[test]
fn create_texture_with_nonmatching_dimensions_returns_error() {
let data_6 = &[
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
];
let store = AssetStorage::<Texture>::new();
assert!(create_texture(data_6, (1, 1), &store, &LOADER, ()).is_err());
assert!(create_texture(data_6, (3, 3), &store, &LOADER, ()).is_err());
assert!(create_texture(data_6, (0, 6), &store, &LOADER, ()).is_err());
assert!(create_texture(data_6, (6, 0), &store, &LOADER, ()).is_err());
}
#[test]
fn create_texture_with_no_data_returns_error() {
let store = AssetStorage::<Texture>::new();
assert!(create_texture(&[], (0, 0), &store, &LOADER, ()).is_err());
}
#[test]
fn create_texture_with_progresscounter_updates_it() {
let data = &[[0, 0, 0, 0]];
let store = AssetStorage::<Texture>::new();
let mut progress = ProgressCounter::new();
let num_created = 4;
for _ in 0..num_created {
create_texture(data, (1, 1), &store, &LOADER, &mut progress).unwrap();
}
assert_eq!(
num_created,
progress.num_assets(),
"progress counter was not updated when creating a texture"
);
}
}
| true
|
0ba89fd76bd4d3e1d9a6834e28ad53ef3f7b7933
|
Rust
|
kore-signet/argmm
|
/src/simd/simd_i32.rs
|
UTF-8
| 6,494
| 2.5625
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::generic::{simple_argmax, simple_argmin};
use crate::task::{find_final_index_max, find_final_index_min, split_array};
use std::arch::x86_64::*;
pub fn argmin_i32(arr: &[i32]) -> Option<usize> {
match split_array(arr, 4) {
(Some(rem), Some(sim)) => {
let rem_min_index = simple_argmin(rem);
let rem_result = (rem[rem_min_index], rem_min_index);
let sim_result = unsafe { core_argmin(sim, rem.len()) };
find_final_index_min(rem_result, sim_result)
}
(Some(rem), None) => Some(simple_argmin(rem)),
(None, Some(sim)) => {
let sim_result = unsafe { core_argmin(sim, 0) };
Some(sim_result.1)
}
(None, None) => None,
}
}
unsafe fn core_argmin(sim_arr: &[i32], rem_offset: usize) -> (i32, usize) {
let offset = _mm_set1_epi32(rem_offset as i32);
let mut index_low = _mm_add_epi32(_mm_set_epi32(3, 2, 1, 0), offset);
let increment = _mm_set1_epi32(4);
let mut new_index_low = index_low;
let mut values_low = _mm_loadu_si128(sim_arr.as_ptr() as *const __m128i);
sim_arr.chunks_exact(4).skip(1).for_each(|step| {
new_index_low = _mm_add_epi32(new_index_low, increment);
let new_values = _mm_loadu_si128(step.as_ptr() as *const __m128i);
let lt_mask = _mm_cmplt_epi32(new_values, values_low);
values_low = _mm_or_si128(
_mm_and_si128(new_values, lt_mask),
_mm_andnot_si128(lt_mask, values_low),
);
index_low = _mm_or_si128(
_mm_and_si128(new_index_low, lt_mask),
_mm_andnot_si128(lt_mask, index_low),
);
});
let highpack = _mm_unpackhi_epi32(values_low, values_low);
let lowpack = _mm_unpacklo_epi32(values_low, values_low);
let lowest = _mm_min_epi32(highpack, lowpack);
let highpack = _mm_unpackhi_epi32(lowest, lowest);
let lowpack = _mm_unpacklo_epi32(lowest, lowest);
let lowest = _mm_min_epi32(highpack, lowpack);
let low_mask = _mm_cmpeq_epi32(lowest, values_low);
index_low = _mm_or_si128(
_mm_and_si128(index_low, low_mask),
_mm_andnot_si128(low_mask, _mm_set1_epi32(std::i32::MAX)),
);
let value_array = std::mem::transmute::<__m128i, [i32; 4]>(values_low);
let index_array = std::mem::transmute::<__m128i, [i32; 4]>(index_low);
let min_index = simple_argmin(&index_array);
let value = *value_array.get_unchecked(min_index);
let index = *index_array.get_unchecked(min_index);
(value, index as usize)
}
pub fn argmax_i32(arr: &[i32]) -> Option<usize> {
match split_array(arr, 4) {
(Some(rem), Some(sim)) => {
let rem_min_index = simple_argmax(rem);
let rem_result = (rem[rem_min_index], rem_min_index);
let sim_result = unsafe { core_argmax(sim, rem.len()) };
find_final_index_max(rem_result, sim_result)
}
(Some(rem), None) => Some(simple_argmax(rem)),
(None, Some(sim)) => {
let sim_result = unsafe { core_argmax(sim, 0) };
Some(sim_result.1)
}
(None, None) => None,
}
}
unsafe fn core_argmax(sim_arr: &[i32], rem_offset: usize) -> (i32, usize) {
let offset = _mm_set1_epi32(rem_offset as i32);
let mut index_high = _mm_add_epi32(_mm_set_epi32(3, 2, 1, 0), offset);
let mut new_index_high = index_high;
let increment = _mm_set1_epi32(4);
let mut values_high = _mm_loadu_si128(sim_arr.as_ptr() as *const __m128i);
sim_arr.chunks_exact(4).skip(1).for_each(|step| {
new_index_high = _mm_add_epi32(new_index_high, increment);
let new_values = _mm_loadu_si128(step.as_ptr() as *const __m128i);
let gt_mask = _mm_cmpgt_epi32(new_values, values_high);
values_high = _mm_or_si128(
_mm_and_si128(new_values, gt_mask),
_mm_andnot_si128(gt_mask, values_high),
);
index_high = _mm_or_si128(
_mm_and_si128(new_index_high, gt_mask),
_mm_andnot_si128(gt_mask, index_high),
);
});
let highpack = _mm_unpackhi_epi32(values_high, values_high);
let lowpack = _mm_unpacklo_epi32(values_high, values_high);
let highest = _mm_max_epi32(highpack, lowpack);
let highpack = _mm_unpackhi_epi32(highest, highest);
let lowpack = _mm_unpacklo_epi32(highest, highest);
let highest = _mm_max_epi32(highpack, lowpack);
let high_mask = _mm_cmpeq_epi32(highest, values_high);
index_high = _mm_or_si128(
_mm_and_si128(index_high, high_mask),
_mm_andnot_si128(high_mask, _mm_set1_epi32(std::i32::MAX)),
);
let value_array = std::mem::transmute::<__m128i, [i32; 4]>(values_high);
let index_array = std::mem::transmute::<__m128i, [i32; 4]>(index_high);
let min_index = simple_argmin(&index_array);
let value = *value_array.get_unchecked(min_index);
let index = *index_array.get_unchecked(min_index);
(value, index as usize)
}
#[cfg(test)]
mod tests {
use super::{argmax_i32, argmin_i32, simple_argmax, simple_argmin};
use rand::{thread_rng, Rng};
use rand_distr::Uniform;
fn get_array_i32(n: usize) -> Vec<i32> {
let rng = thread_rng();
let uni = Uniform::new_inclusive(std::i32::MIN, std::i32::MAX);
rng.sample_iter(uni).take(n).collect()
}
#[test]
fn test_both_versions_return_the_same_results() {
let data = get_array_i32(1025);
assert_eq!(data.len() % 4, 1);
let min_index = argmin_i32(&data).unwrap();
let max_index = argmax_i32(&data).unwrap();
let argmin_index = simple_argmin(&data);
let argmax_index = simple_argmax(&data);
assert_eq!(argmin_index, min_index);
assert_eq!(argmax_index, max_index);
}
#[test]
fn test_first_index_is_returned_when_identical_values_found() {
let data = [
std::i32::MIN,
std::i32::MIN,
4,
6,
9,
std::i32::MAX,
22,
std::i32::MAX,
];
let argmin_index = simple_argmin(&data);
let argmin_simd_index = argmin_i32(&data).unwrap();
assert_eq!(argmin_index, argmin_simd_index);
assert_eq!(argmin_index, 0);
let argmax_index = simple_argmax(&data);
let argmax_simd_index = argmax_i32(&data).unwrap();
assert_eq!(argmax_index, argmax_simd_index);
assert_eq!(argmax_index, 5);
}
}
| true
|
7c047be3f355d4ae1842208d831d9aa5f4b8ff71
|
Rust
|
laohanlinux/util-rs
|
/kvstore/src/crypto.rs
|
UTF-8
| 2,426
| 3.1875
| 3
|
[] |
no_license
|
use sha3::{Sha3_256, Digest};
use common;
use encoding::{ToHex, FromHexError};
pub const HASH_SIZE: usize = 32;
pub struct Hash([u8; HASH_SIZE]);
impl Hash {
// Create a new instance from bytes array.
pub fn new(b: [u8; HASH_SIZE]) -> Self {
Hash(b)
}
/// Create a new instance from bytes slice
pub fn from_slice(bs: &[u8]) -> Option<Self> {
assert_eq!(bs.len(), HASH_SIZE);
// TODO
None
}
/// Create a new install with filled with zeros.
pub fn zero() -> Self {
Self::new([0; HASH_SIZE])
}
pub fn to_hex(&self) -> String {
common::to_hex(self)
}
}
/// It is very good
impl AsRef<[u8]> for Hash {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
use std::str::FromStr;
impl FromStr for Hash {
type Err = FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.len() == HASH_SIZE {
let out = s.chars().map(|c| c as u8).collect();
return Ok(Hash::new(out));
}else if s.len() == (HASH_SIZE + 2) {
let out = s.chars().skip(2).map(|c| c as u8).collect();
return Ok(Hash::new(out));
} else {
return Err(FromHexError::new());
}
}
}
use std::fmt;
impl fmt::Debug for Hash {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "0x{}", self.to_hex())
}
}
/// TODO use macro
impl ToHex for Hash {
// TODO
fn write_hex<W: ::std::fmt::Write>(&self, w: &mut W) -> ::std::fmt::Result {
Ok(())
}
// TODO
fn write_hex_upper<W: ::std::fmt::Write>(&self, w: &mut W) -> ::std::fmt::Result {
Ok(())
}
}
pub trait CryptoHash {
fn hash(&self) -> Hash;
}
pub fn hash(data: &[u8]) -> Hash {
let digest = common::to_sha3(data);
Hash::from_str(&format!("{:x}", digest)).unwrap()
}
#[derive(Debug, Default)]
pub struct HashStream(Sha3_256);
impl HashStream {
/// Create a new instance of `HashStream`
pub fn new() -> Self {
HashStream(Sha3_256::default())
}
/// Processes a chunk of stream and returns a `HashStream` with the updated internal state.
pub fn update(mut self, chunk: &[u8]) -> Self {
self.0.input(chunk);
self
}
/// Returns the hash of data supplied to the stream so far.
pub fn hash(self) -> Hash {
let dig = self.0.result();
Hash(dig)
}
}
| true
|
737edc2a7a50d5f6a9244c2ae7186009222e7899
|
Rust
|
VenmoTools/request-rs
|
/src/proto/http1/conn.rs
|
UTF-8
| 8,384
| 2.953125
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::borrow::{Borrow, BorrowMut};
use std::io::{Read, Write};
use std::net::{IpAddr, SocketAddr, TcpStream};
use std::time::Duration;
use net2::{TcpBuilder, TcpStreamExt};
use crate::body::{Body, BodyKind};
use crate::body_kind;
use crate::error::Result;
use crate::proto::Connector;
/// the tcp configuration for http client
#[derive(Debug, Clone)]
pub struct HttpConfig {
/// if is None use default time
pub connect_timeout: Option<Duration>,
///
pub happy_eyeballs_timeout: Option<Duration>,
/// if is None use default time
pub keep_alive_timeout: Option<Duration>,
/// if is None use System given ip address(127.0.0.1)
pub local_address: Option<IpAddr>,
/// not delay
pub nodelay: bool,
/// tcp connector will reuse ip address and port if `reuse_address` is true
pub reuse_address: bool,
/// tcp send buffer size default size if is None
pub send_buffer_size: Option<usize>,
/// tcp received buffer size default size if is None
pub recv_buffer_size: Option<usize>,
///
pub ttl: u32,
}
impl Default for HttpConfig {
fn default() -> Self {
Self {
connect_timeout: None,
happy_eyeballs_timeout: Some(Duration::from_millis(300)),
keep_alive_timeout: None,
local_address: None,
nodelay: false,
reuse_address: false,
send_buffer_size: None,
recv_buffer_size: None,
ttl: 64,
}
}
}
/// Simplified `hyper::HttpConnector`
#[derive(Debug)]
pub struct HttpConnector {
config: HttpConfig,
stream: Option<TcpStream>,
}
impl HttpConnector {
/// Construct a new HttpConnector.
pub fn new() -> Self {
Self {
config: HttpConfig::default(),
stream: None,
}
}
/// open tcp stream
pub fn open_stream(addr: &SocketAddr) -> Result<Self> {
let mut connector = Self::new();
connector.connect_to(addr)?;
Ok(connector)
}
/// open tcp stream
pub fn open_stream_with_config(addr: &SocketAddr, config: HttpConfig) -> Result<Self> {
let mut connector = Self::with_http_config(config);
let stream = connector.create_connection(addr)?;
connector.stream = Some(stream);
Ok(connector)
}
/// Read the exact number of bytes required to fill `buf`.
pub fn read_exact(&mut self, buf: &mut [u8]) -> Result<()> {
if let Some(ref mut stream) = self.stream {
stream.read_exact(buf)?;
return Ok(());
}
panic!("no connection opened, please open connection first")
}
/// read all
pub fn read_all(&mut self, buf: &mut Vec<u8>) -> Result<usize> {
if let Some(ref mut stream) = self.stream {
let size = stream.read_to_end(buf)?;
return Ok(size);
}
panic!("no connection opened, please open connection first")
}
/// write all
pub fn write_all(&mut self, buf: &[u8]) -> Result<()> {
if let Some(ref mut stream) = self.stream {
stream.write_all(buf)?;
return Ok(());
}
panic!("no connection opened, please open connection first")
}
/// write body
pub fn write_body(&mut self, body: Body) -> Result<()> {
body_kind!(body.kind(),
text => {
self.write_all(text.as_bytes())?
},
binary => {
self.write_all(binary.as_ref())?
},
empty => {
}
);
Ok(())
}
/// Construct a new HttpConnector use given http config
pub fn with_http_config(config: HttpConfig) -> Self {
Self {
config,
stream: None,
}
}
/// Set that all sockets have `SO_KEEPALIVE` set with the supplied duration.
///
/// If `None`, the option will not be set.
///
/// Default is `None`.
#[inline]
pub fn set_keepalive(&mut self, dur: Option<Duration>) {
self.config_mut().keep_alive_timeout = dur;
}
///
#[inline]
pub fn set_ttl(&mut self, ttl: u32) {
self.config_mut().ttl = ttl;
}
/// Set that all sockets have `SO_NODELAY` set to the supplied value `nodelay`.
///
/// Default is `false`.
#[inline]
pub fn set_nodelay(&mut self, nodelay: bool) {
self.config_mut().nodelay = nodelay;
}
/// Set that all sockets are bound to the configured address before connection.
///
/// If `None`, the sockets will not be bound.
///
/// Default is `None`.
#[inline]
pub fn set_local_address(&mut self, addr: Option<IpAddr>) {
self.config_mut().local_address = addr;
}
/// Sets the value of the SO_SNDBUF option on the socket.
#[inline]
pub fn set_send_buffer_size(&mut self, size: Option<usize>) {
self.config_mut().send_buffer_size = size;
}
/// Sets the value of the SO_RCVBUF option on the socket.
#[inline]
pub fn set_recv_buffer_size(&mut self, size: Option<usize>) {
self.config_mut().recv_buffer_size = size;
}
/// Set the connect timeout.
///
/// If a domain resolves to multiple IP addresses, the timeout will be
/// evenly divided across them.
///
/// Default is `None`.
#[inline]
pub fn set_connect_timeout(&mut self, dur: Option<Duration>) {
self.config_mut().connect_timeout = dur;
}
/// Set timeout for [RFC 6555 (Happy Eyeballs)][RFC 6555] algorithm.
///
/// If hostname resolves to both IPv4 and IPv6 addresses and connection
/// cannot be established using preferred address family before timeout
/// elapses, then connector will in parallel attempt connection using other
/// address family.
///
/// If `None`, parallel connection attempts are disabled.
///
/// Default is 300 milliseconds.
///
/// [RFC 6555]: https://tools.ietf.org/html/rfc6555
#[inline]
pub fn set_happy_eyeballs_timeout(&mut self, dur: Option<Duration>) {
self.config_mut().happy_eyeballs_timeout = dur;
}
/// private
fn config_mut(&mut self) -> &mut HttpConfig {
self.config.borrow_mut()
}
/// private
fn config(&self) -> &HttpConfig {
self.config.borrow()
}
}
impl Connector for HttpConnector {
fn create_connection(&mut self, socket_addr: &SocketAddr) -> Result<TcpStream> {
let config = self.config();
// use net2 crate to build Tcp Stream
let tcp_builder = match socket_addr {
SocketAddr::V4(_) => TcpBuilder::new_v4(),
SocketAddr::V6(_) => TcpBuilder::new_v6(),
}?;
// Set value for the `SO_REUSEADDR` option on this socket
if config.reuse_address {
tcp_builder.reuse_address(true)?;
}
// ttl
tcp_builder.ttl(config.ttl)?;
if let Some(ref local) = config.local_address {
// let system chose port
tcp_builder.bind(SocketAddr::new(local.clone(), 0))?;
}
let stream = tcp_builder.connect(socket_addr)?;
stream.set_write_timeout(config.connect_timeout.clone())?;
stream.set_read_timeout(config.connect_timeout.clone())?;
stream.set_nodelay(config.nodelay)?;
stream.set_keepalive(config.keep_alive_timeout.clone())?;
Ok(stream)
}
fn connect_to(&mut self, addr: &SocketAddr) -> Result<()> {
let stream = self.create_connection(addr)?;
self.stream = Some(stream);
Ok(())
}
}
impl Read for HttpConnector {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
if let Some(ref mut stream) = self.stream {
return stream.read(buf);
}
panic!("read failed! no connection opened, please open connection first")
}
}
impl Write for HttpConnector {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
if let Some(ref mut stream) = self.stream {
return stream.write(buf);
}
panic!("write failed! no connection opened, please open connection first")
}
fn flush(&mut self) -> std::io::Result<()> {
if let Some(ref mut stream) = self.stream {
return stream.flush();
}
panic!("flush failed! no connection opened, please open connection first")
}
}
| true
|
2b45aef05ec181a31e192d1b9de0e09a11a1994a
|
Rust
|
theikkila/redrock
|
/src/lib.rs
|
UTF-8
| 8,038
| 2.703125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate bincode;
extern crate byteorder;
extern crate rocksdb;
use std::collections::HashMap;
use bincode::{serialize, deserialize};
use byteorder::{BigEndian, ReadBytesExt}; // 1.2.7
use std::error;
use std::fmt;
use std::str;
use std::mem::transmute;
use rocksdb::{DB, Options, Direction, IteratorMode, DBCompressionType, WriteBatch};
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct ListMeta {
length: u64
}
#[derive(Debug, Clone, PartialEq)]
pub struct RedrockError {
message: String,
}
impl From<RedrockError> for String {
fn from(e: RedrockError) -> String {
e.message
}
}
impl From<rocksdb::Error> for RedrockError {
fn from(e: rocksdb::Error) -> RedrockError {
RedrockError{message: e.into_string()}
}
}
impl From<bincode::Error> for RedrockError {
fn from(e: bincode::Error) -> RedrockError {
RedrockError{message: format!("{:?}", e)}
}
}
impl error::Error for RedrockError {
fn description(&self) -> &str {
&self.message
}
}
impl fmt::Display for RedrockError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.message.fmt(formatter)
}
}
// impl fmt::Debug for Point {
// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// write!(f, "RedRockError: {}", self)
// }
// }
fn meta_key(tp: &str, key: &str) -> String {
format!("meta_{}_{}", tp, key)//.as_bytes().to_vec()
}
fn data_key(tp: &str, key: &str) -> String {
format!("data_{}_{}", tp, key)//.as_bytes().to_vec()
}
fn l_idx_key(key: &str, idx: u64) -> String {
data_key("l", &format!("{}_{}", key, idx))
}
fn z_member_key(key: &str, member: &str) -> String {
data_key("z", &format!("{}:{}", key, member))
}
fn z_key(key: &str) -> String {
data_key("z", &format!("{}:", &key))
}
fn lmetaget(db: &rocksdb::DB, key: &str) -> Result<ListMeta, RedrockError> {
let meta_k = meta_key("l", &key);
match db.get(&meta_k) {
Ok(Some(meta_b)) => deserialize(&meta_b).map_err(|e| e.into()),
_ => Ok(ListMeta{length: 0})
}
}
fn lmetaset(db: &rocksdb::DB, key: &str, m: &ListMeta) -> std::result::Result<(), RedrockError> {
let meta_k = meta_key("l", &key);
db.put(&meta_k, &serialize(&m)?).map_err(|e| e.into())
}
pub fn lpush(db: &rocksdb::DB, key: &str, value: &str) -> std::result::Result<(), RedrockError> {
let mut meta = lmetaget(&db, key)?;
set_str(&db, &l_idx_key(&key, meta.length), &value)
.and_then(|_| {
meta.length+=1;
lmetaset(&db, &key, &meta)
})
}
pub fn llen(db: &rocksdb::DB, key: &str) -> std::result::Result<u64, RedrockError> {
let meta = lmetaget(&db, &key)?;
Ok(meta.length)
}
pub fn get_str(db: &rocksdb::DB, key: &str) -> Option<String> {
match db.get(key) {
Ok(Some(data_b)) => deserialize(&data_b).ok(),
_ => None
}
}
pub fn set_str(db: &rocksdb::DB, key: &str, data: &str) -> std::result::Result<(), RedrockError> {
db.put(&key.as_bytes(), &serialize(&data)?).map_err(|e| e.into())
}
pub fn del(db: &rocksdb::DB, key: &str) -> std::result::Result<(), RedrockError> {
db.delete(&key.as_bytes()).map_err(|e| e.into())
}
pub fn get_u64(db: &rocksdb::DB, key: &str) -> Option<u64> {
match db.get(key.as_bytes()) {
Ok(Some(data_b)) => {
let mut b: &[u8] = &data_b;
b.read_u64::<BigEndian>().ok()
},
_ => None
}
}
pub fn get_i64(db: &rocksdb::DB, key: &str) -> Option<i64> {
match db.get(key.as_bytes()) {
Ok(Some(data_b)) => {
let mut b: &[u8] = &data_b;
b.read_i64::<BigEndian>().ok()
},
_ => None
}
}
pub fn set_u64(db: &rocksdb::DB, key: &str, data: u64) -> std::result::Result<(), RedrockError> {
let bytes: [u8; 8] = unsafe { transmute(data.to_be()) };
db.put(&key.as_bytes(), &bytes).map_err(|e| e.into())
}
pub fn set_i64(db: &rocksdb::DB, key: &str, data: i64) -> std::result::Result<(), RedrockError> {
let bytes: [u8; 8] = unsafe { transmute(data.to_be()) };
db.put(&key.as_bytes(), &bytes).map_err(|e| e.into())
}
pub fn inc_u64(db: &rocksdb::DB, key: &str) -> std::result::Result<(), RedrockError> {
set_u64(&db, &key, get_u64(&db, &key).unwrap_or(0)+1)
}
pub fn inc_i64(db: &rocksdb::DB, key: &str) -> std::result::Result<(), RedrockError> {
set_i64(&db, &key, get_i64(&db, &key).unwrap_or(0)+1)
}
pub fn lget(db: &rocksdb::DB, key: &str) -> Vec<String> {
let mut out: Vec<String> = vec![];
if let Ok(meta) = lmetaget(&db, &key) {
for i in 0 .. meta.length {
get_str(&db, &l_idx_key(&key, i))
.map(|s| out.push(s));
};
}
out
}
pub fn ldel(db: &rocksdb::DB, key: &str) -> std::result::Result<(), RedrockError> {
let meta = lmetaget(&db, &key)?;
let mut batch = WriteBatch::default();
for i in 0 .. meta.length {
batch.delete(&l_idx_key(&key, i));
};
batch.delete(&meta_key("l", &key));
db.write(batch).map_err(|e| e.into())
}
pub fn lexists(db: &rocksdb::DB, key: &str) -> std::result::Result<bool, RedrockError> {
match db.get(&meta_key("l", &key)) {
Ok(Some(_)) => Ok(true),
Ok(None) => Ok(false),
Err(e) => Err(e.into()),
}
}
pub fn sadd(db: &rocksdb::DB, key: &str, member: &str) -> std::result::Result<(), RedrockError> {
set_str(&db, &z_member_key(&key, &member), &member)
}
pub fn srem(db: &rocksdb::DB, key: &str, member: &str) -> std::result::Result<(), RedrockError> {
db.delete(&z_member_key(&key, &member)).map_err(|e| e.into())
}
pub fn prefix_search(db: &rocksdb::DB, prefix: &str) -> HashMap<String, i64> {
let prefix_b = prefix.as_bytes();
let mut out: HashMap<String, i64> = HashMap::new();
let iter = db.iterator(IteratorMode::From(&prefix_b, Direction::Forward)); // From a key in Direction::{forward,reverse}
for (it_key, it_value) in iter {
match str::from_utf8(&it_key) {
Ok(k) => {
if !k.starts_with(&prefix) { break; }
let mut b: &[u8] = &it_value;
b.read_i64::<BigEndian>()
.map(|v| out.insert(k.to_string(), v)).expect("inserting to hashmap");
},
_ => { break; }
}
};
out
}
pub fn prefix_search_str(db: &rocksdb::DB, prefix: &str) -> HashMap<String, String> {
let prefix_b = prefix.as_bytes();
let mut out: HashMap<String, String> = HashMap::new();
let iter = db.iterator(IteratorMode::From(&prefix_b, Direction::Forward));
for (it_key, it_value) in iter {
match str::from_utf8(&it_key) {
Ok(k) => {
if !k.starts_with(&prefix) { break; }
deserialize(&it_value).map(|s| out.insert(k.to_string(), s)).expect("inserting to hashmap");
},
_ => { break; }
}
};
out
}
pub fn smembers(db: &rocksdb::DB, key: &str) -> Vec<String> {
let zk = z_key(&key);
let mut out: Vec<String> = vec![];
let iter = db.iterator(IteratorMode::From(&zk.as_bytes(), Direction::Forward)); // From a key in Direction::{forward,reverse}
for (it_key, it_value) in iter {
match str::from_utf8(&it_key) {
Ok(k) => {
if !k.starts_with(&zk) { break; }
deserialize(&it_value).map(|s| out.push(s)).expect("inserting to vector");
},
_ => { break; }
}
};
out
}
pub fn open_db(path: &str) -> rocksdb::DB {
let mut opts = Options::default();
opts.create_if_missing(true);
opts.set_compression_type(DBCompressionType::Lz4);
// let transform = SliceTransform::create_fixed_prefix(5);
// opts.increase_parallelism(4);
// opts.enable_statistics();
// opts.set_stats_dump_period_sec(120);
// opts.set_prefix_extractor(transform);
// opts.set_memtable_prefix_bloom_ratio(0.1);
DB::open(&opts, path).expect("open database")
}
| true
|
e1ac962f7d34086494cca0b3f0cf2f4fcfa53d14
|
Rust
|
dirvine/leaf
|
/src/layers/activation/mod.rs
|
UTF-8
| 730
| 2.78125
| 3
|
[
"MIT"
] |
permissive
|
//! Provides nonlinear activation methods.
//!
//! Activation Layers take a bottom Blob, provide the activation operation and
//! produce a top Blob.
//! Thanks to the nonlinearity of the activation methods, we can 'learn' and
//! detect nonlinearities
//! in our (complex) datasets.
//!
//! The activation operation used should depend on the task at hand. For binary
//! classification a
//! step function might be very useful. For more complex tasks continious
//! activation functions such
//! as Sigmoid, TanH, Softmax or ReLU should be used. In most cases ReLU might
//! prove the best
//! results.
//!
//! The activation function is also sometimes called transfer function.
pub use self::sigmoid::Sigmoid;
pub mod sigmoid;
| true
|
930dc995fa77a81d4a7a3726f5031d5cf8d76a91
|
Rust
|
SymbioticLab/Kayak
|
/db/src/rpc.rs
|
UTF-8
| 15,368
| 2.515625
| 3
|
[] |
no_license
|
/* Copyright (c) 2018 University of Utah
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
use std::mem::{size_of, transmute};
use super::wireformat::*;
use e2d2::common::EmptyMetadata;
use e2d2::headers::{IpHeader, MacHeader, UdpHeader};
use e2d2::interface::*;
/// This function looks into a packet corresponding to an RPC request, and
/// reads it's service (assumed to be the first byte after the end of the
/// UDP header).
///
/// # Arguments
///
/// * `request`: A reference to a packet corresponding to an RPC request.
/// The packet should have been parsed upto it's UDP header.
///
/// # Return
///
/// If valid, the service the request should be dispatched to. If invalid, a
/// code corresponding to an invalid service (InvalidService).
pub fn parse_rpc_service(request: &Packet<UdpHeader, EmptyMetadata>) -> Service {
// Read the service off the first byte on the payload.
let service: u8 = request.get_payload()[0];
match service.lt(&(Service::InvalidService as u8)) {
true => unsafe {
let service: Service = transmute(service);
return service;
},
false => {
return Service::InvalidService;
}
}
}
/// This function looks into a packet corresponding to an RPC request, and
/// reads it's opcode (assumed to be the second byte after the end of the
/// UDP header).
///
/// # Arguments
///
/// * `request`: A reference to a packet corresponding to an RPC request.
/// The packet should have been parsed upto it's UDP header.
///
/// # Return
///
/// If valid, the opcode on the RPC request. If invalid, an opcode corresponding
/// to an invalid operation (InvalidOperation) will be returned.
pub fn parse_rpc_opcode(request: &Packet<UdpHeader, EmptyMetadata>) -> OpCode {
// Read the opcode off the second byte on the payload.
let opcode: u8 = request.get_payload()[1];
match opcode.lt(&(OpCode::InvalidOperation as u8)) {
true => unsafe {
let opcode: OpCode = transmute(opcode);
return opcode;
},
false => {
return OpCode::InvalidOperation;
}
}
}
/// This function looks into the records encapsulated into the payload corresponding to an RPC
/// request, and reads it's optype (assumed to be the first byte in each record in optype).
///
/// # Arguments
///
/// * `record`: A reference to a record encapsulated in response payload for a RPC request.
///
/// # Return
///
/// If valid, the optype for the given record. If invalid, an optype corresponding
/// to an invalid type (InvalidRecord) will be returned.
pub fn parse_record_optype(record: &[u8]) -> OpType {
let optype = record[0];
match optype.lt(&(OpType::InvalidRecord as u8)) {
true => unsafe {
let optype: OpType = transmute(optype);
return optype;
},
false => {
return OpType::InvalidRecord;
}
}
}
/// Allocate a packet with MAC, IP, and UDP headers for an RPC request.
///
/// # Panic
///
/// Panics if allocation or header manipulation fails at any point.
///
/// # Arguments
///
/// * `mac`: Reference to the MAC header to be added to the request.
/// * `ip` : Reference to the IP header to be added to the request.
/// * `udp`: Reference to the UDP header to be added to the request.
/// * `dst`: The destination port to be written into the UDP header.
///
/// # Return
///
/// A packet with the supplied network headers written into it.
#[inline]
fn create_request(
mac: &MacHeader,
ip: &IpHeader,
udp: &UdpHeader,
dst: u16,
) -> Packet<UdpHeader, EmptyMetadata> {
let mut packet = new_packet()
.expect("Failed to allocate packet for request!")
.push_header(mac)
.expect("Failed to push MAC header into request!")
.push_header(ip)
.expect("Failed to push IP header into request!")
.push_header(udp)
.expect("Failed to push UDP header into request!");
// Write the destination port into the UDP header.
packet.get_mut_header().set_dst_port(dst);
return packet;
}
/// Sets the length fields on the UDP and IP headers of a packet.
///
/// # Arguments
///
/// * `request`: A packet parsed upto it's UDP header whose UDP and IP length fields need to be
/// set.
///
/// # Return
///
/// A packet parsed upto it's IP headers with said fields set.
pub fn fixup_header_length_fields(
mut request: Packet<UdpHeader, EmptyMetadata>,
) -> Packet<IpHeader, EmptyMetadata> {
// Set fields on the UDP header.
let udp_len = (size_of::<UdpHeader>() + request.get_payload().len()) as u16;
request.get_mut_header().set_length(udp_len);
// Set fields on the IP header.
let mut request = request.deparse_header(size_of::<IpHeader>());
request
.get_mut_header()
.set_length(size_of::<IpHeader>() as u16 + udp_len);
return request;
}
/// Allocate and populate a packet that requests a server "get" operation.
///
/// # Panic
///
/// May panic if there is a problem allocating the packet or constructing
/// headers.
///
/// # Arguments
///
/// * `mac`: Reference to the MAC header to be added to the request.
/// * `ip` : Reference to the IP header to be added to the request.
/// * `udp`: Reference to the UDP header to be added to the request.
/// * `tenant`: Id of the tenant requesting the item.
/// * `table_id`: Id of the table from which the key is looked up.
/// * `key`: Byte string of key whose value is to be fetched. Limit 64 KB.
/// * `id`: RPC identifier.
/// * `dst`: The UDP port on the server the RPC is destined for.
/// * `generator`: The issuer of the get() request(Client or Extension).
///
/// # Return
///
/// Packet populated with the request parameters.
#[inline]
pub fn create_get_rpc(
mac: &MacHeader,
ip: &IpHeader,
udp: &UdpHeader,
tenant: u32,
table_id: u64,
key: &[u8],
id: u64,
dst: u16,
generator: GetGenerator,
) -> Packet<IpHeader, EmptyMetadata> {
// Key length cannot be more than 16 bits. Required to construct the RPC header.
if key.len() > u16::max_value() as usize {
panic!("Key too long ({} bytes).", key.len());
}
// Allocate a packet, write the header and payload into it, and set fields on it's UDP and IP
// header.
let mut request = create_request(mac, ip, udp, dst)
.push_header(&GetRequest::new(
tenant,
table_id,
key.len() as u16,
id,
generator,
))
.expect("Failed to push RPC header into request!");
request
.add_to_payload_tail(key.len(), &key)
.expect("Failed to write key into get() request!");
fixup_header_length_fields(request.deparse_header(size_of::<UdpHeader>()))
}
/// Allocate and populate a packet that requests a server "put" operation.
///
/// # Panic
///
/// May panic if there is a problem allocating the packet or constructing
/// headers.
///
/// # Arguments
///
/// * `mac`: Reference to the MAC header to be added to the request.
/// * `ip` : Reference to the IP header to be added to the request.
/// * `udp`: Reference to the UDP header to be added to the request.
/// * `tenant`: Id of the tenant requesting the insertion.
/// * `table_id`: Id of the table into which the key-value pair is to be inserted.
/// * `key`: Byte string of key whose value is to be inserted. Limit 64 KB.
/// * `val`: Byte string of the value to be inserted.
/// * `id`: RPC identifier.
/// * `dst`: The UDP port on the server the RPC is destined for.
///
/// # Return
///
/// Packet populated with the request parameters.
#[inline]
pub fn create_put_rpc(
mac: &MacHeader,
ip: &IpHeader,
udp: &UdpHeader,
tenant: u32,
table_id: u64,
key: &[u8],
val: &[u8],
id: u64,
dst: u16,
) -> Packet<IpHeader, EmptyMetadata> {
// Key length cannot be more than 16 bits. Required to construct the RPC header.
if key.len() > u16::max_value() as usize {
panic!("Key too long ({} bytes).", key.len());
}
// Allocate a packet, write the header and payload into it, and set fields on it's UDP and IP
// header.
let mut request = create_request(mac, ip, udp, dst)
.push_header(&PutRequest::new(tenant, table_id, key.len() as u16, id))
.expect("Failed to push RPC header into request!");
let mut payload = Vec::with_capacity(key.len() + val.len());
payload.extend_from_slice(key);
payload.extend_from_slice(val);
request
.add_to_payload_tail(payload.len(), &payload)
.expect("Failed to write key into put() request!");
fixup_header_length_fields(request.deparse_header(size_of::<UdpHeader>()))
}
/// Allocate and populate a packet that requests a server "multiget" operation.
///
/// # Arguments
///
/// * `mac`: Reference to the MAC header to be added to the request.
/// * `ip` : Reference to the IP header to be added to the request.
/// * `udp`: Reference to the UDP header to be added to the request.
/// * `tenant`: Id of the tenant requesting the item.
/// * `table_id`: Id of the table from which the key is looked up.
/// * `key_len`: The length of each key to be looked up at the server. All keys are
/// assumed to be of equal length.
/// * `num_keys`: The number of keys to be looked up at the server.
/// * `keys`: Byte string of key whose values are to be fetched.
/// * `id`: RPC identifier.
/// * `dst`: The UDP port on the server the RPC is destined for.
///
/// # Return
///
/// Packet populated with the request parameters.
#[inline]
pub fn create_multiget_rpc(
mac: &MacHeader,
ip: &IpHeader,
udp: &UdpHeader,
tenant: u32,
table_id: u64,
key_len: u16,
num_keys: u32,
keys: &[u8],
id: u64,
dst: u16,
) -> Packet<IpHeader, EmptyMetadata> {
// Allocate a packet, write the header and payload into it, and set fields on it's UDP and IP
// header.
let mut request = create_request(mac, ip, udp, dst)
.push_header(&MultiGetRequest::new(
tenant, table_id, key_len, num_keys, id,
))
.expect("Failed to push RPC header into request!");
request
.add_to_payload_tail(keys.len(), &keys)
.expect("Failed to write key into multiget() request!");
fixup_header_length_fields(request.deparse_header(size_of::<UdpHeader>()))
}
/// Allocate and populate a packet that requests a server "invoke" operation.
///
/// # Panic
///
/// May panic if there is a problem allocating the packet or constructing
/// headers.
///
/// # Arguments
///
/// * `mac`: Reference to the MAC header to be added to the request.
/// * `ip` : Reference to the IP header to be added to the request.
/// * `udp`: Reference to the UDP header to be added to the request.
/// * `tenant`: Id of the tenant requesting the invocation.
/// * `name_len`: Number of bytes at the head of the payload identifying the extension.
/// * `payload`: The RPC payload to be written into the packet. Should contain the name of the
/// extension, followed by it's arguments.
/// * `id`: RPC identifier.
/// * `dst`: The destination port on the server the RPC is destined for.
///
/// # Return
///
/// Packet populated with the request parameters.
#[inline]
pub fn create_invoke_rpc(
mac: &MacHeader,
ip: &IpHeader,
udp: &UdpHeader,
tenant: u32,
name_len: u32,
payload: &[u8],
id: u64,
dst: u16,
) -> Packet<IpHeader, EmptyMetadata> {
// The Arguments to the procedure cannot be more that 4 GB long.
if payload.len() - name_len as usize > u32::max_value() as usize {
panic!(
"Args too long ({} bytes).",
payload.len() - name_len as usize
);
}
// Allocate a packet, write the header and payload into it, and set fields on it's UDP and IP
// header. Since the payload contains both, the name and arguments in it, args_len can be
// calculated as payload length - name_len.
let mut request = create_request(mac, ip, udp, dst)
.push_header(&InvokeRequest::new(
tenant,
name_len,
(payload.len() - name_len as usize) as u32,
id,
))
.expect("Failed to push RPC header into request!");
request
.add_to_payload_tail(payload.len(), &payload)
.expect("Failed to write args into invoke() request!");
fixup_header_length_fields(request.deparse_header(size_of::<UdpHeader>()))
}
/// Allocate and populate a packet that requests a server "commit" operation.
///
/// # Panic
///
/// May panic if there is a problem allocating the packet or constructing
/// headers.
///
/// # Arguments
///
/// * `mac`: Reference to the MAC header to be added to the request.
/// * `ip` : Reference to the IP header to be added to the request.
/// * `udp`: Reference to the UDP header to be added to the request.
/// * `tenant`: Id of the tenant requesting the item.
/// * `table_id`: Id of the table from which the key is looked up.
/// * `payload`: Byte string of read-write set whose value is used for transaction validation.
/// * `id`: RPC identifier.
/// * `dst`: The UDP port on the server the RPC is destined for.
/// * `key_len`: The length of the key for each record.
/// * `val_len`: The length of the value for each record.
/// # Return
///
/// Packet populated with the request parameters.
#[inline]
pub fn create_commit_rpc(
mac: &MacHeader,
ip: &IpHeader,
udp: &UdpHeader,
tenant: u32,
table_id: u64,
payload: &[u8],
id: u64,
dst: u16,
key_len: u16,
val_len: u16,
) -> Packet<IpHeader, EmptyMetadata> {
// Key length cannot be more than 16 bits. Required to construct the RPC header.
if key_len > u16::max_value() && val_len > u16::max_value() {
panic!("Key or Value too long ({} key {} value).", key_len, val_len);
}
if payload.len() > 1436 {
panic!("Network support doesn't support > 1500B packets");
}
// Allocate a packet, write the header and payload into it, and set fields on it's UDP and IP
// header.
let mut request = create_request(mac, ip, udp, dst)
.push_header(&CommitRequest::new(tenant, id, table_id, key_len, val_len))
.expect("Failed to push RPC header into request!");
request
.add_to_payload_tail(payload.len(), &payload)
.expect("Failed to write payload into commit() request!");
fixup_header_length_fields(request.deparse_header(size_of::<UdpHeader>()))
}
| true
|
c58b80967849fec0f15480df476f1c881c3e1d04
|
Rust
|
likr/atcoder
|
/agc029/src/bin/b.rs
|
UTF-8
| 1,508
| 2.828125
| 3
|
[] |
no_license
|
use proconio::input;
#[allow(unused_imports)]
use proconio::marker::*;
#[allow(unused_imports)]
use std::cmp::*;
#[allow(unused_imports)]
use std::collections::*;
#[allow(unused_imports)]
use std::f64::consts::*;
#[allow(unused)]
const INF: usize = std::usize::MAX / 4;
#[allow(unused)]
const M: usize = 1000000007;
fn main() {
input! {
n: usize,
a: [usize; n],
}
let mut count = HashMap::new();
for i in 0..n {
*count.entry(a[i]).or_insert(0) += 1;
}
let mut keys = count.keys().map(|&k| k).collect::<Vec<usize>>();
keys.sort();
keys.reverse();
let mut b = vec![];
for i in 1..40 {
b.push(2usize.pow(i));
}
b.reverse();
let mut result = 0;
for &bi in &b {
for &k1 in &keys {
if k1 >= bi {
continue;
}
let c1 = count[&k1];
let k2 = bi - k1;
if let Some(&c2) = count.get(&k2) {
if k1 == k2 {
let c = c1 / 2;
// eprintln!("{} {} {}", k1, k2, c);
result += c;
*count.get_mut(&k1).unwrap() -= c;
} else {
let c = min(c1, c2);
// eprintln!("{} {} {}", k1, k2, c);
result += c;
*count.get_mut(&k1).unwrap() -= c;
*count.get_mut(&k2).unwrap() -= c;
}
}
}
}
println!("{}", result);
}
| true
|
21b1a36340d2c5c0f4344eabcb1ac4b984c85024
|
Rust
|
lamedh-dev/aws-lambda-rust-runtime
|
/lambda-http/src/ext.rs
|
UTF-8
| 12,685
| 3.1875
| 3
|
[
"Apache-2.0"
] |
permissive
|
//! Extension methods for `http::Request` types
use crate::{request::RequestContext, strmap::StrMap, Body};
use serde::{de::value::Error as SerdeError, Deserialize};
use std::{error::Error, fmt};
/// ALB/API gateway pre-parsed http query string parameters
pub(crate) struct QueryStringParameters(pub(crate) StrMap);
/// API gateway pre-extracted url path parameters
///
/// These will always be empty for ALB requests
pub(crate) struct PathParameters(pub(crate) StrMap);
/// API gateway configured
/// [stage variables](https://docs.aws.amazon.com/apigateway/latest/developerguide/stage-variables.html)
///
/// These will always be empty for ALB requests
pub(crate) struct StageVariables(pub(crate) StrMap);
/// Request payload deserialization errors
///
/// Returned by [`RequestExt#payload()`](trait.RequestExt.html#tymethod.payload)
#[derive(Debug)]
pub enum PayloadError {
/// Returned when `application/json` bodies fail to deserialize a payload
Json(serde_json::Error),
/// Returned when `application/x-www-form-urlencoded` bodies fail to deserialize a payload
WwwFormUrlEncoded(SerdeError),
}
impl fmt::Display for PayloadError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
PayloadError::Json(json) => writeln!(f, "failed to parse payload from application/json {}", json),
PayloadError::WwwFormUrlEncoded(form) => writeln!(
f,
"failed to parse payload from application/x-www-form-urlencoded {}",
form
),
}
}
}
impl Error for PayloadError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
PayloadError::Json(json) => Some(json),
PayloadError::WwwFormUrlEncoded(form) => Some(form),
}
}
}
/// Extentions for `lambda_http::Request` structs that
/// provide access to [API gateway](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-input-format)
/// and [ALB](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html)
/// features.
///
/// # Examples
///
/// A request's body can be deserialized if its correctly encoded as per
/// the request's `Content-Type` header. The two supported content types are
/// `application/x-www-form-urlencoded` and `application/json`.
///
/// The following handler will work an http request body of `x=1&y=2`
/// as well as `{"x":1, "y":2}` respectively.
///
/// ```rust,no_run
/// use lamedh_http::{handler, lambda::{self, Context, Error}, IntoResponse, Request, Response, RequestExt};
/// use aws_lambda_events::encodings::Body;
/// use serde::Deserialize;
///
/// #[derive(Debug,Deserialize,Default)]
/// struct Args {
/// #[serde(default)]
/// x: usize,
/// #[serde(default)]
/// y: usize
/// }
///
/// #[tokio::main]
/// async fn main() -> Result<(), Error> {
/// lamedh_runtime::run(handler(add)).await?;
/// Ok(())
/// }
///
/// async fn add(
/// request: Request,
/// _: Context
/// ) -> Result<Response<Body>, Error> {
/// let args: Args = request.payload()
/// .unwrap_or_else(|_parse_err| None)
/// .unwrap_or_default();
/// Ok(
/// Response::new(
/// format!(
/// "{} + {} = {}",
/// args.x,
/// args.y,
/// args.x + args.y
/// ).into()
/// )
/// )
/// }
/// ```
pub trait RequestExt {
/// Return pre-parsed http query string parameters, parameters
/// provided after the `?` portion of a url,
/// associated with the API gateway request.
///
/// The yielded value represents both single and multi-valued
/// parameters alike. When multiple query string parameters with the same
/// name are expected, `query_string_parameters().get_all("many")` to retrieve them all.
///
/// No query parameters
/// will yield an empty `StrMap`.
fn query_string_parameters(&self) -> StrMap;
/// Configures instance with query string parameters under #[cfg(test)] configurations
///
/// This is intended for use in mock testing contexts.
fn with_query_string_parameters<Q>(self, parameters: Q) -> Self
where
Q: Into<StrMap>;
/// Return pre-extracted path parameters, parameter provided in url placeholders
/// `/foo/{bar}/baz/{boom}`,
/// associated with the API gateway request. No path parameters
/// will yield an empty `StrMap`
///
/// These will always be empty for ALB triggered requests
fn path_parameters(&self) -> StrMap;
/// Configures instance with path parameters under #[cfg(test)] configurations
///
/// This is intended for use in mock testing contexts.
fn with_path_parameters<P>(self, parameters: P) -> Self
where
P: Into<StrMap>;
/// Return [stage variables](https://docs.aws.amazon.com/apigateway/latest/developerguide/stage-variables.html)
/// associated with the API gateway request. No stage parameters
/// will yield an empty `StrMap`
///
/// These will always be empty for ALB triggered requests
fn stage_variables(&self) -> StrMap;
/// Configures instance with stage variables under #[cfg(test)] configurations
///
/// This is intended for use in mock testing contexts.
#[cfg(test)]
fn with_stage_variables<V>(self, variables: V) -> Self
where
V: Into<StrMap>;
/// Return request context data assocaited with the ALB or API gateway request
fn request_context(&self) -> RequestContext;
/// Return the Result of a payload parsed into a serde Deserializeable
/// type
///
/// Currently only `application/x-www-form-urlencoded`
/// and `application/json` flavors of content type
/// are supported
///
/// A [PayloadError](enum.PayloadError.html) will be returned for undeserializable
/// payloads. If no body is provided, `Ok(None)` will be returned.
fn payload<D>(&self) -> Result<Option<D>, PayloadError>
where
for<'de> D: Deserialize<'de>;
}
impl RequestExt for http::Request<Body> {
fn query_string_parameters(&self) -> StrMap {
self.extensions()
.get::<QueryStringParameters>()
.map(|ext| ext.0.clone())
.unwrap_or_default()
}
fn with_query_string_parameters<Q>(self, parameters: Q) -> Self
where
Q: Into<StrMap>,
{
let mut s = self;
s.extensions_mut().insert(QueryStringParameters(parameters.into()));
s
}
fn path_parameters(&self) -> StrMap {
self.extensions()
.get::<PathParameters>()
.map(|ext| ext.0.clone())
.unwrap_or_default()
}
fn with_path_parameters<P>(self, parameters: P) -> Self
where
P: Into<StrMap>,
{
let mut s = self;
s.extensions_mut().insert(PathParameters(parameters.into()));
s
}
fn stage_variables(&self) -> StrMap {
self.extensions()
.get::<StageVariables>()
.map(|ext| ext.0.clone())
.unwrap_or_default()
}
#[cfg(test)]
fn with_stage_variables<V>(self, variables: V) -> Self
where
V: Into<StrMap>,
{
let mut s = self;
s.extensions_mut().insert(StageVariables(variables.into()));
s
}
fn request_context(&self) -> RequestContext {
self.extensions()
.get::<RequestContext>()
.cloned()
.expect("Request did not contain a request context")
}
fn payload<D>(&self) -> Result<Option<D>, PayloadError>
where
for<'de> D: Deserialize<'de>,
{
self.headers()
.get(http::header::CONTENT_TYPE)
.map(|ct| match ct.to_str() {
Ok(content_type) => {
if content_type.starts_with("application/x-www-form-urlencoded") {
return serde_urlencoded::from_bytes::<D>(self.body().as_ref())
.map_err(PayloadError::WwwFormUrlEncoded)
.map(Some);
} else if content_type.starts_with("application/json") {
return serde_json::from_slice::<D>(self.body().as_ref())
.map_err(PayloadError::Json)
.map(Some);
}
Ok(None)
}
_ => Ok(None),
})
.unwrap_or_else(|| Ok(None))
}
}
#[cfg(test)]
mod tests {
use crate::{Body, Request, RequestExt};
use serde::Deserialize;
#[test]
fn requests_can_mock_query_string_parameters_ext() {
let mocked = hashmap! {
"foo".into() => vec!["bar".into()]
};
let request = Request::default().with_query_string_parameters(mocked.clone());
assert_eq!(request.query_string_parameters(), mocked.into());
}
#[test]
fn requests_can_mock_path_parameters_ext() {
let mocked = hashmap! {
"foo".into() => vec!["bar".into()]
};
let request = Request::default().with_path_parameters(mocked.clone());
assert_eq!(request.path_parameters(), mocked.into());
}
#[test]
fn requests_can_mock_stage_variables_ext() {
let mocked = hashmap! {
"foo".into() => vec!["bar".into()]
};
let request = Request::default().with_stage_variables(mocked.clone());
assert_eq!(request.stage_variables(), mocked.into());
}
#[test]
fn requests_have_form_post_parsable_payloads() {
#[derive(Deserialize, PartialEq, Debug)]
struct Payload {
foo: String,
baz: usize,
}
let request = http::Request::builder()
.header("Content-Type", "application/x-www-form-urlencoded")
.body(Body::from("foo=bar&baz=2"))
.expect("failed to build request");
let payload: Option<Payload> = request.payload().unwrap_or_default();
assert_eq!(
payload,
Some(Payload {
foo: "bar".into(),
baz: 2
})
);
}
#[test]
fn requests_have_json_parseable_payloads() {
#[derive(Deserialize, PartialEq, Debug)]
struct Payload {
foo: String,
baz: usize,
}
let request = http::Request::builder()
.header("Content-Type", "application/json")
.body(Body::from(r#"{"foo":"bar", "baz": 2}"#))
.expect("failed to build request");
let payload: Option<Payload> = request.payload().unwrap_or_default();
assert_eq!(
payload,
Some(Payload {
foo: "bar".into(),
baz: 2
})
);
}
#[test]
fn requests_match_form_post_content_type_with_charset() {
#[derive(Deserialize, PartialEq, Debug)]
struct Payload {
foo: String,
baz: usize,
}
let request = http::Request::builder()
.header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")
.body(Body::from("foo=bar&baz=2"))
.expect("failed to build request");
let payload: Option<Payload> = request.payload().unwrap_or_default();
assert_eq!(
payload,
Some(Payload {
foo: "bar".into(),
baz: 2
})
);
}
#[test]
fn requests_match_json_content_type_with_charset() {
#[derive(Deserialize, PartialEq, Debug)]
struct Payload {
foo: String,
baz: usize,
}
let request = http::Request::builder()
.header("Content-Type", "application/json; charset=UTF-8")
.body(Body::from(r#"{"foo":"bar", "baz": 2}"#))
.expect("failed to build request");
let payload: Option<Payload> = request.payload().unwrap_or_default();
assert_eq!(
payload,
Some(Payload {
foo: "bar".into(),
baz: 2
})
);
}
#[test]
fn requests_omiting_content_types_do_not_support_parseable_payloads() {
#[derive(Deserialize, PartialEq, Debug)]
struct Payload {
foo: String,
baz: usize,
}
let request = http::Request::builder()
.body(Body::from(r#"{"foo":"bar", "baz": 2}"#))
.expect("failed to bulid request");
let payload: Option<Payload> = request.payload().unwrap_or_default();
assert_eq!(payload, None);
}
}
| true
|
8abdbeedb7b305c544654ee52778c5a5dd7598cb
|
Rust
|
aochagavia/IbanCalculator
|
/src/lock/advanced_spin_lock.rs
|
UTF-8
| 1,891
| 3.40625
| 3
|
[] |
no_license
|
use std::cell::UnsafeCell;
use std::sync::atomic::{AtomicBool, Ordering};
use std::ops::{Deref, DerefMut};
pub struct AdvancedSpinLock<T> {
lock: AtomicBool,
data: UnsafeCell<T>,
}
unsafe impl<T: Send> Send for AdvancedSpinLock<T> { }
unsafe impl<T: Send> Sync for AdvancedSpinLock<T> { }
impl<T> AdvancedSpinLock<T> {
/// Creates a new AdvancedSpinLock in an unlocked state ready for use.
pub fn new(t: T) -> AdvancedSpinLock<T> {
AdvancedSpinLock {
lock: AtomicBool::new(false),
data: UnsafeCell::new(t),
}
}
/// Acquires a SpinLockGuard, spinning the current thread until it is able to do so.
pub fn lock(&self) -> SpinLockGuard<T> {
while self.lock.compare_and_swap(false, true, Ordering::SeqCst) {
while self.lock.load(Ordering::SeqCst) { }
}
// Exit the spinning wait, holding the lock
unsafe { SpinLockGuard::new(self) }
// Note: the lock will be released by `SpinLockGuard` when it falls out of scope
}
}
pub struct SpinLockGuard<'a, T: 'a> {
__spin_lock: &'a AdvancedSpinLock<T>,
}
impl<'a, T> SpinLockGuard<'a, T> {
unsafe fn new(lock: &'a AdvancedSpinLock<T>) -> SpinLockGuard<'a, T> {
SpinLockGuard {
__spin_lock: lock,
}
}
}
impl<'a, T> Drop for SpinLockGuard<'a, T> {
#[inline]
fn drop(&mut self) {
self.__spin_lock.lock.store(false, Ordering::SeqCst);
}
}
// The `Deref` and `DerefMut` traits are not necessary to enforce safety, but
// are very convenient from an API perspective
impl<'a, T> Deref for SpinLockGuard<'a, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.__spin_lock.data.get() }
}
}
impl<'a, T> DerefMut for SpinLockGuard<'a, T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.__spin_lock.data.get() }
}
}
| true
|
d3ee1bb22e3e7eadb0e7bf31f0c9f067519af1b9
|
Rust
|
Nouzan/gray-tree
|
/src/binary_tree/iter.rs
|
UTF-8
| 1,858
| 3.1875
| 3
|
[] |
no_license
|
use super::Node;
use std::collections::VecDeque;
use std::marker::PhantomData;
/// Level order traverse iterator.
#[derive(Debug)]
pub struct LevelOrderIter<'a, T> {
last: *const Node<T>,
queue: VecDeque<*const Node<T>>,
marker: PhantomData<&'a Node<T>>,
level: usize,
}
impl<'a, T> LevelOrderIter<'a, T> {
/// Create a level order traverse iter.
pub fn new(node: &'a Node<T>) -> Self {
let ptr = node as *const _;
let mut queue = VecDeque::new();
queue.push_back(ptr);
Self {
last: ptr,
queue,
level: 0,
marker: PhantomData::default(),
}
}
/// Return the level in the tree of the next item
/// returned by `next`.
pub fn level(&self) -> usize {
self.level
}
}
impl<'a, T> Iterator for LevelOrderIter<'a, T> {
type Item = (usize, &'a T);
fn next(&mut self) -> Option<Self::Item> {
if let Some(ptr) = self.queue.pop_front() {
unsafe {
if let Some(node) = ptr.as_ref() {
if let Some(left) = node.left() {
self.queue.push_back(left as *const _);
}
if let Some(right) = node.right() {
self.queue.push_back(right as *const _);
}
let level = self.level;
// update the last pointer.
if self.last == ptr {
if let Some(last) = self.queue.back() {
self.last = *last;
}
self.level += 1;
}
Some((level, node.data()))
} else {
None
}
}
} else {
None
}
}
}
| true
|
1e48106785a2169d2cc2ac07c9ea6e82c240f5c1
|
Rust
|
Byron/gitoxide
|
/gix-odb/tests/odb/store/compound.rs
|
UTF-8
| 984
| 2.796875
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! These are old tests of the now removed linked odb, but executed on the general store
//! to be sure we don't loose coverage. This might, however, be overlapping with much more thorough
//! tests o the general store itself, so they can possibly be removed at some point.
mod locate {
use gix_odb::Find;
use crate::{hex_to_id, odb::db};
fn can_locate(db: &gix_odb::Handle, hex_id: &str) {
let mut buf = vec![];
assert!(db
.try_find(hex_to_id(hex_id), &mut buf)
.expect("no read error")
.is_some());
}
#[test]
fn loose_object() {
can_locate(&db(), "37d4e6c5c48ba0d245164c4e10d5f41140cab980");
}
#[test]
fn pack_object() {
can_locate(&db(), "501b297447a8255d3533c6858bb692575cdefaa0"); // pack 11fd
can_locate(&db(), "4dac9989f96bc5b5b1263b582c08f0c5f0b58542"); // pack a2bf
can_locate(&db(), "dd25c539efbb0ab018caa4cda2d133285634e9b5"); // pack c043
}
}
| true
|
27053c54d3060687e4be4064380013a7a1083854
|
Rust
|
nickelc/dbl-rs
|
/src/widget.rs
|
UTF-8
| 2,644
| 2.921875
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! URL Builders for badge, large and small widgets.
use std::collections::HashMap;
use url::{ParseError, Url};
use crate::types::BotId;
/// URL Builder for [badge widgets](https://top.gg/api/docs#widgets).
pub enum Badge {
Owner,
Upvotes,
Servers,
Status,
Library,
}
impl Badge {
pub fn build<T>(&self, bot: T, show_avatar: bool) -> Result<Url, ParseError>
where
T: Into<BotId>,
{
let kind = match self {
Badge::Owner => "owner",
Badge::Library => "lib",
Badge::Servers => "servers",
Badge::Status => "status",
Badge::Upvotes => "upvotes",
};
let mut url = api!("/widget/{}/{}.svg", kind, bot.into());
if !show_avatar {
url.push_str("?noavatar=true");
}
Url::parse(&url)
}
}
/// URL Builder for [large widgets](https://top.gg/api/docs#widgets).
pub struct LargeWidget(HashMap<&'static str, String>);
/// URL Builder for [small widgets](https://top.gg/api/docs#widgets).
pub struct SmallWidget(HashMap<&'static str, String>);
macro_rules! impl_widget {
(
$widget:ident($cnt:expr) {
$(
$(#[$fn_meta:ident $($meta_args:tt)*])*
$fn:ident: $name:expr;
)+
}
) => {
impl $widget {
pub fn new() -> Self {
$widget(HashMap::with_capacity($cnt))
}
/// Build the widget url.
pub fn build<T>(self, bot: T) -> Result<Url, ParseError>
where
T: Into<BotId>,
{
Url::parse_with_params(&api!("/widget/{}.svg", bot.into()), self.0)
}
$(
$(#[$fn_meta $($meta_args)*])*
pub fn $fn<T>(mut self, color: T) -> Self
where
T: ToString,
{
self.0.insert($name, color.to_string());
self
}
)+
}
impl Default for $widget {
fn default() -> Self {
$widget::new()
}
}
};
}
impl_widget!(LargeWidget(7) {
top_color: "topcolor";
middle_color: "middlecolor";
username_color: "usernamecolor";
certified_color: "certifiedcolor";
data_color: "datacolor";
label_color: "labelcolor";
hightlight_color: "hightlightcolor";
});
impl_widget!(SmallWidget(5) {
avatarbg_color: "avatarbgcolor";
left_color: "leftcolor";
right_color: "rightcolor";
lefttext_color: "lefttextcolor";
righttext_color: "righttextcolor";
});
| true
|
8b7436ae0db5ed80328e0cc0e2c049edbb0c886f
|
Rust
|
jagedn/represaliados
|
/main.rs
|
UTF-8
| 4,921
| 2.75
| 3
|
[] |
no_license
|
extern crate reqwest;
extern crate select;
extern crate encoding;
// importation syntax
use scraper::{Html, Selector};
use serde::{Deserialize, Serialize};
use serde_json::{Result};
#[derive(Serialize, Deserialize)]
struct Record{
index: i32,
nombre : String,
poblacion : String,
residencia :String,
profesion : String,
expediente : String,
archivo: String,
fondo: String,
serie: String,
signatura: String,
fecha: String,
paginas: String,
tipologia: String,
observaciones: String,
}
fn print_record( record : Record) -> Result<()>{
let j = serde_json::to_string(&record)?;
println!("{}", j);
Ok(())
}
fn main() {
let index : i32 = std::env::args()
.nth(1).expect("Neceisto el inicio").parse().unwrap();
let end : i32 = std::env::args()
.nth(2).expect("Necesito el final").parse().unwrap();
for n in index..end{
let url = format!("http://pares.mcu.es/victimasGCFPortal/detalle.form?idpersona={}",n);
let resp = reqwest::blocking::get(&url).unwrap();
if resp.status().is_success() {
let body = resp.text_with_charset("ISO-8859-1").unwrap();
let document = Html::parse_document(&body);
let detalle_selector = Selector::parse("table[summary='Detalle']").unwrap();
let expediente_selector = Selector::parse("table[summary='Expediente']").unwrap();
let tr_selector = Selector::parse("tr").unwrap();
let td_selector = Selector::parse("td").unwrap();
let strong_selector = Selector::parse("strong").unwrap();
let mut record = Record{
index: n,
nombre: "".to_owned(),
poblacion: "".to_owned(),
residencia: "".to_owned(),
profesion: "".to_owned(),
expediente : "".to_owned(),
archivo: "".to_owned(),
fondo: "".to_owned(),
serie: "".to_owned(),
signatura: "".to_owned(),
fecha: "".to_owned(),
paginas: "".to_owned(),
tipologia: "".to_owned(),
observaciones: "".to_owned(),
};
if document.select(&detalle_selector).count() > 0{
let total_tr = document.select(&detalle_selector)
.next().unwrap().select(&tr_selector).count();
let detalle = document.select(&detalle_selector).next().unwrap();
let mut trs = detalle.select(&tr_selector);
let nombre = trs.nth(0).unwrap()
.select(&td_selector).nth(0).unwrap()
.select(&strong_selector).nth(0)
.unwrap();
record.nombre = String::from(nombre.inner_html().trim());
let _otros = trs.nth(0).unwrap();
let poblacion = trs.nth(0).unwrap()
.select(&td_selector).nth(0).unwrap().text().next().unwrap_or("");
record.poblacion = String::from(poblacion.trim());
if total_tr == 5 {
let residencia = trs.nth(0).unwrap()
.select(&td_selector).nth(0).unwrap().text().next().unwrap_or("");
record.residencia = String::from(residencia.trim());
}
let profesion = trs.nth(0).unwrap()
.select(&td_selector).nth(0).unwrap().text().next().unwrap_or("");
record.profesion = String::from(profesion.trim());
}
if document.select(&expediente_selector).count() > 0 {
let th_selector = Selector::parse("th").unwrap();
let expediente = document.select(&expediente_selector).next().unwrap();
let trs = expediente.select(&tr_selector);
for tr in trs{
let title = tr.select(&th_selector).next().unwrap().inner_html();
let value = tr.select(&td_selector).next().unwrap().inner_html();
match title.trim(){
"Archivo" => record.archivo = String::from(value.trim()),
"Fondo" => record.fondo = String::from(value.trim()),
"Serie" => record.serie = String::from(value.trim()),
"Signatura" => record.signatura = String::from(value.trim()),
"Fecha de expediente" => record.fecha = String::from(value.trim()),
"Tipología" => record.tipologia = String::from(value.trim()),
"Observaciones" => record.observaciones = String::from(value.trim()),
_ => record.paginas = String::from(value.trim()),
}
}
}
print_record(record);
}
}
}
| true
|
c901ba2952acfed5c82173230730abcd3e779a54
|
Rust
|
klxqlehua/docker-api-rs
|
/src/api/secret.rs
|
UTF-8
| 4,210
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
#![cfg(feature = "swarm")]
//! Secrets are sensitive data that can be used by services. Swarm mode must be enabled for these endpoints to work.
use crate::{conn::Payload, Result};
impl_api_ty!(Secret => name: N);
impl<'docker> Secret<'docker> {
impl_api_ep! { secret: Secret, resp
Inspect -> &format!("/secrets/{}", secret.name)
Delete -> &format!("/secrets/{}", secret.name)
}
// TODO: add Secret::update
}
impl<'docker> Secrets<'docker> {
impl_api_ep! { __: Secret, resp
List -> "/secrets"
Create -> "/secrets/create", resp.id
}
}
pub mod data {
use crate::{
api::{Driver, Labels, ObjectVersion},
Error, Result,
};
use serde::{Deserialize, Serialize};
#[cfg(feature = "chrono")]
use chrono::{DateTime, Utc};
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct SecretInfo {
#[serde(rename = "ID")]
pub id: String,
pub version: ObjectVersion,
#[cfg(feature = "chrono")]
pub created_at: DateTime<Utc>,
#[cfg(not(feature = "chrono"))]
pub created_at: String,
#[cfg(feature = "chrono")]
pub updated_at: DateTime<Utc>,
#[cfg(not(feature = "chrono"))]
pub updated_at: String,
pub spec: SecretSpec,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct SecretSpec {
pub name: String,
pub labels: Labels,
pub data: String,
pub driver: Driver,
pub templating: Driver,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
/// Structure used to create a new secret with [`Secrets::create`](crate::Secrets::create).
pub struct SecretCreateOpts {
name: String,
labels: Labels,
data: String,
driver: Driver,
templating: Driver,
}
impl SecretCreateOpts {
/// Create a new secret with name and data. This function will take care of
/// encoding the secret's data as base64.
pub fn new<N, D>(name: N, data: D) -> Self
where
N: Into<String>,
D: AsRef<str>,
{
Self {
name: name.into(),
labels: Labels::new(),
data: base64::encode(data.as_ref()),
driver: Driver::default(),
templating: Driver::default(),
}
}
/// Set the driver of this secret.
pub fn set_driver(&mut self, driver: Driver) {
self.driver = driver;
}
/// Set the templating driver of this secret.
pub fn set_templating(&mut self, driver: Driver) {
self.templating = driver;
}
/// Add a label to this secret
pub fn add_label<K, V>(&mut self, key: K, val: V) -> Option<String>
where
K: Into<String>,
V: Into<String>,
{
self.labels.insert(key.into(), val.into())
}
pub fn serialize(&self) -> Result<String> {
serde_json::to_string(&self).map_err(Error::from)
}
}
#[derive(Deserialize)]
pub(crate) struct SecretCreateInfo {
#[serde(rename = "Id")]
pub id: String,
}
}
pub use data::*;
pub mod opts {
use crate::api::Filter;
impl_url_opts_builder!(SecretList);
pub enum SecretFilter {
Id(String),
LabelKey(String),
LabelKeyVal(String, String),
Name(String),
Names(String),
}
impl Filter for SecretFilter {
fn query_key_val(&self) -> (&'static str, String) {
use SecretFilter::*;
match &self {
Id(id) => ("id", id.to_owned()),
LabelKey(label) => ("label", label.to_owned()),
LabelKeyVal(key, val) => ("label", format!("{}={}", key, val)),
Name(name) => ("name", name.to_owned()),
Names(names) => ("names", names.to_owned()),
}
}
}
impl SecretListOptsBuilder {
impl_filter_func!(SecretFilter);
}
}
pub use opts::*;
| true
|
70b68bce86988e240f647659d5b570021406c22c
|
Rust
|
beamsies/plctag-rs
|
/crates/core/src/raw.rs
|
UTF-8
| 20,610
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
// plctag-rs
//
// a rust wrapper of libplctag, with rust style APIs and useful extensions.
// Copyright: 2020-2021, Joylei <leingliu@gmail.com>
// License: MIT
use crate::*;
use std::time::{Duration, Instant};
use std::{ffi::CString, thread};
#[cfg(feature = "event")]
use crate::event::{listen, Event, Handler};
/// Tag Identifier
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct TagId(pub(crate) i32);
/// wrapper of tag model based on `libplctag`
#[derive(Debug)]
pub struct RawTag {
tag_id: i32,
}
impl RawTag {
/// create new RawTag
/// # Note
/// if you passed wrong path parameters, your program might crash.
/// you might want to use `PathBuilder` to build a path.
///
/// # Examples
/// ```rust,ignore
/// use plctag::{RawTag};
///
/// let path="protocol=ab-eip&plc=controllogix&path=1,0&gateway=192.168.1.120&name=MyTag1&elem_count=1&elem_size=16";
/// let tag = RawTag::new(path, timeout).unwrap();
/// ```
pub fn new(path: impl AsRef<str>, timeout: u32) -> Result<Self> {
let path = CString::new(path.as_ref()).unwrap();
let tag_id = unsafe { ffi::plc_tag_create(path.as_ptr(), timeout as i32) };
if tag_id < 0 {
return Err(Status::new(ffi::PLCTAG_ERR_CREATE).into());
}
Ok(Self { tag_id })
}
/// tag id
#[inline(always)]
pub fn id(&self) -> TagId {
TagId(self.tag_id)
}
/// perform write operation.
/// - blocking read if timeout > 0
/// - non-blocking read if timeout = 0
#[inline(always)]
pub fn read(&self, timeout: u32) -> Status {
let rc = unsafe { ffi::plc_tag_read(self.tag_id, timeout as i32) };
rc.into()
}
/// perform write operation
/// - blocking write if timeout > 0
/// - non-blocking write if timeout = 0
#[inline(always)]
pub fn write(&self, timeout: u32) -> Status {
let rc = unsafe { ffi::plc_tag_write(self.tag_id, timeout as i32) };
rc.into()
}
/// wait until not pending, blocking
/// # Note
/// only for simple use cases
#[inline]
pub fn wait(&self, timeout: Option<Duration>) -> Status {
let start = Instant::now();
loop {
if let Some(v) = timeout {
if start.elapsed() > v {
return Status::Err(ffi::PLCTAG_ERR_TIMEOUT);
}
}
let status = self.status();
if !status.is_pending() {
return status;
}
//sleep(Duration::from_millis(1));
thread::yield_now();
}
}
/// element size
#[inline(always)]
pub fn elem_size(&self) -> Result<i32> {
self.get_attr("elem_size", 0)
}
/// element count
#[inline(always)]
pub fn elem_count(&self) -> Result<i32> {
self.get_attr("elem_count", 0)
}
/// get tag attribute
#[inline(always)]
pub fn get_attr(&self, attr: impl AsRef<str>, default_value: i32) -> Result<i32> {
let attr = CString::new(attr.as_ref()).unwrap();
let val =
unsafe { ffi::plc_tag_get_int_attribute(self.tag_id, attr.as_ptr(), default_value) };
if val == i32::MIN {
// error
return Err(self.status().into());
}
Ok(val)
}
/// set tag attribute
#[inline(always)]
pub fn set_attr(&self, attr: impl AsRef<str>, value: i32) -> Result<()> {
let attr = CString::new(attr.as_ref()).unwrap();
let rc = unsafe { ffi::plc_tag_set_int_attribute(self.tag_id, attr.as_ptr(), value) };
Status::new(rc).into_result()
}
/// poll tag status
#[inline(always)]
pub fn status(&self) -> Status {
let rc = unsafe { ffi::plc_tag_status(self.tag_id) };
Status::new(rc)
}
/// tag size in bytes
#[inline(always)]
pub fn size(&self) -> Result<u32> {
let value = unsafe { ffi::plc_tag_get_size(self.tag_id) };
if value < 0 {
return Err(Status::from(value).into());
}
Ok(value as u32)
}
/// set tag size in bytes, returns old size
#[inline(always)]
pub fn set_size(&self, size: u32) -> Result<u32> {
let value = unsafe { ffi::plc_tag_set_size(self.tag_id, size as i32) };
if value < 0 {
return Err(Status::from(value).into());
}
Ok(value as u32)
}
/// get bit value
#[inline(always)]
pub fn get_bit(&self, bit_offset: u32) -> Result<bool> {
let val = unsafe { ffi::plc_tag_get_bit(self.tag_id, bit_offset as i32) };
if val == i32::MIN {
// error
return Err(self.status().into());
}
Ok(val == 1)
}
/// set bit value
#[inline(always)]
pub fn set_bit(&self, bit_offset: u32, value: bool) -> Result<()> {
let rc = unsafe {
ffi::plc_tag_set_bit(self.tag_id, bit_offset as i32, if value { 1 } else { 0 })
};
Status::new(rc).into_result()
}
/// get bool value
#[inline(always)]
pub fn get_bool(&self, byte_offset: u32) -> Result<bool> {
let value = self.get_u8(byte_offset)?;
Ok(value > 0)
}
/// set bool value
#[inline(always)]
pub fn set_bool(&self, byte_offset: u32, value: bool) -> Result<()> {
self.set_u8(byte_offset, if value { 1 } else { 0 })
}
/// get i8 value
#[inline(always)]
pub fn get_i8(&self, byte_offset: u32) -> Result<i8> {
let val = unsafe { ffi::plc_tag_get_int8(self.tag_id, byte_offset as i32) };
if val == i8::MIN {
self.status().into_result()?;
}
Ok(val)
}
/// get i8 value
#[inline(always)]
pub fn set_i8(&self, byte_offset: u32, value: i8) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_int8(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get u8 value
#[inline(always)]
pub fn get_u8(&self, byte_offset: u32) -> Result<u8> {
let val = unsafe { ffi::plc_tag_get_uint8(self.tag_id, byte_offset as i32) };
if val == u8::MAX {
self.status().into_result()?;
}
Ok(val)
}
/// set u8 value
#[inline(always)]
pub fn set_u8(&self, byte_offset: u32, value: u8) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_uint8(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get i16 value
#[inline(always)]
pub fn get_i16(&self, byte_offset: u32) -> Result<i16> {
let val = unsafe { ffi::plc_tag_get_int16(self.tag_id, byte_offset as i32) };
if val == i16::MIN {
self.status().into_result()?;
}
Ok(val)
}
/// set i16 value
#[inline(always)]
pub fn set_i16(&self, byte_offset: u32, value: i16) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_int16(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get u16 value
#[inline(always)]
pub fn get_u16(&self, byte_offset: u32) -> Result<u16> {
let val = unsafe { ffi::plc_tag_get_uint16(self.tag_id, byte_offset as i32) };
if val == u16::MAX {
self.status().into_result()?;
}
Ok(val)
}
/// set u16 value
#[inline(always)]
pub fn set_u16(&self, byte_offset: u32, value: u16) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_uint16(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get i32 value
#[inline(always)]
pub fn get_i32(&self, byte_offset: u32) -> Result<i32> {
let val = unsafe { ffi::plc_tag_get_int32(self.tag_id, byte_offset as i32) };
if val == i32::MIN {
self.status().into_result()?;
}
Ok(val)
}
/// set i32 value
#[inline(always)]
pub fn set_i32(&self, byte_offset: u32, value: i32) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_int32(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get u32 value
#[inline(always)]
pub fn get_u32(&self, byte_offset: u32) -> Result<u32> {
let val = unsafe { ffi::plc_tag_get_uint32(self.tag_id, byte_offset as i32) };
if val == u32::MAX {
self.status().into_result()?;
}
Ok(val)
}
/// set u32 value
#[inline(always)]
pub fn set_u32(&self, byte_offset: u32, value: u32) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_uint32(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get i64 value
#[inline(always)]
pub fn get_i64(&self, byte_offset: u32) -> Result<i64> {
let val = unsafe { ffi::plc_tag_get_int64(self.tag_id, byte_offset as i32) };
if val == i64::MIN {
self.status().into_result()?;
}
Ok(val)
}
/// set i64 value
#[inline(always)]
pub fn set_i64(&self, byte_offset: u32, value: i64) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_int64(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get u64 value
#[inline(always)]
pub fn get_u64(&self, byte_offset: u32) -> Result<u64> {
let val = unsafe { ffi::plc_tag_get_uint64(self.tag_id, byte_offset as i32) };
if val == u64::MAX {
self.status().into_result()?;
}
Ok(val)
}
/// set u64 value
#[inline(always)]
pub fn set_u64(&self, byte_offset: u32, value: u64) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_uint64(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get f32 value
#[inline(always)]
pub fn get_f32(&self, byte_offset: u32) -> Result<f32> {
let val = unsafe { ffi::plc_tag_get_float32(self.tag_id, byte_offset as i32) };
if (val - f32::MIN).abs() <= f32::EPSILON {
self.status().into_result()?;
}
Ok(val)
}
/// set f32 value
#[inline(always)]
pub fn set_f32(&self, byte_offset: u32, value: f32) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_float32(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// get f64 value
#[inline(always)]
pub fn get_f64(&self, byte_offset: u32) -> Result<f64> {
let val = unsafe { ffi::plc_tag_get_float64(self.tag_id, byte_offset as i32) };
if (val - f64::MIN).abs() <= f64::EPSILON {
self.status().into_result()?;
}
Ok(val)
}
/// set f64 value
#[inline(always)]
pub fn set_f64(&self, byte_offset: u32, value: f64) -> Result<()> {
let rc = unsafe { ffi::plc_tag_set_float64(self.tag_id, byte_offset as i32, value) };
Status::new(rc).into_result()
}
/// Getting A String Length
#[cfg(feature = "api_string")]
#[inline(always)]
pub fn get_string_length(&self, byte_offset: u32) -> Result<u32> {
let rc = unsafe { ffi::plc_tag_get_string_length(self.tag_id, byte_offset as i32) };
if rc >= 0 {
Ok(rc as u32)
} else {
Err(Status::new(rc))
}
}
/// Getting A String Capacity
#[cfg(feature = "api_string")]
#[inline(always)]
pub fn get_string_capacity(&self, byte_offset: u32) -> Result<u32> {
let rc = unsafe { ffi::plc_tag_get_string_capacity(self.tag_id, byte_offset as i32) };
if rc >= 0 {
Ok(rc as u32)
} else {
Err(Status::new(rc))
}
}
/// Getting the Space Occupied by a String
#[cfg(feature = "api_string")]
#[inline(always)]
pub fn get_string_total_length(&self, byte_offset: u32) -> Result<u32> {
let rc = unsafe { ffi::plc_tag_get_string_total_length(self.tag_id, byte_offset as i32) };
if rc >= 0 {
Ok(rc as u32)
} else {
Err(Status::new(rc))
}
}
/// Reading A String
#[cfg(feature = "api_string")]
#[inline(always)]
pub fn get_string(&self, byte_offset: u32, buf: &mut [u8]) -> Result<()> {
let rc = unsafe {
ffi::plc_tag_get_string(
self.tag_id,
byte_offset as i32,
buf.as_mut_ptr() as *mut i8,
buf.len() as i32,
)
};
Status::new(rc).into_result()
}
/// Write A String
/// NOTE: panic if buf terminates with 0 byte
#[cfg(feature = "api_string")]
#[inline(always)]
pub fn set_string(&self, byte_offset: u32, buf: impl Into<Vec<u8>>) -> Result<()> {
let buf = CString::new(buf).unwrap();
let rc = unsafe { ffi::plc_tag_set_string(self.tag_id, byte_offset as i32, buf.as_ptr()) };
Status::new(rc).into_result()
}
/// get raw bytes.
/// If buffer length would exceed the end of the data in the tag data buffer, an out of bounds error is returned
#[cfg(feature = "api_raw_bytes")]
#[inline(always)]
pub fn get_bytes_unchecked(&self, byte_offset: u32, buf: &mut [u8]) -> Result<usize> {
let rc = unsafe {
ffi::plc_tag_get_raw_bytes(
self.tag_id,
byte_offset as i32,
buf.as_mut_ptr(),
buf.len() as i32,
)
};
Status::new(rc).into_result()?;
Ok(buf.len())
}
/// get raw bytes
#[cfg(feature = "api_raw_bytes")]
#[inline]
pub fn get_bytes(&self, byte_offset: u32, buf: &mut [u8]) -> Result<usize> {
if buf.len() == 0 {
return Ok(0);
}
let size = self.size()? as usize;
if byte_offset as usize >= size {
return Ok(0);
}
let slots_len = size - byte_offset as usize;
let buf_len = std::cmp::min(slots_len, buf.len());
let buf = &mut buf[..buf_len];
self.get_bytes_unchecked(byte_offset, buf)
}
/// set raw bytes.
/// If buffer length would exceed the end of the data in the tag data buffer, an out of bounds error is returned
#[cfg(feature = "api_raw_bytes")]
#[inline(always)]
pub fn set_bytes_unchecked(&self, byte_offset: u32, buf: &[u8]) -> Result<usize> {
let rc = unsafe {
ffi::plc_tag_set_raw_bytes(
self.tag_id,
byte_offset as i32,
buf.as_ptr() as *mut u8,
buf.len() as i32,
)
};
Status::new(rc).into_result()?;
Ok(buf.len())
}
/// set raw bytes
#[cfg(feature = "api_raw_bytes")]
#[inline]
pub fn set_bytes(&self, byte_offset: u32, buf: &[u8]) -> Result<usize> {
if buf.len() == 0 {
return Ok(0);
}
let size = self.size()? as usize;
if byte_offset as usize >= size {
return Ok(0);
}
let slots_len = size - byte_offset as usize;
let buf_len = std::cmp::min(slots_len, buf.len());
let buf = &buf[..buf_len];
self.set_bytes_unchecked(byte_offset, buf)
}
/// Note: it's not efficient
#[cfg(not(feature = "api_raw_bytes"))]
pub fn get_bytes(&self, byte_offset: u32, buf: &mut [u8]) -> Result<usize> {
if buf.len() == 0 {
return Ok(0);
}
let size = self.size()?;
if byte_offset >= size {
return Ok(0);
}
let mut i = byte_offset;
for item in buf {
*item = self.get_u8(i as u32)?;
i += 1;
}
Ok((i - byte_offset) as usize)
}
/// Note: it's not efficient
#[cfg(not(feature = "api_raw_bytes"))]
pub fn set_bytes(&self, byte_offset: u32, buf: &[u8]) -> Result<usize> {
if buf.len() == 0 {
return Ok(0);
}
let size = self.size()?;
if byte_offset >= size {
return Ok(0);
}
let slots_len = (size - byte_offset) as usize;
let buf_len = std::cmp::min(slots_len, buf.len());
let buf = &buf[0..buf_len];
for (i, v) in buf.iter().enumerate() {
self.set_u8(byte_offset + i as u32, *v)?;
}
Ok(buf.len())
}
/// note: registering a new callback will override existing one
#[cfg(not(feature = "event"))]
#[inline(always)]
pub unsafe fn register_callback(
&self,
cb: Option<unsafe extern "C" fn(tag_id: i32, event: i32, status: i32)>,
) -> Status {
//unregister first
let _ = ffi::plc_tag_unregister_callback(self.tag_id);
let rc = ffi::plc_tag_register_callback(self.tag_id, cb);
rc.into()
}
#[cfg(not(feature = "event"))]
#[inline(always)]
pub fn unregister_callback(&self) -> Status {
let rc = unsafe { ffi::plc_tag_unregister_callback(self.tag_id) };
rc.into()
}
/// listen for events
///
/// # Examples
/// ```rust,ignore
/// use plctag::event::Event;
/// let tag: RawTag = ...;
/// let listener = tag.listen(|id, evt, status|
/// {
/// println!("tag event: {}, status: {}", evt, status);
/// });
///
/// //remove listener later
/// drop(listener);
/// ```
#[cfg(feature = "event")]
#[inline(always)]
pub fn listen<F>(&self, f: F) -> Handler
where
F: FnMut(TagId, Event, Status) + Send + Sync + Clone + 'static,
{
listen(&self.tag_id, f)
}
/// Abort the pending operation.
/// The operation is only needed when you write async code.
/// For non-blocking read/write (timeout=0), it's your responsibility to call this method to cancel the pending
/// operation when timeout or other necessary situations.
#[inline(always)]
pub fn abort(&self) -> Result<()> {
let rc = unsafe { ffi::plc_tag_abort(self.tag_id) };
Status::new(rc).into_result()
}
/// get tag value of `T` that derives [`Decode`]
#[cfg(feature = "value")]
#[inline]
pub fn get_value<T: Decode>(&self, byte_offset: u32) -> Result<T> {
let v = T::decode(self, byte_offset)?;
Ok(v)
}
/// set tag value that derives [`Encode`]
#[cfg(feature = "value")]
#[inline]
pub fn set_value<T: Encode>(&self, byte_offset: u32, value: T) -> Result<()> {
value.encode(self, byte_offset)
}
}
impl Drop for RawTag {
#[inline(always)]
fn drop(&mut self) {
unsafe {
//let _ = self.abort();
ffi::plc_tag_destroy(self.tag_id);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_debug() {
let tag = RawTag::new("make=system&family=library&name=debug&debug=4", 100).unwrap();
let size = tag.size().unwrap();
assert!(size > 0);
//read
let res = tag.read(100);
assert!(res.is_ok());
let level = tag.get_u32(0).unwrap_or_default();
assert_eq!(level, 4);
//write
let res = tag.set_u32(0, 1);
assert!(res.is_ok());
let res = tag.write(100);
assert!(res.is_ok());
//read
let res = tag.read(100);
assert!(res.is_ok());
let level = tag.get_u32(0).unwrap_or_default();
assert_eq!(level, 1);
let mut buf: Vec<u8> = vec![0; size as usize];
let size = tag.get_bytes(0, &mut buf).unwrap();
assert_eq!(size, 30);
let result = &[
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0,
];
assert_eq!(&buf, result);
buf[0] = 3;
let count = tag.set_bytes(0, &buf[0..2]).unwrap();
assert_eq!(count, 2);
let count = tag.get_bytes(0, &mut buf[0..3]).unwrap();
assert_eq!(count, 3);
let result = &[3, 0, 0];
assert_eq!(&buf[0..3], result);
}
}
| true
|
534535e9d1a827f987d500bc4539cc288f75336d
|
Rust
|
vipulkit/sri-delhi-rust-libcss
|
/test/libcss/testutils.rs
|
UTF-8
| 2,311
| 2.546875
| 3
|
[] |
no_license
|
#[link(name = "testutils",
vers = "0.2",
url = "https://github.com/webconvforge/sri-delhi-rust-libcss/tree/master/libparserutils")];
#[crate_type = "lib"];
extern mod css;
use std::io::*;
pub type line_func =
~extern fn(data:~str , pw:LINE_CTX_DATA_TYPE) -> bool;
pub struct line_ctx_csdetect {
buf:~[u8],
enc:~str,
indata:bool,
inenc:bool
}
/*pub type line_func =
~extern fn(data:~str , pw:&mut line_ctx) -> bool;*/
pub struct line_ctx_lex {
buf:~[u8],
exp:~[~str],
indata:bool,
inexp:bool
}
pub enum LINE_CTX_DATA_TYPE {
CSDETECT(@mut line_ctx_csdetect),
LEX(@mut line_ctx_lex)
}
pub fn css__parse_filesize( fileName:~str)->uint {
// debug!(~"css__parse_filesize : "+ fileName);
let r:@Reader = file_reader(&Path(fileName)).unwrap();
r.seek(0,SeekEnd);
r.tell()
}
pub fn css__parse_strnchr(string:&~str, chr:char)-> (~str,uint) {
let length = string.len();
let mut i : uint = 0;
while i < length {
if (*string)[i] as char == chr {
return (string.slice(i,length).to_owned(),i);
}
i = i + 1;
}
return (~"",string.len());
}
pub fn css__parse_testfile(filename:~str, callback:line_func, pw:LINE_CTX_DATA_TYPE)->bool {
// debug!(~"css__parse_testfile : "+ filename);
let r:@Reader = file_reader(&Path(filename)).unwrap();
let mut data:~str;
let mut string: ~str;
while(!r.eof()) {
data = r.read_line();
// io::print(fmt!("data is %? " , str::to_bytes(data)));
let mut iter = 0;
let numOfbuffers= data.len()/300 + 1 ;
while iter < (numOfbuffers-1) {
string = data.slice(iter * 300 ,(iter +1) * 300).to_owned();
if string.len() == 0 {
loop;
}
if !(*callback)(string.clone(), pw) {
return false;
}
iter += 1;
}
string = data.slice(iter * 300, data.len()).to_owned();
if string.len() > 0 {
if !(*callback)( string.clone(), pw) {
return false;
}
}
}
if !(*callback)( ~"#", pw) {
return false;
}
true
}
| true
|
07e41cb0f452e3e899cbcab246b4bdc9ffd73cf6
|
Rust
|
gohanman/AdventOfCode2017
|
/day04/src/main.rs
|
UTF-8
| 2,192
| 3.4375
| 3
|
[
"Apache-2.0"
] |
permissive
|
extern crate proj_self;
fn line_to_words(input: &str) -> Vec<String> {
let as_string = input.to_string();
let iter = as_string.split(' ');
iter.map(|x| x.trim().to_string())
.collect()
}
fn is_equal(a: &str, b: &str) -> bool {
a == b
}
fn is_anagram(a: &str, b: &str) -> bool {
if a.len() != b.len() {
return false;
}
let mut a_chars: Vec<char> = a.chars().collect();
let mut b_chars: Vec<char> = b.chars().collect();
a_chars.sort_by(|x, y| x.cmp(y));
b_chars.sort_by(|x, y| x.cmp(y));
a_chars == b_chars
}
fn validate(input: &Vec<String>, func: &Fn(&str, &str) -> bool) -> bool{
let mut i = 0;
while i < (input.len() - 1) {
let mut j = i+1;
while j < input.len() {
if func(&input[i], &input[j]) {
return false;
}
j += 1;
}
i += 1;
}
true
}
fn main() {
let proj = proj_self::proj_dir(3);
let file = proj.join("input.txt");
let input = proj_self::file_to_str(&file);
let vals = proj_self::str_to_lines(&input);
let valid: Vec<&&str> = vals.iter().filter(|x| validate(&line_to_words(x), &is_equal)).collect();
println!("Valid: {}", valid.len());
let also_valid: Vec<&&str> = vals.iter().filter(|x| validate(&line_to_words(x), &is_anagram)).collect();
println!("Anagram Valid: {}", also_valid.len());
}
#[test]
fn test() {
let a = line_to_words("aa bb cc dd ee");
assert_eq!(true, validate(&a, &is_equal));
let b = line_to_words("aa bb cc dd aa");
assert_eq!(false, validate(&b, &is_equal));
let c = line_to_words("aa bb cc dd aaa");
assert_eq!(true, validate(&c, &is_equal));
let d = line_to_words("abcde fghij");
assert_eq!(true, validate(&d, &is_anagram));
let e = line_to_words("abcde xyz ecdab");
assert_eq!(false, validate(&e, &is_anagram));
let f = line_to_words("a ab abc abd abf abj");
assert_eq!(true, validate(&f, &is_anagram));
let g = line_to_words("iiii oiii ooii oooi oooo");
assert_eq!(true, validate(&g, &is_anagram));
let h = line_to_words("oiii ioii iioi iiio");
assert_eq!(false, validate(&h, &is_anagram));
}
| true
|
2204da7929a2622e80fafbef0df71420a4d7a2f5
|
Rust
|
AustinHaugerud/oxidsys
|
/src/language/operations/agents_and_teams/agent_set_no_death_knock_down_only.rs
|
UTF-8
| 895
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
use language::operations::{make_param_doc, Operation, ParamInfo};
pub struct AgentSetNoDeathKnockDownOnlyOp;
const DOC : &str = "Sets the agent as unkillable (value = 1) or normal (value = 0). Unkillable agents will drop on the ground instead of dying and will stand up afterwards.";
pub const OP_CODE: u32 = 1733;
pub const IDENT: &str = "agent_set_no_death_knock_down_only";
impl Operation for AgentSetNoDeathKnockDownOnlyOp {
fn op_code(&self) -> u32 {
OP_CODE
}
fn documentation(&self) -> &'static str {
DOC
}
fn identifier(&self) -> &'static str {
IDENT
}
fn param_info(&self) -> ParamInfo {
ParamInfo {
num_required: 2,
num_optional: 0,
param_docs: vec![
make_param_doc("<agent_id>", ""),
make_param_doc("<value>", ""),
],
}
}
}
| true
|
414af91106d753c4c0e9fc8c5c6b59a31ee3b5c1
|
Rust
|
storiqaamericanteam/stores
|
/src/services/moderator_comments.rs
|
UTF-8
| 6,573
| 2.53125
| 3
|
[] |
no_license
|
//! ModeratorProductComments Services, presents CRUD operations with wizard_stores
use diesel::connection::AnsiTransactionManager;
use diesel::pg::Pg;
use diesel::Connection;
use failure::Error as FailureError;
use r2d2::ManageConnection;
use stq_types::{BaseProductId, StoreId};
use super::types::ServiceFuture;
use models::*;
use repos::ReposFactory;
use services::Service;
pub trait ModeratorCommentsService {
/// Returns latest moderator product comment by base product iD
fn get_latest_for_product(&self, base_product_id: BaseProductId) -> ServiceFuture<Option<ModeratorProductComments>>;
/// Creates new moderator product comment
fn create_product_comment(&self, payload: NewModeratorProductComments) -> ServiceFuture<ModeratorProductComments>;
/// Returns latest moderator comment by store iD
fn get_latest_for_store(&self, store_id: StoreId) -> ServiceFuture<Option<ModeratorStoreComments>>;
/// Creates new moderator store comment
fn create_store_comment(&self, payload: NewModeratorStoreComments) -> ServiceFuture<ModeratorStoreComments>;
}
impl<
T: Connection<Backend = Pg, TransactionManager = AnsiTransactionManager> + 'static,
M: ManageConnection<Connection = T>,
F: ReposFactory<T>,
> ModeratorCommentsService for Service<T, M, F>
{
/// Returns latest moderator product comment by base product iD
fn get_latest_for_product(&self, base_product_id: BaseProductId) -> ServiceFuture<Option<ModeratorProductComments>> {
let user_id = self.dynamic_context.user_id;
let repo_factory = self.static_context.repo_factory.clone();
self.spawn_on_pool(move |conn| {
let moderator_product_repo = repo_factory.create_moderator_product_comments_repo(&*conn, user_id);
moderator_product_repo.find_by_base_product_id(base_product_id).map_err(|e| {
e.context("Service ModeratorComments, get_latest_for_product endpoint error occurred.")
.into()
})
})
}
/// Creates new moderator product comment
fn create_product_comment(&self, payload: NewModeratorProductComments) -> ServiceFuture<ModeratorProductComments> {
let user_id = self.dynamic_context.user_id;
let repo_factory = self.static_context.repo_factory.clone();
self.spawn_on_pool(move |conn| {
let moderator_product_repo = repo_factory.create_moderator_product_comments_repo(&*conn, user_id);
conn.transaction::<ModeratorProductComments, FailureError, _>(move || moderator_product_repo.create(payload))
.map_err(|e| {
e.context("Service ModeratorComments, create_product_comment endpoint error occurred.")
.into()
})
})
}
/// Returns latest moderator comment by store iD
fn get_latest_for_store(&self, store_id: StoreId) -> ServiceFuture<Option<ModeratorStoreComments>> {
let user_id = self.dynamic_context.user_id;
let repo_factory = self.static_context.repo_factory.clone();
self.spawn_on_pool(move |conn| {
let moderator_store_repo = repo_factory.create_moderator_store_comments_repo(&*conn, user_id);
moderator_store_repo.find_by_store_id(store_id).map_err(|e| {
e.context("Service ModeratorComments, get_latest_for_store endpoint error occurred.")
.into()
})
})
}
/// Creates new moderator store comment
fn create_store_comment(&self, payload: NewModeratorStoreComments) -> ServiceFuture<ModeratorStoreComments> {
let user_id = self.dynamic_context.user_id;
let repo_factory = self.static_context.repo_factory.clone();
self.spawn_on_pool(move |conn| {
let moderator_store_repo = repo_factory.create_moderator_store_comments_repo(&*conn, user_id);
conn.transaction::<ModeratorStoreComments, FailureError, _>(move || moderator_store_repo.create(payload))
.map_err(|e| {
e.context("Service ModeratorComments, create_store_comment endpoint error occurred.")
.into()
})
})
}
}
#[cfg(test)]
pub mod tests {
use std::sync::Arc;
use tokio_core::reactor::Core;
use stq_types::*;
use models::*;
use repos::repo_factory::tests::*;
use services::*;
fn create_product_comments_payload() -> NewModeratorProductComments {
NewModeratorProductComments {
moderator_id: MOCK_USER_ID,
base_product_id: BaseProductId(1),
comments: "new comment".to_string(),
}
}
fn create_store_comments_payload() -> NewModeratorStoreComments {
NewModeratorStoreComments {
moderator_id: MOCK_USER_ID,
store_id: StoreId(1),
comments: "new comment".to_string(),
}
}
#[test]
fn test_get_product_comment() {
let mut core = Core::new().unwrap();
let handle = Arc::new(core.handle());
let service = create_service(Some(MOCK_USER_ID), handle);
let work = service.get_latest_for_product(BaseProductId(1));
let result = core.run(work).unwrap();
assert_eq!(result.unwrap().base_product_id, BaseProductId(1));
}
#[test]
fn test_create_product_comment() {
let mut core = Core::new().unwrap();
let handle = Arc::new(core.handle());
let service = create_service(Some(MOCK_USER_ID), handle);
let payload = create_product_comments_payload();
let work = service.create_product_comment(payload.clone());
let result = core.run(work).unwrap();
assert_eq!(result.comments, payload.comments);
}
#[test]
fn test_get_store_comment() {
let mut core = Core::new().unwrap();
let handle = Arc::new(core.handle());
let service = create_service(Some(MOCK_USER_ID), handle);
let work = service.get_latest_for_store(StoreId(1));
let result = core.run(work).unwrap();
assert_eq!(result.unwrap().store_id, StoreId(1));
}
#[test]
fn test_create_store_comment() {
let mut core = Core::new().unwrap();
let handle = Arc::new(core.handle());
let service = create_service(Some(MOCK_USER_ID), handle);
let payload = create_store_comments_payload();
let work = service.create_store_comment(payload.clone());
let result = core.run(work).unwrap();
assert_eq!(result.comments, payload.comments);
}
}
| true
|
f0fe0ff76cda426327c2e2fff745f82f8ec28798
|
Rust
|
Jamwesayer/BitcoinPaymentCore
|
/src/presentation/controller.rs
|
UTF-8
| 2,506
| 2.65625
| 3
|
[] |
no_license
|
use crate::presentation::controller_service::*;
use crate::presentation::item::*;
pub struct PaymentController {
payment_controller_service: PaymentControllerService
}
impl Default for PaymentController {
fn default() -> Self {
Self {
payment_controller_service: PaymentControllerService::default()
}
}
}
impl PaymentController {
pub async fn create_payment_window(&self, payment_request_item: PaymentRequestItem) {
match self.payment_controller_service.create_payment_window(&payment_request_item) {
Ok(generated_payment_request_item) => {
println!("Address: {:?}", generated_payment_request_item.get_address());
generated_payment_request_item.generate_qr_code_image();
self.payment_controller_service.follow_transaction_for_label(generated_payment_request_item, *payment_request_item.get_store_id()).await
},
Err(e) => println!("{:?}", e)
}
}
pub fn check_payment_status(&self, payment_search_item: PaymentWindowSearchItem) {
match self.payment_controller_service.check_payment_status(payment_search_item) {
Ok(payment_details_item) => println!("{:?}", payment_details_item),
Err(e) => println!("{:?}", e)
}
}
fn refund(&self, payment_search_item: PaymentWindowSearchItem) {
self.payment_controller_service.refund(payment_search_item);
}
pub fn suspend_payment_window(&self, payment_search_item: PaymentWindowSearchItem) {
match self.payment_controller_service.suspend_payment_window(payment_search_item) {
Ok(success) => println!("{:?}", success),
Err(e) => println!("{:?}", e)
}
}
}
// -----------------------------------------------Transactions
pub struct TransactionController {
transaction_controller_service: TransactionControllerService
}
impl Default for TransactionController {
fn default() -> Self {
Self {
transaction_controller_service: TransactionControllerService::default()
}
}
}
impl TransactionController {
pub fn get_all_transactions(&self, store_id: &i32) {
match self.transaction_controller_service.get_all_transactions(store_id) {
Ok(transactions) => {
println!("{:?}", transactions);
},
Err(e) => {println!("{:?}", e);}
}
}
}
// pub struct StoreController {
// store_controller_service:
// }
| true
|
dcf7310d4b2b5b4379112142952de1d423a0da3d
|
Rust
|
Lulzx/dynamaze
|
/src/anim.rs
|
UTF-8
| 4,916
| 2.96875
| 3
|
[] |
no_license
|
use std::collections::VecDeque;
use std::f64::consts::FRAC_PI_2;
use std::sync::{Arc, Mutex, RwLock};
use serde::{Deserialize, Serialize};
use crate::Direction;
use crate::net::{Message, MetaMessage};
/// Tracks state of the target stripe animation
pub struct TargetStripeState {
pub offset: f64,
}
impl TargetStripeState {
const LENGTH: f64 = 2.0;
fn new() -> TargetStripeState {
TargetStripeState { offset: 0.0 }
}
fn advance_by(&mut self, ticks: f64) {
self.offset = (self.offset + ticks) % Self::LENGTH;
}
pub fn pct_offset(&self) -> f64 {
self.offset / Self::LENGTH
}
}
/// Checks the direction in which the tile rotate animation spins
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum RotateDir {
/// Clockwise
CW,
/// Counterclockwise
CCW,
}
/// Tracks state of the loose tile rotate animation
pub struct LooseRotateState {
pub angle: f64,
}
impl LooseRotateState {
const LENGTH: f64 = 0.25;
fn new() -> LooseRotateState {
LooseRotateState { angle: 0.0 }
}
fn reset(&mut self, dir: RotateDir) {
self.angle += match dir {
RotateDir::CW => -FRAC_PI_2,
RotateDir::CCW => FRAC_PI_2,
};
}
fn advance_by(&mut self, ticks: f64) {
if self.angle == 0.0 {
return;
}
let delta = FRAC_PI_2 / Self::LENGTH;
let (delta, clamp): (f64, fn(f64, f64) -> f64) = if self.angle.is_sign_positive() {
(-delta, f64::max)
} else {
(delta, f64::min)
};
self.angle = clamp(self.angle + delta * ticks, 0.0);
}
}
/// Tracks state of loose tile insert animation
pub struct LooseInsertState {
/// Direction in which the tiles are currently offset
/// (same as the edge on which the loose tile started)
pub offset_dir: Direction,
/// Fraction of a tile remaining in the animation
pub distance_left: f64,
/// Row/column of the offset tiles
coordinate: usize,
}
impl LooseInsertState {
const LENGTH: f64 = 0.25;
fn new() -> LooseInsertState {
LooseInsertState {
offset_dir: Direction::North,
distance_left: 0.0,
coordinate: 0,
}
}
fn reset(&mut self, dir: Direction, coord: usize) {
self.offset_dir = dir;
self.distance_left = 1.0;
self.coordinate = coord;
}
fn advance_by(&mut self, ticks: f64) {
if self.distance_left == 0.0 {
return;
}
self.distance_left = (self.distance_left - ticks / Self::LENGTH).max(0.0);
}
pub fn applies_to_pos(&self, (row, col): (usize, usize)) -> bool {
if self.distance_left == 0.0 {
return false;
}
let should_be_coord = match self.offset_dir {
Direction::North | Direction::South => col,
Direction::East | Direction::West => row,
};
should_be_coord == self.coordinate
}
pub fn applies_to_loose(&self, (dir, guide_idx): (Direction, usize)) -> bool {
if self.distance_left == 0.0 {
return false;
}
if dir == self.offset_dir || dir == self.offset_dir * Direction::South {
self.coordinate == 2 * guide_idx + 1
} else {
false
}
}
}
/// Tracks state of all currently running animations
pub struct AnimGlobalState {
pub target_stripe: TargetStripeState,
pub loose_rotate: LooseRotateState,
pub loose_insert: LooseInsertState,
net_queue: Option<Arc<Mutex<VecDeque<MetaMessage>>>>,
}
impl AnimGlobalState {
fn new() -> AnimGlobalState {
AnimGlobalState {
target_stripe: TargetStripeState::new(),
loose_rotate: LooseRotateState::new(),
loose_insert: LooseInsertState::new(),
net_queue: None,
}
}
pub fn advance_by(&mut self, ticks: f64) {
self.target_stripe.advance_by(ticks);
self.loose_rotate.advance_by(ticks);
self.loose_insert.advance_by(ticks);
}
pub fn set_send(&mut self, send: Arc<Mutex<VecDeque<MetaMessage>>>) {
self.net_queue = Some(send)
}
pub fn apply(&mut self, msg: AnimSync) {
match msg {
AnimSync::Rotate(dir) => self.loose_rotate.reset(dir),
AnimSync::Insert(dir, x) => self.loose_insert.reset(dir, x),
}
}
pub fn apply_send(&mut self, sync: AnimSync) {
self.apply(sync.clone());
if let Some(ref mut send) = self.net_queue {
let message = Message::Anim(sync);
send.lock().unwrap().push_back(message.into());
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum AnimSync {
Rotate(RotateDir),
Insert(Direction, usize),
}
lazy_static! {
pub static ref STATE: RwLock<AnimGlobalState> = { RwLock::new(AnimGlobalState::new()) };
}
| true
|
a3b9e31369235e71bd04558aa0ab6bd88fd62042
|
Rust
|
buhe/rust_study
|
/study2/src/main.rs
|
UTF-8
| 455
| 3.34375
| 3
|
[
"MIT"
] |
permissive
|
use std::fmt::Display;
#[derive(Debug)]
struct Matrix(f32, f32, f32, f32);
impl Display for Matrix {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,"({},{})\n({},{})",self.0,self.1,self.2,self.3)
}
}
fn t(m: Matrix) -> Matrix{
let m2 = Matrix(m.0, m.2,m.1,m.3);
m2
}
fn main() {
println!("Hello, world!");
let m = Matrix(1.1,1.2,2.1,2.2);
println!("{}", m);
println!("{}", t(m));
}
| true
|
83da17dfee8fe14f2d5a48818203761e0d399c6c
|
Rust
|
fors00ked/raytracer
|
/src/main.rs
|
UTF-8
| 5,256
| 2.609375
| 3
|
[
"MIT"
] |
permissive
|
extern crate rand;
use rand::Rng;
extern crate rayon;
use rayon::prelude::*;
use std::path::Path;
use std::fs::File;
use std::io::Write;
use std::f32;
use std::sync::Arc;
mod math;
use math::vec3::Vec3;
use math::ray::Ray;
mod world;
use world::bvh::*;
use world::hitable::*;
use world::camera::*;
use world::materials::*;
fn color(r: Ray, world: &dyn Hitable, depth: i32) -> Vec3 {
let mut rec = HitRecord::new();
if world.hit(&r, 0.001, f32::MAX, &mut rec) {
if depth < 50 {
match rec.material {
None => {
Vec3::zero()
},
Some (ref material) => {
let (scatter_result, attenuation, scattered) = material.scatter(&r, &rec);
if scatter_result {
attenuation * color(scattered, world, depth + 1)
}
else {
Vec3::zero()
}
}
}
}
else {
Vec3::zero()
}
}
else {
let unit_direction = math::vec3::unit_vector(r.direction());
let t = 0.5 * (unit_direction.y() + 1.0);
return (1.0 - t) * Vec3::new(1.0, 1.0, 1.0) + t * Vec3::new(0.5, 0.7, 1.0)
}
}
fn random_scene() -> Vec<Arc<dyn Hitable+Send+Sync>> {
let mut rng = rand::thread_rng();
let mut hitable: Vec<Arc<dyn Hitable+Send+Sync>> = vec![];
hitable.push(Arc::new(Sphere::new(Vec3::new(0.0, -1000.0, 0.0), 1000.0, Arc::new(Lambertian::new(Vec3::new(0.5, 0.5, 0.5))))));
for a in -11..11 {
for b in -11..11 {
let center = Vec3::new(a as f32 + 0.9 * rng.gen::<f32>(), 0.2, b as f32 + 0.9 * rng.gen::<f32>());
let rand = rng.gen::<f32>();
if (center - Vec3::new(4.9, 0.2, 0.0)).length() > 0.9 {
if rand < 0.8 {
hitable.push(Arc::new(Sphere::new(center, 0.2, Arc::new(Lambertian::new(Vec3::new(rng.gen::<f32>() * rng.gen::<f32>(), rng.gen::<f32>() * rng.gen::<f32>(), rng.gen::<f32>()* rng.gen::<f32>()))))));
}
else if rand < 0.95 {
hitable.push(Arc::new(Sphere::new(center, 0.2, Arc::new(Metal::new(Vec3::new(0.5 * (1.0 + rng.gen::<f32>()), 0.5 * (1.0 + rng.gen::<f32>()), 0.5 * (1.0 + rng.gen::<f32>())))))));
}
else {
hitable.push(Arc::new(Sphere::new(center, 0.2, Arc::new(Dielectric::new(1.5)))));
}
}
}
}
hitable.push(Arc::new(Sphere::new(Vec3::new(0.0, 1.0, 0.0), 1.0, Arc::new(Metal::new(Vec3::new(0.3, 0.9, 0.4))))));
hitable.push(Arc::new(Sphere::new(Vec3::new(-4.0, 1.0, 0.0), 1.0, Arc::new(Lambertian::new(Vec3::new(0.4, 0.2, 0.1))))));
hitable.push(Arc::new(Sphere::new(Vec3::new(4.0, 1.0, 0.0), 1.0, Arc::new(Metal::new(Vec3::new(0.7, 0.6, 0.5))))));
hitable
}
fn main() {
let file_name = "output.ppm";
let file_path = Path::new(file_name);
let mut file = match File::create(file_path) {
Ok(file) => file,
Err(e) => panic!("Could not create file: {} error: {:?}", file_name, e.kind()),
};
let width = 800;
let height = 400;
write!(file, "P3\n{} {}\n255\n", width,height).expect("Could not write to file");
let mut hitable: Vec<Arc<Hitable+Send+Sync>> = random_scene();
/*vec![
Box::new(Sphere::new(Vec3::new(0.0, 0.0, -1.0), 0.5, Rc::new(Lambertian::new(Vec3::new(0.8, 0.3, 0.3))))),
Box::new(Sphere::new(Vec3::new(0.0, -100.5, -1.0), 100.0, Rc::new(Lambertian::new(Vec3::new(0.8, 0.8, 0.0))))),
Box::new(Sphere::new(Vec3::new(1.0, 0.0, -1.0), 0.5, Rc::new(Metal::new(Vec3::new(0.8, 0.6, 0.2))))),
Box::new(Sphere::new(Vec3::new(-1.0, 0.0, -1.0), 0.5, Rc::new(Dielectric::new(1.5)))),
Box::new(Sphere::new(Vec3::new(-1.0, 0.0, -1.0), -0.45, Rc::new(Dielectric::new(1.5)))),
];*/
let world = BvhNode::new(&mut hitable);
let look_from = Vec3::new(13.0, 2.0, 3.0);
let look_at = Vec3::new(0.0, 0.0, 0.0);
let camera = Camera::new(look_from, look_at, Vec3::new(0.0, 1.0, 0.0), 20.0, (width as f32) / (height as f32), 0.1, 10.0);
let num_samples = 100;
let mut color_buf = vec![vec![Vec3::zero(); width]; height];
color_buf.par_iter_mut().enumerate().for_each(|(i, pixel)| {
let mut rng = rand::thread_rng();
for j in 0 .. width {
let mut col = Vec3::zero();
for _s in 0..num_samples {
let u = (j as f32 + rng.gen::<f32>()) / (width as f32);
let v = (i as f32 + rng.gen::<f32>()) / (height as f32);
let ray = camera.get_ray(u,v);
col += color(ray, &world, 0);
}
col /= num_samples as f32;
(*pixel)[j] = col;
}
});
for i in (0 .. height).rev() {
for j in 0 .. width {
let col = color_buf[i][j];
let r = (255.99 * col[0].sqrt()) as i32;
let g = (255.99 * col[1].sqrt()) as i32;
let b = (255.99 * col[2].sqrt()) as i32;
write!(file, "{} {} {}\n", r, g, b).expect("Could not write to file");
}
}
}
| true
|
ec5cf2a428fcdcc6b30617de216babe4ae739cf5
|
Rust
|
katsyoshi/zatsu
|
/rust/nlp100/nlp100/src/lib.rs
|
UTF-8
| 5,480
| 2.71875
| 3
|
[] |
no_license
|
extern crate futures;
extern crate hyper;
extern crate hyper_tls;
extern crate libflate;
extern crate regex;
extern crate serde_json;
extern crate tokio_core;
use futures::{Future, Stream};
use hyper::Client;
use hyper_tls::HttpsConnector;
use libflate::gzip::Decoder;
use regex::Regex;
use serde_json::Value;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
use tokio_core::reactor::Core;
pub struct NLP100 {
pub origin: String,
pub words: Vec<String>,
pub chars: Vec<String>,
}
impl NLP100 {
pub fn new(script: &str) -> NLP100 {
let chars: Vec<String> = script.chars().map(|m| m.to_string()).collect();
let words: Vec<String> = Regex::new(r"\W+").unwrap().split(script).map(|m| m.to_string()).collect();
let origin: String = script.to_string();
NLP100 {
words,
chars,
origin,
}
}
pub fn ngram(self, size: u8, t: bool) -> Vec<String> {
let mut val: Vec<String> = Vec::new();
let mut i = 0;
let (v, j) = if t { (self.words, " ") } else { (self.chars, "") };
let len = v.len();
loop {
let w = i + size as usize;
if w > len { break; }
val.push(v[i..w].join(j));
i += 1;
}
val
}
pub fn char_count_list(self) -> Vec<u32> {
self.words.iter().map(|v| v.len() as u32).collect()
}
pub fn chars_first_to(word: String, stop: usize) -> String {
(word.chars().map(|c| c.to_string()).collect::<Vec<String>>()[0..stop]).join("")
}
pub fn words_first_to(self, stop: usize) -> Vec<String> {
self.words.iter().map(|word| NLP100::chars_first_to(word.to_string(), stop)).collect()
}
fn open(path: String) -> File {
match File::open(path) { Ok(file) => file, Err(e) => { panic!(e); } }
}
pub fn read_gzip(path: String) -> Vec<String> {
let mut file = NLP100::open(path);
let mut string = String::new();
Decoder::new(&mut file).expect("cannot open gz file!").read_to_string(&mut string).expect("cannot read string in this gz file!");
string.split("\n").map(|m| m.to_string()).collect()
}
pub fn read(path: String) -> Vec<String> {
BufReader::new(NLP100::open(path)).lines().map(|m| m.expect("None!").to_string()).collect()
}
pub fn count(path: String) -> usize {
BufReader::new(NLP100::open(path)).lines().count()
}
pub fn get(url: String) -> String {
let mut core = Core::new().unwrap();
let url = url.parse().unwrap();
let handle = core.handle();
let client = Client::configure().connector(HttpsConnector::new(4, &handle).unwrap()).build(&handle);
let work = client.get(url).and_then(|res| {
res.body().concat2().map(|chunk| {
let v = chunk.to_vec();
String::from_utf8_lossy(&v).to_string()
})
});
match core.run(work) {
Ok(v) => v,
Err(e) => { panic!(e); },
}
}
pub fn parse_json(json: String) -> Value {
match serde_json::from_str(&json) {
Ok(v) => v,
Err(e) => { panic!(e); },
}
}
}
#[cfg(test)]
mod tests {
use NLP100;
#[test]
fn origin() {
let nlp100 = setup();
assert_eq!(nlp100.origin, "hello");
}
#[test]
fn chars() {
let nlp100 = setup();
assert_eq!(nlp100.chars, vec!["h", "e", "l", "l", "o"]);
}
#[test]
fn words() {
let nlp100 = setup();
assert_eq!(nlp100.words, vec!["hello"]);
}
#[test]
fn count_words(){
let nlp100 = NLP100::new("h, l, l,o").words;
assert_eq!(nlp100.len(), 4 as usize);
assert_eq!(nlp100, vec!["h", "l", "l", "o"]);
}
#[test]
fn bigram() {
let nlp100 = NLP100::new("hello");
assert_eq!(nlp100.ngram(2, false), vec!["he", "el", "ll", "lo"]);
}
#[test]
fn trigram() {
let nlp100 = NLP100::new("hello");
assert_eq!(nlp100.ngram(3, false), vec!["hel", "ell", "llo"]);
}
#[test]
fn word_cound() {
let nlp100 = NLP100::new("hello, world!");
assert_eq!(nlp100.char_count_list(), vec![5, 5]);
}
#[test]
fn words_first_to_one() {
let nlp100 = NLP100::new("hello, world!!!");
assert_eq!(nlp100.words_first_to(1), vec!["h", "w"]);
}
#[test]
fn words_first_to_two() {
let nlp100 = NLP100::new("hello, world!!!");
assert_eq!(nlp100.words_first_to(2), vec!["he", "wo"]);
}
#[test]
fn read_file() {
let line = NLP100::read(String::from("hightemp.txt"));
assert_eq!(line[0], "高知県\t江川崎\t41\t2013-08-12");
}
#[test]
fn count_line() {
let line = NLP100::count(String::from("hightemp.txt"));
assert_eq!(line, 24);
}
#[test]
fn deflate() {
let path = String::from("jawiki-country.json.gz");
let json = NLP100::read_gzip(path);
assert_eq!(55586, json[0].len());
}
#[test]
fn parse_json() {
let json = NLP100::parse_json(String::from("{ \"hello\": 10 }"));
assert_eq!(json["hello"], 10);
}
#[test]
fn get() {
let html = NLP100::get(String::from("https://katsyoshi.org/nlp100.json"));
assert_eq!(html, "");
}
fn setup() -> NLP100 {
NLP100::new("hello")
}
}
| true
|
4cca1a7633a29ec3c8a1b8a8ea9adfa2e5faafd9
|
Rust
|
sapanburman/webapp_boilerplate
|
/src/frontend/mod.rs
|
UTF-8
| 5,215
| 2.734375
| 3
|
[] |
no_license
|
use rocket_contrib::templates::Template;
use std::collections::{HashMap, BTreeMap};
use crate::user::model::User;
use hmac::{Hmac, NewMac};
use jwt::SignWithKey;
use sha2::Sha256;
use crate::{DbConn, ApplicationConfig};
use rocket::http::{Cookie, Cookies};
use rocket_contrib::templates::tera::Context;
use rocket::request::Form;
use bcrypt::{hash, DEFAULT_COST};
pub fn mount(rocket: rocket::Rocket) -> rocket::Rocket {
rocket.mount("/ui", routes![activate, request_reset, reset_password])
.mount("/ui", routes![activate_error])
}
#[get("/activate/<registration_code>")]
fn activate(registration_code: String, connection: DbConn, config: ApplicationConfig, mut cookies: Cookies) -> Template {
let mut user = match User::by_registration_code(registration_code, &connection.0) {
Some(u) => u,
None => {
let mut context = Context::new();
context.insert("error_message", "Could not find a user with this registration code");
return Template::render("error/specific_error", &context);
}
};
let secretkey = match config.0.get_str("secretkey") {
Ok(x) => { x }
Err(_) => {
error!("Could not find secret key for user token enryption");
return Template::render("error/generic_error", &Context::new());
}
};
let key: Hmac<Sha256> = Hmac::new_varkey(secretkey.as_bytes()).unwrap();
let mut claims = BTreeMap::new();
claims.insert("sub", user.id.unwrap().to_string());
match claims.sign_with_key(&key) {
Ok(message) => {
let cookie = Cookie::build("token", message.clone()).path("/").secure(false).finish();
cookies.add(cookie);
//Set reset code to null because we have a successful login
user.registration_code = None;
User::update(&user, &connection.0);
let mut context = Context::new();
context.insert("token", &message);
Template::render("activate", &context)
}
Err(_) => {
error!("Token could not be created");
return Template::render("error/generic_error", &Context::new());
}
}
}
#[get("/activate/<_registration_code>", rank = 2)]
pub fn activate_error(_registration_code: String) -> Template {
let context: HashMap<String, String> = HashMap::new();
Template::render("error", &context)
}
#[get("/request_reset/<reset_code>")]
fn request_reset(reset_code: String, connection: DbConn) -> Template {
let user = match User::by_reset_code(reset_code, &connection.0) {
Some(u) => u,
None => {
let mut context = Context::new();
context.insert("error_message", "Could not find a user with this reset code");
return Template::render("error/specific_error", &context);
}
};
let mut context = Context::new();
context.insert("reset_code", &user.reset_code.unwrap());
Template::render("requestResetPassword", &context)
}
#[derive(FromForm)]
struct ResetForm {
pub reset_code: String,
pub password: String,
}
#[post("/reset_password", data = "<resetform>")]
fn reset_password(resetform: Form<ResetForm>, mut cookies: Cookies, config: ApplicationConfig, connection: DbConn) -> Template {
if resetform.password.chars().count() < 8 {
let mut context = Context::new();
context.insert("error_message", "Password is too short. Minimum 8 characters!");
return Template::render("error/specific_error", &context);
}
let mut user = match User::by_reset_code(resetform.reset_code.clone(), &connection.0) {
Some(u) => u,
None => {
let mut context = Context::new();
context.insert("error_message", "Could not find a user with this reset code");
return Template::render("error/specific_error", &context);
}
};
let secretkey = match config.0.get_str("secretkey") {
Ok(x) => { x }
Err(_) => {
error!("Could not find secret key for user token enryption");
return Template::render("error/generic_error", &Context::new());
}
};
let key: Hmac<Sha256> = Hmac::new_varkey(secretkey.as_ref()).unwrap();
let mut claims = BTreeMap::new();
claims.insert("sub", user.id.unwrap().to_string());
match claims.sign_with_key(&key) {
Ok(message) => {
let cookie = Cookie::build("token", message.clone()).path("/").secure(false).finish();
cookies.add(cookie);
user.password = hash(&resetform.password, DEFAULT_COST).unwrap();
//Set reset code to null because we have a successful reset
user.reset_code = None;
// since a user reset was successful, it's also fine to set regestration code to null
user.registration_code = None;
User::update(&user, &connection.0);
let mut context = Context::new();
context.insert("token", &message);
Template::render("resetPassword", &context)
}
Err(_) => {
error!("Token could not be created");
return Template::render("error/generic_error", &Context::new());
}
}
}
| true
|
1afb8664d323b821087802a09fb18fa26ebb8ef5
|
Rust
|
duzhanyuan/rs_netdisk
|
/src/controllers/mod.rs
|
UTF-8
| 1,110
| 2.828125
| 3
|
[] |
no_license
|
mod file;
mod folder;
mod user;
pub use self::file::FileController;
pub use self::folder::FolderController;
pub use self::user::UserController;
use std::error::Error;
use std::fmt::Display;
#[derive(Debug)]
pub enum ControllerError {
Unauthorized,
Forbidden,
NotFound,
InternalServerError,
}
impl ControllerError {
pub fn level(&self) -> &str {
match self {
ControllerError::Unauthorized => "warn",
ControllerError::Forbidden => "warn",
ControllerError::NotFound => "debug",
ControllerError::InternalServerError => "error",
}
}
}
impl Display for ControllerError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
let response = match self {
ControllerError::Unauthorized => "Unauthorized",
ControllerError::Forbidden => "Forbidden",
ControllerError::NotFound => "Not Found",
ControllerError::InternalServerError => "Internal Server Error",
};
write!(f, "{}", response)
}
}
impl Error for ControllerError {}
| true
|
5f984e1b7c0484be64e1a72a42e8ada923d541bf
|
Rust
|
jarmar/advent-of-code-2018
|
/12/src/main.rs
|
UTF-8
| 2,823
| 3.171875
| 3
|
[] |
no_license
|
use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader};
type Rule = [bool; 5];
fn parse_init_state(line: &str) -> Vec<bool> {
line.chars().filter(|&c| c == '#' || c == '.').map(|c| c == '#').collect()
}
fn parse_rule(line: &str) -> Option<Rule> {
let parts: Vec<_> = line.split(" => ").collect();
let result = (parts[1].chars().next().unwrap() == '#');
if result {
let mut rule = [false; 5];
for (i, rule_val) in rule[..].iter_mut().zip(parts[0].chars().map(|c| c == '#')) {
*i = rule_val;
}
Some(rule)
} else {
None
}
}
struct RuleLookup {
rules: Vec<Rule>
}
impl RuleLookup {
fn new<T: AsRef<str>>(lines: &[T]) -> Self {
let mut rules: Vec<_> = lines.iter().filter_map(|s| parse_rule(s.as_ref())).collect();
rules.sort();
RuleLookup { rules: rules }
}
fn rule(&self, state: &[bool; 5]) -> bool {
self.rules.contains(state)
}
}
fn print_state(state: &[bool], start_ix: isize) {
for i in -15..start_ix {
print!(".");
}
for &b in state {
if b {
print!("#");
} else {
print!(".");
}
}
println!("");
}
fn main() {
// TODO: do some clever cycle/offset analysis and use that to
// analytically find a solution.
// Currently, part 2 solved by looking at 5000, 50000, 500000 etc.
// and observing a very clear pattern...
let args: Vec<String> = env::args().collect();
let f = &args.get(1).expect("No input file given");
let f = File::open(f).expect("File not found");
let lines: Result<Vec<_>, _> = BufReader::new(f).lines().collect();
let lines = lines.expect("Could not read lines from file");
let init_state = parse_init_state(&lines[0]);
let rule_lookup = RuleLookup::new(&lines[2..]);
let mut state = vec![false; 3];
let mut start_ix = -3;
state.extend(init_state);
state.extend(&[false; 3]);
for i in 0..5000000 {
//print_state(&state, start_ix);
let len_before = state.len() as isize;
let next_vals: Vec<_> = state
.windows(5)
.map(|w| rule_lookup.rule(&[w[0], w[1], w[2], w[3], w[4]]))
.skip_while(|&b| !b)
.collect();
state.clear();
state.extend(&[false; 3]);
state.extend(next_vals);
state.extend(&[false; 3]);
let len_after = state.len() as isize;
let n_dropped = len_before - len_after + 1;
start_ix += n_dropped;
}
//print_state(&state, start_ix);
println!("{}", state.len());
let n_plants: isize = state
.iter()
.zip(start_ix..)
.filter(|&(b, _)| *b)
.map(|(_, i)| i)
//.collect();
.sum();
println!("Hello, world! {}", n_plants);
}
| true
|
ee057286b05d7328d0b7f28fee6852b2401ce965
|
Rust
|
potatosalad/exercism
|
/rust/anagram/src/lib.rs
|
UTF-8
| 726
| 3.296875
| 3
|
[] |
no_license
|
use std::collections::HashSet;
pub fn anagrams_for<'a>(word: &str, possible_anagrams: &[&'a str]) -> HashSet<&'a str> {
let mut out: HashSet<&'a str> = HashSet::new();
let word_vec = word_to_sorted_vec(word);
for possible_anagram in possible_anagrams {
let possible_anagram_vec = word_to_sorted_vec(possible_anagram);
if word_vec == possible_anagram_vec && !is_same_word(word, possible_anagram) {
out.insert(possible_anagram);
}
}
out
}
fn is_same_word(a: &str, b: &str) -> bool {
a.to_lowercase() == b.to_lowercase()
}
fn word_to_sorted_vec(word: &str) -> Vec<char> {
let mut vec: Vec<char> = word.to_lowercase().chars().collect();
vec.sort();
vec
}
| true
|
962a23b7aa81ccc5308abbc37fd4c2f552c61430
|
Rust
|
vorot/roots
|
/src/analytical/linear.rs
|
UTF-8
| 2,619
| 3
| 3
|
[
"BSD-2-Clause-Views",
"BSD-2-Clause"
] |
permissive
|
// Copyright (c) 2015, Mikhail Vorotilov
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use super::super::FloatType;
use super::super::Roots;
/// Solves a linear equation a1*x + a0 = 0.
///
/// # Examples
///
/// ```
/// use roots::Roots;
/// use roots::find_roots_linear;
///
/// // Returns Roots::No([]) as '0*x + 1 = 0' has no roots;
/// let no_root = find_roots_linear(0f32, 1f32);
/// assert_eq!(no_root, Roots::No([]));
///
/// // Returns Roots::Two([0f64]) as '1*x + 0 = 0' has the root 0
/// let root = find_roots_linear(1f64, 0f64);
/// assert_eq!(root, Roots::One([0f64]));
///
/// // Returns Roots::One([0f32]) as 0 is one of roots of '0*x + 0 = 0'
/// let zero_root = find_roots_linear(0f32, 0f32);
/// assert_eq!(zero_root, Roots::One([0f32]));
/// ```
pub fn find_roots_linear<F: FloatType>(a1: F, a0: F) -> Roots<F> {
if a1 == F::zero() {
if a0 == F::zero() {
Roots::One([F::zero()])
} else {
Roots::No([])
}
} else {
Roots::One([-a0 / a1])
}
}
#[cfg(test)]
mod test {
use super::super::super::*;
#[test]
fn test_find_roots_linear() {
assert_eq!(find_roots_linear(0f32, 0f32), Roots::One([0f32]));
assert_eq!(find_roots_linear(2f64, 1f64), Roots::One([-0.5f64]));
assert_eq!(find_roots_linear(0f32, 1f32), Roots::No([]));
}
}
| true
|
2fd06fcab30ed123f3ca7a0cd53a8dc4788a0f43
|
Rust
|
RustWorks/Draw2D_GLFW_Vulkan_FFI
|
/src/graphics/vulkan/instance/layers.rs
|
UTF-8
| 1,244
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
//! Functions to check if a set of vulkan layers are available for the
//! instance.
use anyhow::{bail, Result};
use ash::{version::EntryV1_0, Entry};
/// Bail if any of the required layers is not supported by the instance.
pub fn check_layers(entry: &Entry, required_layers: &[String]) -> Result<()> {
let missing = missing_layers(entry, required_layers)?;
if !missing.is_empty() {
bail!("some required layers were not found!\n{:?}", missing);
}
Ok(())
}
/// Get a list of all layers which are required but not avaialable for this
/// vulkan instance.
fn missing_layers(
entry: &Entry,
required_layers: &[String],
) -> Result<Vec<String>> {
let available_layer_properties =
entry.enumerate_instance_layer_properties()?;
let available_names: Vec<String> = available_layer_properties
.iter()
.map(|layer| {
String::from_utf8(
layer.layer_name.iter().map(|c| *c as u8).collect(),
)
.unwrap()
})
.collect();
log::info!("Available layers {}", available_names.join("\n"));
Ok(required_layers
.iter()
.cloned()
.filter(|name| available_names.contains(name))
.collect())
}
| true
|
df022a96dba3a27a24e114cef6d61894ecdf66cd
|
Rust
|
proycon/vocajeux
|
/src/lib.rs
|
UTF-8
| 10,332
| 2.921875
| 3
|
[] |
no_license
|
extern crate rand;
extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
extern crate regex;
extern crate md5;
extern crate dirs;
extern crate csv;
use std::fs;
use std::error::Error;
use std::fmt;
use std::io;
use std::iter::Iterator;
use std::collections::HashMap;
use std::time::{SystemTime, UNIX_EPOCH};
use md5::{compute,Digest};
use std::path::{Path,PathBuf};
use std::iter::FromIterator;
/// Vocabulary Item data structure
#[derive(Serialize, Deserialize)]
pub struct VocaItem {
#[serde(default)] //deserialise missing fields to default empty values
pub word: String,
#[serde(default)]
pub transcription: String,
#[serde(default)]
pub translation: String,
#[serde(default)]
pub example: String,
#[serde(default)]
pub comment: String,
#[serde(default)]
pub tags: Vec<String>
}
/// Vocabulary List data structure
#[derive(Serialize, Deserialize)]
pub struct VocaList {
pub items: Vec<VocaItem>
}
#[derive(Serialize, Deserialize)]
pub struct VocaScore {
pub correct: HashMap<String,u32>,
pub incorrect: HashMap<String,u32>,
pub lastseen: HashMap<String,u64>,
// pub due: HashMap<String,u64>
}
///we implement the Display trait so we can print VocaItems
impl fmt::Display for VocaItem {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,"{}",self.word)
}
}
impl VocaItem {
pub fn id(&self) -> md5::Digest {
md5::compute(self.word.as_bytes())
}
pub fn id_as_string(&self) -> String {
format!("{:x}",self.id())
}
pub fn filter(&self, filtertags: Option<&Vec<&str>>) -> bool {
match filtertags {
Some(tags) => match tags.is_empty() {
false => {
//do the actual matching
self.tags.iter().any(|tag| tags.contains(&tag.as_str()))
},
true => true
},
None => true
}
}
///Prints a vocaitem
pub fn print(self: &VocaItem, phon: bool, translation: bool, example: bool) {
println!("{}", self.word);
if phon {
println!("{}", self.transcription);
}
if example {
println!("{}", self.example);
}
if translation {
println!("{}", self.translation);
}
}
}
impl VocaList {
/// Parse the vocabulary data file (JSON) into the VocaList structure
pub fn parse(filename: &str) -> Result<VocaList, Box<dyn Error>> {
let data = fs::read_to_string(filename)?;
let data: VocaList = serde_json::from_str(data.as_str())?; //(shadowing)
Ok(data)
}
/// Add a new item to the vocabulary list
pub fn append(&mut self, word: String, translation: Option<&str>, transcription: Option<&str>, example: Option<&str>, comment: Option<&str>, tags: Option<&Vec<&str>>) {
let tags: Vec<String> = if let Some(ref tags) = tags {
tags.iter()
.map(|s| { s.to_string() })
.collect()
} else {
Vec::new()
};
let item = VocaItem {
word: word,
translation: translation.map(|s:&str| s.to_string()).unwrap_or(String::new()),
transcription: transcription.map(|s:&str| s.to_string()).unwrap_or(String::new()),
example: example.map(|s:&str| s.to_string()).unwrap_or(String::new()),
comment: comment.map(|s:&str| s.to_string()).unwrap_or(String::new()),
tags: tags,
};
self.items.push(item);
}
pub fn save(&self, filename: &str) -> std::io::Result<()> {
let data: String = serde_json::to_string(self)?;
fs::write(filename, data)
}
/// Show the contents of the Vocabulary List; prints to to standard output
pub fn show(&self, withtranslation: bool, withtranscription: bool, filtertags: Option<&Vec<&str>>, withtags: bool, withexample: bool, withcomment: bool) {
for item in self.items.iter() {
if item.filter(filtertags) {
print!("{}", item);
if withtranscription { print!("\t{}", item.transcription) }
if withtranslation { print!("\t{}", item.translation) }
if withexample { print!("\t{}", item.example) }
if withcomment { print!("\t{}", item.comment) }
if withtags {
print!("\t");
for (i, tag) in item.tags.iter().enumerate() {
print!("{}", tag);
if i < item.tags.len() - 1 {
print!(",")
}
}
}
println!()
}
}
}
///Output all data as CSV
pub fn csv(&self, filtertags: Option<&Vec<&str>>) -> Result<(), Box<dyn Error>> {
let mut wtr = csv::WriterBuilder::new()
.flexible(true)
.has_headers(false)
.from_writer(io::stdout());
for item in self.items.iter() {
if item.filter(filtertags) {
wtr.serialize(item)?;
}
};
wtr.flush()?;
Ok(())
}
///Select a word
pub fn pick(&self, mut optscoredata: Option<&mut VocaScore>, filtertags: Option<&Vec<&str>>, seen: bool) -> &VocaItem {
let sum: f64 = self.items.iter().map(|item| {
if item.filter(filtertags) {
if let Some(ref scoredata) = optscoredata {
scoredata.score(item.id_as_string().as_str())
} else {
1.0
}
} else {
0.0
}
}).sum();
let choice: f64 = rand::random::<f64>() * sum;
let mut score: f64 = 0.0; //cummulative score
let mut choiceindex: usize = 0;
for (i, item) in self.items.iter().enumerate() {
if item.filter(filtertags) {
if let Some(ref scoredata) = optscoredata {
score += scoredata.score(item.id_as_string().as_str());
} else {
score += 1.0;
}
if score >= choice {
choiceindex = i;
break;
}
}
}
let vocaitem = &self.items[choiceindex];
if seen {
if let Some(ref mut scoredata) = optscoredata {
scoredata.seen(vocaitem);
}
}
vocaitem
}
///Lookup a word
pub fn find(&self, word: &str, optscoredata: Option<&mut VocaScore>, seen: bool) -> Option<&VocaItem> {
let optvocaitem = self.items.iter().find(|x| { x.word == word });
if seen {
if let (Some(ref mut scoredata), Some(vocaitem)) = (optscoredata, optvocaitem) {
scoredata.seen(vocaitem);
}
};
optvocaitem
}
}
impl VocaScore {
/// Load score file
pub fn load(filename: &str) -> Result<VocaScore, Box<dyn Error>> {
let data = fs::read_to_string(filename)?;
let data: VocaScore = serde_json::from_str(data.as_str())?; //(shadowing)
Ok(data)
}
///Save a score file
pub fn save(&self, filename: &str) -> std::io::Result<()> {
let data: String = serde_json::to_string(self)?;
fs::write(filename, data)
}
///Return the 'score' for an item, this corresponds to the probability it is presented, so
///the lower the score, the better a word is known
pub fn score(&self, id: &str) -> f64 {
let correct = *self.correct.get(id).or(Some(&0)).unwrap() + 1;
let incorrect = *self.incorrect.get(id).or(Some(&0)).unwrap() + 1;
incorrect as f64 / correct as f64
}
pub fn seen(&mut self, item: &VocaItem) {
let id: String = item.id_as_string();
let now = SystemTime::now().duration_since(UNIX_EPOCH).expect("Unable to get time").as_secs();
self.lastseen.insert(id.clone(),now);
}
pub fn addscore(&mut self, item: &VocaItem, correct: bool) {
let id: String = item.id_as_string();
self.seen(item);
if correct {
*self.correct.entry(id).or_insert(0) += 1;
} else {
*self.incorrect.entry(id).or_insert(0) += 1;
}
}
}
impl Default for VocaScore {
fn default() -> VocaScore {
VocaScore {
correct: HashMap::new(),
incorrect: HashMap::new(),
lastseen: HashMap::new()
}
}
}
/// Return the default data directory
pub fn defaultdatadir() -> PathBuf {
PathBuf::from(dirs::config_dir().expect("Unable to find configuration dir")).join("vocajeux").join("data")
}
///
/// Return the default score directory
pub fn defaultscoredir() -> PathBuf {
PathBuf::from(dirs::config_dir().expect("Unable to find configuration dir")).join("vocajeux").join("scores")
}
pub fn getdatafile(name: &str, datapath: PathBuf) -> Option<PathBuf> {
let mut filename: String = name.to_owned();
filename.push_str(".json");
let datafile = datapath.join(filename);
match datafile.exists() {
true => Some(datafile),
false => None
}
}
pub fn getscorefile(name: &str, scorepath: PathBuf, accesskey: Option<&str>) -> PathBuf {
let mut filename: String = if name.ends_with(".json") {
name[..name.len()-5].to_string()
} else {
name.to_string()
};
if let Some(accesskey) = accesskey {
filename.push_str(".");
filename.push_str(accesskey);
}
filename.push_str(".score.json");
scorepath.join(filename)
}
/// Returns an index of available vocabulary sets
pub fn getdataindex(configpath_opt: Option<PathBuf>) -> Vec<PathBuf> {
let mut index: Vec<PathBuf> = Vec::new();
let configpath;
if let Some(configpath_some) = configpath_opt {
configpath = configpath_some;
} else {
configpath = dirs::config_dir().expect("Unable to find configuration dir");
}
let datapath = PathBuf::from(configpath).join("vocajeux").join("data");
if datapath.exists() {
for file in datapath.read_dir().expect("Unable to read dir") {
if let Ok(file) = file {
index.push(file.path());
}
}
}
index
}
| true
|
e4c0f68b5634b70bca22616c673fb5c82412ce66
|
Rust
|
stepht/devrc
|
/src/template.rs
|
UTF-8
| 2,029
| 3.0625
| 3
|
[
"MIT"
] |
permissive
|
use tera::{Context, Tera};
use crate::{errors::DevrcResult, scope::Scope};
pub fn render_string(name: &str, template: &str, scope: &Scope) -> DevrcResult<String> {
let context: Context = scope.into();
let _autoescape = true;
// TODO: pass tera as input parameter
let mut tera = Tera::default();
// if autoescape {
// tera.autoescape_on(vec![ONE_OFF_TEMPLATE_NAME]);
// }
tera.add_raw_template(name, template)?;
let result = tera.render(name, &context);
tera.templates.remove(name);
match result {
Ok(value) => Ok(value),
Err(error) => Err(error.into()), // Err(value) => {
// // TODO: wrap Tera error
// // println!("Render template error: {:}", value);
// Err(DevrcError::RenderError(value))
// }
}
}
#[cfg(test)]
mod tests {
use super::*;
use tera::{Error as TeraError, ErrorKind as TerraErrorKind};
use crate::{errors::DevrcError, scope::Scope};
#[test]
fn test_render_string() {
let mut scope = Scope::default();
scope.insert_var("name", "username");
let rendered_template =
render_string("var_name", "some template string: {{ name }}", &scope);
assert_eq!(
rendered_template.unwrap(),
"some template string: username".to_owned()
);
}
#[test]
fn test_render_invalid_template() {
let rendered_template =
render_string("var_name", "some template {{ } string", &Scope::default());
assert!(rendered_template.is_err());
match rendered_template.err().unwrap() {
DevrcError::RenderError(TeraError {
kind: TerraErrorKind::Msg(kind),
..
}) => {
assert_eq!(kind, "Failed to parse \'var_name\'");
}
_ => unreachable!(),
}
}
}
| true
|
e6461038c0737015f80eac8057f75a108469e357
|
Rust
|
tanolino/my_prime_cruncher
|
/src/multi_threaded.rs
|
UTF-8
| 2,968
| 3.28125
| 3
|
[] |
no_license
|
use std::thread;
use std::sync::mpsc;
static NTHREADS: u64 = 16;
fn prim_check_single_number(smaller_prims: &Vec<u64>, number: &u64) -> bool {
for p in smaller_prims {
if (number % p) == 0 {
return false;
}
else if (p * p) > *number {
return true;
}
}
return true;
}
fn prim_get_numbers(smaller_prims: &Vec<u64>, start: u64, end: u64) -> Vec<u64>{
let mut ret = Vec::new();
for x in start..end {
if prim_check_single_number(smaller_prims, &x) {
ret.push(x);
}
}
ret
}
fn divide_among_threads(smaller_prims: &mut Vec<u64>, start: u64, end: u64) {
let slice_size : u64 = end-start;
let slice_size_per_thread : u64 = slice_size / NTHREADS;
let (tx,rx) : (mpsc::Sender<(u64,Vec<u64>)>, mpsc::Receiver<(u64,Vec<u64>)>) = mpsc::channel();
let mut children = Vec::new();
for id in 0..NTHREADS {
let thread_tx = tx.clone();
let thread_start = start + slice_size_per_thread * id;
let thread_smaller_prims = smaller_prims.clone();
let child = thread::spawn(move || {
let prims;
if id < NTHREADS-1 {
prims = prim_get_numbers(&thread_smaller_prims, thread_start, thread_start + slice_size_per_thread);
}
else {
prims = prim_get_numbers(&thread_smaller_prims, thread_start, end);
}
thread_tx.send((id, prims)).unwrap();
});
children.push(child);
}
let mut new_prims : Vec<Vec<u64>> = vec![Vec::new(); NTHREADS as usize];
for _ in 0..NTHREADS {
let nums = rx.recv().unwrap();
new_prims[nums.0 as usize] = nums.1;
}
new_prims.sort();
for child in children {
child.join().expect("Failed to join a Child Thread.");
}
for mut add in new_prims {
smaller_prims.append(&mut add);
}
}
pub fn prim_count_trivial(end: u64) -> Vec<u64>{
let mut smaller_prims: Vec<u64> = Vec::new();
for num in 2..end {
if prim_check_single_number(&smaller_prims, &num) {
// println!("New prime: {}", &num);
smaller_prims.push(num);
}
}
smaller_prims
}
pub fn prim_count(end: u64) -> Vec<u64> {
if end <= 11 {
if end >= 2 {
return prim_count_trivial(end)
}
}
else
{
let mut smaller_prims: Vec<u64> = prim_count_trivial(12);
let mut biggest_prim : u64 = 11;
let mut biggest_prim_squared : u64 = biggest_prim * biggest_prim;
while biggest_prim_squared < end {
divide_among_threads(&mut smaller_prims, biggest_prim+1, biggest_prim_squared);
biggest_prim = *smaller_prims.last().unwrap();
biggest_prim_squared = biggest_prim * biggest_prim;
}
divide_among_threads(&mut smaller_prims, biggest_prim+1, end);
return smaller_prims;
}
Vec::new()
}
| true
|
b95bceb2f220a1b76b9b97b0d75734da43465fa2
|
Rust
|
gnoliyil/fuchsia
|
/third_party/rust_crates/vendor/trust-dns-resolver-0.22.0/src/hosts.rs
|
UTF-8
| 8,749
| 2.796875
| 3
|
[
"BSD-2-Clause",
"MIT",
"Apache-2.0"
] |
permissive
|
//! Hosts result from a configuration of the system hosts file
use std::collections::HashMap;
use std::io;
use std::path::Path;
use std::str::FromStr;
use std::sync::Arc;
use proto::op::Query;
use proto::rr::{Name, RecordType};
use proto::rr::{RData, Record};
use tracing::warn;
use crate::dns_lru;
use crate::lookup::Lookup;
#[derive(Debug, Default)]
struct LookupType {
/// represents the A record type
a: Option<Lookup>,
/// represents the AAAA record type
aaaa: Option<Lookup>,
}
/// Configuration for the local hosts file
#[derive(Debug, Default)]
pub struct Hosts {
/// Name -> RDatas map
by_name: HashMap<Name, LookupType>,
}
impl Hosts {
/// Creates a new configuration from the system hosts file,
/// only works for Windows and Unix-like OSes,
/// will return empty configuration on others
#[cfg(any(unix, windows))]
pub fn new() -> Self {
read_hosts_conf(hosts_path()).unwrap_or_default()
}
/// Creates a default configuration for non Windows or Unix-like OSes
#[cfg(not(any(unix, windows)))]
pub fn new() -> Self {
Hosts::default()
}
/// Look up the addresses for the given host from the system hosts file.
pub fn lookup_static_host(&self, query: &Query) -> Option<Lookup> {
if !self.by_name.is_empty() {
if let Some(val) = self.by_name.get(query.name()) {
let result = match query.query_type() {
RecordType::A => val.a.clone(),
RecordType::AAAA => val.aaaa.clone(),
_ => None,
};
return result;
}
}
None
}
/// Insert a new Lookup for the associated `Name` and `RecordType`
pub fn insert(&mut self, name: Name, record_type: RecordType, lookup: Lookup) {
assert!(record_type == RecordType::A || record_type == RecordType::AAAA);
let lookup_type = self
.by_name
.entry(name.clone())
.or_insert_with(LookupType::default);
let new_lookup = {
let old_lookup = match record_type {
RecordType::A => lookup_type.a.get_or_insert_with(|| {
let query = Query::query(name.clone(), record_type);
Lookup::new_with_max_ttl(query, Arc::from([]))
}),
RecordType::AAAA => lookup_type.aaaa.get_or_insert_with(|| {
let query = Query::query(name.clone(), record_type);
Lookup::new_with_max_ttl(query, Arc::from([]))
}),
_ => {
tracing::warn!("unsupported IP type from Hosts file: {:#?}", record_type);
return;
}
};
old_lookup.append(lookup)
};
// replace the appended version
match record_type {
RecordType::A => lookup_type.a = Some(new_lookup),
RecordType::AAAA => lookup_type.aaaa = Some(new_lookup),
_ => tracing::warn!("unsupported IP type from Hosts file"),
}
}
/// parse configuration from `src`
pub fn read_hosts_conf(mut self, src: impl io::Read) -> io::Result<Self> {
use std::io::{BufRead, BufReader};
use proto::rr::domain::TryParseIp;
// lines in the src should have the form `addr host1 host2 host3 ...`
// line starts with `#` will be regarded with comments and ignored,
// also empty line also will be ignored,
// if line only include `addr` without `host` will be ignored,
// the src will be parsed to map in the form `Name -> LookUp`.
for line in BufReader::new(src).lines() {
// Remove comments from the line
let line = line?;
let line = line.split('#').next().unwrap().trim();
if line.is_empty() {
continue;
}
let fields: Vec<_> = line.split_whitespace().collect();
if fields.len() < 2 {
continue;
}
let addr = if let Some(a) = fields[0].try_parse_ip() {
a
} else {
warn!("could not parse an IP from hosts file");
continue;
};
for domain in fields.iter().skip(1).map(|domain| domain.to_lowercase()) {
if let Ok(name) = Name::from_str(&domain) {
let record = Record::from_rdata(name.clone(), dns_lru::MAX_TTL, addr.clone());
match addr {
RData::A(..) => {
let query = Query::query(name.clone(), RecordType::A);
let lookup = Lookup::new_with_max_ttl(query, Arc::from([record]));
self.insert(name.clone(), RecordType::A, lookup);
}
RData::AAAA(..) => {
let query = Query::query(name.clone(), RecordType::AAAA);
let lookup = Lookup::new_with_max_ttl(query, Arc::from([record]));
self.insert(name.clone(), RecordType::AAAA, lookup);
}
_ => {
warn!("unsupported IP type from Hosts file: {:#?}", addr);
continue;
}
};
// TODO: insert reverse lookup as well.
};
}
}
Ok(self)
}
}
#[cfg(unix)]
fn hosts_path() -> &'static str {
"/etc/hosts"
}
#[cfg(windows)]
fn hosts_path() -> std::path::PathBuf {
let system_root =
std::env::var_os("SystemRoot").expect("Environtment variable SystemRoot not found");
let system_root = Path::new(&system_root);
system_root.join("System32\\drivers\\etc\\hosts")
}
/// parse configuration from `path`
#[cfg(any(unix, windows))]
#[cfg_attr(docsrs, doc(cfg(any(unix, windows))))]
pub(crate) fn read_hosts_conf<P: AsRef<Path>>(path: P) -> io::Result<Hosts> {
use std::fs::File;
let file = File::open(path)?;
Hosts::default().read_hosts_conf(file)
}
#[cfg(any(unix, windows))]
#[cfg(test)]
mod tests {
use super::*;
use std::env;
use std::net::{Ipv4Addr, Ipv6Addr};
fn tests_dir() -> String {
let server_path = env::var("TDNS_WORKSPACE_ROOT").unwrap_or_else(|_| "../..".to_owned());
format! {"{}/crates/resolver/tests", server_path}
}
#[test]
fn test_read_hosts_conf() {
let path = format!("{}/hosts", tests_dir());
let hosts = read_hosts_conf(&path).unwrap();
let name = Name::from_str("localhost").unwrap();
let rdatas = hosts
.lookup_static_host(&Query::query(name.clone(), RecordType::A))
.unwrap()
.iter()
.map(ToOwned::to_owned)
.collect::<Vec<RData>>();
assert_eq!(rdatas, vec![RData::A(Ipv4Addr::new(127, 0, 0, 1))]);
let rdatas = hosts
.lookup_static_host(&Query::query(name, RecordType::AAAA))
.unwrap()
.iter()
.map(ToOwned::to_owned)
.collect::<Vec<RData>>();
assert_eq!(
rdatas,
vec![RData::AAAA(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1))]
);
let name = Name::from_str("broadcasthost").unwrap();
let rdatas = hosts
.lookup_static_host(&Query::query(name, RecordType::A))
.unwrap()
.iter()
.map(ToOwned::to_owned)
.collect::<Vec<RData>>();
assert_eq!(rdatas, vec![RData::A(Ipv4Addr::new(255, 255, 255, 255))]);
let name = Name::from_str("example.com").unwrap();
let rdatas = hosts
.lookup_static_host(&Query::query(name, RecordType::A))
.unwrap()
.iter()
.map(ToOwned::to_owned)
.collect::<Vec<RData>>();
assert_eq!(rdatas, vec![RData::A(Ipv4Addr::new(10, 0, 1, 102))]);
let name = Name::from_str("a.example.com").unwrap();
let rdatas = hosts
.lookup_static_host(&Query::query(name, RecordType::A))
.unwrap()
.iter()
.map(ToOwned::to_owned)
.collect::<Vec<RData>>();
assert_eq!(rdatas, vec![RData::A(Ipv4Addr::new(10, 0, 1, 111))]);
let name = Name::from_str("b.example.com").unwrap();
let rdatas = hosts
.lookup_static_host(&Query::query(name, RecordType::A))
.unwrap()
.iter()
.map(ToOwned::to_owned)
.collect::<Vec<RData>>();
assert_eq!(rdatas, vec![RData::A(Ipv4Addr::new(10, 0, 1, 111))]);
}
}
| true
|
acd6f035668d22883e557048a6f868710f8845ad
|
Rust
|
Trivaxy/leibniz-lang
|
/runtime/src/codegen.rs
|
UTF-8
| 25,205
| 2.703125
| 3
|
[] |
no_license
|
use std::collections::{HashMap, HashSet};
use linked_hash_map::LinkedHashMap;
use parser::{Operator, ParserNode, TypeConstraint};
use crate::{
function::{Function, NativeFunction},
instruction::Instruction,
optimizations::Optimization,
runtime::LeibnizRuntime,
value::Value,
};
pub struct CodeGen {
function_table: HashMap<String, Function>,
function_index_table: HashMap<String, usize>,
deferred_functions: Vec<(String, Vec<String>, ParserNode)>,
deferred_functions_set: HashSet<String>,
native_function_table: HashMap<String, NativeFunction>,
native_function_index_table: HashMap<String, usize>,
type_table: HashMap<String, LinkedHashMap<String, TypeConstraint>>,
type_index_table: HashMap<String, usize>,
local_table: HashMap<String, usize>,
global_table: HashMap<String, usize>,
string_table: HashMap<String, usize>,
preserved_locals: HashSet<String>,
current_function: Option<Function>,
entry_point: String,
}
impl CodeGen {
pub fn new(entry_point: String) -> Self {
let mut function_table = HashMap::new();
let mut function_index_table = HashMap::new();
// this is to reserve the index 0 for the entrypoint function
function_table.insert(
entry_point.clone(),
Function::new(entry_point.clone(), Vec::new()),
);
function_index_table.insert(entry_point.clone(), 0);
let (native_function_table, native_function_index_table) = Self::get_native_function_maps();
CodeGen {
function_table: function_table,
function_index_table: function_index_table,
deferred_functions: Vec::new(),
deferred_functions_set: HashSet::new(),
native_function_table: native_function_table,
native_function_index_table: native_function_index_table,
type_table: HashMap::new(),
type_index_table: HashMap::new(),
local_table: HashMap::new(),
global_table: HashMap::new(),
string_table: HashMap::new(),
preserved_locals: HashSet::new(),
current_function: Some(Function::new(entry_point.clone(), Vec::new())),
entry_point: entry_point,
}
}
pub fn generate_from_node(&mut self, node: ParserNode) {
self.accept_node(node);
self.finalize_current();
}
pub fn dissolve(self) -> LeibnizRuntime {
let function_table = self.function_table;
let function_index_table = self.function_index_table;
let mut finished_func_table = HashMap::new();
for kvp in function_table {
finished_func_table.insert(*function_index_table.get(&kvp.0).unwrap(), kvp.1);
}
let native_function_table = self.native_function_table;
let native_function_index_table = self.native_function_index_table;
let mut finished_native_func_table = HashMap::new();
for kvp in native_function_table {
finished_native_func_table
.insert(*native_function_index_table.get(&kvp.0).unwrap(), kvp.1);
}
let type_table = self.type_table;
let type_index_table = self.type_index_table;
let mut finished_type_table = HashMap::new();
for r#type in type_table {
let fields = r#type.1.into_iter().map(|kvp| kvp.0).collect();
finished_type_table.insert(*type_index_table.get(&r#type.0).unwrap(), fields);
}
let finished_string_table = self
.string_table
.iter()
.map(|kvp| (*kvp.1, kvp.0.to_owned()))
.collect();
LeibnizRuntime::new(
finished_func_table,
finished_native_func_table,
finished_type_table,
finished_string_table,
)
}
fn finalize_current(&mut self) {
if self.current_function.is_none() {
panic!("tried to finalize a function, but there wasn't one being built");
}
// self.apply_basic_optimization();
let func = self.current_function.take().unwrap();
self.function_table.insert(func.name.clone(), func);
self.local_table.clear();
while self.deferred_functions.len() > 0 {
let (name, parameters, node) = self.deferred_functions.pop().unwrap();
self.current_function = Some(Function::new(name, parameters.clone()));
for param in parameters {
self.register_local(param);
}
self.generate_from_node(node);
}
}
fn apply_basic_optimization(&mut self) {
Optimization::apply_basic_optimization(self.current_function.as_mut().unwrap());
}
fn get_native_function_maps() -> (HashMap<String, NativeFunction>, HashMap<String, usize>) {
let mut funcs = HashMap::new();
funcs.insert(
"print",
NativeFunction::new(
|parameters, runtime| {
println!("{}", parameters[0]);
runtime.push_value(parameters[0].clone());
},
1,
),
);
funcs.insert(
"Re",
NativeFunction::new(
|parameters, runtime| {
runtime.push_value(match parameters[0] {
Value::Real(_) => parameters[0].clone(),
Value::Complex(c) => Value::Real(c.re),
_ => Value::error(),
});
},
1,
),
);
funcs.insert(
"Im",
NativeFunction::new(
|parameters, runtime| {
runtime.push_value(match parameters[0] {
Value::Real(_) => Value::real(0.0),
Value::Complex(c) => Value::Real(c.im),
_ => Value::error(),
});
},
1,
),
);
let funcs: HashMap<String, NativeFunction> = funcs
.into_iter()
.map(|kvp| (kvp.0.to_string(), kvp.1))
.collect();
let mut func_indexed = HashMap::new();
let mut i = 0;
for kvp in &funcs {
func_indexed.insert(kvp.0.to_string(), i);
i += 1;
}
(funcs, func_indexed)
}
fn accept_node(&mut self, node: ParserNode) {
if self.current_function.is_none() {
panic!("tried to generate code when no function has been created");
}
match node {
ParserNode::Number(num, im) => self.accept_number(num, im),
ParserNode::Identifier(identifier) => self.accept_identifier(&identifier),
ParserNode::Operation(a, operator, b) => self.accept_operation(*a, *b, operator),
ParserNode::Assignment(identifiers, value_node) => {
self.accept_assignment(&identifiers, *value_node)
}
ParserNode::FunctionCall(identifier, parameter_nodes) => {
self.accept_function_call(&identifier, parameter_nodes)
}
ParserNode::Conditional(predicate_node, true_node, false_node) => {
self.accept_conditional(*predicate_node, *true_node, *false_node)
}
ParserNode::FunctionDeclaration(identifier, parameter_names, body) => {
self.accept_function_declaration(identifier, parameter_names, *body)
}
ParserNode::TypeDeclaration(identifier, fields) => {
self.accept_type_declaration(identifier, fields)
}
ParserNode::VariableDeclaration(identifier, expression_node) => {
self.accept_variable_declaration(identifier, *expression_node)
}
ParserNode::Range(_, _, _, _) => {} // Ranges aren't actually emitted by the parser directly
ParserNode::Array(expression_nodes) => self.accept_array(expression_nodes),
ParserNode::Index(value_node, index_node) => {
self.accept_array_index(*value_node, *index_node)
}
ParserNode::Loop(variable_name, range_node, body_node) => {
self.accept_loop(variable_name, *range_node, *body_node)
}
ParserNode::Factorial(expression_node) => self.accept_factorial(*expression_node),
ParserNode::Access(expression_node, field_name) => {
self.accept_access(*expression_node, field_name)
}
ParserNode::String(string) => self.accept_string(string),
ParserNode::Tree(tree_nodes) => self.accept_tree(tree_nodes),
}
}
fn accept_number(&mut self, num: f64, im: bool) {
self.emit_instr(Instruction::PushNumber(num));
if im {
self.emit_instr(Instruction::MakeImaginary);
}
}
fn accept_identifier(&mut self, identifier: &str) {
let has_local = self.has_local(identifier);
let has_global = self.has_global(identifier);
if !has_local && !has_global {
panic!("tried to generate code to load variable '{}' when it hasn't been declared locally or globally", identifier);
}
if has_local {
self.emit_instr(Instruction::GetLocal(self.local_index(identifier)));
} else {
self.emit_instr(Instruction::GetGlobal(self.global_index(identifier)));
}
}
fn accept_operation(&mut self, a: ParserNode, b: ParserNode, operator: Operator) {
self.accept_node(a);
self.accept_node(b);
let instr = match operator {
Operator::Add => Instruction::Add,
Operator::Subtract => Instruction::Subtract,
Operator::Multiply => Instruction::Multiply,
Operator::Divide => Instruction::Divide,
Operator::Power => Instruction::Raise,
Operator::Modulo => Instruction::Remainder,
Operator::Equals => Instruction::Equals,
Operator::GreaterThan => Instruction::GreaterThan,
Operator::LessThan => Instruction::LessThan,
Operator::GreaterThanOrEquals => Instruction::GreaterThanOrEquals,
Operator::LessThanOrEquals => Instruction::LessThanOrEquals,
};
self.emit_instr(instr);
}
fn accept_assignment(&mut self, identifiers: &[String], value_node: ParserNode) {
for identifier in identifiers {
if !self.has_local(identifier) && !self.has_global(identifier) {
panic!(
"tried to generate code to re-assign variable '{}' but it was not defined",
identifier
);
}
}
self.accept_node(value_node);
for _ in 1..identifiers.len() {
self.emit_instr(Instruction::Dupe);
}
for identifier in identifiers {
if self.has_local(identifier) {
self.emit_instr(Instruction::SetLocal(self.local_index(identifier)));
} else {
self.emit_instr(Instruction::SetGlobal(self.global_index(identifier)));
}
}
}
fn accept_function_call(&mut self, identifier: &str, parameter_nodes: Vec<ParserNode>) {
let supplied_param_count = parameter_nodes.len();
for node in parameter_nodes {
self.accept_node(node);
}
if self.has_native_function(identifier) {
let param_count = self
.native_function_table
.get(identifier)
.unwrap()
.parameters();
if param_count != supplied_param_count {
panic!(
"function '{}' expects {} parameters",
identifier, param_count
);
}
self.emit_instr(Instruction::CallNative(
self.native_function_index(identifier),
));
return;
} else if self.has_type(identifier) {
self.emit_instr(Instruction::MakeType(self.type_index(identifier)));
return;
}
if !self.has_function(identifier) && !self.deferred_functions_set.contains(identifier) {
panic!(
"tried to call function '{}' which hasn't been declared before",
identifier
);
}
self.emit_instr(Instruction::Call(self.function_index(identifier)));
}
fn accept_conditional(
&mut self,
predicate_node: ParserNode,
true_node: ParserNode,
false_node: ParserNode,
) {
self.accept_node(predicate_node);
let end_of_initial = self.cursor_pos();
self.accept_node(true_node);
let end_of_true = self.cursor_pos();
self.accept_node(false_node);
let end_of_false = self.cursor_pos();
self.move_cursor_to(end_of_initial);
self.emit_instr(Instruction::JumpFalse(
(end_of_true - end_of_initial + 2) as isize,
));
self.move_cursor_to(end_of_true + 1);
self.emit_instr(Instruction::Jump((end_of_false - end_of_true + 1) as isize));
self.move_cursor_to_end();
}
fn accept_function_declaration(
&mut self,
identifier: String,
parameter_names: Vec<String>,
body: ParserNode,
) {
if self.has_function(&identifier) || self.has_native_function(&identifier) {
panic!(
"a function with the name '{}' has already been defined",
identifier
);
}
self.register_function(identifier.clone());
self.deferred_functions
.push((identifier.clone(), parameter_names, body));
self.deferred_functions_set.insert(identifier);
}
fn accept_type_declaration(
&mut self,
identifier: String,
fields: Vec<(String, TypeConstraint)>,
) {
if self.has_type(&identifier) {
panic!(
"a type with the name '{}' has already been defined",
identifier
);
}
self.register_type(identifier, fields);
}
fn accept_variable_declaration(&mut self, identifier: String, expression_node: ParserNode) {
let global_context = self.in_entry_point();
if self.has_local(&identifier) {
panic!(
"cannot declare a variable '{}' as it has already been declared before locally",
identifier
);
} else if self.has_global(&identifier) && global_context {
panic!(
"cannot declare a variable '{}' as it has already been declared before globally",
identifier
);
}
self.accept_node(expression_node);
if global_context {
let index = self.register_global(identifier);
self.emit_instr(Instruction::SetGlobal(index));
} else {
let index = self.register_local(identifier);
self.emit_instr(Instruction::SetLocal(index));
}
}
fn accept_array(&mut self, expression_nodes: Vec<ParserNode>) {
let len = expression_nodes.len();
for expression in expression_nodes {
self.accept_node(expression);
}
self.emit_instr(Instruction::MakeArray(len));
}
fn accept_array_index(&mut self, value_node: ParserNode, index_node: ParserNode) {
self.accept_node(value_node);
self.accept_node(index_node);
self.emit_instr(Instruction::Index);
}
fn accept_loop(
&mut self,
variable_name: String,
range_node: ParserNode,
body_node: ParserNode,
) {
if self.has_preserved_local(&variable_name) {
panic!(
"the local '{}' was already defined in an enclosing loop",
&variable_name
);
}
if self.has_local(&variable_name) {
let index = self.local_index(&variable_name);
self.emit_instr(Instruction::PreserveLocal(index));
self.track_preserved_local(variable_name.clone());
} else {
self.register_local(variable_name.clone());
}
let index = self.local_index(&variable_name);
let (lower_bound_node, upper_bound_node, step_node, going_down) =
range_node.destructure_range();
self.emit_instr(Instruction::PushNumber(0.0));
self.accept_node(lower_bound_node);
self.emit_instr(Instruction::SetLocal(index));
let end_of_init = self.cursor_pos() as isize;
self.accept_node(body_node);
self.emit_instr(Instruction::Add);
self.emit_instr(Instruction::GetLocal(index));
self.accept_node(step_node);
if going_down {
self.emit_instr(Instruction::Subtract);
} else {
self.emit_instr(Instruction::Add);
}
self.emit_instr(Instruction::SetLocal(index));
let end_of_body = self.cursor_pos() as isize;
self.emit_instr(Instruction::GetLocal(index));
self.accept_node(upper_bound_node);
if going_down {
self.emit_instr(Instruction::GreaterThanOrEquals);
} else {
self.emit_instr(Instruction::LessThanOrEquals);
}
let end_of_check = self.cursor_pos() as isize;
self.emit_instr(Instruction::JumpTrue(-(end_of_check - end_of_init)));
self.move_cursor_to(end_of_init as usize);
self.emit_instr(Instruction::Jump(end_of_body - end_of_init + 1));
self.move_cursor_to_end();
if self.has_preserved_local(&variable_name) {
self.emit_instr(Instruction::FetchLocal(index));
self.emit_instr(Instruction::SetLocal(index));
self.untrack_preserved_local(&variable_name);
} else {
self.remove_local(&variable_name);
}
}
fn accept_factorial(&mut self, expression_node: ParserNode) {
self.accept_node(expression_node);
self.emit_instr(Instruction::PushNumber(1.0));
self.emit_instr(Instruction::Add);
self.emit_instr(Instruction::Gamma)
}
fn accept_access(&mut self, expression_node: ParserNode, field_name: String) {
self.accept_node(expression_node);
self.emit_instr(Instruction::LoadField(field_name));
}
fn accept_string(&mut self, string: String) {
let index = if !self.has_string(&string) {
self.register_string(string)
} else {
self.string_index(&string)
};
self.emit_instr(Instruction::LoadString(index));
}
fn accept_tree(&mut self, tree_nodes: Vec<ParserNode>) {
for node in tree_nodes {
self.accept_node(node);
}
}
fn has_local(&self, identifier: &str) -> bool {
self.local_table.contains_key(identifier)
}
fn local_index(&self, identifier: &str) -> usize {
if !self.has_local(identifier) {
panic!("no local by the name '{}' exists", identifier);
}
*self.local_table.get(identifier).unwrap()
}
fn register_local(&mut self, identifier: String) -> usize {
if self.has_local(&identifier) {
panic!(
"tried to register local '{}' when it already exists",
identifier
);
}
let local_index = self.local_table.len();
self.local_table.insert(identifier, local_index);
local_index
}
fn remove_local(&mut self, identifier: &str) {
if !self.has_local(identifier) {
panic!(
"tried to remove local '{}' when it didn't exist",
identifier
);
}
self.local_table.remove(identifier);
}
fn track_preserved_local(&mut self, identifier: String) {
if self.has_preserved_local(&identifier) {
panic!(
"tried to track preserved local '{}' when it was preserved before",
identifier
);
}
self.preserved_locals.insert(identifier);
}
fn untrack_preserved_local(&mut self, identifier: &str) {
if !self.has_preserved_local(identifier) {
panic!(
"tried to untrack preserved local '{}' when it wasn't there",
identifier
);
}
self.preserved_locals.remove(identifier);
}
fn has_preserved_local(&self, identifier: &str) -> bool {
self.preserved_locals.contains(identifier)
}
fn has_global(&self, identifier: &str) -> bool {
self.global_table.contains_key(identifier)
}
fn global_index(&self, identifier: &str) -> usize {
if !self.has_global(identifier) {
panic!("no global by the name '{}' exists", identifier);
}
*self.global_table.get(identifier).unwrap()
}
fn register_global(&mut self, identifier: String) -> usize {
if self.has_global(&identifier) {
panic!(
"tried to register global '{}' when it already exists",
identifier
);
}
let global_index = self.global_table.len();
self.global_table.insert(identifier, global_index);
global_index
}
fn has_string(&self, string: &str) -> bool {
self.string_table.contains_key(string)
}
fn string_index(&self, string: &str) -> usize {
if !self.has_string(string) {
panic!("the string \"{}\" has not been registered", string)
}
*self.string_table.get(string).unwrap()
}
fn register_string(&mut self, string: String) -> usize {
if self.has_string(&string) {
panic!(
"tried to register string \"{}\" when it already exists",
string
);
}
let string_index = self.string_table.len();
self.string_table.insert(string, string_index);
string_index
}
fn has_function(&self, identifier: &str) -> bool {
self.function_index_table.contains_key(identifier)
}
fn function_index(&self, identifier: &str) -> usize {
if !self.has_function(identifier) {
panic!("no function by the name '{}' exists", identifier);
}
*self.function_index_table.get(identifier).unwrap()
}
fn register_function(&mut self, identifier: String) -> usize {
if self.has_function(&identifier) {
panic!(
"tried to register function '{}' when it already exists",
identifier
);
}
let function_index = self.function_index_table.len();
self.function_index_table.insert(identifier, function_index);
function_index
}
fn has_native_function(&self, name: &str) -> bool {
self.native_function_index_table.contains_key(name)
}
fn native_function_index(&self, name: &str) -> usize {
if !self.has_native_function(name) {
panic!("no native function by the name '{}' exists", name);
}
*self.native_function_index_table.get(name).unwrap()
}
fn has_type(&self, identifier: &str) -> bool {
self.type_table.contains_key(identifier)
}
fn type_index(&self, identifier: &str) -> usize {
if !self.has_type(identifier) {
panic!("no type by the name '{}' exists", identifier);
}
*self.type_index_table.get(identifier).unwrap()
}
fn register_type(
&mut self,
identifier: String,
fields: Vec<(String, TypeConstraint)>,
) -> usize {
if self.has_type(&identifier) {
panic!(
"tried to register type '{}' when it already exists",
identifier
);
}
let mut indexed_fields = LinkedHashMap::new();
let mut i = 0;
for field in &fields {
indexed_fields.insert(field.0.clone(), i);
i += 1;
}
let mut fields_as_map = LinkedHashMap::new();
for field in fields {
fields_as_map.insert(field.0, field.1);
}
let type_index = self.type_table.len();
self.type_table.insert(identifier.clone(), fields_as_map);
self.type_index_table.insert(identifier, type_index);
type_index
}
fn move_cursor_to_end(&mut self) {
self.current_function.as_mut().unwrap().move_cursor_to_end();
}
fn move_cursor_to(&mut self, index: usize) {
self.current_function
.as_mut()
.unwrap()
.move_cursor_to(index);
}
fn cursor_pos(&self) -> usize {
self.current_function.as_ref().unwrap().cursor_pos()
}
fn emit_instr(&mut self, instruction: Instruction) {
self.current_function
.as_mut()
.unwrap()
.emit_instr(instruction);
}
fn in_entry_point(&self) -> bool {
self.current_function.as_ref().unwrap().name == self.entry_point
}
}
| true
|
77998af67155ff681eaf01b767af2a2590e6db80
|
Rust
|
marco-c/gecko-dev-wordified-and-comments-removed
|
/third_party/rust/getrandom/tests/common/mod.rs
|
UTF-8
| 2,027
| 2.59375
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use
super
:
:
getrandom_impl
;
#
[
cfg
(
all
(
target_arch
=
"
wasm32
"
target_os
=
"
unknown
"
)
)
]
use
wasm_bindgen_test
:
:
wasm_bindgen_test
as
test
;
#
[
cfg
(
feature
=
"
test
-
in
-
browser
"
)
]
wasm_bindgen_test
:
:
wasm_bindgen_test_configure
!
(
run_in_browser
)
;
#
[
test
]
fn
test_zero
(
)
{
getrandom_impl
(
&
mut
[
0u8
;
0
]
)
.
unwrap
(
)
;
}
#
[
cfg
(
not
(
feature
=
"
custom
"
)
)
]
fn
num_diff_bits
(
s1
:
&
[
u8
]
s2
:
&
[
u8
]
)
-
>
usize
{
assert_eq
!
(
s1
.
len
(
)
s2
.
len
(
)
)
;
s1
.
iter
(
)
.
zip
(
s2
.
iter
(
)
)
.
map
(
|
(
a
b
)
|
(
a
^
b
)
.
count_ones
(
)
as
usize
)
.
sum
(
)
}
#
[
test
]
#
[
cfg
(
not
(
feature
=
"
custom
"
)
)
]
fn
test_diff
(
)
{
let
mut
v1
=
[
0u8
;
1000
]
;
getrandom_impl
(
&
mut
v1
)
.
unwrap
(
)
;
let
mut
v2
=
[
0u8
;
1000
]
;
getrandom_impl
(
&
mut
v2
)
.
unwrap
(
)
;
let
d
=
num_diff_bits
(
&
v1
&
v2
)
;
assert
!
(
d
>
3500
)
;
assert
!
(
d
<
4500
)
;
}
#
[
test
]
#
[
cfg
(
not
(
feature
=
"
custom
"
)
)
]
fn
test_small
(
)
{
for
size
in
1
.
.
=
64
{
let
mut
num_bytes
=
0
;
let
mut
diff_bits
=
0
;
while
num_bytes
<
256
{
let
mut
s1
=
vec
!
[
0u8
;
size
]
;
getrandom_impl
(
&
mut
s1
)
.
unwrap
(
)
;
let
mut
s2
=
vec
!
[
0u8
;
size
]
;
getrandom_impl
(
&
mut
s2
)
.
unwrap
(
)
;
num_bytes
+
=
size
;
diff_bits
+
=
num_diff_bits
(
&
s1
&
s2
)
;
}
assert
!
(
diff_bits
>
3
*
num_bytes
)
;
assert
!
(
diff_bits
<
5
*
num_bytes
)
;
}
}
#
[
test
]
fn
test_huge
(
)
{
let
mut
huge
=
[
0u8
;
100_000
]
;
getrandom_impl
(
&
mut
huge
)
.
unwrap
(
)
;
}
#
[
cfg
(
not
(
target_arch
=
"
wasm32
"
)
)
]
#
[
test
]
fn
test_multithreading
(
)
{
extern
crate
std
;
use
std
:
:
{
sync
:
:
mpsc
:
:
channel
thread
vec
}
;
let
mut
txs
=
vec
!
[
]
;
for
_
in
0
.
.
20
{
let
(
tx
rx
)
=
channel
(
)
;
txs
.
push
(
tx
)
;
thread
:
:
spawn
(
move
|
|
{
rx
.
recv
(
)
.
unwrap
(
)
;
let
mut
v
=
[
0u8
;
1000
]
;
for
_
in
0
.
.
100
{
getrandom_impl
(
&
mut
v
)
.
unwrap
(
)
;
thread
:
:
yield_now
(
)
;
}
}
)
;
}
for
tx
in
txs
.
iter
(
)
{
tx
.
send
(
(
)
)
.
unwrap
(
)
;
}
}
| true
|
8d85d0f219b42688e7f2e4ccad14114ad4b1491e
|
Rust
|
snakeztc/tantivy-ztc
|
/src/searcher.rs
|
UTF-8
| 7,762
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
#![allow(clippy::new_ret_no_self)]
use crate::{document::Document, get_field, query::Query, to_pyerr};
use pyo3::{exceptions::PyValueError, prelude::*, PyObjectProtocol};
use tantivy as tv;
use tantivy::collector::{Count, MultiCollector, TopDocs};
/// Tantivy's Searcher class
///
/// A Searcher is used to search the index given a prepared Query.
#[pyclass]
pub(crate) struct Searcher {
pub(crate) inner: tv::LeasedItem<tv::Searcher>,
}
#[derive(Clone)]
enum Fruit {
Score(f32),
Order(u64),
}
impl std::fmt::Debug for Fruit {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Fruit::Score(s) => f.write_str(&format!("{}", s)),
Fruit::Order(o) => f.write_str(&format!("{}", o)),
}
}
}
impl ToPyObject for Fruit {
fn to_object(&self, py: Python) -> PyObject {
match self {
Fruit::Score(s) => s.to_object(py),
Fruit::Order(o) => o.to_object(py),
}
}
}
#[pyclass]
/// Object holding a results successful search.
pub(crate) struct SearchResult {
hits: Vec<(Fruit, DocAddress)>,
#[pyo3(get)]
/// How many documents matched the query. Only available if `count` was set
/// to true during the search.
count: Option<usize>,
}
#[pyproto]
impl PyObjectProtocol for SearchResult {
fn __repr__(&self) -> PyResult<String> {
if let Some(count) = self.count {
Ok(format!(
"SearchResult(hits: {:?}, count: {})",
self.hits, count
))
} else {
Ok(format!("SearchResult(hits: {:?})", self.hits))
}
}
}
#[pymethods]
impl SearchResult {
#[getter]
/// The list of tuples that contains the scores and DocAddress of the
/// search results.
fn hits(&self, py: Python) -> PyResult<Vec<(PyObject, DocAddress)>> {
let ret: Vec<(PyObject, DocAddress)> = self
.hits
.iter()
.map(|(result, address)| (result.to_object(py), address.clone()))
.collect();
Ok(ret)
}
}
#[pymethods]
impl Searcher {
/// Search the index with the given query and collect results.
///
/// Args:
/// query (Query): The query that will be used for the search.
/// limit (int, optional): The maximum number of search results to
/// return. Defaults to 10.
/// count (bool, optional): Should the number of documents that match
/// the query be returned as well. Defaults to true.
/// order_by_field (Field, optional): A schema field that the results
/// should be ordered by. The field must be declared as a fast field
/// when building the schema. Note, this only works for unsigned
/// fields.
/// offset (Field, optional): The offset from which the results have
/// to be returned.
///
/// Returns `SearchResult` object.
///
/// Raises a ValueError if there was an error with the search.
#[args(limit = 10, offset = 0, count = true)]
fn search(
&self,
_py: Python,
query: &Query,
limit: usize,
count: bool,
order_by_field: Option<&str>,
offset: usize,
) -> PyResult<SearchResult> {
let mut multicollector = MultiCollector::new();
let count_handle = if count {
Some(multicollector.add_collector(Count))
} else {
None
};
let (mut multifruit, hits) = {
if let Some(order_by) = order_by_field {
let field = get_field(&self.inner.index().schema(), order_by)?;
let collector = TopDocs::with_limit(limit)
.and_offset(offset)
.order_by_u64_field(field);
let top_docs_handle = multicollector.add_collector(collector);
let ret = self.inner.search(query.get(), &multicollector);
match ret {
Ok(mut r) => {
let top_docs = top_docs_handle.extract(&mut r);
let result: Vec<(Fruit, DocAddress)> = top_docs
.iter()
.map(|(f, d)| {
(Fruit::Order(*f), DocAddress::from(d))
})
.collect();
(r, result)
}
Err(e) => return Err(PyValueError::new_err(e.to_string())),
}
} else {
let collector = TopDocs::with_limit(limit).and_offset(offset);
let top_docs_handle = multicollector.add_collector(collector);
let ret = self.inner.search(query.get(), &multicollector);
match ret {
Ok(mut r) => {
let top_docs = top_docs_handle.extract(&mut r);
let result: Vec<(Fruit, DocAddress)> = top_docs
.iter()
.map(|(f, d)| {
(Fruit::Score(*f), DocAddress::from(d))
})
.collect();
(r, result)
}
Err(e) => return Err(PyValueError::new_err(e.to_string())),
}
}
};
let count = match count_handle {
Some(h) => Some(h.extract(&mut multifruit)),
None => None,
};
Ok(SearchResult { hits, count })
}
/// Returns the overall number of documents in the index.
#[getter]
fn num_docs(&self) -> u64 {
self.inner.num_docs()
}
/// Fetches a document from Tantivy's store given a DocAddress.
///
/// Args:
/// doc_address (DocAddress): The DocAddress that is associated with
/// the document that we wish to fetch.
///
/// Returns the Document, raises ValueError if the document can't be found.
fn doc(&self, doc_address: &DocAddress) -> PyResult<Document> {
let doc = self.inner.doc(doc_address.into()).map_err(to_pyerr)?;
let named_doc = self.inner.schema().to_named_doc(&doc);
Ok(Document {
field_values: named_doc.0,
})
}
}
/// DocAddress contains all the necessary information to identify a document
/// given a Searcher object.
///
/// It consists in an id identifying its segment, and its segment-local DocId.
/// The id used for the segment is actually an ordinal in the list of segment
/// hold by a Searcher.
#[pyclass]
#[derive(Clone, Debug)]
pub(crate) struct DocAddress {
pub(crate) segment_ord: tv::SegmentLocalId,
pub(crate) doc: tv::DocId,
}
#[pymethods]
impl DocAddress {
/// The segment ordinal is an id identifying the segment hosting the
/// document. It is only meaningful, in the context of a searcher.
#[getter]
fn segment_ord(&self) -> u32 {
self.segment_ord
}
/// The segment local DocId
#[getter]
fn doc(&self) -> u32 {
self.doc
}
}
impl From<&tv::DocAddress> for DocAddress {
fn from(doc_address: &tv::DocAddress) -> Self {
DocAddress {
segment_ord: doc_address.segment_ord(),
doc: doc_address.doc(),
}
}
}
impl Into<tv::DocAddress> for &DocAddress {
fn into(self) -> tv::DocAddress {
tv::DocAddress(self.segment_ord(), self.doc())
}
}
#[pyproto]
impl PyObjectProtocol for Searcher {
fn __repr__(&self) -> PyResult<String> {
Ok(format!(
"Searcher(num_docs={}, num_segments={})",
self.inner.num_docs(),
self.inner.segment_readers().len()
))
}
}
| true
|
219ba319c997a9e61cddde1c2ab3ee6fc1d99e8f
|
Rust
|
storyfeet/rpg_tracker
|
/src/token.rs
|
UTF-8
| 6,526
| 3.21875
| 3
|
[] |
no_license
|
use crate::error::LineError;
use crate::prev_iter::{Backer, LineCounter, Prev};
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum Token {
Ident(String),
Num(i32),
Expr,
Fn,
Var,
Hash,
Dot,
Colon,
Comma,
Dollar,
Add,
Sub,
Mul,
Div,
Equals,
Break,
BracketO,
BracketC,
SquareO,
SquareC,
SquigleO,
SquigleC,
Or,
Amp,
Less,
Greater,
True,
False,
Qoth(String),
}
impl Token {
pub fn as_str_val(&self) -> Result<&str, LineError> {
match self {
Token::Ident(s) => Ok(s),
Token::Qoth(s) => Ok(s),
_ => Err(LineError::new(&format!("{:?} not a string type", self), 0)),
}
}
pub fn special_char(c: char) -> Option<Token> {
match c {
'#' => Some(Token::Hash),
'$' => Some(Token::Dollar),
':' => Some(Token::Colon),
'.' => Some(Token::Dot),
'+' => Some(Token::Add),
'-' => Some(Token::Sub),
'=' => Some(Token::Equals),
'*' => Some(Token::Mul),
'/' => Some(Token::Div),
'(' => Some(Token::BracketO),
')' => Some(Token::BracketC),
'{' => Some(Token::SquigleO),
'}' => Some(Token::SquigleC),
'[' => Some(Token::SquareO),
']' => Some(Token::SquareC),
',' => Some(Token::Comma),
'<' => Some(Token::Less),
'>' => Some(Token::Greater),
'\n' | ';' => Some(Token::Break),
_ => None,
}
}
}
pub struct Tokenizer<'a> {
it: Prev<char, std::str::Chars<'a>>,
prev: Option<Token>,
pub line_no: usize,
}
impl<'a> Tokenizer<'a> {
pub fn new(s: &'a str) -> Self {
Tokenizer {
it: Prev::new(s.chars()),
line_no: 0,
prev: None,
}
}
fn read_num(&mut self) -> i32 {
let mut res = 0;
loop {
match self.it.next() {
Some(c) => {
if c >= '0' && c <= '9' {
res *= 10;
res += (c as i32) - 48;
} else {
self.it.back();
return res;
}
}
None => return res,
}
}
}
/// starts after the '"'
fn read_qoth(&mut self) -> Token {
let mut res = String::new();
while let Some(c) = self.it.next() {
match c {
'\\' => res.push(self.it.next().unwrap_or(' ')),
'"' => return Token::Qoth(res),
'\n' => {
self.line_no += 1;
res.push('\n');
}
c => res.push(c),
}
}
Token::Qoth(res)
}
fn read_ident(&mut self) -> String {
let mut res = String::new();
loop {
let c = match self.it.next() {
Some(c) => c,
None => return res,
};
if let Some(_) = Token::special_char(c) {
self.it.back();
return res;
}
match c {
' ' | '\t' => return res,
'\\' => res.push(self.it.next().unwrap_or('\\')),
_ => res.push(c),
}
}
}
}
impl<'a> LineCounter for Tokenizer<'a> {
fn line(&self) -> usize {
self.line_no
}
}
impl<'a> Iterator for Tokenizer<'a> {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
let mut c = self.it.next()?;
while c == ' ' || c == '\t' {
c = self.it.next()?;
}
if let Some(r) = Token::special_char(c) {
if c == '\n' {
self.line_no += 1;
}
return Some(r);
}
let res = match c {
'"' => self.read_qoth(),
v if v >= '0' && v <= '9' => {
self.it.back();
Token::Num(self.read_num())
}
_ => {
self.it.back();
let id = self.read_ident();
match id.as_ref() {
"true" => Token::True,
"false" => Token::False,
"expr" => Token::Expr,
"fn" | "func" => Token::Fn,
"var" => Token::Var,
_ => Token::Ident(id),
}
}
};
self.prev = Some(res.clone());
Some(res)
}
}
pub struct TokPrev<'a> {
it: Prev<Token, Tokenizer<'a>>,
}
impl<'a> TokPrev<'a> {
pub fn new(s: &'a str) -> Self {
TokPrev {
it: Prev::new(Tokenizer::new(s)),
}
}
}
impl<'a> Iterator for TokPrev<'a> {
type Item = Token;
fn next(&mut self) -> Option<Token> {
return self.it.next();
}
}
impl<'a> Backer for TokPrev<'a> {
fn back(&mut self) {
self.it.back();
}
}
impl<'a> TokPrev<'a> {
pub fn read_to_break(&mut self) {
while let Some(t) = self.next() {
if t == Token::Break {
return;
}
}
}
}
impl<'a> LineCounter for TokPrev<'a> {
fn line(&self) -> usize {
self.it.line()
}
}
#[cfg(test)]
mod test_tokens {
use super::*;
#[test]
pub fn test_token_reads() {
let mut tk = Tokenizer::new("hello:52 + d6");
assert_eq!(tk.next(), Some(Token::Ident("hello".to_string())));
assert_eq!(tk.next(), Some(Token::Colon), "c1-2");
assert_eq!(tk.next(), Some(Token::Num(52)));
assert_eq!(tk.next(), Some(Token::Add));
assert_eq!(tk.next(), Some(Token::Ident("d6".to_string())));
assert!(tk.next().is_none());
}
#[test]
pub fn test_qoth() {
let mut tk = Tokenizer::new(r#"hello:"Goodbye","Nice","to \"meet\" you""#);
assert_eq!(tk.next().unwrap(), Token::Ident("hello".to_string()));
assert_eq!(tk.next().unwrap(), Token::Colon);
assert_eq!(tk.next().unwrap(), Token::Qoth("Goodbye".to_string()));
assert_eq!(tk.next().unwrap(), Token::Comma);
assert_eq!(tk.next().unwrap(), Token::Qoth("Nice".to_string()));
assert_eq!(tk.next().unwrap(), Token::Comma);
assert_eq!(
tk.next().unwrap(),
Token::Qoth("to \"meet\" you".to_string())
);
assert!(tk.next().is_none());
}
}
| true
|
3e5e13106d98a70e24e7a19326f95f11525b85fc
|
Rust
|
MPogoda/advent-of-code-2020
|
/rust-advent-of-code-2020/src/day21.rs
|
UTF-8
| 2,523
| 2.875
| 3
|
[
"Unlicense"
] |
permissive
|
use itertools::Itertools;
use regex::Regex;
use std::collections::HashSet;
type Entry = (HashSet<String>, HashSet<String>);
lazy_static! {
static ref RE: Regex =
Regex::new(r"^(?P<products>.+) \(contains (?P<allergens>.+)\)$").unwrap();
}
fn parse_line(line: &str) -> Entry {
let captures = RE.captures(line).unwrap();
let products: HashSet<_> = captures
.name("products")
.unwrap()
.as_str()
.split(' ')
.map(|word| word.to_owned())
.collect();
let allergens: HashSet<_> = captures
.name("allergens")
.unwrap()
.as_str()
.split(", ")
.map(|word| word.to_owned())
.collect();
(products, allergens)
}
#[aoc_generator(day21)]
fn parse_input(input: &str) -> Vec<Entry> {
input.lines().map(parse_line).collect()
}
type Match = (String, String);
fn find_match(data: &[Entry], unmatched: &HashSet<&String>) -> Match {
for &allergen in unmatched {
let mut affected = data
.iter()
.filter(|(_, allergens)| allergens.contains(allergen))
.map(|(products, _)| products);
let mut common = affected.next().unwrap().clone();
for next in affected {
common.retain(|v| next.contains(v));
}
if common.len() == 1 {
let product = common.drain().next().unwrap();
return (product, allergen.to_owned());
}
}
panic!("Cannot find the solution!");
}
fn solve(input: &[Entry]) -> (Vec<Entry>, Vec<(String, String)>) {
let mut unmatched: HashSet<_> = input
.iter()
.flat_map(|(_, allergens)| allergens.iter())
.collect();
let mut data = input.to_vec();
let mut matches = Vec::new();
while !unmatched.is_empty() {
let (product, allergen) = find_match(&data, &unmatched);
for (products, allergens) in &mut data {
products.retain(|v| *v != product);
allergens.retain(|v| *v != allergen);
}
unmatched.remove(&allergen);
matches.push((product, allergen));
}
(data, matches)
}
#[aoc(day21, part1)]
fn part1(input: &[Entry]) -> usize {
solve(input).0.iter().map(|(v, _)| v.len()).sum()
}
#[aoc(day21, part2)]
fn part2(input: &[Entry]) -> String {
let (_, mut matches) = solve(input);
matches.sort_by_cached_key(|(_, allergen)| allergen.clone());
matches
.drain(0..)
.map(|(product, _)| product)
.collect_vec()
.join(",")
}
| true
|
acae444bd6c225a74dfaa81cfd85a90809a5e364
|
Rust
|
tikhono/CryptoHackPals
|
/pals/set2/_15_pkcs7_padding_validation/src/lib.rs
|
UTF-8
| 796
| 2.921875
| 3
|
[] |
no_license
|
pub fn validate_padding(data: &[u8]) -> bool {
let pad_size = data.last().unwrap();
for byte in data.iter().skip(data.len() - *pad_size as usize) {
if *byte != *pad_size {
return false;
}
}
true
}
#[cfg(test)]
mod tests {
use crate::validate_padding;
use _09_implement_pkcs7_padding::pad;
#[test]
fn test_valid_pad_yellow_submarine_20() {
assert_eq!(
validate_padding(
std::str::from_utf8(&pad(&"YELLOW SUBMARINE".as_bytes().to_vec(), 20))
.unwrap()
.as_ref()
),
true
);
}
#[test]
fn test_invalid_pad_yellow_submarine_20() {
assert_eq!(validate_padding(b"YELLOW SUBMARINE\x04\x04\x04"), false);
}
}
| true
|
a3aec71ba135897a0cfa583743cc9432660512fe
|
Rust
|
thaumant/levenshtein-perf-examples
|
/src/version3.rs
|
UTF-8
| 1,595
| 3.453125
| 3
|
[] |
no_license
|
use std::cell::RefCell;
static DEFAULT_CAPACITY: usize = 20;
pub struct Levenshtein {
row: RefCell<Vec<usize>>,
chars2: RefCell<Vec<char>>,
}
impl Levenshtein {
pub fn new() -> Levenshtein {
Levenshtein {
row: RefCell::new(Vec::with_capacity(DEFAULT_CAPACITY)),
chars2: RefCell::new(Vec::with_capacity(DEFAULT_CAPACITY)),
}
}
pub fn distance(&self, str1: &str, str2: &str) -> usize {
if str1.is_empty() { return str2.chars().count(); }
if str2.is_empty() { return str1.chars().count(); }
let row = &mut *self.row.borrow_mut();
let chars2 = &mut *self.chars2.borrow_mut();
store(chars2, str2.chars());
store(row, 1 .. chars2.len() + 1);
for (i, ch1) in str1.chars().enumerate() {
let mut dist_del = i + 1;
let mut dist_sub = i;
for (j, &ch2) in chars2.iter().enumerate() {
dist_del = min!(
row[j] + 1,
dist_del + 1,
dist_sub + (ch1 != ch2) as usize
);
dist_sub = row[j];
row[j] = dist_del;
}
}
row[row.len() - 1]
}
}
fn store<T: Clone, Iter: Iterator<Item=T>>(buffer: &mut Vec<T>, iter: Iter) {
buffer.clear();
for val in iter {
buffer.push(val);
}
}
thread_local! {
static LEVEN: Levenshtein = Levenshtein::new();
}
pub fn levenshtein3(str1: &str, str2: &str) -> usize {
LEVEN.with(|leven| leven.distance(&str1, &str2))
}
| true
|
757dda6ff4247f35030b62203be8c55c0829f542
|
Rust
|
rust-lang/rustc-perf
|
/collector/compile-benchmarks/style-servo/components/style/values/computed/length.rs
|
UTF-8
| 31,468
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! `<length>` computed values, and related ones.
use app_units::Au;
use ordered_float::NotNaN;
use std::fmt;
use std::ops::{Add, Neg};
use style_traits::ToCss;
use style_traits::values::specified::AllowedNumericType;
use super::{Number, ToComputedValue, Context, Percentage};
use values::{Auto, CSSFloat, Either, ExtremumLength, None_, Normal, specified};
use values::animated::{Animate, Procedure, ToAnimatedZero};
use values::computed::NonNegativeNumber;
use values::distance::{ComputeSquaredDistance, SquaredDistance};
use values::generics::NonNegative;
use values::specified::length::{AbsoluteLength, FontBaseSize, FontRelativeLength};
use values::specified::length::ViewportPercentageLength;
pub use super::image::Image;
pub use values::specified::{Angle, BorderStyle, Time, UrlOrNone};
impl ToComputedValue for specified::NoCalcLength {
type ComputedValue = CSSPixelLength;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
specified::NoCalcLength::Absolute(length) =>
length.to_computed_value(context),
specified::NoCalcLength::FontRelative(length) =>
length.to_computed_value(context, FontBaseSize::CurrentStyle),
specified::NoCalcLength::ViewportPercentage(length) =>
length.to_computed_value(context.viewport_size_for_viewport_unit_resolution()),
specified::NoCalcLength::ServoCharacterWidth(length) =>
length.to_computed_value(context.style().get_font().clone_font_size().size()),
#[cfg(feature = "gecko")]
specified::NoCalcLength::Physical(length) =>
length.to_computed_value(context),
}
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
specified::NoCalcLength::Absolute(AbsoluteLength::Px(computed.px()))
}
}
impl ToComputedValue for specified::Length {
type ComputedValue = CSSPixelLength;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
specified::Length::NoCalc(l) => l.to_computed_value(context),
specified::Length::Calc(ref calc) => calc.to_computed_value(context).length(),
}
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
specified::Length::NoCalc(specified::NoCalcLength::from_computed_value(computed))
}
}
#[allow(missing_docs)]
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, ToAnimatedZero)]
pub struct CalcLengthOrPercentage {
#[animation(constant)]
pub clamping_mode: AllowedNumericType,
length: Length,
pub percentage: Option<Percentage>,
}
impl ComputeSquaredDistance for CalcLengthOrPercentage {
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
// FIXME(nox): This looks incorrect to me, to add a distance between lengths
// with a distance between percentages.
Ok(
self.unclamped_length().compute_squared_distance(&other.unclamped_length())? +
self.percentage().compute_squared_distance(&other.percentage())?,
)
}
}
impl CalcLengthOrPercentage {
/// Returns a new `CalcLengthOrPercentage`.
#[inline]
pub fn new(length: Length, percentage: Option<Percentage>) -> Self {
Self::with_clamping_mode(length, percentage, AllowedNumericType::All)
}
/// Returns a new `CalcLengthOrPercentage` with a specific clamping mode.
#[inline]
pub fn with_clamping_mode(length: Length,
percentage: Option<Percentage>,
clamping_mode: AllowedNumericType)
-> Self {
Self {
clamping_mode: clamping_mode,
length: length,
percentage: percentage,
}
}
/// Returns this `calc()` as a `<length>`.
///
/// Panics in debug mode if a percentage is present in the expression.
#[inline]
pub fn length(&self) -> CSSPixelLength {
debug_assert!(self.percentage.is_none());
self.length_component()
}
/// Returns the length component of this `calc()`
#[inline]
pub fn length_component(&self) -> CSSPixelLength {
CSSPixelLength::new(self.clamping_mode.clamp(self.length.px()))
}
/// Returns the `<length>` component of this `calc()`, unclamped.
#[inline]
pub fn unclamped_length(&self) -> CSSPixelLength {
self.length
}
/// Return the percentage value as CSSFloat.
#[inline]
pub fn percentage(&self) -> CSSFloat {
self.percentage.map_or(0., |p| p.0)
}
/// Convert the computed value into used value.
#[inline]
pub fn to_used_value(&self, container_len: Option<Au>) -> Option<Au> {
self.to_pixel_length(container_len).map(Au::from)
}
/// If there are special rules for computing percentages in a value (e.g. the height property),
/// they apply whenever a calc() expression contains percentages.
pub fn to_pixel_length(&self, container_len: Option<Au>) -> Option<Length> {
match (container_len, self.percentage) {
(Some(len), Some(percent)) => {
let pixel = self.length.px() + len.scale_by(percent.0).to_f32_px();
Some(Length::new(self.clamping_mode.clamp(pixel)))
},
(_, None) => Some(self.length()),
_ => None,
}
}
}
impl From<LengthOrPercentage> for CalcLengthOrPercentage {
fn from(len: LengthOrPercentage) -> CalcLengthOrPercentage {
match len {
LengthOrPercentage::Percentage(this) => {
CalcLengthOrPercentage::new(Length::new(0.), Some(this))
}
LengthOrPercentage::Length(this) => {
CalcLengthOrPercentage::new(this, None)
}
LengthOrPercentage::Calc(this) => {
this
}
}
}
}
impl From<LengthOrPercentageOrAuto> for Option<CalcLengthOrPercentage> {
fn from(len: LengthOrPercentageOrAuto) -> Option<CalcLengthOrPercentage> {
match len {
LengthOrPercentageOrAuto::Percentage(this) => {
Some(CalcLengthOrPercentage::new(Length::new(0.), Some(this)))
}
LengthOrPercentageOrAuto::Length(this) => {
Some(CalcLengthOrPercentage::new(this, None))
}
LengthOrPercentageOrAuto::Calc(this) => {
Some(this)
}
LengthOrPercentageOrAuto::Auto => {
None
}
}
}
}
impl From<LengthOrPercentageOrNone> for Option<CalcLengthOrPercentage> {
fn from(len: LengthOrPercentageOrNone) -> Option<CalcLengthOrPercentage> {
match len {
LengthOrPercentageOrNone::Percentage(this) => {
Some(CalcLengthOrPercentage::new(Length::new(0.), Some(this)))
}
LengthOrPercentageOrNone::Length(this) => {
Some(CalcLengthOrPercentage::new(this, None))
}
LengthOrPercentageOrNone::Calc(this) => {
Some(this)
}
LengthOrPercentageOrNone::None => {
None
}
}
}
}
impl ToCss for CalcLengthOrPercentage {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
use num_traits::Zero;
let (length, percentage) = match (self.length, self.percentage) {
(l, None) => return l.to_css(dest),
(l, Some(p)) if l.px() == 0. => return p.to_css(dest),
(l, Some(p)) => (l, p),
};
dest.write_str("calc(")?;
percentage.to_css(dest)?;
dest.write_str(if length.px() < Zero::zero() { " - " } else { " + " })?;
length.abs().to_css(dest)?;
dest.write_str(")")
}
}
impl specified::CalcLengthOrPercentage {
/// Compute the value, zooming any absolute units by the zoom function.
fn to_computed_value_with_zoom<F>(&self, context: &Context, zoom_fn: F,
base_size: FontBaseSize) -> CalcLengthOrPercentage
where F: Fn(Length) -> Length {
use std::f32;
let mut length = 0.;
if let Some(absolute) = self.absolute {
length += zoom_fn(absolute.to_computed_value(context)).px();
}
for val in &[self.vw.map(ViewportPercentageLength::Vw),
self.vh.map(ViewportPercentageLength::Vh),
self.vmin.map(ViewportPercentageLength::Vmin),
self.vmax.map(ViewportPercentageLength::Vmax)] {
if let Some(val) = *val {
let viewport_size = context.viewport_size_for_viewport_unit_resolution();
length += val.to_computed_value(viewport_size).px();
}
}
for val in &[self.ch.map(FontRelativeLength::Ch),
self.em.map(FontRelativeLength::Em),
self.ex.map(FontRelativeLength::Ex),
self.rem.map(FontRelativeLength::Rem)] {
if let Some(val) = *val {
length += val.to_computed_value(context, base_size).px();
}
}
CalcLengthOrPercentage {
clamping_mode: self.clamping_mode,
length: Length::new(length.min(f32::MAX).max(f32::MIN)),
percentage: self.percentage,
}
}
/// Compute font-size or line-height taking into account text-zoom if necessary.
pub fn to_computed_value_zoomed(&self, context: &Context, base_size: FontBaseSize) -> CalcLengthOrPercentage {
self.to_computed_value_with_zoom(context, |abs| context.maybe_zoom_text(abs.into()).0, base_size)
}
}
impl ToComputedValue for specified::CalcLengthOrPercentage {
type ComputedValue = CalcLengthOrPercentage;
fn to_computed_value(&self, context: &Context) -> CalcLengthOrPercentage {
// normal properties don't zoom, and compute em units against the current style's font-size
self.to_computed_value_with_zoom(context, |abs| abs, FontBaseSize::CurrentStyle)
}
#[inline]
fn from_computed_value(computed: &CalcLengthOrPercentage) -> Self {
specified::CalcLengthOrPercentage {
clamping_mode: computed.clamping_mode,
absolute: Some(AbsoluteLength::from_computed_value(&computed.length)),
percentage: computed.percentage,
..Default::default()
}
}
}
#[allow(missing_docs)]
#[animate(fallback = "Self::animate_fallback")]
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[css(derive_debug)]
#[derive(Animate, Clone, ComputeSquaredDistance, Copy, PartialEq)]
#[derive(ToAnimatedZero, ToCss)]
#[distance(fallback = "Self::compute_squared_distance_fallback")]
pub enum LengthOrPercentage {
Length(Length),
Percentage(Percentage),
Calc(CalcLengthOrPercentage),
}
impl LengthOrPercentage {
/// https://drafts.csswg.org/css-transitions/#animtype-lpcalc
fn animate_fallback(
&self,
other: &Self,
procedure: Procedure,
) -> Result<Self, ()> {
// Special handling for zero values since these should not require calc().
if self.is_definitely_zero() {
return other.to_animated_zero()?.animate(other, procedure);
}
if other.is_definitely_zero() {
return self.animate(&self.to_animated_zero()?, procedure);
}
let this = CalcLengthOrPercentage::from(*self);
let other = CalcLengthOrPercentage::from(*other);
Ok(LengthOrPercentage::Calc(this.animate(&other, procedure)?))
}
#[inline]
fn compute_squared_distance_fallback(
&self,
other: &Self,
) -> Result<SquaredDistance, ()> {
CalcLengthOrPercentage::compute_squared_distance(
&(*self).into(),
&(*other).into(),
)
}
}
impl From<Au> for LengthOrPercentage {
#[inline]
fn from(length: Au) -> Self {
LengthOrPercentage::Length(length.into())
}
}
impl LengthOrPercentage {
#[inline]
#[allow(missing_docs)]
pub fn zero() -> LengthOrPercentage {
LengthOrPercentage::Length(Length::new(0.))
}
#[inline]
/// 1px length value for SVG defaults
pub fn one() -> LengthOrPercentage {
LengthOrPercentage::Length(Length::new(1.))
}
/// Returns true if the computed value is absolute 0 or 0%.
///
/// (Returns false for calc() values, even if ones that may resolve to zero.)
#[inline]
pub fn is_definitely_zero(&self) -> bool {
use self::LengthOrPercentage::*;
match *self {
Length(l) => l.px() == 0.0,
Percentage(p) => p.0 == 0.0,
Calc(_) => false
}
}
// CSSFloat doesn't implement Hash, so does CSSPixelLength. Therefore, we still use Au as the
// hash key.
#[allow(missing_docs)]
pub fn to_hash_key(&self) -> (Au, NotNaN<f32>) {
use self::LengthOrPercentage::*;
match *self {
Length(l) => (Au::from(l), NotNaN::new(0.0).unwrap()),
Percentage(p) => (Au(0), NotNaN::new(p.0).unwrap()),
Calc(c) => (Au::from(c.unclamped_length()), NotNaN::new(c.percentage()).unwrap()),
}
}
/// Returns the used value.
pub fn to_used_value(&self, containing_length: Au) -> Au {
Au::from(self.to_pixel_length(containing_length))
}
/// Returns the used value as CSSPixelLength.
pub fn to_pixel_length(&self, containing_length: Au) -> Length {
match *self {
LengthOrPercentage::Length(length) => length,
LengthOrPercentage::Percentage(p) => containing_length.scale_by(p.0).into(),
LengthOrPercentage::Calc(ref calc) => {
calc.to_pixel_length(Some(containing_length)).unwrap()
},
}
}
/// Returns the clamped non-negative values.
#[inline]
pub fn clamp_to_non_negative(self) -> Self {
match self {
LengthOrPercentage::Length(length) => {
LengthOrPercentage::Length(Length::new(length.px().max(0.)))
},
LengthOrPercentage::Percentage(percentage) => {
LengthOrPercentage::Percentage(Percentage(percentage.0.max(0.)))
},
_ => self
}
}
}
impl ToComputedValue for specified::LengthOrPercentage {
type ComputedValue = LengthOrPercentage;
fn to_computed_value(&self, context: &Context) -> LengthOrPercentage {
match *self {
specified::LengthOrPercentage::Length(ref value) => {
LengthOrPercentage::Length(value.to_computed_value(context))
}
specified::LengthOrPercentage::Percentage(value) => {
LengthOrPercentage::Percentage(value)
}
specified::LengthOrPercentage::Calc(ref calc) => {
LengthOrPercentage::Calc((**calc).to_computed_value(context))
}
}
}
fn from_computed_value(computed: &LengthOrPercentage) -> Self {
match *computed {
LengthOrPercentage::Length(value) => {
specified::LengthOrPercentage::Length(
ToComputedValue::from_computed_value(&value)
)
}
LengthOrPercentage::Percentage(value) => {
specified::LengthOrPercentage::Percentage(value)
}
LengthOrPercentage::Calc(ref calc) => {
specified::LengthOrPercentage::Calc(
Box::new(ToComputedValue::from_computed_value(calc))
)
}
}
}
}
#[allow(missing_docs)]
#[animate(fallback = "Self::animate_fallback")]
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[css(derive_debug)]
#[derive(Animate, Clone, ComputeSquaredDistance, Copy, PartialEq, ToCss)]
#[distance(fallback = "Self::compute_squared_distance_fallback")]
pub enum LengthOrPercentageOrAuto {
Length(Length),
Percentage(Percentage),
Auto,
Calc(CalcLengthOrPercentage),
}
impl LengthOrPercentageOrAuto {
/// https://drafts.csswg.org/css-transitions/#animtype-lpcalc
fn animate_fallback(
&self,
other: &Self,
procedure: Procedure,
) -> Result<Self, ()> {
let this = <Option<CalcLengthOrPercentage>>::from(*self);
let other = <Option<CalcLengthOrPercentage>>::from(*other);
Ok(LengthOrPercentageOrAuto::Calc(
this.animate(&other, procedure)?.ok_or(())?,
))
}
#[inline]
fn compute_squared_distance_fallback(
&self,
other: &Self,
) -> Result<SquaredDistance, ()> {
<Option<CalcLengthOrPercentage>>::compute_squared_distance(
&(*self).into(),
&(*other).into(),
)
}
}
impl LengthOrPercentageOrAuto {
/// Returns true if the computed value is absolute 0 or 0%.
///
/// (Returns false for calc() values, even if ones that may resolve to zero.)
#[inline]
pub fn is_definitely_zero(&self) -> bool {
use self::LengthOrPercentageOrAuto::*;
match *self {
Length(l) => l.px() == 0.0,
Percentage(p) => p.0 == 0.0,
Calc(_) | Auto => false
}
}
}
impl ToComputedValue for specified::LengthOrPercentageOrAuto {
type ComputedValue = LengthOrPercentageOrAuto;
#[inline]
fn to_computed_value(&self, context: &Context) -> LengthOrPercentageOrAuto {
match *self {
specified::LengthOrPercentageOrAuto::Length(ref value) => {
LengthOrPercentageOrAuto::Length(value.to_computed_value(context))
}
specified::LengthOrPercentageOrAuto::Percentage(value) => {
LengthOrPercentageOrAuto::Percentage(value)
}
specified::LengthOrPercentageOrAuto::Auto => {
LengthOrPercentageOrAuto::Auto
}
specified::LengthOrPercentageOrAuto::Calc(ref calc) => {
LengthOrPercentageOrAuto::Calc((**calc).to_computed_value(context))
}
}
}
#[inline]
fn from_computed_value(computed: &LengthOrPercentageOrAuto) -> Self {
match *computed {
LengthOrPercentageOrAuto::Auto => specified::LengthOrPercentageOrAuto::Auto,
LengthOrPercentageOrAuto::Length(value) => {
specified::LengthOrPercentageOrAuto::Length(
ToComputedValue::from_computed_value(&value)
)
}
LengthOrPercentageOrAuto::Percentage(value) => {
specified::LengthOrPercentageOrAuto::Percentage(value)
}
LengthOrPercentageOrAuto::Calc(calc) => {
specified::LengthOrPercentageOrAuto::Calc(
Box::new(ToComputedValue::from_computed_value(&calc))
)
}
}
}
}
#[allow(missing_docs)]
#[animate(fallback = "Self::animate_fallback")]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[css(derive_debug)]
#[derive(Animate, Clone, ComputeSquaredDistance, Copy, PartialEq, ToCss)]
#[distance(fallback = "Self::compute_squared_distance_fallback")]
pub enum LengthOrPercentageOrNone {
Length(Length),
Percentage(Percentage),
Calc(CalcLengthOrPercentage),
None,
}
impl LengthOrPercentageOrNone {
/// https://drafts.csswg.org/css-transitions/#animtype-lpcalc
fn animate_fallback(
&self,
other: &Self,
procedure: Procedure,
) -> Result<Self, ()> {
let this = <Option<CalcLengthOrPercentage>>::from(*self);
let other = <Option<CalcLengthOrPercentage>>::from(*other);
Ok(LengthOrPercentageOrNone::Calc(
this.animate(&other, procedure)?.ok_or(())?,
))
}
fn compute_squared_distance_fallback(
&self,
other: &Self,
) -> Result<SquaredDistance, ()> {
<Option<CalcLengthOrPercentage>>::compute_squared_distance(
&(*self).into(),
&(*other).into(),
)
}
}
impl LengthOrPercentageOrNone {
/// Returns the used value.
pub fn to_used_value(&self, containing_length: Au) -> Option<Au> {
match *self {
LengthOrPercentageOrNone::None => None,
LengthOrPercentageOrNone::Length(length) => Some(Au::from(length)),
LengthOrPercentageOrNone::Percentage(percent) => Some(containing_length.scale_by(percent.0)),
LengthOrPercentageOrNone::Calc(ref calc) => calc.to_used_value(Some(containing_length)),
}
}
}
impl ToComputedValue for specified::LengthOrPercentageOrNone {
type ComputedValue = LengthOrPercentageOrNone;
#[inline]
fn to_computed_value(&self, context: &Context) -> LengthOrPercentageOrNone {
match *self {
specified::LengthOrPercentageOrNone::Length(ref value) => {
LengthOrPercentageOrNone::Length(value.to_computed_value(context))
}
specified::LengthOrPercentageOrNone::Percentage(value) => {
LengthOrPercentageOrNone::Percentage(value)
}
specified::LengthOrPercentageOrNone::Calc(ref calc) => {
LengthOrPercentageOrNone::Calc((**calc).to_computed_value(context))
}
specified::LengthOrPercentageOrNone::None => {
LengthOrPercentageOrNone::None
}
}
}
#[inline]
fn from_computed_value(computed: &LengthOrPercentageOrNone) -> Self {
match *computed {
LengthOrPercentageOrNone::None => specified::LengthOrPercentageOrNone::None,
LengthOrPercentageOrNone::Length(value) => {
specified::LengthOrPercentageOrNone::Length(
ToComputedValue::from_computed_value(&value)
)
}
LengthOrPercentageOrNone::Percentage(value) => {
specified::LengthOrPercentageOrNone::Percentage(value)
}
LengthOrPercentageOrNone::Calc(calc) => {
specified::LengthOrPercentageOrNone::Calc(
Box::new(ToComputedValue::from_computed_value(&calc))
)
}
}
}
}
/// A wrapper of LengthOrPercentage, whose value must be >= 0.
pub type NonNegativeLengthOrPercentage = NonNegative<LengthOrPercentage>;
impl From<NonNegativeLength> for NonNegativeLengthOrPercentage {
#[inline]
fn from(length: NonNegativeLength) -> Self {
LengthOrPercentage::Length(length.0).into()
}
}
impl From<LengthOrPercentage> for NonNegativeLengthOrPercentage {
#[inline]
fn from(lop: LengthOrPercentage) -> Self {
NonNegative::<LengthOrPercentage>(lop)
}
}
impl From<NonNegativeLengthOrPercentage> for LengthOrPercentage {
#[inline]
fn from(lop: NonNegativeLengthOrPercentage) -> LengthOrPercentage {
lop.0
}
}
impl NonNegativeLengthOrPercentage {
/// Get zero value.
#[inline]
pub fn zero() -> Self {
NonNegative::<LengthOrPercentage>(LengthOrPercentage::zero())
}
/// Returns true if the computed value is absolute 0 or 0%.
#[inline]
pub fn is_definitely_zero(&self) -> bool {
self.0.is_definitely_zero()
}
/// Returns the used value.
#[inline]
pub fn to_used_value(&self, containing_length: Au) -> Au {
self.0.to_used_value(containing_length)
}
}
/// The computed `<length>` value.
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[cfg_attr(feature = "servo", derive(Deserialize, HeapSizeOf, Serialize))]
#[derive(Animate, Clone, ComputeSquaredDistance, Copy, Debug, PartialEq, PartialOrd)]
#[derive(ToAnimatedValue, ToAnimatedZero)]
pub struct CSSPixelLength(CSSFloat);
impl CSSPixelLength {
/// Return a new CSSPixelLength.
#[inline]
pub fn new(px: CSSFloat) -> Self {
CSSPixelLength(px)
}
/// Return the containing pixel value.
#[inline]
pub fn px(&self) -> CSSFloat {
self.0
}
/// Return the length with app_unit i32 type.
#[inline]
pub fn to_i32_au(&self) -> i32 {
Au::from(*self).0
}
/// Return the absolute value of this length.
pub fn abs(self) -> Self {
CSSPixelLength::new(self.0.abs())
}
}
impl ToCss for CSSPixelLength {
#[inline]
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.0.to_css(dest)?;
dest.write_str("px")
}
}
impl Neg for CSSPixelLength {
type Output = Self;
#[inline]
fn neg(self) -> Self {
CSSPixelLength::new(-self.0)
}
}
impl From<CSSPixelLength> for Au {
#[inline]
fn from(len: CSSPixelLength) -> Self {
Au::from_f32_px(len.0)
}
}
impl From<Au> for CSSPixelLength {
#[inline]
fn from(len: Au) -> Self {
CSSPixelLength::new(len.to_f32_px())
}
}
/// An alias of computed `<length>` value.
pub type Length = CSSPixelLength;
/// Either a computed `<length>` or the `none` keyword.
pub type LengthOrNone = Either<Length, None_>;
/// Either a computed `<length>` or the `auto` keyword.
pub type LengthOrAuto = Either<Length, Auto>;
/// Either a computed `<length>` or a `<number>` value.
pub type LengthOrNumber = Either<Length, Number>;
impl LengthOrNumber {
/// Returns `0`.
#[inline]
pub fn zero() -> Self {
Either::Second(0.)
}
}
/// Either a computed `<length>` or the `normal` keyword.
pub type LengthOrNormal = Either<Length, Normal>;
/// A wrapper of Length, whose value must be >= 0.
pub type NonNegativeLength = NonNegative<Length>;
impl NonNegativeLength {
/// Create a NonNegativeLength.
#[inline]
pub fn new(px: CSSFloat) -> Self {
NonNegative(Length::new(px.max(0.)))
}
/// Return a zero value.
#[inline]
pub fn zero() -> Self {
Self::new(0.)
}
/// Return the pixel value of |NonNegativeLength|.
#[inline]
pub fn px(&self) -> CSSFloat {
self.0.px()
}
#[inline]
/// Ensures it is non negative
pub fn clamp(self) -> Self {
if (self.0).0 < 0. {
Self::zero()
} else {
self
}
}
/// Scale this NonNegativeLength.
/// We scale NonNegativeLength by zero if the factor is negative because it doesn't
/// make sense to scale a negative factor on a non-negative length.
#[inline]
pub fn scale_by(&self, factor: f32) -> Self {
Self::new(self.0.px() * factor.max(0.))
}
}
impl Add<NonNegativeLength> for NonNegativeLength {
type Output = Self;
fn add(self, other: Self) -> Self {
NonNegativeLength::new(self.px() + other.px())
}
}
impl From<Length> for NonNegativeLength {
#[inline]
fn from(len: Length) -> Self {
NonNegative(len)
}
}
impl From<Au> for NonNegativeLength {
#[inline]
fn from(au: Au) -> Self {
NonNegative(au.into())
}
}
impl From<NonNegativeLength> for Au {
#[inline]
fn from(non_negative_len: NonNegativeLength) -> Self {
Au::from(non_negative_len.0)
}
}
/// Either a computed NonNegativeLength or the `auto` keyword.
pub type NonNegativeLengthOrAuto = Either<NonNegativeLength, Auto>;
/// Either a computed NonNegativeLength or the `normal` keyword.
pub type NonNegativeLengthOrNormal = Either<NonNegativeLength, Normal>;
/// Either a computed NonNegativeLength or a NonNegativeNumber value.
pub type NonNegativeLengthOrNumber = Either<NonNegativeLength, NonNegativeNumber>;
/// A value suitable for a `min-width`, `min-height`, `width` or `height` property.
/// See values/specified/length.rs for more details.
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Animate, Clone, ComputeSquaredDistance, Copy, Debug, PartialEq)]
#[derive(ToAnimatedZero, ToCss)]
pub enum MozLength {
LengthOrPercentageOrAuto(LengthOrPercentageOrAuto),
#[animation(error)]
ExtremumLength(ExtremumLength),
}
impl MozLength {
/// Returns the `auto` value.
pub fn auto() -> Self {
MozLength::LengthOrPercentageOrAuto(LengthOrPercentageOrAuto::Auto)
}
}
impl ToComputedValue for specified::MozLength {
type ComputedValue = MozLength;
#[inline]
fn to_computed_value(&self, context: &Context) -> MozLength {
match *self {
specified::MozLength::LengthOrPercentageOrAuto(ref lopoa) => {
MozLength::LengthOrPercentageOrAuto(lopoa.to_computed_value(context))
}
specified::MozLength::ExtremumLength(ref ext) => {
debug_assert!(context.for_non_inherited_property.is_some(),
"should check whether we're a non-inherited property");
context.rule_cache_conditions.borrow_mut()
.set_writing_mode_dependency(context.builder.writing_mode);
MozLength::ExtremumLength(ext.clone())
}
}
}
#[inline]
fn from_computed_value(computed: &MozLength) -> Self {
match *computed {
MozLength::LengthOrPercentageOrAuto(ref lopoa) =>
specified::MozLength::LengthOrPercentageOrAuto(
specified::LengthOrPercentageOrAuto::from_computed_value(&lopoa)),
MozLength::ExtremumLength(ref ext) =>
specified::MozLength::ExtremumLength(ext.clone()),
}
}
}
/// A value suitable for a `max-width` or `max-height` property.
/// See values/specified/length.rs for more details.
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Animate, Clone, ComputeSquaredDistance, Copy, Debug, PartialEq, ToCss)]
pub enum MaxLength {
LengthOrPercentageOrNone(LengthOrPercentageOrNone),
#[animation(error)]
ExtremumLength(ExtremumLength),
}
impl MaxLength {
/// Returns the `none` value.
pub fn none() -> Self {
MaxLength::LengthOrPercentageOrNone(LengthOrPercentageOrNone::None)
}
}
impl ToComputedValue for specified::MaxLength {
type ComputedValue = MaxLength;
#[inline]
fn to_computed_value(&self, context: &Context) -> MaxLength {
match *self {
specified::MaxLength::LengthOrPercentageOrNone(ref lopon) => {
MaxLength::LengthOrPercentageOrNone(lopon.to_computed_value(context))
}
specified::MaxLength::ExtremumLength(ref ext) => {
MaxLength::ExtremumLength(ext.clone())
}
}
}
#[inline]
fn from_computed_value(computed: &MaxLength) -> Self {
match *computed {
MaxLength::LengthOrPercentageOrNone(ref lopon) =>
specified::MaxLength::LengthOrPercentageOrNone(
specified::LengthOrPercentageOrNone::from_computed_value(&lopon)),
MaxLength::ExtremumLength(ref ext) =>
specified::MaxLength::ExtremumLength(ext.clone()),
}
}
}
| true
|
899791266ffb03447ebfc72541f44e019ad68d1f
|
Rust
|
sybila/biodivine-lib-bdd
|
/src/_impl_bdd/_impl_cnf.rs
|
UTF-8
| 5,148
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
use crate::{Bdd, BddNode, BddPartialValuation, BddPointer, BddVariable};
use fxhash::FxBuildHasher;
use std::collections::HashMap;
impl Bdd {
/// **(internal)** A specialized algorithm for constructing BDDs from CNFs. It builds the BDD
/// directly by recursively "splitting" the clauses. The advantage is that we avoid a lot of
/// memory copying. The disadvantage is that when the number of variables is high and the
/// number of clauses low, this could be slightly slower due to all the recursion. However,
/// it definitely needs to be tested at some point.
pub(crate) fn mk_cnf(num_vars: u16, cnf: &[BddPartialValuation]) -> Bdd {
// This is essentially a "dual" algorithm to the DNF implementation. Relevant explanation
// can be found there.
if cnf.is_empty() {
Bdd::mk_true(num_vars);
}
fn build_recursive(
num_vars: u16,
mut variable: u16,
cnf: &[&BddPartialValuation],
result: &mut Bdd,
node_cache: &mut HashMap<BddNode, BddPointer, FxBuildHasher>,
) -> BddPointer {
loop {
if variable == num_vars {
return BddPointer::from_bool(cnf.is_empty());
}
if cnf.is_empty() {
return BddPointer::one();
}
let var = BddVariable(variable);
let should_branch = cnf.iter().any(|val| val.get_value(var).is_some());
if !should_branch {
variable += 1;
continue;
}
// Compared to DNF, here we want to *remove* any clause that has the specific
// fixed value, because then the clause is satisfied. I.e. we want to retain
// all clauses that are not satisfied by the recursive path so far.
let mut var_true = Vec::new();
let mut var_false = Vec::new();
for clause in cnf {
match clause.get_value(var) {
Some(true) => var_false.push(*clause),
Some(false) => var_true.push(*clause),
_ => {
var_true.push(*clause);
var_false.push(*clause);
}
}
}
let high = build_recursive(num_vars, variable + 1, &var_true, result, node_cache);
let low = build_recursive(num_vars, variable + 1, &var_false, result, node_cache);
if high == low {
return high;
}
let node = BddNode::mk_node(var, low, high);
return if let Some(id) = node_cache.get(&node) {
*id
} else {
result.push_node(node);
node_cache.insert(node, result.root_pointer());
result.root_pointer()
};
}
}
let mut result = Bdd::mk_true(num_vars);
let mut node_cache = HashMap::with_capacity_and_hasher(cnf.len(), FxBuildHasher::default());
node_cache.insert(BddNode::mk_zero(num_vars), BddPointer::zero());
node_cache.insert(BddNode::mk_one(num_vars), BddPointer::one());
let cnf = Vec::from_iter(cnf.iter());
build_recursive(num_vars, 0, &cnf, &mut result, &mut node_cache);
result
}
/// Construct a CNF representation of this BDD.
pub fn to_cnf(&self) -> Vec<BddPartialValuation> {
// This is a "dual" of the DNF algorithm.
// However, it also appears in this answer:
// https://stackoverflow.com/questions/19488478/convert-bdd-to-cnf
fn build_recursive(
bdd: &Bdd,
path: &mut BddPartialValuation,
node: BddPointer,
results: &mut Vec<BddPartialValuation>,
) {
if node.is_terminal() {
// Compared to DNF, we want to include paths that terminate in the zero nodes.
if node.is_zero() {
results.push(path.clone());
}
return;
}
let var = bdd.var_of(node);
let low = bdd.low_link_of(node);
let high = bdd.high_link_of(node);
// Compared to DNF, we invert the values on the constructed path (i.e. low node
// has a value fixed to true and vice versa).
if !low.is_one() {
path.set_value(var, true);
build_recursive(bdd, path, low, results);
path.unset_value(var);
}
if !high.is_one() {
path.set_value(var, false);
build_recursive(bdd, path, high, results);
path.unset_value(var);
}
}
let mut result = Vec::new();
build_recursive(
self,
&mut BddPartialValuation::empty(),
self.root_pointer(),
&mut result,
);
result
}
}
| true
|
1777752513cd438b096191e4464e04523f875a02
|
Rust
|
vain0x/competo
|
/src/config.rs
|
UTF-8
| 1,210
| 3.046875
| 3
|
[
"MIT"
] |
permissive
|
//! Defines data structures of command line arguments.
use clap;
#[derive(Debug)]
pub struct Config {
pub src_path: Option<String>,
pub main_path: Option<String>,
pub install_mod_names: Vec<String>,
}
impl Config {
pub fn from_matches(gm: &clap::ArgMatches) -> Self {
let install_mod_names = match gm.subcommand() {
("install", None) => {
warn!("Nothing to install");
Vec::new()
}
("install", Some(sm)) => {
let mod_names = sm
.values_of("mod-name")
.into_iter()
.flat_map(|names| names)
.map(|name| name.to_owned())
.collect::<Vec<_>>();
trace!("install {:?}", mod_names);
mod_names
}
_ => {
error!("unknown subcommand");
vec![]
}
};
let src_path = gm.value_of("src-path").map(|s| s.to_owned());
let main_path = gm.value_of("main-path").map(|s| s.to_owned());
Config {
src_path,
main_path,
install_mod_names,
}
}
}
| true
|
75bbae25944dc9b1b2681683ecb063558582d66b
|
Rust
|
ilya-zlobintsev/blogger
|
/src/main.rs
|
UTF-8
| 1,955
| 2.796875
| 3
|
[] |
no_license
|
#![feature(proc_macro_hygiene, decl_macro)]
use chrono::{DateTime, Utc};
use comrak::ComrakOptions;
use rocket_contrib::{serve::StaticFiles, templates::Template};
use serde::Serialize;
use std::{
fs::{self, DirEntry},
time::SystemTime,
};
#[macro_use]
extern crate rocket;
#[derive(Serialize)]
struct BlogEntry {
title: String,
description: String,
path: String,
}
#[derive(Serialize)]
struct MainPageContext {
entries: Vec<BlogEntry>,
}
#[get("/entries/<entry>")]
fn get_entry(entry: String) -> Template {
let markdown = fs::read_to_string(format!("entries/{}", entry)).unwrap();
let doc = comrak::markdown_to_html(&markdown, &ComrakOptions::default());
Template::render("entry", doc)
}
#[get("/")]
fn index() -> Template {
let mut files: Vec<DirEntry> = fs::read_dir("entries")
.expect("entries folder not found")
.map(|entry| entry.unwrap())
.collect();
files.sort_by(|a, b| {
b.metadata()
.unwrap()
.created()
.unwrap()
.cmp(&a.metadata().unwrap().created().unwrap())
});
let mut entries = Vec::new();
for entry_file in files {
let contents = fs::read_to_string(entry_file.path()).unwrap();
let title = contents
.split("\n")
.next()
.unwrap()
.replace("#", "")
.trim()
.to_string();
let path = format!("entries/{}", entry_file.file_name().to_string_lossy());
entries.push(BlogEntry {
title,
description: entry_file.path().to_string_lossy().to_string(),
path,
});
}
let context = MainPageContext { entries };
Template::render("main-page", context)
}
fn main() {
rocket::ignite()
.mount("/", routes![index, get_entry])
.mount("/", StaticFiles::from("./static"))
.attach(Template::fairing())
.launch();
}
| true
|
fdd0d9a0e8cf2b4deeaf52dbad3da9ad30898122
|
Rust
|
liufuyang/adventofcode-2020
|
/examples/day12_2/main.rs
|
UTF-8
| 2,945
| 3.359375
| 3
|
[
"MIT"
] |
permissive
|
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::str::FromStr;
use adventofcode_2020::error::MyError;
#[derive(Debug)]
struct Position {
x: isize,
y: isize,
waypoint: (isize, isize),
}
impl Position {
fn new() -> Self {
Position { x: 0, y: 0, waypoint: (10, 1) }
}
fn apply(&mut self, cmd: &Cmd) {
match *cmd {
Cmd::N(n) => self.waypoint.1 += n as isize,
Cmd::S(n) => self.waypoint.1 -= n as isize,
Cmd::E(n) => self.waypoint.0 += n as isize,
Cmd::W(n) => self.waypoint.0 -= n as isize,
Cmd::L(_) | Cmd::R(_) => self.rotate(cmd),
Cmd::F(n) => {
self.x += self.waypoint.0 * n as isize;
self.y += self.waypoint.1 * n as isize;
}
Cmd::None => ()
}
}
// https://www.wikiwand.com/en/Rotation_matrix
fn rotate(&mut self, dir: &Cmd) {
match *dir {
Cmd::L(n) => {
self.waypoint = rotate(self.waypoint, n as isize);
}
Cmd::R(n) => {
self.waypoint = rotate(self.waypoint, -(n as isize));
}
_ => ()
}
}
fn get_distance(&self) -> usize {
self.x.abs() as usize + self.y.abs() as usize
}
}
fn rotate(input: (isize, isize), deg: isize) -> (isize, isize) {
let x = input.0 as f64;
let y = input.1 as f64;
let deg = deg as f64 / 180.0 * std::f64::consts::PI;
let r_x = x * deg.cos() - y * deg.sin();
let r_y = x * deg.sin() + y * deg.cos();
// Important: -0.999999 as isize gives 0;
(r_x.round() as isize, r_y.round() as isize)
}
#[derive(Debug)]
enum Cmd {
N(usize),
S(usize),
E(usize),
W(usize),
L(usize),
R(usize),
F(usize),
None,
}
fn main() -> Result<(), Box<dyn Error>> {
let file = File::open("./examples/day12_2/input.txt")?;
let position = BufReader::new(file)
.lines()
.map(|line| line.unwrap().parse::<Cmd>().unwrap_or(Cmd::None))
.fold(Position::new(), |mut position, next| {
println!("{:?}, next->{:?}", position, next);
position.apply(&next);
println!("{:?}", position);
position
});
println!("{:?}, distance: {}", position, position.get_distance());
Ok(())
}
impl FromStr for Cmd {
type Err = Box<dyn Error>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (cmd, value) = s.split_at(1);
let value = value.parse::<usize>()?;
match cmd {
"N" => Ok(Cmd::N(value)),
"S" => Ok(Cmd::S(value)),
"E" => Ok(Cmd::E(value)),
"W" => Ok(Cmd::W(value)),
"L" => Ok(Cmd::L(value)),
"R" => Ok(Cmd::R(value)),
"F" => Ok(Cmd::F(value)),
_ => Err(Box::new(MyError::new("Not a Cmd")))
}
}
}
| true
|
b17c859e57d9ae0b6d69e6b453f53e3aa969b61d
|
Rust
|
sirkibsirkib/rust-overlaps
|
/src/prepare.rs
|
UTF-8
| 2,991
| 2.984375
| 3
|
[
"MIT"
] |
permissive
|
use bio::io::fasta;
use bidir_map::BidirMap;
use std::io;
use std::fs::File;
/////////////////////////////
use structs::run_config::{Config, Maps};
/*
builds the maps data structure from a fasta file + config
the "maps" contains most of the constant information for the run
> mappings from internal to external representations of strings ie: id-->"name"
> mappings between internal represenations ie: id<-->index(in text)
> important data ie: text
> some convenient functions ie: get &str (in the text)
*/
pub fn read_and_prepare(filename : &str, config : &Config) -> Result<(Maps), io::Error> {
let mut text : Vec<u8> = Vec::new();
let mut id2name_vec : Vec<String> = Vec::new();
let mut id2index_bdmap : BidirMap<usize, usize> = BidirMap::new();
let f = File::open(filename)
.expect(&format!("Failed to open input file at {:?}\n", filename));
let reader = fasta::Reader::new(f);
let mut n_symbols_removed = 0;
for record in reader.records() {
let record = record?;
if let Some(name) = record.id(){
let id = id2name_vec.len();
let name = name.to_owned();
let mut str_vec = record.seq().to_vec();
if !config.n_alphabet{
let before_len = str_vec.len();
str_vec.retain(|c|*c != ('N' as u8));
if str_vec.len() < before_len{
n_symbols_removed += before_len - str_vec.len();
}
}
str_vec.reverse();
text.push('$' as u8);
let index = text.len();
id2index_bdmap.insert(id, index);
text.extend(str_vec.clone());
id2name_vec.push(name.clone());
if config.reversals{
let id = id2name_vec.len();
str_vec.reverse();
for i in 0..str_vec.len(){
str_vec[i] = complement_u8(str_vec[i]);
}
text.push('$' as u8);
let index = text.len();
id2index_bdmap.insert(id, index);
text.extend(str_vec);
id2name_vec.push(name);
}
}
}
if n_symbols_removed > 0 {
println!(" WARNING\n\tOmitted {} N symbols found in input data.\n\t\
Run without flag --no_n to use these N strings intact.", n_symbols_removed);
}
text.push('#' as u8);
text.shrink_to_fit();
id2name_vec.shrink_to_fit();
let mut indexes : Vec<usize> = id2index_bdmap.second_col().map(|x| *x).collect();
indexes.sort();
indexes.shrink_to_fit();
let maps = Maps{
text : text,
id2name_vec : id2name_vec,
id2index_bdmap : id2index_bdmap,
indexes : indexes,
};
Ok(maps)
}
fn complement_u8(x : u8) -> u8 {
match x{
b'A' => b'T',
b'C' => b'G',
b'G' => b'C',
b'T' => b'A',
b'N' => b'N',
_ => panic!("Bad string char '{}'", x as char),
}
}
| true
|
f72b9fc8f1fe283b4b1e1b1e2fcfa0cd58a0926b
|
Rust
|
tuggan/coreutils-rust
|
/src/lib/time/mod.rs
|
UTF-8
| 1,988
| 3.078125
| 3
|
[
"Apache-2.0"
] |
permissive
|
/*
Copyright 2020 Dennis Vesterlund <dennisvesterlund@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use regex::Regex;
pub mod error;
/// Apply suffix for conversion to seconds.
///
/// Valid suffixes:
/// - s: seconds
/// - m: minutes
/// - h: hours
/// - d: days
///
/// # Return
/// Calculated new value without suffix or 0.0 if unrecognised suffix
fn apply_suffix(v: f64, c: char) -> f64 {
let multiplier: f64 = match c {
's' => 1.0,
'm' => 60.0,
'h' => 60.0 * 60.0,
'd' => 60.0 * 60.0 * 24.0,
_ => 0.0,
};
return v * multiplier;
}
/// Parse human readable time intervals and return u64 representing interval.
///
/// Example 2m becomes 120
///
/// Input must match regex `^(\d+\.)?\d+[smhd]?$`
///
/// # Error
/// If input does not match `^(\d+\.)?\d+[smhd]?$`
pub fn parse_to_seconds(s: &str) -> error::Result<f64> {
let re = Regex::new(r"^(\d+\.)?\d+[smhd]?$").unwrap();
let re_suffix = Regex::new(r"[smhd]$").unwrap();
let ret: f64;
if !re.is_match(s) {
return Err(error::TimeError::new(
1,
format!("not a valid input string: {}", s).as_str(),
));
}
if re_suffix.is_match(s) {
// Split suffix
let time: f64 = s[..s.len() - 1].parse().unwrap();
let suffix: char = s.chars().last().unwrap();
ret = apply_suffix(time, suffix);
} else {
// No suffix
ret = s.parse().unwrap();
}
return Ok(ret);
}
| true
|
4cf0e630aa007ab44a966677745aedd03b473699
|
Rust
|
vn-ki/autovec-proc-macro
|
/example/tests/test-slice.rs
|
UTF-8
| 246
| 3.03125
| 3
|
[] |
no_license
|
use autovec::autovec;
struct Point(i64, i64);
#[autovec]
fn fn_4([a, b, c]: [i64; 3]) -> i64 {
a*b*c
}
#[test]
fn test_slice() {
let a: Vec<_> = vec![
[1, 2, 3],
[3, 4, 5]
];
assert_eq!(fn_4(a), vec![6, 60]);
}
| true
|
c4a572e434d4aa4e3593ed3031d9efff9ba62afe
|
Rust
|
truchi/lay
|
/src/style/gen/attributes/slant.rs
|
UTF-8
| 852
| 3.078125
| 3
|
[] |
no_license
|
////////////////////////////////////////////////////////////////////////////////
// 🚨🚨🚨🚨🚨🚨🚨🚨 This file is @generated by build script. 🚨🚨🚨🚨🚨🚨🚨🚨 //
// 🚧🚧🚧🚧🚧🚧🚧🚧 ⛔ DO NOT MODIFY! ⛔ 🚧🚧🚧🚧🚧🚧🚧🚧 //
////////////////////////////////////////////////////////////////////////////////
pub use Slant::*;
/// [`Slant`](crate::Slant) (`Italic`, `ResetSlant`).
///
/// Prints the corresponding CSI to the terminal when `Display`ed.
///
/// `Default`s to `Slant::ResetSlant`, the unsetting CSI.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum Slant {
Italic,
ResetSlant,
}
/// Returns `Slant::ResetSlant`.
impl Default for Slant {
/// Returns `Slant::ResetSlant`.
fn default() -> Self {
Slant::ResetSlant
}
}
| true
|
50e8be231dd2c6e477ee827b6201adf51e2680a3
|
Rust
|
tamaspetz/relox
|
/src/uleb128.rs
|
UTF-8
| 17,756
| 3.5625
| 4
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
///! Unsigned LEB128 encoding
///!
///! https://en.wikipedia.org/wiki/LEB128
use crate::error::{Error, ErrorKind};
const CONTINUE_BIT: u8 = 0x80;
/// Writes an unsigned value as ULEB128 into a buffer
/// and returns the number of bytes written.
///
/// # Errors
///
/// If the provided buffer is smaller than required.
fn write_unsigned(mut value: u32, bytes: &mut [u8]) -> Result<usize, Error> {
let mut split = (value & 0x7F) as u8;
for (index, byte) in bytes.iter_mut().enumerate() {
value = value.wrapping_shr(7);
if value > 0 {
// Write byte with continuation bit set.
*byte = split | CONTINUE_BIT;
split = (value & 0x7F) as u8;
} else {
// Store last byte.
*byte = split;
return Ok(index + 1);
}
}
Err(Error::new(ErrorKind::NotEnoughData))
}
/// Writes an unsigned 8-bit value as ULEB128 into a buffer
/// and returns the number of bytes written.
///
/// # Errors
///
/// If the provided buffer is smaller than required.
#[allow(unused)]
pub fn write_u8(value: u8, bytes: &mut [u8]) -> Result<usize, Error> {
write_unsigned(value as u32, bytes)
}
/// Writes an unsigned 16-bit value as ULEB128 into a buffer
/// and returns the number of bytes written.
///
/// # Errors
///
/// If the provided buffer is smaller than required.
#[allow(unused)]
pub fn write_u16(value: u16, bytes: &mut [u8]) -> Result<usize, Error> {
write_unsigned(value as u32, bytes)
}
/// Writes an unsigned 32-bit value as ULEB128 into a buffer
/// and returns the number of bytes written.
///
/// # Errors
///
/// If the provided buffer is smaller than required.
#[allow(unused)]
pub fn write_u32(value: u32, bytes: &mut [u8]) -> Result<usize, Error> {
write_unsigned(value, bytes)
}
/// Returns an unsigned value deccoded from ULEB128 from a buffer and
/// the number of bytes read.
///
/// # Errors
///
/// If the provided buffer is smaller than required or if the decoded value is
/// greater than the max value of the expected type.
fn read_unsigned(
bytes: &[u8],
last_split_max: u32,
shift_max: u32,
value: &mut u32,
) -> Result<usize, Error> {
let mut shift: u32 = 0;
for (index, byte) in bytes.iter().enumerate() {
let split: u32 = (byte & !CONTINUE_BIT) as u32;
if !cfg!(feature = "no_sanity_check") && (shift == shift_max) && (split > last_split_max) {
return Err(Error::new(ErrorKind::InvalidData));
} else {
*value |= split.wrapping_shl(shift);
if (byte & CONTINUE_BIT) == CONTINUE_BIT {
shift += 7;
if !cfg!(feature = "no_sanity_check") && (shift > shift_max) {
return Err(Error::new(ErrorKind::InvalidData));
}
} else {
return Ok(index + 1);
}
}
}
Err(Error::new(ErrorKind::NotEnoughData))
}
/// Returns an unsigned 8-bit value deccoded from ULEB128 from a buffer
/// and the number of bytes read.
///
/// # Errors
///
/// If the provided buffer is smaller than required or if the decoded value is
/// greater than the max value of the expected type.
#[allow(unused)]
pub fn read_u8(bytes: &[u8], value: &mut u8) -> Result<usize, Error> {
let mut tmp: u32 = 0;
let result = read_unsigned(bytes, 0x01, 7, &mut tmp);
if result.is_ok() {
*value = tmp as u8;
}
result
}
/// Returns an unsigned 16-bit value deccoded from ULEB128 from a buffer
/// and the number of bytes read.
///
/// # Errors
///
/// If the provided buffer is smaller than required or if the decoded value is
/// greater than the max value of the expected type.
#[allow(unused)]
pub fn read_u16(bytes: &[u8], value: &mut u16) -> Result<usize, Error> {
let mut tmp: u32 = 0;
let result = read_unsigned(bytes, 0x03, 14, &mut tmp);
if result.is_ok() {
*value = tmp as u16;
}
result
}
/// Returns an unsigned 32-bit value deccoded from ULEB128 from a buffer
/// and the number of bytes read.
///
/// # Errors
///
/// If the provided buffer is smaller than required or if the decoded value is
/// greater than the max value of the expected type.
#[allow(unused)]
pub fn read_u32(bytes: &[u8], value: &mut u32) -> Result<usize, Error> {
*value = 0;
read_unsigned(bytes, 0x0F, 28, value)
}
#[cfg(test)]
mod tests {
use super::*;
use rand::prelude::*;
#[test]
fn test_write_u8() {
let mut buffer: [u8; 2] = [0; 2];
// 1 byte
assert_eq!(write_u8(0, &mut buffer[0..0]).is_err(), true);
assert_eq!(write_u8(0, &mut buffer).unwrap(), 1);
assert_eq!(buffer[0], 0);
assert_eq!(write_u8(0x7F, &mut buffer).unwrap(), 1);
assert_eq!(buffer[0], 0x7F);
// 2 bytes
assert_eq!(write_u8(0x80, &mut buffer[0..1]).is_err(), true);
assert_eq!(write_u8(CONTINUE_BIT, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[1], 0x01);
assert_eq!(write_u8(0xFF, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x01);
}
#[test]
fn test_write_u16() {
let mut buffer: [u8; 3] = [0; 3];
// 1 byte
assert_eq!(write_u16(0, &mut buffer[0..0]).is_err(), true);
assert_eq!(write_u16(0, &mut buffer).unwrap(), 1);
assert_eq!(buffer[0], 0);
assert_eq!(write_u16(0x7F, &mut buffer).unwrap(), 1);
assert_eq!(buffer[0], 0x7F);
// 2 bytes
assert_eq!(write_u16(0x80, &mut buffer[0..1]).is_err(), true);
assert_eq!(write_u16(0x80, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[1], 0x01);
assert_eq!(write_u16(0xFF, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x01);
assert_eq!(write_u16(0x3F_FF, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x7F);
// 3 bytes
assert_eq!(write_u16(0x40_00, &mut buffer[0..2]).is_err(), true);
assert_eq!(write_u16(0x40_00, &mut buffer).unwrap(), 3);
assert_eq!(buffer[0], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[1], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[2], 0x01);
assert_eq!(write_u16(0xFF_FF, &mut buffer).unwrap(), 3);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[2], 0x03);
}
#[test]
fn test_write_u32() {
let mut buffer: [u8; 5] = [0; 5];
// 1 byte
assert_eq!(write_u32(0, &mut buffer[0..0]).is_err(), true);
assert_eq!(write_u32(0, &mut buffer).unwrap(), 1);
assert_eq!(buffer[0], 0);
assert_eq!(write_u32(0x7F, &mut buffer).unwrap(), 1);
assert_eq!(buffer[0], 0x7F);
// 2 bytes
assert_eq!(write_u32(0x80, &mut buffer[0..1]).is_err(), true);
assert_eq!(write_u32(0x80, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[1], 0x01);
assert_eq!(write_u32(0xFF, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x01);
assert_eq!(write_u32(0x3F_FF, &mut buffer).unwrap(), 2);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x7F);
// 3 bytes
assert_eq!(write_u32(0x40_00, &mut buffer[0..2]).is_err(), true);
assert_eq!(write_u32(0x40_00, &mut buffer).unwrap(), 3);
assert_eq!(buffer[0], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[1], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[2], 0x01);
assert_eq!(write_u32(0xFF_FF, &mut buffer).unwrap(), 3);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[2], 0x03);
assert_eq!(write_u32(0x1F_FF_FF, &mut buffer).unwrap(), 3);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[2], 0x7F);
// 4 bytes
assert_eq!(write_u32(0x20_00_00, &mut buffer[0..3]).is_err(), true);
assert_eq!(write_u32(0x20_00_00, &mut buffer).unwrap(), 4);
assert_eq!(buffer[0], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[1], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[2], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[3], 0x01);
assert_eq!(write_u32(0xF_FF_FF_FF, &mut buffer).unwrap(), 4);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[2], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[3], 0x7F);
// 5 bytes
assert_eq!(write_u32(0x10_00_00_00, &mut buffer[0..4]).is_err(), true);
assert_eq!(write_u32(0x10_00_00_00, &mut buffer).unwrap(), 5);
assert_eq!(buffer[0], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[1], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[2], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[3], 0x00 | CONTINUE_BIT);
assert_eq!(buffer[4], 0x01);
assert_eq!(write_u32(0xFF_FF_FF_FF, &mut buffer).unwrap(), 5);
assert_eq!(buffer[0], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[1], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[2], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[3], 0x7F | CONTINUE_BIT);
assert_eq!(buffer[4], 0x0F);
// Specific data
assert_eq!(write_u32(624485, &mut buffer).unwrap(), 3);
assert_eq!(buffer[0], 0xE5);
assert_eq!(buffer[1], 0x8E);
assert_eq!(buffer[2], 0x26);
}
#[test]
fn test_read_u8() {
let mut value: u8 = 0;
assert_eq!(read_u8(&[0x00; 0], &mut value).is_err(), true);
assert_eq!(read_u8(&[CONTINUE_BIT], &mut value).is_err(), true);
assert_eq!(
read_u8(&[CONTINUE_BIT, CONTINUE_BIT], &mut value).is_err(),
true
);
#[cfg(not(feature = "no_sanity_check"))]
assert_eq!(
read_u8(&[CONTINUE_BIT, CONTINUE_BIT, 0], &mut value).is_err(),
true
);
// 1 byte
assert_eq!(read_u8(&[0x00], &mut value).unwrap(), 1);
assert_eq!(value, 0x00);
assert_eq!(read_u8(&[0x7F], &mut value).unwrap(), 1);
assert_eq!(value, 0x7F);
// 2 bytes
assert_eq!(
read_u8(&[0x7F | CONTINUE_BIT, 0x01], &mut value).unwrap(),
2
);
assert_eq!(value, 0xFF);
// Out-of-range
#[cfg(not(feature = "no_sanity_check"))]
assert_eq!(
read_u8(&[0x7F | CONTINUE_BIT, 0x02], &mut value).is_err(),
true
);
}
#[test]
fn test_read_u16() {
let mut value: u16 = 0;
assert_eq!(read_u16(&[0x00; 0], &mut value).is_err(), true);
assert_eq!(read_u16(&[CONTINUE_BIT], &mut value).is_err(), true);
assert_eq!(
read_u16(&[CONTINUE_BIT, CONTINUE_BIT], &mut value).is_err(),
true
);
assert_eq!(
read_u16(&[CONTINUE_BIT, CONTINUE_BIT, CONTINUE_BIT], &mut value).is_err(),
true
);
#[cfg(not(feature = "no_sanity_check"))]
assert_eq!(
read_u16(&[CONTINUE_BIT, CONTINUE_BIT, CONTINUE_BIT, 0], &mut value).is_err(),
true
);
// 1 byte
assert_eq!(read_u16(&[0x00], &mut value).unwrap(), 1);
assert_eq!(value, 0x00);
assert_eq!(read_u16(&[0x7F], &mut value).unwrap(), 1);
assert_eq!(value, 0x7F);
// 2 bytes
assert_eq!(
read_u16(&[0x7F | CONTINUE_BIT, 0x01], &mut value).unwrap(),
2
);
assert_eq!(value, 0xFF);
assert_eq!(
read_u16(&[0x7F | CONTINUE_BIT, 0x7F], &mut value).unwrap(),
2
);
assert_eq!(value, 0x3F_FF);
// 3 bytes
assert_eq!(
read_u16(
&[0x00 | CONTINUE_BIT, 0x00 | CONTINUE_BIT, 0x01],
&mut value
)
.unwrap(),
3
);
assert_eq!(value, 0x40_00);
assert_eq!(
read_u16(
&[0x7F | CONTINUE_BIT, 0x7F | CONTINUE_BIT, 0x03],
&mut value
)
.unwrap(),
3
);
assert_eq!(value, 0xFF_FF);
// Out-of-range
#[cfg(not(feature = "no_sanity_check"))]
assert_eq!(
read_u16(
&[0x7F | CONTINUE_BIT, 0x7F | CONTINUE_BIT, 0x04],
&mut value
)
.is_err(),
true
);
}
#[test]
fn test_read_u32() {
let mut value: u32 = 0;
assert_eq!(read_u32(&[0x00; 0], &mut value).is_err(), true);
assert_eq!(read_u32(&[CONTINUE_BIT], &mut value).is_err(), true);
assert_eq!(
read_u32(&[CONTINUE_BIT, CONTINUE_BIT], &mut value).is_err(),
true
);
assert_eq!(
read_u32(&[CONTINUE_BIT, CONTINUE_BIT, CONTINUE_BIT], &mut value).is_err(),
true
);
assert_eq!(
read_u32(
&[CONTINUE_BIT, CONTINUE_BIT, CONTINUE_BIT, CONTINUE_BIT],
&mut value
)
.is_err(),
true
);
assert_eq!(
read_u32(
&[
CONTINUE_BIT,
CONTINUE_BIT,
CONTINUE_BIT,
CONTINUE_BIT,
CONTINUE_BIT
],
&mut value
)
.is_err(),
true
);
#[cfg(not(feature = "no_sanity_check"))]
assert_eq!(
read_u32(
&[
CONTINUE_BIT,
CONTINUE_BIT,
CONTINUE_BIT,
CONTINUE_BIT,
CONTINUE_BIT,
0
],
&mut value
)
.is_err(),
true
);
// 1 byte
assert_eq!(read_u32(&[0x00], &mut value).unwrap(), 1);
assert_eq!(value, 0x00);
assert_eq!(read_u32(&[0x7F], &mut value).unwrap(), 1);
assert_eq!(value, 0x7F);
// 2 bytes
assert_eq!(
read_u32(&[0x7F | CONTINUE_BIT, 0x01], &mut value).unwrap(),
2
);
assert_eq!(value, 0xFF);
assert_eq!(
read_u32(&[0x7F | CONTINUE_BIT, 0x7F], &mut value).unwrap(),
2
);
assert_eq!(value, 0x3F_FF);
// 3 bytes
assert_eq!(
read_u32(
&[0x00 | CONTINUE_BIT, 0x00 | CONTINUE_BIT, 0x01],
&mut value
)
.unwrap(),
3
);
assert_eq!(value, 0x40_00);
assert_eq!(
read_u32(
&[0x7F | CONTINUE_BIT, 0x7F | CONTINUE_BIT, 0x7F],
&mut value
)
.unwrap(),
3
);
assert_eq!(value, 0x1F_FF_FF);
// 4 bytes
assert_eq!(
read_u32(
&[
0x00 | CONTINUE_BIT,
0x00 | CONTINUE_BIT,
0x00 | CONTINUE_BIT,
0x01
],
&mut value
)
.unwrap(),
4
);
assert_eq!(value, 0x20_00_00);
assert_eq!(
read_u32(
&[
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x7F
],
&mut value
)
.unwrap(),
4
);
assert_eq!(value, 0xF_FF_FF_FF);
// 5 bytes
assert_eq!(
read_u32(
&[
0x00 | CONTINUE_BIT,
0x00 | CONTINUE_BIT,
0x00 | CONTINUE_BIT,
0x00 | CONTINUE_BIT,
0x01
],
&mut value
)
.unwrap(),
5
);
assert_eq!(value, 0x10_00_00_00);
assert_eq!(
read_u32(
&[
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x0F
],
&mut value
)
.unwrap(),
5
);
assert_eq!(value, 0xFF_FF_FF_FF);
// Out-of-range
#[cfg(not(feature = "no_sanity_check"))]
assert_eq!(
read_u32(
&[
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x7F | CONTINUE_BIT,
0x1F
],
&mut value
)
.is_err(),
true
);
}
#[test]
fn test_random_u32() {
let mut rng = rand::thread_rng();
let mut buffer: [u8; 5] = [0; 5];
#[allow(unused)]
'assert: for _ in 0..4096 {
let value: u32 = rng.gen();
let mut decoded_value: u32 = 0;
write_u32(value, &mut buffer).unwrap();
read_u32(&buffer, &mut decoded_value).unwrap();
assert_eq!(value, decoded_value);
}
}
}
| true
|
7f28a872e3452ce986e096e6fe7e468fb9aa61d4
|
Rust
|
caryhaynie/lox-rs
|
/src/main.rs
|
UTF-8
| 825
| 3.09375
| 3
|
[] |
no_license
|
use std::env::args;
use std::fs::File;
use std::io;
// imported trait impls.
use std::io::BufRead;
use std::io::Write;
fn run_file(file: &str) {
let mut f = File::open(file).expect(&format!("failed to open {}!", file));
}
fn run_prompt() {
let stdin = io::stdin();
let mut lines = stdin.lock().lines();
loop {
print!("> ");
// make sure we're not the victim of overly aggressive output buffering.
io::stdout().flush().expect("failed to flush stdout!");
if let Some(Ok(line)) = lines.next() {
if !line.is_empty() { run(&line); }
}
}
}
fn run(code: &str) {
unimplemented!();
}
fn main() {
match args().len() {
1 => run_prompt(),
2 => run_file(&args().nth(1).unwrap()),
_ => println!("usage: lox [script]")
}
}
| true
|
022f3a828e3f2a478cb4233b059146c9cc7fad15
|
Rust
|
techno-tanoC/azusa
|
/src/app.rs
|
UTF-8
| 2,020
| 2.5625
| 3
|
[] |
no_license
|
use std::time::Duration;
use std::path::Path;
use std::sync::Arc;
use tokio::fs::File;
use tokio::io::AsyncSeek;
use tokio::prelude::*;
use uuid::Uuid;
use crate::download::Download;
use crate::lock_copy::LockCopy;
use crate::progress::{Progress, ProgressDecorator};
use crate::table::Table;
use crate::error::Result;
#[derive(Clone)]
pub struct App {
pub client: reqwest::Client,
pub lock_copy: LockCopy,
pub table: Table<String, Arc<Progress>>,
}
impl App {
pub fn new(path: impl AsRef<Path>) -> Self {
let client = reqwest::ClientBuilder::new()
.connect_timeout(Duration::from_secs(30))
.danger_accept_invalid_certs(true)
.build()
.expect("failed ClientBuilder::build()");
let lock_copy = LockCopy::new(&path);
let table = Table::new();
App { client, lock_copy, table }
}
pub async fn download(&self, url: impl AsRef<str>, name: impl AsRef<str>, ext: impl AsRef<str>) -> Result<()> {
let id = Uuid::new_v4();
debug!("app::download id: {:?} url: {:?} name: {:?} ext: {:?}", id, url.as_ref(), name.as_ref(), ext.as_ref());
let file = tempfile::tempfile()?;
let file = File::from_std(file);
let pg = Progress::new(name.as_ref());
self.table.add(id.to_string(), pg.clone()).await;
let mut deco = ProgressDecorator::new(pg, file);
let ret = self.do_download(&mut deco, url, name, ext).await;
self.table.delete(id.to_string()).await;
ret
}
async fn do_download<T>(&self, deco: &mut ProgressDecorator<T>, url: impl AsRef<str>, name: impl AsRef<str>, ext: impl AsRef<str>) -> Result<()>
where
T: AsyncRead + AsyncWrite + AsyncSeek + Unpin + Send,
{
let mut res = self.client.get(url.as_ref()).send().await?;
let ret = Download::new(&mut res, deco).run().await;
if ret.is_ok() {
self.lock_copy.copy(deco, &name, &ext).await
} else {
ret
}
}
}
| true
|
a7b5bf730b1e9575422d31f9cc623447beba78bc
|
Rust
|
AsamK/AdventOfCode
|
/src/advent2018/day16.rs
|
UTF-8
| 5,536
| 2.71875
| 3
|
[] |
no_license
|
use self::interpreter_utils::{Instruction, Opcode, Registers, ALL_OPCODES};
use crate::errors::{ACResult, Error};
use nom::{complete, do_parse, many1, many_m_n, map, named, opt, tag, take_while1, terminated};
use std::io::BufRead;
use std::io::Read;
pub mod interpreter_utils;
pub fn get_result<T: Read + BufRead>(data: T, level: u8) -> ACResult<String> {
match level {
1 => level_1(&parse_line(data)?).map(|r| r.to_string()),
2 => level_2(&parse_line(data)?).map(|r| r.to_string()),
_ => Err(Error::new(format!("Level {} not implemented", level))),
}
}
fn parse_line<T: Read>(mut data: T) -> ACResult<Input> {
let mut contents = String::new();
data.read_to_string(&mut contents)
.map_err(|_| Error::new_str("Failed to read data"))?;
parse_input(&contents)
.map(|x| x.1)
.map_err(|_e| Error::new("Failed to parse input".to_owned()))
}
const REGISTER_COUNT: usize = 4;
#[derive(Debug)]
struct AnyInstruction {
opcode: u8,
input_a: u64,
input_b: u64,
output_register: u8,
}
impl AnyInstruction {
fn to_instruction(&self, opcode: &Opcode) -> Instruction {
Instruction::new(
opcode.clone(),
self.input_a,
self.input_b,
self.output_register,
)
}
}
#[derive(Debug)]
struct Sample {
register_before: Registers,
register_after: Registers,
instruction: AnyInstruction,
}
#[derive(Debug)]
struct Input {
samples: Vec<Sample>,
instructions: Vec<AnyInstruction>,
}
named!(parse_number<&str, u8>,
complete!(map!(take_while1!(|c: char| c.is_numeric()), |c| c.to_string().parse().unwrap()))
);
named!(parse_number_u64<&str, u64>,
complete!(map!(take_while1!(|c: char| c.is_numeric()), |c| c.to_string().parse().unwrap()))
);
named!(parse_register_or_value<&str, u64>,
do_parse!(
n: parse_number_u64 >>
opt!(tag!(", ")) >>
(n)
)
);
named!(
parse_register<&str, Registers>,
do_parse!(
tag!("[") >>
registers: many_m_n!(REGISTER_COUNT, REGISTER_COUNT, parse_register_or_value) >>
tag!("]") >>
(Registers::new(®isters))
)
);
named!(
parse_instruction<&str, AnyInstruction>,
do_parse!(
opcode: parse_number >>
tag!(" ") >>
input_a: parse_number_u64 >>
tag!(" ") >>
input_b: parse_number_u64 >>
tag!(" ") >>
output_register: parse_number >>
(AnyInstruction {
opcode,
input_a,
input_b,
output_register
})
)
);
named!(
parse_sample<&str, Sample>,
do_parse!(
tag!("Before: ") >>
register_before: parse_register >>
tag!("\n") >>
instruction: parse_instruction >>
tag!("\n") >>
tag!("After: ") >>
register_after: parse_register >>
tag!("\n") >>
tag!("\n") >>
(Sample {
register_before,
register_after,
instruction
})
)
);
named!(parse_input<&str, Input>,
do_parse!(
samples: many1!(parse_sample) >>
tag!("\n\n") >>
instructions: many1!(complete!(terminated!(parse_instruction, tag!("\n")))) >>
(Input { samples, instructions })
)
);
fn get_matching_opcodes(sample: &Sample) -> Vec<Opcode> {
ALL_OPCODES
.iter()
.filter(|opcode| {
let mut result_registers = sample.register_before.clone();
sample
.instruction
.to_instruction(opcode)
.execute_instruction(&mut result_registers);
result_registers == sample.register_after
})
.cloned()
.collect()
}
fn get_mapping_from_samples(samples: &[Sample]) -> Vec<Opcode> {
let possible_matches: Vec<(u8, Vec<Opcode>)> = samples
.iter()
.map(|s| (s.instruction.opcode, get_matching_opcodes(s)))
.collect();
let mut mapping = vec![Vec::new(); 16];
for (opcode_id, opcode_matches) in possible_matches {
let old_matches = &mapping[opcode_id as usize];
if old_matches.is_empty() {
mapping[opcode_id as usize] = opcode_matches;
} else {
mapping[opcode_id as usize] = old_matches
.iter()
.filter(|m| opcode_matches.contains(m))
.cloned()
.collect();
}
}
loop {
let singles: Vec<_> = mapping
.iter()
.filter(|l| l.len() == 1)
.map(|l| l[0].clone())
.collect();
if singles.len() == mapping.len() {
break;
}
for m in mapping.iter_mut() {
if m.len() == 1 {
continue;
}
*m = m.drain(..).filter(|o| !singles.contains(o)).collect();
}
}
mapping.iter().map(|codes| codes[0].clone()).collect()
}
fn level_1(input: &Input) -> ACResult<usize> {
let result = input
.samples
.iter()
.map(|s| get_matching_opcodes(s))
.filter(|opcodes| opcodes.len() >= 3)
.count();
Ok(result)
}
fn level_2(input: &Input) -> ACResult<u64> {
let mapping = get_mapping_from_samples(&input.samples);
let mut registers = Registers::empty(REGISTER_COUNT);
for instr in input.instructions.iter() {
instr
.to_instruction(&mapping[instr.opcode as usize])
.execute_instruction(&mut registers);
}
Ok(*registers.get(0))
}
| true
|
929320d88898f3a3787e59f6292aa75a1344fea1
|
Rust
|
herwigstuetz/downloader
|
/src/capi.rs
|
UTF-8
| 1,281
| 2.71875
| 3
|
[] |
no_license
|
use super::downloader;
use std::ffi::{CStr, CString};
use std::os::raw::c_char;
use std::path::Path;
/// Downloads `url` to the directory `tmp` and returns the path to the
/// downloaded file.
#[no_mangle]
#[allow(clippy::not_unsafe_ptr_arg_deref)]
pub extern "C" fn dl_download(url: *const c_char, tmp: *const c_char) -> *mut c_char {
if url.is_null() {
return std::ptr::null_mut();
}
if tmp.is_null() {
return std::ptr::null_mut();
}
// safe because is_null() check
let url = match unsafe { CStr::from_ptr(url).to_str() } {
Ok(url) => url,
Err(_) => return std::ptr::null_mut(),
};
let tmp = match unsafe { CStr::from_ptr(tmp).to_str() } {
Ok(tmp) => tmp,
Err(_) => return std::ptr::null_mut(),
};
let file = downloader::download(url, Path::new(tmp));
file.ok()
.as_ref()
.and_then(|f| f.to_str())
.and_then(|s| CString::new(s).ok())
.map(|s| s.into_raw())
.unwrap_or(std::ptr::null_mut())
}
/// Frees `char` pointers returned by `dl_download`.
#[no_mangle]
#[allow(clippy::not_unsafe_ptr_arg_deref)]
pub extern "C" fn dl_free(s: *mut c_char) {
if s.is_null() {
return;
}
unsafe {
CString::from_raw(s);
}
}
| true
|
4c5f18464fddc19b6da80c2d0ed6f4a60c77afda
|
Rust
|
lvsoso/learn_rust
|
/step_in/rcrash/src/generic/generic_in_struct.rs
|
UTF-8
| 331
| 3.75
| 4
|
[] |
no_license
|
#[derive(Debug)]
struct Point<T, U> {
x : T,
y: T,
z: U,
}
#[cfg(test)]
mod tests {
use super::Point;
#[test]
fn test_point() {
let integer = Point{x:5, y:10, z:15.0};
let float = Point{x: 1.0, y : 4.0, z:8};
println!("{:?}", integer);
println!("{:?}", float);
}
}
| true
|
f7dce4066b124486fd4c2421f60534dbcb385346
|
Rust
|
getreu/tp-note
|
/tpnote-lib/src/error.rs
|
UTF-8
| 8,718
| 2.9375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Custom error types.
use std::io;
use std::path::PathBuf;
use thiserror::Error;
/// The error `InvalidFrontMatterYaml` prints the front matter section of the
/// note file. This constant limits the number of text lines that are printed.
pub const FRONT_MATTER_ERROR_MAX_LINES: usize = 20;
/// Configuration file related filesystem and syntax errors.
#[derive(Debug, Error)]
pub enum FileError {
/// Remedy: delete all files in configuration file directory.
#[error(
"Can not find unused filename in directory:\n\
\t{directory:?}\n\
(only `COPY_COUNTER_MAX` copies are allowed)."
)]
NoFreeFileName { directory: PathBuf },
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Serialize(#[from] toml::ser::Error),
#[error(transparent)]
Deserialize(#[from] toml::de::Error),
}
/// Configuration file related semantic errors.
#[derive(Debug, Error, Clone)]
pub enum LibCfgError {
/// Remedy: Choose another `sort_tag_extra_separator` character.
#[error(
"Configuration file error in section `[filename]`:\n\
`sort_tag_extra_separator=\"{extra_separator}\"\n\
must not be one of `sort_tag_chars=\"{chars}\"`\n\
or `{dot_file_marker}`."
)]
SortTagExtraSeparator {
dot_file_marker: char,
chars: String,
extra_separator: String,
},
/// Remedy: Insert `sort_tag_separator` in `sort_tag_chars`.
#[error(
"Configuration file error in section `[filename]`:\n\
All characters in `sort_tag_separator=\"{separator}\"\n\
must be in the set `sort_tag_chars=\"{chars}\"`\n\
and `sort_tag_separator` must NOT start with `{dot_file_marker}`."
)]
SortTagSeparator {
dot_file_marker: char,
chars: String,
separator: String,
},
/// Remedy: Choose a `copy_counter_extra_separator` in the set.
#[error(
"Configuration file error in section `[filename]`:\n\
`copy_counter_extra_separator=\"{extra_separator}\"`\n\
must be one of: \"{chars}\""
)]
CopyCounterExtraSeparator {
chars: String,
extra_separator: String,
},
/// Remedy: check the configuration file variable `arg_default.export_link_rewriting`.
#[error("choose one of: `off`, `short` or `long`")]
ParseLocalLinkKind,
/// Remedy: check the ISO 639-1 codes in the configuration variable
/// `tmpl.filter_get_lang` and make sure that they are supported, by
/// checking `tpnote -V`.
#[error(
"The ISO 639-1 language subtag `{language_code}`\n\
in the configuration file variable\n\
`tmpl.filter_get_lang` or in the environment\n\
variable `TPNOTE_LANG_DETECTION` is not supported.\n\
All listed codes must be part of the set:\n\
{all_langs}."
)]
ParseLanguageCode {
language_code: String,
all_langs: String,
},
/// Remedy: add one more ISO 639-1 code in the configuration variable
/// `tmpl.filter_get_lang` (or in `TPNOTE_LANG_DETECTION`) and make
/// sure that the code is supported, by checking `tpnote -V`.
#[error(
"Not enough languages to choose from.\n\
The list of ISO 639-1 language subtags\n\
currently contains only one item: `{language_code}`.\n\
Add one more language to the configuration \n\
file variable `tmpl.filter_get_lang` or to the\n\
environment variable `TPNOTE_LANG_DETECTION`\n\
to prevent this error from occurring."
)]
NotEnoughLanguageCodes { language_code: String },
}
#[derive(Debug, Error)]
/// Error type returned form methods in or related to the `note` module.
pub enum NoteError {
/// Remedy: check the file permission of the note file.
#[error("Can not read file:\n\t {path:?}\n{source}")]
Read { path: PathBuf, source: io::Error },
/// Remedy: report this error. It should not happen.
#[error("Can not prepend header. File has one already: \n{existing_header}")]
CannotPrependHeader { existing_header: String },
/// Remedy: check the syntax of the Tera template in the configuration file.
#[error(
"Tera template error in configuration file\n\
variable \"{template_str}\":\n {source_str}"
)]
TeraTemplate {
source_str: String,
template_str: String,
},
/// Remedy: restart with `--debug trace`.
#[error(
"Tera error:\n\
{source}"
)]
Tera {
#[from]
source: tera::Error,
},
/// Remedy: add the missing field in the note's front matter.
#[error(
"The document is missing a `{field_name}:`\n\
field in its front matter:\n\
\n\
\t~~~~~~~~~~~~~~\n\
\t---\n\
\t{field_name}: \"My note\"\n\
\t---\n\
\tsome text\n\
\t~~~~~~~~~~~~~~\n\
\n\
Please correct the front matter if this is\n\
supposed to be a Tp-Note file. Ignore otherwise."
)]
MissingFrontMatterField { field_name: String },
/// Remedy: enter a string.
#[error(
"The value of the front matter field `{field_name}:`\n\
must be a non empty string."
)]
CompulsoryFrontMatterFieldIsEmpty { field_name: String },
/// Remedy: check YAML syntax in the note's front matter.
#[error(
"Can not parse front matter:\n\
\n\
{front_matter}\
\n\
{source_error}"
)]
InvalidFrontMatterYaml {
front_matter: String,
source_error: serde_yaml::Error,
},
/// Remedy: check YAML syntax in the input stream's front matter.
#[error(
"Invalid YAML field(s) in the {tmpl_var} input\n\
stream data found:\n\
{source_str}"
)]
InvalidInputYaml {
tmpl_var: String,
source_str: String,
},
/// Remedy: check front matter delimiters `----`.
#[error(
"The document (or template) has no front matter\n\
section. Is one `---` missing?\n\n\
\t~~~~~~~~~~~~~~\n\
\t---\n\
\t{compulsory_field}: \"My note\"\n\
\t---\n\
\tsome text\n\
\t~~~~~~~~~~~~~~\n\
\n\
Please correct the front matter if this is\n\
supposed to be a Tp-Note file. Ignore otherwise."
)]
MissingFrontMatter { compulsory_field: String },
/// Remedy: remove invalid characters.
#[error(
"The `sort_tag` header variable contains invalid\n\
character(s):\n\n\
\t---\n\
\tsort_tag = \"{sort_tag}\"\n\
\t---\n\n\
Only the characters: \"{sort_tag_chars}\"\n\
are allowed here."
)]
SortTagVarInvalidChar {
sort_tag: String,
sort_tag_chars: String,
},
/// Remedy: correct the front matter variable `file_ext`.
#[error(
"The file extension:\n\
\t---\n\
\tfile_ext=\"{extension}\"\n\
\t---\n\
is not registered as a valid Tp-Note-file in\n\
the `filename.extensions_*` variables in your\n\
configuration file:\n\
\t{md_ext:?}\n\
\t{rst_ext:?}\n\
\t{html_ext:?}\n\
\t{txt_ext:?}\n\
\t{no_viewer_ext:?}\n\
\n\
Choose one of the listed above or add more\n\
extensions to the `filename.extensions_*`\n\
variables in your configuration file."
)]
FileExtNotRegistered {
extension: Box<String>,
md_ext: Box<Vec<String>>,
rst_ext: Box<Vec<String>>,
html_ext: Box<Vec<String>>,
txt_ext: Box<Vec<String>>,
no_viewer_ext: Box<Vec<String>>,
},
/// Remedy: check reStructuredText syntax.
#[error("Can not parse reStructuredText input:\n{msg}")]
#[cfg(feature = "renderer")]
RstParse { msg: String },
#[error(transparent)]
Utf8Conversion {
#[from]
source: core::str::Utf8Error,
},
#[error(transparent)]
File(#[from] FileError),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
ParseLanguageCode(#[from] LibCfgError),
}
/// Macro to construct a `NoteError::TeraTemplate from a `Tera::Error` .
#[macro_export]
macro_rules! note_error_tera_template {
($e:ident, $t:expr) => {
NoteError::TeraTemplate {
source_str: std::error::Error::source(&$e)
.unwrap_or(&tera::Error::msg(""))
.to_string()
// Remove useless information.
.trim_end_matches("in context while rendering '__tera_one_off'")
.to_string(),
template_str: $t,
}
};
}
| true
|
994a8e4eb70441d248baf9fadd0bda32e5fd3b46
|
Rust
|
mitsuhiko/webgame
|
/webgame_client/src/views/menu.rs
|
UTF-8
| 3,664
| 3.09375
| 3
|
[] |
no_license
|
use yew::agent::Bridged;
use yew::{
html, Bridge, Callback, Component, ComponentLink, Html, InputData, KeyboardEvent, Properties,
ShouldRender,
};
use crate::api::Api;
use crate::protocol::{Command, GameInfo, JoinGameCommand, Message, PlayerInfo};
use crate::utils::format_join_code;
#[derive(Clone, Properties)]
pub struct Props {
pub player_info: PlayerInfo,
pub on_game_joined: Callback<GameInfo>,
}
pub struct MenuPage {
link: ComponentLink<MenuPage>,
api: Box<dyn Bridge<Api>>,
join_code: String,
player_info: PlayerInfo,
on_game_joined: Callback<GameInfo>,
error: Option<String>,
}
pub enum Msg {
Ignore,
NewGame,
JoinGame,
ServerMessage(Message),
SetJoinCode(String),
}
impl Component for MenuPage {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
let on_server_message = link.callback(Msg::ServerMessage);
let api = Api::bridge(on_server_message);
MenuPage {
link,
api,
join_code: "".into(),
player_info: props.player_info,
on_game_joined: props.on_game_joined,
error: None,
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::NewGame => {
log::info!("New Game");
self.api.send(Command::NewGame);
}
Msg::JoinGame => {
log::info!("Join Game");
self.api.send(Command::JoinGame(JoinGameCommand {
join_code: self.join_code.replace("-", ""),
}));
}
Msg::ServerMessage(message) => match message {
Message::GameJoined(data) => {
self.on_game_joined.emit(data);
}
Message::Error(err) => {
self.error = Some(err.message().to_string());
}
_ => {}
},
Msg::SetJoinCode(join_code) => {
self.join_code = format_join_code(&join_code);
}
Msg::Ignore => {}
}
true
}
fn view(&self) -> Html {
html! {
<div>
<h1>{"Let's get started"}</h1>
<p class="intro">{format!("Hello {}!", &self.player_info.nickname)}</p>
<p class="explanation">{"Start a new game or enter the code of a game to join."}</p>
<div class="toolbar">
<button onclick=self.link.callback(|_| Msg::NewGame)>{"New Game"}</button>
<input value=&self.join_code
size="7"
placeholder="JOINCOD"
onkeypress=self.link.callback(|event: KeyboardEvent| {
if event.key() == "Enter" {
Msg::JoinGame
} else {
Msg::Ignore
}
})
oninput=self.link.callback(|e: InputData| Msg::SetJoinCode(e.value)) />
<button onclick=self.link.callback(|_| Msg::JoinGame)>{"Join Game"}</button>
</div>
{
if let Some(ref error) = self.error {
html! {
<p class="error">{format!("uh oh: {}", error)}</p>
}
} else {
html!{}
}
}
</div>
}
}
}
| true
|
f7f9b10c04970a73234e1b06805843b313aa7b94
|
Rust
|
samwhale/advent-of-code
|
/src/exercises/days/day8.rs
|
UTF-8
| 4,471
| 3.453125
| 3
|
[] |
no_license
|
use super::super::super::utils;
use std::iter::Iterator;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Layer {
num_zeroes: u32,
layer: Vec<u32>,
}
pub fn create_layer_from_range(input: &str, start: usize, end: usize) -> Layer {
let mut num_zeroes = 0;
let mut layer: Vec<u32> = Vec::new();
let slice = &input[start..end];
for char in slice.chars() {
let parsed_digit = char.to_string().parse::<u32>().unwrap();
if parsed_digit == 0 {
num_zeroes += 1;
};
layer.push(parsed_digit);
}
Layer { layer, num_zeroes }
}
pub fn read_image(image: &str, [width, height]: [u32; 2]) -> Vec<Layer> {
let mut processed_image: Vec<Layer> = Vec::new();
let area = (width * height) as usize;
let slice_length = image.len() / area;
for i in 0..slice_length {
let start = i * area;
let end = start + area;
let layer = create_layer_from_range(image, start, end);
processed_image.push(layer);
}
processed_image
}
pub fn get_layer_with_fewest_zeroes(image: &Vec<Layer>) -> Layer {
image
.iter()
.min_by_key(|layer| layer.num_zeroes)
.unwrap()
.to_owned()
}
pub fn validate_transmission(image: Vec<u32>) -> u32 {
let mut num_ones = 0;
let mut num_twos = 0;
for digit in image.iter() {
match digit {
1 => num_ones += 1,
2 => num_twos += 1,
_ => {}
}
}
num_ones * num_twos
}
/**
* Find the color of the pixel that shows in the image
*
* 0 is black
* 1 is white
* 2 is transparent
*/
pub fn get_pixel_value(image: &Vec<Layer>, index: usize) -> &str {
for layer in image.iter() {
match layer.layer[index] {
0 => {
return ".";
}
1 => {
return "#";
}
2 => {}
_ => {
panic!("Non valid integer");
}
}
}
" "
}
pub fn format_image(image: Vec<String>, [num_columns, num_rows]: [u32; 2]) -> Vec<Vec<String>> {
let mut result: Vec<Vec<String>> = Vec::new();
for row_index in 0..num_rows {
let mut row: Vec<String> = Vec::new();
for column_index in 0..num_columns {
let digit_index = ((row_index * num_columns) + column_index) as usize;
row.push(image[digit_index].clone());
}
result.push(row);
}
result
}
pub fn create_image(image: Vec<Layer>, dimensions: [u32; 2]) -> Vec<Vec<String>> {
let mut result: Vec<String> = Vec::new();
for index in 0..image[0].layer.len() {
let pixel_value = get_pixel_value(&image, index).to_string();
result.push(pixel_value);
}
format_image(result, dimensions)
}
pub fn main() {
println!("--- Day 8 ---");
let message = utils::read_file_into_string("./src/exercises/data/data-day8.txt");
let dimensions = [25, 6];
let processed_image = read_image(&message.trim(), dimensions);
let layer = get_layer_with_fewest_zeroes(&processed_image);
println!(
"num ones * num twos: {:?}",
validate_transmission(layer.layer)
);
let result = create_image(processed_image, dimensions);
println!("Message:");
for row in result {
println!("{:?}", row.join(""));
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn read_image_test() {
let result = vec![
Layer {
layer: vec![1, 2, 3, 4, 5, 6],
num_zeroes: 0,
},
Layer {
layer: vec![7, 8, 9, 0, 1, 2],
num_zeroes: 1,
},
];
assert_eq!(read_image("123456789012", [3, 2]), result);
}
#[test]
pub fn get_layer_with_fewest_zeroes_test() {
let result = Layer {
layer: vec![0, 0, 1, 1, 1, 1],
num_zeroes: 2,
};
let input = vec![
Layer {
layer: vec![0, 0, 0, 1, 1, 1],
num_zeroes: 3,
},
Layer {
layer: vec![0, 0, 1, 1, 1, 1],
num_zeroes: 2,
},
Layer {
layer: vec![0, 0, 0, 0, 1, 1],
num_zeroes: 4,
},
];
assert_eq!(get_layer_with_fewest_zeroes(&input), result);
}
#[test]
pub fn validate_transmission_test() {
assert_eq!(validate_transmission(vec![0, 0, 1, 1, 0, 2, 2, 0, 1, 2]), 9);
assert_eq!(
validate_transmission(vec![0, 1, 1, 1, 0, 2, 2, 0, 1, 2]),
12
);
assert_eq!(validate_transmission(vec![1, 1, 1, 1, 1]), 0);
assert_eq!(validate_transmission(vec![2, 2, 2, 2]), 0);
}
#[test]
pub fn create_image_test() {
let image = read_image("0222112222120000", [2, 2]);
let result = vec![vec![".", "#"], vec!["#", "."]];
assert_eq!(create_image(image, [2, 2]), result);
}
}
| true
|
a18d1f6314c1988930e9a89e36fe8549fe216df2
|
Rust
|
greenmughal/multiinput-rust
|
/src/lib.rs
|
UTF-8
| 966
| 2.546875
| 3
|
[
"MIT"
] |
permissive
|
/*!
rawinput library for rust development on windows
# Usage Example
```no_run
extern crate multiinput;
use multiinput::*;
fn main() {
let mut manager = RawInputManager::new().unwrap();
manager.register_devices(DeviceType::Joysticks);
manager.register_devices(DeviceType::Keyboards);
manager.register_devices(DeviceType::Mice);
'outer: loop{
if let Some(event) = manager.get_event(){
match event{
RawEvent::KeyboardEvent(_, KeyId::Escape, State::Pressed)
=> break 'outer,
_ => (),
}
println!("{:?}", event);
}
}
println!("Finishing");
}
```
*/
extern crate libc;
extern crate winapi;
extern crate kernel32;
extern crate user32;
extern crate hid;
mod mouse;
pub mod event;
mod joystick;
mod rawinput;
mod keyboard;
mod devices;
pub mod manager;
pub use event::*;
pub use manager::*;
| true
|
061c0745caeae1546f6567020f392c6d61a1a901
|
Rust
|
svmk/fund-watch-bot
|
/src/system/app_config/repository_config.rs
|
UTF-8
| 258
| 2.703125
| 3
|
[] |
no_license
|
use std::path::PathBuf;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RepositoryConfig {
#[serde(rename="path")]
path: PathBuf,
}
impl RepositoryConfig {
pub fn get_path(&self) -> PathBuf {
return self.path.clone();
}
}
| true
|
e112b00073f9fe271185118389990494ad85cbfa
|
Rust
|
1024chen/rust_code_training
|
/rust_book_code/10/1/generics_in_enum/src/main.rs
|
UTF-8
| 161
| 3.03125
| 3
|
[] |
no_license
|
fn main() {
enum Option {
Some(T),
None,
}
enum Result<T, E> {
Ok(T),
Err(E),
}
println!("Hello, world!");
}
| true
|
04539521dbc5576c287cab6f0f88d11f0868edc5
|
Rust
|
dillonhicks/rsnek
|
/rsnek/src/objects/number.rs
|
UTF-8
| 1,425
| 3.125
| 3
|
[] |
no_license
|
//! Native number coercions and comparisons. I think rust already does this with the `Wrapped`
//! traits...
use std;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use num::{ToPrimitive};
use ::system::primitives::{HashId};
use ::system::primitives as rs;
pub fn format_float(float: &rs::Float) -> rs::String {
format!("{:?}", *float)
}
pub fn format_int(int: &rs::Integer) -> rs::String {
format!("{}", *int)
}
pub fn hash_int(int: &rs::Integer) -> HashId {
let mut s = DefaultHasher::new();
int.hash(&mut s);
s.finish()
}
// To make int == float not such a pain in the ass
pub struct IntAdapter<'a>(pub &'a rs::Integer);
pub struct FloatAdapter<'a>(pub &'a rs::Float);
impl<'a, 'b> std::cmp::PartialEq<IntAdapter<'b>> for FloatAdapter<'a> {
fn eq(&self, other: &IntAdapter) -> bool {
match other.0.to_f64() {
Some(num) => *self.0 == num,
None => false
}
}
}
impl<'a, 'b> std::cmp::PartialEq<FloatAdapter<'b>> for IntAdapter<'a> {
fn eq(&self, other: &FloatAdapter) -> bool {
match self.0.to_f64() {
Some(num) => num == *other.0,
None => false
}
}
}
//
//impl<'a, 'b> std::ops::Add<FloatAdapter<'b>> for IntAdapter<'a> {
// type Output = rs::Float;
//
// fn add(self, rhs: FloatAdapter) -> Self::Output {
// match self.0
// }
//}
| true
|
090a9fec946ce174672f03e634b3fafbd824e64d
|
Rust
|
Ophois47/Daves-NES-Emulator
|
/src/render/frame.rs
|
UTF-8
| 547
| 3.390625
| 3
|
[
"MIT"
] |
permissive
|
pub struct Frame {
pub data: Vec<u8>,
}
impl Frame {
const WIDTH: usize = 256 * 2;
const HIGHT: usize = 240;
pub fn new() -> Self {
Frame {
data: vec![0; (Frame::WIDTH) * (Frame::HIGHT) * 3],
}
}
pub fn set_pixel(&mut self, x: usize, y: usize, rgb: (u8, u8, u8)) {
let base = y * 3 * Frame::WIDTH + x * 3;
if base + 2 < self.data.len() {
self.data[base] = rgb.0;
self.data[base + 1] = rgb.1;
self.data[base + 2] = rgb.2;
}
}
}
| true
|
9ab2faa31c60776d6c377af1403284fd0ad164c8
|
Rust
|
wasm-network/quicksilver
|
/src/graphics/vertex.rs
|
UTF-8
| 2,642
| 3.375
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use crate::{
geom::{Scalar, Vector},
graphics::{Background, Color, Image}
};
use std::cmp::Ordering;
#[derive(Clone, Copy, Debug)]
/// A vertex for drawing items to the GPU
pub struct Vertex {
/// The position of the vertex in space
pub pos: Vector,
/// If there is a texture attached to this vertex, where to get the texture data from
///
/// It is normalized from 0 to 1
pub tex_pos: Option<Vector>,
/// The color to blend this vertex with
pub col: Color,
}
impl Vertex {
/// Create a new GPU vertex
pub fn new(pos: impl Into<Vector>, tex_pos: Option<Vector>, bkg: Background) -> Vertex {
Vertex {
pos: pos.into(),
tex_pos: tex_pos.map(|pos| pos.into()),
col: bkg.color()
}
}
}
#[derive(Clone)]
/// A triangle to draw to the GPU
pub struct GpuTriangle {
/// The plane the triangle falls on
pub z: f32,
/// The indexes in the vertex list that the GpuTriangle uses
pub indices: [u32; 3],
/// The (optional) image used by the GpuTriangle
///
/// All of the vertices used by the triangle should agree on whether it uses an image,
/// it is up to you to maintain this
pub image: Option<Image>
}
impl GpuTriangle {
/// Create a new untextured GPU Triangle
pub fn new(offset: u32, indices: [u32; 3], z: impl Scalar, bkg: Background) -> GpuTriangle {
GpuTriangle {
z: z.float(),
indices: [indices[0] + offset, indices[1] + offset, indices[2] + offset],
image: bkg.image().cloned()
}
}
}
#[doc(hidden)]
impl PartialEq for GpuTriangle {
fn eq(&self, other: &GpuTriangle) -> bool {
match (&self.image, &other.image) {
(&Some(ref a), &Some(ref b)) => a.get_id() == b.get_id(),
(&None, &None) => true,
_ => false
}
}
}
#[doc(hidden)]
impl Eq for GpuTriangle {}
#[doc(hidden)]
impl PartialOrd for GpuTriangle {
fn partial_cmp(&self, other: &GpuTriangle) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[doc(hidden)]
impl Ord for GpuTriangle {
fn cmp(&self, other: &GpuTriangle) -> Ordering {
match self.z.partial_cmp(&other.z) {
None | Some(Ordering::Equal) =>
match (&self.image, &other.image) {
(&Some(ref a), &Some(ref b)) => a.get_id().cmp(&b.get_id()),
(&Some(_), &None) => Ordering::Greater,
(&None, &Some(_)) => Ordering::Less,
(&None, &None) => Ordering::Equal,
},
Some(result) => result
}
}
}
| true
|
1734d1f7921afa11af8f89e4b7c9b6ca078bdea1
|
Rust
|
dkull/cryptopals
|
/src/bin/s5c38_weakened_srp_offline_attack.rs
|
UTF-8
| 2,853
| 2.515625
| 3
|
[
"MIT"
] |
permissive
|
extern crate cryptopals;
use std::collections::HashMap;
use std::{thread, time};
use num_bigint::{BigUint, ToBigUint};
pub fn main() {
let N = BigUint::parse_bytes(
"ffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024\
e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd\
3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec\
6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f\
24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361\
c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552\
bb9ed529077096966d670c354e4abc9804f1746c08ca237327fff\
fffffffffffff"
.to_string()
.as_bytes(),
16,
)
.unwrap();
let username = "admin".to_string();
let password = "p@55w0rd".to_string();
let connstring = "localhost:7878".to_string();
let connstring2 = "localhost:7979".to_string();
let mut users: HashMap<String, String> = HashMap::new();
users.insert(username.clone(), password.clone());
cryptopals::weakened_srp::WeakenedSRPServer::start(
connstring.clone(),
N.clone(),
2_usize.to_biguint().unwrap(),
3_usize.to_biguint().unwrap(),
users.clone(),
false,
);
let sleep_time = time::Duration::from_millis(100);
thread::sleep(sleep_time);
println!("=== Normal auth");
let authenticated = cryptopals::weakened_srp::WeakenedSRPClient::auth(
connstring.clone(),
N.clone(),
2_usize.to_biguint().unwrap(),
3_usize.to_biguint().unwrap(),
username.clone(),
password.clone(),
);
println!("authed as \"{}\": {}", &username, authenticated);
println!("=== Bad auth (wrong password)");
/*
let's override A and hardcode S to be 0
*/
let authenticated = cryptopals::weakened_srp::WeakenedSRPClient::auth(
connstring.clone(),
N.clone(),
2_usize.to_biguint().unwrap(),
3_usize.to_biguint().unwrap(),
username.clone(),
"wrong_password".to_string(),
);
println!("authed as \"{}\" : {}", username, authenticated);
/*
let's start another server instance - evil this time
*/
println!("=== Starting attack server");
cryptopals::weakened_srp::WeakenedSRPServer::start(
connstring2.clone(),
N.clone(),
2_usize.to_biguint().unwrap(),
3_usize.to_biguint().unwrap(),
users,
true,
);
thread::sleep(sleep_time);
/*
let's override A and hardcode S to be 0
*/
let authenticated = cryptopals::weakened_srp::WeakenedSRPClient::auth(
connstring2.clone(),
N.clone(),
2_usize.to_biguint().unwrap(),
3_usize.to_biguint().unwrap(),
username.clone(),
password,
);
println!("authed as \"{}\" : {}", username, authenticated);
}
| true
|
f978e7b54fb034637f62ae7451945a2377488098
|
Rust
|
Voxelot/transaction-processor
|
/src/domain/engine.rs
|
UTF-8
| 5,746
| 2.921875
| 3
|
[] |
no_license
|
use crate::domain::model::{
Chargeback, Client, Deposit, Dispute, Resolve, Transaction, TransactionStatus, Withdrawal,
};
use crate::domain::ports::{
ClientRepository, ClientUpdate, Engine, EngineConfig, EngineErrors, EngineResult,
TransactionRepositoryErrors, TransactionsRepository,
};
use async_trait::async_trait;
use futures::prelude::stream::BoxStream;
use futures::{StreamExt, TryStreamExt};
#[derive(Default, Debug)]
pub struct TransactionEngine<T: EngineConfig> {
clients: T::ClientRepository,
transactions: T::TransactionRepository,
}
#[async_trait]
impl<T> Engine for TransactionEngine<T>
where
T: EngineConfig,
{
async fn process_transaction(&mut self, transaction: Transaction) -> EngineResult {
match transaction {
Transaction::Deposit(deposit) => self.process_deposit(deposit).await,
Transaction::Withdrawal(withdrawal) => self.process_withdrawal(withdrawal).await,
Transaction::Dispute(dispute) => self.process_dispute(dispute).await,
Transaction::Resolve(resolve) => self.process_resolve(resolve).await,
Transaction::Chargeback(chargeback) => self.process_chargeback(chargeback).await,
}
}
async fn get_clients(
&self,
) -> Result<BoxStream<'static, Result<Client, EngineErrors>>, EngineErrors> {
Ok(self
.clients
.get_all()
.await?
.map_err(EngineErrors::ClientError)
.boxed())
}
}
impl<T> TransactionEngine<T>
where
T: EngineConfig,
{
async fn process_deposit(&mut self, deposit: Deposit) -> EngineResult {
if let Err(TransactionRepositoryErrors::TransactionNotFound(_)) =
self.transactions.get_transaction_status(&deposit.tx).await
{
self.transactions
.store_transaction_value(deposit.tx, deposit.amount.clone())
.await?;
self.transactions
.store_transaction_status(deposit.tx, TransactionStatus::Processed)
.await?;
self.clients
.update(
&deposit.client,
ClientUpdate::Deposit {
available_increase: deposit.amount.clone(),
total_increase: deposit.amount,
},
)
.await?;
}
Ok(())
}
async fn process_withdrawal(&mut self, withdrawal: Withdrawal) -> EngineResult {
let client = self.clients.get(&withdrawal.client).await?;
if client.available > withdrawal.amount {
self.clients
.update(
&withdrawal.client,
ClientUpdate::Withdrawal {
available_decrease: withdrawal.amount.clone(),
total_decrease: withdrawal.amount.clone(),
},
)
.await?;
}
Ok(())
}
async fn process_dispute(&mut self, dispute: Dispute) -> EngineResult {
let status = self
.transactions
.get_transaction_status(&dispute.tx)
.await?;
// Only handle dispute if transaction is in the base processed state
if status == TransactionStatus::Processed {
let amount = self.transactions.get_transaction_value(&dispute.tx).await?;
self.transactions
.store_transaction_status(dispute.tx, TransactionStatus::Disputed)
.await?;
self.clients
.update(
&dispute.client,
ClientUpdate::Dispute {
available_decrease: amount.clone(),
held_increase: amount,
},
)
.await?;
}
Ok(())
}
async fn process_resolve(&mut self, resolve: Resolve) -> EngineResult {
let state = self
.transactions
.get_transaction_status(&resolve.tx)
.await?;
// only process resolution if transaction is in a disputed state
if state == TransactionStatus::Disputed {
let amount = self.transactions.get_transaction_value(&resolve.tx).await?;
self.transactions
.store_transaction_status(resolve.tx, TransactionStatus::Resolved)
.await?;
self.clients
.update(
&resolve.client,
ClientUpdate::Resolve {
available_increase: amount.clone(),
held_decrease: amount.clone(),
},
)
.await?;
}
Ok(())
}
async fn process_chargeback(&mut self, chargeback: Chargeback) -> EngineResult {
let state = self
.transactions
.get_transaction_status(&chargeback.tx)
.await?;
// only process chargeback if transaction is currently disputed
if state == TransactionStatus::Disputed {
let amount = self
.transactions
.get_transaction_value(&chargeback.tx)
.await?;
self.transactions
.store_transaction_status(chargeback.tx, TransactionStatus::ChargedBack)
.await?;
self.clients
.update(
&chargeback.client,
ClientUpdate::Chargeback {
held_decrease: amount.clone(),
total_decrease: amount,
},
)
.await?;
}
Ok(())
}
}
#[cfg(test)]
mod tests;
| true
|
7ff6fdb0150f4a7fe51a07954b927ac1dc89a32c
|
Rust
|
brotheryeska/domain_try
|
/src/pages/authentication/passwordless/email_setting.rs
|
UTF-8
| 4,551
| 2.578125
| 3
|
[] |
no_license
|
use yew::prelude::*;
pub struct EmailSettings {}
pub enum Msg {}
impl Component for EmailSettings {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, _: ComponentLink<Self>) -> Self {
EmailSettings {}
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
true
}
fn change(&mut self, _: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
html! {
<div class="p-2" style="font-size: 14px;">
<div class="mb-3">
<label class="form-label">{"Connection"}</label>
<input class="form-control" type="text" value="email" aria-label="Disabled input example" disabled=true readonly=true/>
<p>{"If you are triggering a login manually, this is the identifier you would use on the connection parameter"}</p>
</div>
<div class="mb-3">
<label class="form-label">{"From"}</label>
<input class="form-control" type="text" placeholder="myname@mycompany.com" aria-label="from input example"/>
</div>
<div class="mb-3">
<label class="form-label">{"Subject"}</label>
<input class="form-control" type="text" placeholder="Welcome to {{application name}}" aria-label="app subject input example"/>
</div>
<div class="mb-3 w-75">
<label class="form-label">{"Body"}</label>
<p>{"The content of the email your users are going to receive."}</p>
<div class="mb-2 row 2-50">
<div class="col">
<button type="button" class="text-color-primary btn btn-secondary btn-default:hover" style="background-color:#96989d !important; font-size:12px; ">{"Reset to last saved"}</button>
</div>
<div class="col">
<button type="button" class="text-color-primary btn btn-secondary btn-default:hover" style="background-color:#96989d !important; font-size:12px;">{"Reset to default"}</button>
</div>
</div>
</div>
<div class="mb-3">
<label class="form-label">{"Authentication Parameter"}</label>
<div class="form-floating">
<textarea class="form-control text-light pt-1" placeholder="Your verification code is: @@password@@" style="background-color:rgb(47,56,61); height:100px;" id="floatingTextarea2"></textarea>
</div>
<p>{"Query string parameters to be included as part of the generated link.
"}</p>
</div>
<div class="mb-3">
<label class="form-label">{"OTP Expiry"}</label>
<div class="input-group mb-3">
<input type="number" class="form-control" min="1" aria-label="Recipient's username" aria-describedby="otpExpiry"/>
<span class="input-group-text" id="otpExpiry">{"seconds"}</span>
</div>
<p>{"The time step, in seconds, between new passwords."}</p>
</div>
<div class="mb-3">
<label class="form-label">{"OTP Length"}</label>
<div class="input-group mb-3">
<input type="number" class="form-control" min="1" aria-label="Recipient's username" aria-describedby="otpLength"/>
<span class="input-group-text" id="otpLength">{"seconds"}</span>
</div>
<p>{"The length of the resulting one-time password."}</p>
</div>
<div class="mb-3">
<label class="form-label">{"Disable Sign Ups"}</label>
<div class="form-check form-switch">
<input class="form-check-input" type="checkbox" id="flexSwitchCheckDefault"/>
</div>
<p class="text-muted">{"Check this if you want to prevent sign ups to your application. You will still be able to create users with your API credentials."}</p>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-primary">{"Save"}</button>
</div>
</div>
}
}
}
| true
|
3113eea42e10a25c69c18e999ebaa14a71f53f03
|
Rust
|
Dzejkop/catchr
|
/core/tests/parsing.rs
|
UTF-8
| 1,184
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
use catchr_core::{Section, SectionBody, SectionItem, SectionKeyword};
fn syn_parse<T: syn::parse::Parse>(s: impl AsRef<str>) -> T {
syn::parse_str(s.as_ref()).unwrap()
}
#[test]
fn empty_when_section() {
let raw = r#"
when "Hello!" {
}
"#;
let section = syn::parse_str::<Section>(raw).unwrap();
assert_eq!(
section,
Section::new(
SectionKeyword::When,
"Hello!".to_string(),
SectionBody::empty(),
)
);
}
#[test]
fn nested_one() {
let raw = r#"
when "Hello!" {
then "Whatever" {
assert!(false);
}
}
"#;
let section = syn::parse_str::<Section>(raw).unwrap();
assert_eq!(
section,
Section::new(
SectionKeyword::When,
"Hello!".to_string(),
SectionBody::new(vec![SectionItem::Sep(Section::new(
SectionKeyword::Then,
"Whatever".to_string(),
SectionBody::new(vec![SectionItem::Stmt(syn_parse(
"assert!(false);"
))]),
))]),
)
);
}
// TODO: More tests!
| true
|
62d2d862460b014089ef5cab32d2826d81387415
|
Rust
|
rust-rosetta/rust-rosetta
|
/tasks/guess-the-number/src/main.rs
|
UTF-8
| 627
| 3.671875
| 4
|
[
"Unlicense"
] |
permissive
|
extern crate rand;
use std::io::stdin;
use rand::{thread_rng, Rng};
fn main() {
let mystery_number = thread_rng().gen_range(1u8..=10);
println!("Guess my number between 1 and 10");
let input = stdin();
loop {
let mut line = String::new();
let _ = input.read_line(&mut line).unwrap();
match line.trim().parse::<u8>() {
Ok(guess) if guess == mystery_number => break,
Ok(_) => println!("Wrong! Try again!"),
Err(_) => println!("Please enter an integer"),
}
}
// The loop ends only if the player wins
println!("Well guessed!");
}
| true
|
a1bcd1022fbcf00d00f4c976b0afe9b8306a6923
|
Rust
|
milliams/plotlib
|
/examples/letter_counter.rs
|
UTF-8
| 841
| 3.109375
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::btree_map::BTreeMap;
fn main() {
let mut data = Vec::new();
let message: &str = "This is a long message";
let mut count = BTreeMap::new();
for c in message.trim().to_lowercase().chars() {
if c.is_alphabetic() {
*count.entry(c).or_insert(0) += 1
}
}
println!("Number of occurences per character");
for (ch, count) in &count {
println!("{:?}: {}", ch, count);
let count = *count as f64;
data.push(plotlib::repr::BarChart::new(count).label(ch.to_string()));
}
// Add data to the view
let v = data
.into_iter()
.fold(plotlib::view::CategoricalView::new(), |view, datum| {
view.add(datum)
});
plotlib::page::Page::single(&v)
.save("barchart.svg")
.expect("saving svg");
}
| true
|
b76799fb01db17cc0480c45fbb742a8a1f481f89
|
Rust
|
Origen-SDK/o2
|
/rust/origen/src/prog_gen/model/flow_id.rs
|
UTF-8
| 1,686
| 3.125
| 3
|
[
"MIT"
] |
permissive
|
/// Used to uniquely identify a test in a flow
#[derive(Clone, Serialize, PartialEq, Hash, Eq)]
pub struct FlowID {
id: String,
_private: (),
}
impl FlowID {
/// Generate a new ID from a string. No checking is done at the point of creation
/// to guarantee uniqueness, but it will be checked later in the generation process.
/// String-based IDs are forced to lowercase to enable case-insensitive comparisions.
pub fn from_str(id: &str) -> FlowID {
FlowID {
id: id.to_lowercase(),
_private: (),
}
}
/// Generate a new ID from an integer. No checking is done at the point of creation
/// to guarantee uniqueness, but it will be checked later in the generation process.
pub fn from_int(id: usize) -> FlowID {
FlowID {
id: format!("t{}", id),
_private: (),
}
}
/// Generate a new unique ID
pub fn new() -> FlowID {
FlowID::from_int(crate::STATUS.generate_unique_id())
}
/// Returns true if the ID refers to a test external to this flow, currently defined by the ID
/// starting with "extern_"
pub fn is_external(&self) -> bool {
self.id.starts_with("extern")
}
pub fn to_string(&self) -> String {
self.id.clone()
}
pub fn to_str(&self) -> &str {
&self.id
}
}
impl std::fmt::Display for FlowID {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "FlowID(\"{}\")", self.id)
}
}
impl std::fmt::Debug for FlowID {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "FlowID(\"{}\")", self.id)
}
}
| true
|
82eba03ed9bd68551ca0865777c654da302b37de
|
Rust
|
mohaque0/mib
|
/build/src/executor.rs
|
UTF-8
| 4,876
| 3.3125
| 3
|
[
"MIT"
] |
permissive
|
use ::Context;
use ::Task;
use failure::Error;
use std::collections::HashMap;
pub struct Executor<'ctx> {
context: &'ctx Context,
state: HashMap<String, ExecutionItem<'ctx>>
}
struct ExecutionItem<'ctx> {
task: &'ctx Task,
state: ExecutionState
}
#[derive(Clone, Copy, PartialEq, Eq)]
enum ExecutionState {
NotExecuted,
Done
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ExecutionBehavior {
Skipped,
Executed
}
#[derive(Debug, Fail)]
enum ExecutionError {
#[fail(display = "dependency not found: {}", name)]
UnknownDependency{ name: String }
}
impl<'ctx> Executor<'ctx> {
pub fn new(ctx: &Context) -> Executor {
Executor {
context: ctx,
state: HashMap::new()
}
}
fn get_execution(&mut self, task: &str) -> Result<&mut ExecutionItem<'ctx>, Error> {
let task = task.to_string();
if !self.state.contains_key(&task) {
self.state.insert(
task.to_string(),
ExecutionItem {
task: self.context.get_task(&task)?,
state: ExecutionState::NotExecuted
}
);
}
match self.state.get_mut(&task) {
Some(t) => Ok(t),
None => Err(ExecutionError::UnknownDependency{name: task})?
}
}
fn execute_helper(&mut self, task: &str) -> Result<ExecutionBehavior, Error> {
let mut dep_behavior = ExecutionBehavior::Skipped;
let mut ret_behavior = ExecutionBehavior::Skipped;
let deps = self.context.get_task_deps(task)?.clone();
let task = task.to_string();
debug!("Considering: {} with dependencies: {:?}", task, deps);
for dep in deps {
let behavior = self.execute_helper(&dep)?;
// If any of the dependencies execute, then we consider the behavior to be "executed."
if dep_behavior == ExecutionBehavior::Skipped {
dep_behavior = behavior
}
}
let exec_item = self.get_execution(&task)?;
if exec_item.state() == ExecutionState::NotExecuted {
// If any dependency executed or if we need execution.
if dep_behavior == ExecutionBehavior::Executed || exec_item.task().needs_execution() {
info!("Executing: {}", task);
exec_item.task().execute()?;
ret_behavior = ExecutionBehavior::Executed;
} else {
info!("Skipping: {}", task);
}
exec_item.set_done();
}
debug!("Considering: {}. Behavior: {:?}", task, ret_behavior);
Ok(ret_behavior)
}
pub fn execute(&mut self, task: &str) -> Result<(), Error> {
self.execute_helper(task)?;
Ok(())
}
}
impl<'a> ExecutionItem<'a> {
fn task(&self) -> &Task {
self.task
}
fn state(&self) -> ExecutionState {
self.state
}
fn set_done(&mut self) {
self.state = ExecutionState::Done
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
use std::cell::RefCell;
use std::path::PathBuf;
use std::ops::DerefMut;
use std::rc::Rc;
struct TestTask {
flag: Rc<RefCell<bool>>
}
impl TestTask {
fn new(flag: Rc<RefCell<bool>>) -> TestTask {
TestTask {
flag: flag
}
}
}
impl Task for TestTask {
fn needs_execution(&self) -> bool {
true
}
fn execute(&self) -> Result<(), Error> {
*self.flag.borrow_mut().deref_mut() = true;
Ok(())
}
}
fn flag() -> Rc<RefCell<bool>> {
Rc::new(RefCell::new(false))
}
#[test]
fn execution() {
let flag0 = flag();
let flag1 = flag();
let flag2 = flag();
let flag3 = flag();
let flag4 = flag();
let mut ctx = Context::new();
ctx.add_task("task0", Box::new(TestTask::new(flag0.clone())));
ctx.add_task("task1", Box::new(TestTask::new(flag1.clone())));
ctx.add_task("task2", Box::new(TestTask::new(flag2.clone())));
ctx.add_task("task3", Box::new(TestTask::new(flag3.clone())));
ctx.add_task("task4", Box::new(TestTask::new(flag4.clone())));
ctx.task_mut("task0").unwrap().depends_on("task1").unwrap();
ctx.task_mut("task1").unwrap().depends_on("task2").unwrap();
ctx.task_mut("task1").unwrap().depends_on("task3").unwrap();
let mut executor = Executor::new(&ctx);
executor.execute("task0").unwrap();
assert_eq!(*(*flag0).borrow(), true);
assert_eq!(*(*flag1).borrow(), true);
assert_eq!(*(*flag2).borrow(), true);
assert_eq!(*(*flag3).borrow(), true);
assert_eq!(*(*flag4).borrow(), false);
}
}
| true
|
aef62825744754349a8b082dbbd36a1680e065ca
|
Rust
|
immunant/c2rust
|
/c2rust-refactor/src/select/visitor.rs
|
UTF-8
| 12,854
| 2.90625
| 3
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
//! Visitors for implementing `ChildMatch`, `DescMatch`, and `Filter`, which need to walk the AST
//! and inspect the currently selected nodes.
use std::collections::HashSet;
use syntax::ast::*;
use syntax::source_map::Span;
use syntax::visit::{self, FnKind, Visitor};
use crate::command::CommandState;
use crate::select::filter::{self, AnyNode};
use crate::select::Filter;
use crate::RefactorCtxt;
struct ChildMatchVisitor<'a, 'tcx: 'a> {
st: &'a CommandState,
cx: &'a RefactorCtxt<'a, 'tcx>,
old: HashSet<NodeId>,
new: HashSet<NodeId>,
/// Are we at a child of a node that was selected in the `old` set?
in_old: bool,
filt: &'a Filter,
}
impl<'ast, 'a, 'tcx> ChildMatchVisitor<'a, 'tcx> {
fn matches(&self, node: AnyNode) -> bool {
filter::matches_filter(self.st, self.cx, node, self.filt)
}
fn maybe_enter_old<F: FnOnce(&mut Self)>(&mut self, id: NodeId, func: F) {
let was_in_old = self.in_old;
self.in_old = self.old.contains(&id);
func(self);
self.in_old = was_in_old;
}
fn walk_args(&mut self, x: &'ast [Param]) {
for arg in x {
if self.in_old && self.matches(AnyNode::Param(arg)) {
self.new.insert(arg.id);
}
// No point in visiting if the arg is not in `old` - just let `walk` handle it.
if self.old.contains(&arg.id) {
self.maybe_enter_old(arg.id, |v| {
v.visit_pat(&arg.pat);
v.visit_ty(&arg.ty);
});
}
}
}
}
impl<'ast, 'a, 'tcx> Visitor<'ast> for ChildMatchVisitor<'a, 'tcx> {
fn visit_item(&mut self, x: &'ast Item) {
if self.in_old && self.matches(AnyNode::Item(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
if let ItemKind::Fn(ref sig, ..) = x.kind {
v.walk_args(&sig.decl.inputs);
}
visit::walk_item(v, x)
});
}
fn visit_trait_item(&mut self, x: &'ast TraitItem) {
if self.in_old && self.matches(AnyNode::TraitItem(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
if let TraitItemKind::Method(ref sig, ..) = x.kind {
v.walk_args(&sig.decl.inputs);
}
visit::walk_trait_item(v, x)
});
}
fn visit_impl_item(&mut self, x: &'ast ImplItem) {
if self.in_old && self.matches(AnyNode::ImplItem(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
if let ImplItemKind::Method(ref sig, ..) = x.kind {
v.walk_args(&sig.decl.inputs);
}
visit::walk_impl_item(v, x)
});
}
fn visit_foreign_item(&mut self, x: &'ast ForeignItem) {
if self.in_old && self.matches(AnyNode::ForeignItem(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
// walk_foreign_item doesn't call visit_fn
if let ForeignItemKind::Fn(ref decl, _) = x.kind {
v.walk_args(&decl.inputs);
}
visit::walk_foreign_item(v, x)
});
}
fn visit_stmt(&mut self, x: &'ast Stmt) {
if self.in_old && self.matches(AnyNode::Stmt(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_stmt(v, x));
}
fn visit_expr(&mut self, x: &'ast Expr) {
if self.in_old && self.matches(AnyNode::Expr(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_expr(v, x));
}
fn visit_pat(&mut self, x: &'ast Pat) {
if self.in_old && self.matches(AnyNode::Pat(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_pat(v, x));
}
fn visit_ty(&mut self, x: &'ast Ty) {
if self.in_old && self.matches(AnyNode::Ty(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_ty(v, x));
}
fn visit_struct_field(&mut self, x: &'ast StructField) {
if self.in_old && self.matches(AnyNode::Field(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_struct_field(v, x));
}
}
pub fn matching_children(
st: &CommandState,
cx: &RefactorCtxt,
krate: &Crate,
sel: HashSet<NodeId>,
filt: &Filter,
) -> HashSet<NodeId> {
let in_old = sel.contains(&CRATE_NODE_ID);
let mut v = ChildMatchVisitor {
st,
cx,
old: sel,
new: HashSet::new(),
in_old,
filt,
};
visit::walk_crate(&mut v, krate);
v.new
}
struct DescMatchVisitor<'a, 'tcx: 'a> {
st: &'a CommandState,
cx: &'a RefactorCtxt<'a, 'tcx>,
old: HashSet<NodeId>,
new: HashSet<NodeId>,
/// Are we at a descendant of a node that was selected in the `old` set?
in_old: bool,
filt: &'a Filter,
}
impl<'ast, 'a, 'tcx> DescMatchVisitor<'a, 'tcx> {
fn matches(&self, node: AnyNode) -> bool {
filter::matches_filter(self.st, self.cx, node, self.filt)
}
fn maybe_enter_old<F: FnOnce(&mut Self)>(&mut self, id: NodeId, func: F) {
let enter = self.old.contains(&id);
if enter {
let was_in_old = self.in_old;
self.in_old = true;
func(self);
self.in_old = was_in_old;
} else {
func(self);
}
}
fn walk_args(&mut self, x: &'ast [Param]) {
for arg in x {
if self.in_old && self.matches(AnyNode::Param(arg)) {
self.new.insert(arg.id);
}
// No point in visiting if the arg is not in `old` - just let `walk` handle it.
if self.old.contains(&arg.id) {
self.maybe_enter_old(arg.id, |v| {
v.visit_pat(&arg.pat);
v.visit_ty(&arg.ty);
});
}
}
}
}
impl<'ast, 'a, 'tcx> Visitor<'ast> for DescMatchVisitor<'a, 'tcx> {
fn visit_item(&mut self, x: &'ast Item) {
if self.in_old && self.matches(AnyNode::Item(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
if let ItemKind::Fn(ref sig, ..) = x.kind {
v.walk_args(&sig.decl.inputs);
}
visit::walk_item(v, x)
});
}
fn visit_trait_item(&mut self, x: &'ast TraitItem) {
if self.in_old && self.matches(AnyNode::TraitItem(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
if let TraitItemKind::Method(ref sig, ..) = x.kind {
v.walk_args(&sig.decl.inputs);
}
visit::walk_trait_item(v, x)
});
}
fn visit_impl_item(&mut self, x: &'ast ImplItem) {
if self.in_old && self.matches(AnyNode::ImplItem(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
if let ImplItemKind::Method(ref sig, ..) = x.kind {
v.walk_args(&sig.decl.inputs);
}
visit::walk_impl_item(v, x)
});
}
fn visit_foreign_item(&mut self, x: &'ast ForeignItem) {
if self.in_old && self.matches(AnyNode::ForeignItem(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| {
if let ForeignItemKind::Fn(ref decl, ..) = x.kind {
v.walk_args(&decl.inputs);
}
visit::walk_foreign_item(v, x)
});
}
fn visit_stmt(&mut self, x: &'ast Stmt) {
if self.in_old && self.matches(AnyNode::Stmt(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_stmt(v, x));
}
fn visit_expr(&mut self, x: &'ast Expr) {
if self.in_old && self.matches(AnyNode::Expr(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_expr(v, x));
}
fn visit_pat(&mut self, x: &'ast Pat) {
if self.in_old && self.matches(AnyNode::Pat(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_pat(v, x));
}
fn visit_ty(&mut self, x: &'ast Ty) {
if self.in_old && self.matches(AnyNode::Ty(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_ty(v, x));
}
fn visit_struct_field(&mut self, x: &'ast StructField) {
if self.in_old && self.matches(AnyNode::Field(x)) {
self.new.insert(x.id);
}
self.maybe_enter_old(x.id, |v| visit::walk_struct_field(v, x));
}
}
pub fn matching_descendants(
st: &CommandState,
cx: &RefactorCtxt,
krate: &Crate,
sel: HashSet<NodeId>,
filt: &Filter,
) -> HashSet<NodeId> {
let in_old = sel.contains(&CRATE_NODE_ID);
let mut v = DescMatchVisitor {
st,
cx,
old: sel,
new: HashSet::new(),
in_old,
filt,
};
visit::walk_crate(&mut v, krate);
v.new
}
struct FilterVisitor<'a, 'tcx: 'a> {
st: &'a CommandState,
cx: &'a RefactorCtxt<'a, 'tcx>,
old: HashSet<NodeId>,
new: HashSet<NodeId>,
filt: &'a Filter,
}
impl<'ast, 'a, 'tcx> FilterVisitor<'a, 'tcx> {
fn matches(&self, node: AnyNode) -> bool {
filter::matches_filter(self.st, self.cx, node, self.filt)
}
fn walk_args(&mut self, x: &'ast [Param]) {
for arg in x {
if self.old.contains(&arg.id) && self.matches(AnyNode::Param(arg)) {
self.new.insert(arg.id);
}
}
}
}
impl<'ast, 'a, 'tcx> Visitor<'ast> for FilterVisitor<'a, 'tcx> {
fn visit_item(&mut self, x: &'ast Item) {
if self.old.contains(&x.id) && self.matches(AnyNode::Item(x)) {
self.new.insert(x.id);
}
if let ItemKind::Fn(ref sig, ..) = x.kind {
self.walk_args(&sig.decl.inputs);
}
visit::walk_item(self, x);
}
fn visit_trait_item(&mut self, x: &'ast TraitItem) {
if self.old.contains(&x.id) && self.matches(AnyNode::TraitItem(x)) {
self.new.insert(x.id);
}
if let TraitItemKind::Method(ref sig, ..) = x.kind {
self.walk_args(&sig.decl.inputs);
}
visit::walk_trait_item(self, x);
}
fn visit_impl_item(&mut self, x: &'ast ImplItem) {
if self.old.contains(&x.id) && self.matches(AnyNode::ImplItem(x)) {
self.new.insert(x.id);
}
if let ImplItemKind::Method(ref sig, ..) = x.kind {
self.walk_args(&sig.decl.inputs);
}
visit::walk_impl_item(self, x);
}
fn visit_foreign_item(&mut self, x: &'ast ForeignItem) {
if self.old.contains(&x.id) && self.matches(AnyNode::ForeignItem(x)) {
self.new.insert(x.id);
}
if let ForeignItemKind::Fn(ref decl, ..) = x.kind {
self.walk_args(&decl.inputs);
}
visit::walk_foreign_item(self, x);
}
fn visit_stmt(&mut self, x: &'ast Stmt) {
if self.old.contains(&x.id) && self.matches(AnyNode::Stmt(x)) {
self.new.insert(x.id);
}
visit::walk_stmt(self, x);
}
fn visit_expr(&mut self, x: &'ast Expr) {
if self.old.contains(&x.id) && self.matches(AnyNode::Expr(x)) {
self.new.insert(x.id);
}
visit::walk_expr(self, x);
}
fn visit_pat(&mut self, x: &'ast Pat) {
if self.old.contains(&x.id) && self.matches(AnyNode::Pat(x)) {
self.new.insert(x.id);
}
visit::walk_pat(self, x);
}
fn visit_ty(&mut self, x: &'ast Ty) {
if self.old.contains(&x.id) && self.matches(AnyNode::Ty(x)) {
self.new.insert(x.id);
}
visit::walk_ty(self, x);
}
fn visit_fn(&mut self, kind: FnKind<'ast>, fd: &'ast FnDecl, span: Span, _id: NodeId) {
for arg in &fd.inputs {
if self.old.contains(&arg.id) && self.matches(AnyNode::Param(arg)) {
self.new.insert(arg.id);
}
}
visit::walk_fn(self, kind, fd, span);
}
fn visit_struct_field(&mut self, x: &'ast StructField) {
if self.old.contains(&x.id) && self.matches(AnyNode::Field(x)) {
self.new.insert(x.id);
}
visit::walk_struct_field(self, x);
}
}
pub fn filter(
st: &CommandState,
cx: &RefactorCtxt,
krate: &Crate,
sel: HashSet<NodeId>,
filt: &Filter,
) -> HashSet<NodeId> {
let mut v = FilterVisitor {
st,
cx,
old: sel,
new: HashSet::new(),
filt,
};
visit::walk_crate(&mut v, krate);
v.new
}
| true
|
8c42b661c35994c8800dd4f6dd7842b03511db1a
|
Rust
|
krhoda/mockbox
|
/server/src/main.rs
|
UTF-8
| 3,485
| 2.703125
| 3
|
[] |
no_license
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate rocket;
#[macro_use]
extern crate rocket_contrib;
#[macro_use]
extern crate serde_derive;
use rocket::http::{Method, Status};
use rocket::response::NamedFile;
use rocket_contrib::json::{Json, JsonValue};
use rocket_cors::{AllowedHeaders, AllowedOrigins};
use std::fs::OpenOptions;
use std::io::Write;
use std::path::{Path, PathBuf};
const FILE_DIR: &str = "./assets";
// HELPERS:
fn make_cors() -> Result<rocket_cors::Cors, rocket_cors::Error> {
let allowed_origins = AllowedOrigins::all();
rocket_cors::CorsOptions {
allowed_origins,
allowed_methods: vec![Method::Delete, Method::Get, Method::Post, Method::Options]
.into_iter()
.map(From::from)
.collect(),
allowed_headers: AllowedHeaders::all(),
allow_credentials: true,
..Default::default()
}
.to_cors()
}
fn file_to_file_rec(f: std::fs::DirEntry) -> Option<FileRec> {
match f.file_name().into_string() {
Ok(name) => {
let name2 = name.clone();
let maybe_ext = std::path::Path::new(&name2)
.extension()
.and_then(std::ffi::OsStr::to_str);
match maybe_ext {
Some(ext) => Some(FileRec {
name: name,
ext: String::from(ext),
}),
_ => None,
}
}
_ => None,
}
}
// STRUCTS & TYPES:
#[derive(Serialize, Deserialize)]
struct FileRec {
name: String,
ext: String,
}
#[derive(Serialize, Deserialize)]
struct FileUp {
name: String,
body: String,
}
// ROUTES:
#[get("/hello")]
fn hello() -> &'static str {
"Hello, world"
}
#[get("/files/<file..>")]
fn download_file(file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new(FILE_DIR).join(file)).ok()
}
#[get("/files")]
fn list_files() -> Result<JsonValue, std::io::Error> {
let entries = std::fs::read_dir(FILE_DIR)?
.map(|res| res.map(|entry| file_to_file_rec(entry)))
.filter(|maybe_rec| match maybe_rec {
Ok(x) => match x {
Some(_) => true,
_ => false,
},
_ => false,
})
.collect::<Result<Vec<_>, std::io::Error>>()?;
Ok(json!(entries))
}
#[delete("/files/<file_name..>")]
fn delete_file(file_name: PathBuf) -> Result<Status, std::io::Error> {
std::fs::remove_file(Path::new(FILE_DIR).join(file_name))?;
Ok(Status::Ok)
}
#[post("/files", format = "application/json", data = "<file_up>")]
fn upload_file(file_up: Json<FileUp>) -> Result<Status, std::io::Error> {
let maybe_b = base64::decode(&file_up.0.body);
match maybe_b {
Ok(b) => {
let mut next_file = OpenOptions::new()
.read(true)
.write(true)
.create_new(true)
.open(Path::new(FILE_DIR).join(file_up.0.name))?;
next_file.write_all(&b)?;
Ok(Status::Ok)
}
_ => {
Ok(Status::InternalServerError)
}
}
}
fn main() {
match make_cors() {
Ok(cors) => {
rocket::ignite()
.mount("/", routes![hello, download_file, list_files, upload_file, delete_file])
.attach(cors)
.launch();
}
Err(err) => {
panic!("Failed in CORS creation, err: {}", err);
}
}
}
| true
|
22cd23d861c50ec2e574689fe8da63a71a14d218
|
Rust
|
rauchg/AzureSDKForRust
|
/examples/collection.rs
|
UTF-8
| 3,325
| 3.109375
| 3
|
[
"Apache-2.0"
] |
permissive
|
extern crate azure_sdk_for_rust;
extern crate futures;
extern crate hyper;
extern crate hyper_tls;
extern crate tokio_core;
use std::error::Error;
use azure_sdk_for_rust::cosmos::{AuthorizationToken, Client, TokenType};
use futures::future::*;
use tokio_core::reactor::Core;
fn main() {
code().unwrap();
}
// We run a separate method to use the elegant quotation mark operator.
// A series of unwrap(), unwrap() would have achieved the same result.
fn code() -> Result<(), Box<Error>> {
// First we retrieve the account name and master key from environment variables.
// We expect master keys (ie, not resource constrained)
let master_key = std::env::var("COSMOS_MASTER_KEY").expect("Set env variable COSMOS_MASTER_KEY first!");
let account = std::env::var("COSMOS_ACCOUNT").expect("Set env variable COSMOS_ACCOUNT first!");
// let's create a tokio-core reactor.
// It will drive our request. Remember, until run, futures do absolutely
// nothing. So, run them. Also note that, in order to avoid cloning the authorization_token at
// each request this library constructs the request **before** the future. This means the date
// sent to the server will be the one at Future creation time, not the execution time.
// Azure calls will block requests with time too much in the past (in order to prevent reply
// attacks) so make sure to execute the Future as soon as possible after having it created.
// * This is something worth discussing *
let mut core = Core::new()?;
// This is how you construct an authorization token.
// Remember to pick the correct token type.
// Here we assume master.
// Most methods return a ```Result<_, AzureError>```.
// ```AzureError``` is an enum union of all the possible underlying
// errors, plus Azure specific ones. For example if a REST call returns the
// unexpected result (ie NotFound instead of Ok) we return an Err telling
// you that.
let authorization_token = AuthorizationToken::new(account.clone(), TokenType::Master, &master_key)?;
// Once we have an authorization token you can create a client instance. You can change the
// authorization token at later time if you need, for example, to escalate the privileges for a
// single operation.
let client = Client::new(authorization_token)?;
// The Cosmos' client exposes a lot of methods. This one lists the databases in the specified
// account. Database do not implement Display but deref to &str so you can pass it to methods
// both as struct or id.
let future = client.list_databases().and_then(move |databases| {
println!("Account {} has {} database(s)", account, databases.len());
let mut v = Vec::new();
// Each Cosmos' database contains so or more collections. We can enumerate them using the
// list_collection method.
for db in databases {
v.push(client.list_collections(&db.id).map(move |collections| {
println!("database {} has {} collection(s)", db.id, collections.len());
for collection in collections {
println!("\tcollection {}", collection.id);
}
}));
}
futures::future::join_all(v)
});
core.run(future)?;
Ok(())
}
| true
|
75dec76c1239cca7162fb80e75c6d8049053a274
|
Rust
|
mmun/aeroscore-rs
|
/tests/integration_test.rs
|
UTF-8
| 1,368
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
#[macro_use]
extern crate assert_approx_eq;
extern crate aeroscore;
extern crate igc;
use aeroscore::olc;
struct Point {
latitude: f64,
longitude: f64,
altitude: i16,
}
impl olc::Point for Point {
fn latitude(&self) -> f64 {
self.latitude
}
fn longitude(&self) -> f64 {
self.longitude
}
fn altitude(&self) -> i16 {
self.altitude
}
}
#[test]
fn it_works() {
let release_seconds = 10 * 3600 + 28 * 60 + 5;
let fixes = include_str!("fixtures/2017-08-14-fla-6ng-01.igc")
.lines()
.filter(|l| l.starts_with('B'))
.filter_map(|line| igc::records::BRecord::parse(&line).ok()
.map_or(None, |record| {
if seconds_since_midnight(&record.timestamp) >= release_seconds {
Some(Point {
latitude: record.pos.lat.into(),
longitude: record.pos.lon.into(),
altitude: record.gps_alt,
})
} else {
None
}
}))
.collect::<Vec<_>>();
let result = olc::optimize(&fixes).unwrap();
assert_approx_eq!(result.distance, 501.3, 0.1);
}
fn seconds_since_midnight(time: &igc::util::Time) -> i32 {
time.hours as i32 * 60 * 60 + time.minutes as i32 * 60 + time.seconds as i32
}
| true
|
9002d2e7d333c8699ec88e7ec925b30eb26e32cc
|
Rust
|
Patryk27/avr-tester
|
/avr-simulator/src/port.rs
|
UTF-8
| 1,203
| 2.75
| 3
|
[
"MIT"
] |
permissive
|
use super::*;
/// Provides access to simavr's digital pins.
pub struct Port;
impl Port {
pub fn set_pin(avr: &mut Avr, port: char, pin: u8, high: bool) {
let irq = avr
.try_io_getirq(IoCtl::IoPortGetIrq { port }, pin as u32)
.unwrap_or_else(|| panic!("Current AVR doesn't have pin P{}{}", port, pin));
// Safety: `IoPortGetIrq` can be raised with a zero or one
unsafe {
ffi::avr_raise_irq(irq.as_ptr(), if high { 1 } else { 0 });
}
}
pub fn get_pin(avr: &mut Avr, port: char, pin: u8) -> bool {
let mut state = ffi::avr_ioport_state_t {
_bitfield_align_1: Default::default(),
_bitfield_1: Default::default(),
__bindgen_padding_0: Default::default(),
};
// Safety: `IoCtl::IoPortGetState` requires parameter of type
// `avr_ioport_state_t`, which is the case here
let status = unsafe { avr.ioctl(IoCtl::IoPortGetState { port }, &mut state) };
if status == -1 {
panic!("Current AVR doesn't have pin P{}{}", port, pin);
}
let port = state._bitfield_1.get(7, 8) as u8;
port & (1 << pin) > 0
}
}
| true
|
f795021535debd03627f33911f476bf824c89035
|
Rust
|
theduke/rust-serialization-benchmarks
|
/src/spec.rs
|
UTF-8
| 6,468
| 2.65625
| 3
|
[] |
no_license
|
#![macro_use]
pub use std::collections::BTreeMap;
use bencher;
#[derive(Clone, Debug, Serialize)]
pub struct Result {
pub ns_avg: f64,
pub ns_median: f64,
pub ns_variance: f64,
pub ns_std_dev: f64,
pub mb_per_sec: f64,
}
impl Result {
pub fn from_samples(s: bencher::BenchSamples, byte_size: usize) -> Self {
let mb_per_sec = (1000000000.0 / s.ns_iter_summ.mean) * (byte_size as f64 / 1000000.0);
Result {
ns_avg: s.ns_iter_summ.mean,
ns_median: s.ns_iter_summ.median,
ns_variance: s.ns_iter_summ.var,
ns_std_dev: s.ns_iter_summ.std_dev,
mb_per_sec: mb_per_sec,
}
}
}
#[derive(Clone, Debug, Serialize)]
pub struct CaseResult {
pub name: String,
pub serialize: Result,
pub deserialize: Result,
pub data: Vec<u8>,
}
#[derive(Clone, Debug, Serialize)]
pub struct LibraryResult {
pub library: String,
pub cases: BTreeMap<String, CaseResult>,
}
macro_rules! test_variant {
{
variant($variant_name:expr, $data_type:ty, $creater:expr) {
serialize |$ser_data:ident| { $($ser_code:tt)* }
deserialize |$deser_data:ident| { $($deser_code:tt)* }
}
} => {
{
println!(" Variant {}:", $variant_name);
let data = $creater;
// Serialize data to get a byte count.
let serialized_result = {
let $ser_data = &data;
$($ser_code)*
};
let byte_len = serialized_result.len();
// Serialize.
let ser_samples = bencher::bench::benchmark(|bench| {
let $ser_data = &data;
bench.iter(|| {
let res = { $($ser_code)* };
res
});
});
// Deserialize.
let deser_samples = bencher::bench::benchmark(|bench| {
let $deser_data = &serialized_result;
//let mut target = serialized_result.clone();
bench.iter(|| {
let res: $data_type = { $($deser_code)* };
res
});
});
let res = CaseResult {
name: $variant_name.to_string(),
serialize: Result::from_samples(ser_samples, byte_len),
deserialize: Result::from_samples(deser_samples, byte_len),
data: (&serialized_result).clone().into(),
};
println!(" Serialize: median: {}ns / mb/sec: {}",
res.serialize.ns_median,
res.serialize.mb_per_sec);
println!(" Deserialize: median: {}ns / mb/sec: {}",
res.deserialize.ns_median,
res.deserialize.mb_per_sec);
res
}
};
{
variant($variant_name:expr, $data_type:ty, $creater:expr) {
convert_data |$convert_data:ident| -> $converted_type:ty { $($convert_code:tt)* }
serialize |$ser_data:ident| { $($ser_code:tt)* }
deserialize |$deser_data:ident| { $($deser_code:tt)* }
}
} => {
{
println!(" Variant {}:", $variant_name);
let data = $creater;
// Convert data.
let data: $converted_type = {
let $convert_data = data;
$( $convert_code )*
};
// Serialize data to get a byte count.
let serialized_result = {
let $ser_data = &data;
$($ser_code)*
};
let byte_len = serialized_result.len();
// Serialize.
let ser_samples = bencher::bench::benchmark(|bench| {
let $ser_data = &data;
bench.iter(|| {
let res = { $($ser_code)* };
res
});
});
// Deserialize.
let deser_samples = bencher::bench::benchmark(|bench| {
let $deser_data = &serialized_result;
//let mut target = serialized_result.clone();
bench.iter(|| {
let res: $converted_type = { $($deser_code)* };
res
});
});
let res = CaseResult {
name: $variant_name.to_string(),
serialize: Result::from_samples(ser_samples, byte_len),
deserialize: Result::from_samples(deser_samples, byte_len),
data: (&serialized_result).clone().into(),
};
println!(" Serialize: median: {}ns / mb/sec: {}",
res.serialize.ns_median,
res.serialize.mb_per_sec);
println!(" Deserialize: median: {}ns / mb/sec: {}",
res.deserialize.ns_median,
res.deserialize.mb_per_sec);
res
}
}
}
#[macro_export]
macro_rules! make_tests {
{
$(
$library:ident($name:expr) {
$( $code:tt )*
}
)*
} => {
fn run_tests() -> Vec<::spec::LibraryResult> {
let mut results = Vec::new();
{
$(
println!("##########\nTesting {}:", $name);
let mut lib_result = LibraryResult {
library: $name.to_string(),
cases: BTreeMap::new(),
};
let res = test_variant! {
variant("static", StaticData, StaticData::new()) { $( $code )* }
};
lib_result.cases.insert("static".into(), res);
let res = test_variant! {
variant("dynamic", DynamicData, DynamicData::new()) { $( $code )* }
};
lib_result.cases.insert("dynamic".into(), res);
let res = test_variant! {
variant("nested", NestedData, NestedData::new()) { $( $code )* }
};
lib_result.cases.insert("nested".into(), res);
results.push(lib_result);
println!("##########\n");
)*
}
results
}
}
}
| true
|
92e4a8b0c253dfffc3abe541153be0e0af9bb377
|
Rust
|
LuoZijun/es
|
/src/version.rs
|
UTF-8
| 3,617
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
use std::fmt;
const MAX_U16: u16 = u16::max_value();
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
pub struct Date {
pub year: u16,
pub month: u8,
pub day: u8,
}
// https://en.wikipedia.org/wiki/ECMAScript#Versions
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
pub struct ECMAScriptVersion {
pub major: u16,
pub minor: u16,
pub micro: u16,
}
impl ECMAScriptVersion {
pub const ES5: ECMAScriptVersion = ECMAScriptVersion { major: 5, minor: 0, micro: 0 };
pub const ES2009: ECMAScriptVersion = Self::ES5;
// ECMAScript 2011 (ES5): https://www.ecma-international.org/ecma-262/5.1/index.html
pub const ES2011: ECMAScriptVersion = ECMAScriptVersion { major: 5, minor: 1, micro: 0 };
// ECMAScript 2015 (ES6): https://www.ecma-international.org/ecma-262/6.0/index.html
pub const ES2015: ECMAScriptVersion = ECMAScriptVersion { major: 6, minor: 0, micro: 0 };
// ECMAScript 2016 (ES2016): https://www.ecma-international.org/ecma-262/7.0/index.html
pub const ES2016: ECMAScriptVersion = ECMAScriptVersion { major: 7, minor: 0, micro: 0 };
// ECMAScript 2017 (ES2017): https://www.ecma-international.org/ecma-262/8.0/index.html
pub const ES2017: ECMAScriptVersion = ECMAScriptVersion { major: 8, minor: 0, micro: 0 };
// ECMAScript 2018 (ES2018): https://www.ecma-international.org/ecma-262/9.0/index.html
pub const ES2018: ECMAScriptVersion = ECMAScriptVersion { major: 9, minor: 0, micro: 0 };
pub const ESNEXT: ECMAScriptVersion = ECMAScriptVersion { major: MAX_U16, minor: MAX_U16, micro: MAX_U16 };
pub const LATEST: ECMAScriptVersion = Self::ES2018;
}
impl ECMAScriptVersion {
pub fn published_at(&self) -> Date {
match *self {
Self::ES5 => Date { year: 2009, month: 12, day: 0 },
Self::ES2011 => Date { year: 2011, month: 6, day: 0 },
Self::ES2015 => Date { year: 2015, month: 6, day: 0 },
Self::ES2016 => Date { year: 2016, month: 6, day: 0 },
Self::ES2017 => Date { year: 2017, month: 6, day: 0 },
Self::ES2018 => Date { year: 2018, month: 6, day: 0 },
Self::ESNEXT => Date { year: MAX_U16, month: 12, day: 30 },
_ => unreachable!(),
}
}
}
impl Default for ECMAScriptVersion {
fn default() -> Self {
Self::LATEST
}
}
impl fmt::Display for Date {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}/{}", self.year, self.month)
}
}
impl fmt::Display for ECMAScriptVersion {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::ES5 => write!(f, "ES{}(ES5.0)", self.published_at().year),
Self::ES2011 => write!(f, "ES{}(ES5.1)", self.published_at().year),
Self::ES2015 => write!(f, "ES{}(ES6.0)", self.published_at().year),
Self::ES2016 => write!(f, "ES{}(ES7.0)", self.published_at().year),
Self::ES2017 => write!(f, "ES{}(ES8.0)", self.published_at().year),
Self::ES2018 => write!(f, "ES{}(ES9.0)", self.published_at().year),
Self::ESNEXT => write!(f, "ESNEXT"),
_ => unreachable!(),
}
}
}
pub trait Version {
fn age(self) -> ECMAScriptVersion;
fn standard_since(&self) -> ECMAScriptVersion;
fn deprecated_since(&self) -> Option<ECMAScriptVersion>;
fn is_deprecated_at(&self, target: ECMAScriptVersion) -> bool {
match self.deprecated_since() {
Some(ver) => target > ver,
None => false,
}
}
}
| true
|
5b6300a556d4ac04fb385ed6bfc24e962489f5db
|
Rust
|
pindell-matt/rust_labyrinth
|
/src/print.rs
|
UTF-8
| 1,190
| 3.1875
| 3
|
[] |
no_license
|
pub fn print_to_console(grid: &Vec<Vec<Vec<i32>>>) {
println!("");
for row in 0..grid.len() {
if row == 0 { print_top_row(&grid[row]); };
print_vertical_walls(&grid[row]);
print_horizontal_walls(&grid[row]);
}
println!("");
}
fn print_top_row(row: &Vec<Vec<i32>>) {
let mut top = vec!["+"];
for cell in row {
if cell == &row[0] {
top.push(" +");
} else {
top.push("---+");
}
}
let joined = top.join("");
println!("{:?}", joined);
}
fn print_vertical_walls(row: &Vec<Vec<i32>>) {
let mut vertical = vec!["|"];
for cell in row {
if cell[4] == 1 { vertical.push(" * "); } else { vertical.push(" "); }
if cell[2] == 1 { vertical.push(" "); } else { vertical.push("|"); }
}
let joined = vertical.join("");
println!("{:?}", joined);
}
fn print_horizontal_walls(row: &Vec<Vec<i32>>) {
let mut horizontal = vec!["+"];
for cell in row {
if cell[3] == 1 {
horizontal.push(" +");
} else {
horizontal.push("---+");
}
}
let joined = horizontal.join("");
println!("{:?}", joined);
}
| true
|
1450f91f191cb54cfb0b927837afffa44cd49ed1
|
Rust
|
krscott/rust_elm_types
|
/src/spec.rs
|
UTF-8
| 15,174
| 3.046875
| 3
|
[
"MIT"
] |
permissive
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum EnumVariantData {
None,
Single((String, String)),
// Tuple(Vec<(String, String)>),
Struct(Vec<StructField>),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct StructField {
pub name: String,
pub data: (String, String),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct EnumVariant {
pub name: String,
pub data: EnumVariantData,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum TypeSpec {
Struct {
name: String,
fields: Vec<StructField>,
},
Enum {
name: String,
variants: Vec<EnumVariant>,
},
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ApiSpec {
pub module: String,
pub types: Vec<TypeSpec>,
}
const INDENT: &str = " ";
const TYPE_DERIVE_HEADER: &str = "#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]";
const SERDE_ENUM_HEADER: &str = "#[serde(tag = \"var\", content = \"vardata\")]";
impl ApiSpec {
pub fn to_rust(&self) -> String {
self.types
.iter()
.map(|t| t.to_rust())
.collect::<Vec<_>>()
.join("\n\n")
}
pub fn to_elm(&self) -> String {
let exports_str = self
.types
.iter()
.flat_map(|t| {
let (name, expose) = match t {
TypeSpec::Struct { name, .. } => (name, name.clone()),
TypeSpec::Enum { name, .. } => (name, format!("{}(..)", name)),
};
vec![
expose.clone(),
format!("decode{}", name),
format!("encode{}", name),
]
})
.collect::<Vec<_>>()
.join(", ");
let types_str = self
.types
.iter()
.flat_map(|t| vec![t.to_elm(), t.to_elm_decoder(), t.to_elm_encoder()])
.collect::<Vec<_>>()
.join("\n\n");
format!(
"\
module {name} exposing ({exports})
import Json.Decode
import Json.Decode.Extra
import Json.Decode.Pipeline
import Json.Encode
import Json.Encode.Extra
{types}",
name = self.module,
exports = exports_str,
types = types_str
)
}
}
impl TypeSpec {
pub fn to_rust(&self) -> String {
match self {
Self::Struct { name, fields } => {
let fields_fmt = fields
.iter()
.map(|field| field.to_rust(1, true))
.collect::<Vec<_>>()
.join("");
format!(
"\
{header}
pub struct {name} {{
{fields}}}",
header = TYPE_DERIVE_HEADER,
name = name,
fields = fields_fmt
)
}
Self::Enum { name, variants } => {
let variants_fmt = variants
.iter()
.map(|var| var.to_rust(1))
.collect::<Vec<_>>()
.join("");
format!(
"\
{header}
{enum_header}
pub enum {name} {{
{variants}}}",
header = TYPE_DERIVE_HEADER,
enum_header = SERDE_ENUM_HEADER,
name = name,
variants = variants_fmt
)
}
}
}
pub fn to_elm(&self) -> String {
match self {
Self::Struct { name, fields } => {
let sep = format!("\n{}, ", INDENT);
let fields_fmt = fields
.iter()
.map(|field| field.to_elm(1))
.collect::<Vec<_>>()
.join(&sep);
format!(
"\
type alias {name} =
{indent}{{ {fields}
{indent}}}",
name = name,
fields = fields_fmt,
indent = INDENT,
)
}
Self::Enum { name, variants } => {
let subtypes = variants
.iter()
.filter_map(|var| {
if let EnumVariantData::Struct(fields) = &var.data {
let subtype = TypeSpec::Struct {
name: format!("{}{}", name, var.name),
fields: fields.clone(),
};
Some(format!(
"{}\n\n{}\n\n",
subtype.to_elm(),
subtype.to_elm_decoder()
))
} else {
None
}
})
.collect::<Vec<_>>()
.join("");
let sep = format!("\n{}| ", INDENT);
let variants_fmt = variants
.iter()
.map(|var| var.to_elm(name))
.collect::<Vec<_>>()
.join(&sep);
format!(
"\
{subtypes}type {name}
{indent}= {variants}",
subtypes = subtypes,
name = name,
variants = variants_fmt,
indent = INDENT,
)
}
}
}
pub fn to_elm_decoder(&self) -> String {
match self {
Self::Struct { name, fields } => {
let sep = format!("\n{}", INDENT.repeat(2));
let field_decoders = fields
.iter()
.map(|field| format!("|> {}", field.to_elm_decoder()))
.collect::<Vec<_>>()
.join(&sep);
format!(
"\
decode{name} : Json.Decode.Decoder {name}
decode{name} =
Json.Decode.succeed {name}
{fields}",
name = name,
fields = field_decoders
)
}
Self::Enum { name, variants } => {
let sep = format!("\n{}, ", INDENT.repeat(2));
let variant_decoders = variants
.iter()
.map(|var| var.to_elm_decoder(name))
.collect::<Vec<_>>()
.join(&sep);
format!(
"\
decode{name} : Json.Decode.Decoder {name}
decode{name} =
Json.Decode.oneOf
[ {variants}
]",
name = name,
variants = variant_decoders
)
}
}
}
pub fn to_elm_encoder(&self) -> String {
match self {
Self::Struct { name, fields } => {
let sep = format!("\n{}, ", INDENT.repeat(2));
let field_encoders = fields
.iter()
.map(|field| field.to_elm_encoder())
.collect::<Vec<_>>()
.join(&sep);
format!(
"\
encode{name} : {name} -> Json.Encode.Value
encode{name} record =
Json.Encode.object
[ {fields}
]",
name = name,
fields = field_encoders
)
}
Self::Enum { name, variants } => {
let variant_cases = variants
.iter()
.map(|var| var.to_elm_encoder())
.collect::<Vec<_>>()
.join("");
format!(
"\
encode{name} : {name} -> Json.Encode.Value
encode{name} var =
case var of{variants}",
name = name,
variants = variant_cases
)
}
}
}
}
fn elm_json_decoder(elm_type: &str) -> String {
let supported_types = ["String", "Int", "Float", "Bool", "List"];
let decoders = elm_type
.split(' ')
.map(|t| {
if supported_types.contains(&t) {
format!("Json.Decode.{}", t.to_lowercase())
} else if t == "Maybe" {
String::from("Json.Decode.nullable")
} else {
format!("decode{}", t)
}
})
.collect::<Vec<_>>();
if decoders.len() > 1 {
format!("({})", decoders.join(" "))
} else {
decoders.join(" ")
}
}
fn elm_json_encoder(elm_type: &str) -> String {
let supported_types = ["String", "Int", "Float", "Bool", "List"];
elm_type
.split(' ')
.map(|t| {
if supported_types.contains(&t) {
format!("Json.Encode.{}", t.to_lowercase())
} else if t == "Maybe" {
String::from("Json.Encode.Extra.maybe")
} else {
format!("encode{}", t)
}
})
.collect::<Vec<_>>()
.join(" ")
}
impl StructField {
pub fn to_rust(&self, indent: usize, add_pub: bool) -> String {
format!(
"{}{}{}: {},\n",
INDENT.repeat(indent),
if add_pub { "pub " } else { "" },
self.name,
self.data.0
)
}
pub fn to_elm(&self, _indent: usize) -> String {
let elm_type = &self.data.1;
if elm_type.contains(' ') {
format!("{} : ({})", self.name, elm_type)
} else {
format!("{} : {}", self.name, elm_type)
}
}
pub fn to_elm_decoder(&self) -> String {
let elm_type = &self.data.1;
format!(
"Json.Decode.Pipeline.required \"{name}\" {decoder}",
name = self.name,
decoder = elm_json_decoder(elm_type)
)
}
pub fn to_elm_encoder(&self) -> String {
let elm_type = &self.data.1;
format!(
"(\"{name}\", {encoder} <| record.{name})",
name = self.name,
encoder = elm_json_encoder(elm_type)
)
}
}
impl EnumVariant {
pub fn to_rust(&self, indent: usize) -> String {
format!(
"{}{}{},\n",
INDENT.repeat(indent),
self.name,
self.data.to_rust(indent)
)
}
pub fn to_elm(&self, parent_type_name: &str) -> String {
match &self.data {
EnumVariantData::None => format!("{}", self.name),
EnumVariantData::Single((_, elm_type)) => {
if elm_type.contains(' ') {
format!("{} ({})", self.name, elm_type)
} else {
format!("{} {}", self.name, elm_type)
}
}
EnumVariantData::Struct(_fields) => {
format!(
"{name} {parent}{name}",
name = self.name,
parent = parent_type_name
)
// let fields_fmt = fields
// .iter()
// .map(|field| field.to_elm(indent + 1))
// .collect::<Vec<_>>()
// .join(", ");
// format!(" {{ {fields} }}", fields = fields_fmt)
}
}
}
pub fn to_elm_decoder(&self, parent_type_name: &str) -> String {
match &self.data {
EnumVariantData::None => format!(
"Json.Decode.Extra.when (Json.Decode.field \"var\" Json.Decode.string) ((==) \"{name}\") <|\n\
{indent}Json.Decode.succeed {name}",
name = self.name,
indent = INDENT.repeat(3)
),
EnumVariantData::Single((_, elm_type)) => format!(
"Json.Decode.Extra.when (Json.Decode.field \"var\" Json.Decode.string) ((==) \"{name}\") <|\n\
{indent}Json.Decode.map {name} (Json.Decode.field \"vardata\" <| {decoder})",
name = self.name,
decoder = elm_json_decoder(elm_type),
indent = INDENT.repeat(3)
),
EnumVariantData::Struct(_) => format!(
"Json.Decode.Extra.when (Json.Decode.field \"var\" Json.Decode.string) ((==) \"{name}\") <|\n\
{indent}Json.Decode.map {name} (Json.Decode.field \"vardata\" <| decode{parent}{name})",
name = self.name,
indent = INDENT.repeat(3),
parent = parent_type_name,
)
}
}
pub fn to_elm_encoder(&self) -> String {
match &self.data {
EnumVariantData::None => format!(
"\n\
{tab}{tab}{name} ->\n\
{tab}{tab}{tab}Json.Encode.object\n\
{tab}{tab}{tab}{tab}[ ( \"var\", Json.Encode.string \"{name}\" )\n\
{tab}{tab}{tab}{tab}]",
tab = INDENT,
name = self.name
),
EnumVariantData::Single((_, elm_type)) => format!(
"\n\
{tab}{tab}{name} value ->\n\
{tab}{tab}{tab}Json.Encode.object\n\
{tab}{tab}{tab}{tab}[ ( \"var\", Json.Encode.string \"{name}\" )\n\
{tab}{tab}{tab}{tab}, ( \"vardata\", {encoder} <| value )\n\
{tab}{tab}{tab}{tab}]",
tab = INDENT,
name = self.name,
encoder = elm_json_encoder(elm_type)
),
EnumVariantData::Struct(fields) => format!(
"\n\
{tab}{tab}{name} record ->\n\
{tab}{tab}{tab}Json.Encode.object\n\
{tab}{tab}{tab}{tab}[ ( \"var\", Json.Encode.string \"{name}\" )\n\
{tab}{tab}{tab}{tab}, ( \"vardata\", Json.Encode.object\n\
{tab}{tab}{tab}{tab}{tab}[{encoder}\n\
{tab}{tab}{tab}{tab}{tab}] )\n\
{tab}{tab}{tab}{tab}]",
tab = INDENT,
name = self.name,
encoder = fields
.iter()
.map(|field| format!(
" ( \"{name}\", {encoder} <| record.{name} )",
name = field.name,
encoder = elm_json_encoder(&field.data.1)
))
.collect::<Vec<_>>()
.join(&format!("\n{tab}{tab}{tab}{tab}{tab},", tab = INDENT))
),
}
}
}
impl EnumVariantData {
pub fn to_rust(&self, indent: usize) -> String {
match self {
Self::None => "".into(),
Self::Single((rust_type, _)) => format!("({})", rust_type),
Self::Struct(fields) => {
let fields_fmt = fields
.iter()
.map(|field| field.to_rust(indent + 1, false))
.collect::<Vec<_>>()
.join("");
format!(
" {{\n{fields}{indent}}}",
fields = fields_fmt,
indent = INDENT.repeat(indent)
)
}
}
}
}
| true
|
b76d4aef95c2f8eb23d10043fb6ffa2776ad7a37
|
Rust
|
zargony/advent-of-code-2016
|
/src/day14.rs
|
UTF-8
| 4,973
| 2.921875
| 3
|
[] |
no_license
|
extern crate md5;
extern crate onig;
extern crate time;
use std::io::Write;
use onig::Regex;
/// Helper function for displaying a nibble
#[inline]
fn nibble2char(n: u8) -> char {
match n {
0...9 => ('0' as u8 + n) as char,
10...15 => ('a' as u8 + n - 10) as char,
_ => panic!("nibble must be in range 0..15"),
}
}
/// A hash finder uses a brute force approach to find a MD5 digest
/// for a given prefix that has 3 or more repeating characters in its
/// hex representation
pub struct HashFinder<'a> {
prefix: &'a str,
pad: u32,
re: onig::Regex,
stretch: usize,
}
impl<'a> HashFinder<'a> {
/// Create new hash finder for the given prefix
fn new(prefix: &str, stretch: usize) -> HashFinder {
HashFinder {
prefix: prefix,
pad: 0,
re: Regex::new("([a-z0-9])\\1\\1+").unwrap(),
stretch: stretch,
}
}
}
impl<'a> Iterator for HashFinder<'a> {
type Item = (u32, usize, char);
/// Finds the next MD5 hexdigest with 3 or more repeating characters
/// by appending an ever increasing number to the prefix. Yields a
/// tuple for every match that contains the appended number and
/// a vector of strings with 3+ repeated characters.
fn next(&mut self) -> Option<(u32, usize, char)> {
loop {
let mut md5 = md5::Context::new();
md5.consume(self.prefix.as_bytes());
md5.write_fmt(format_args!("{}", self.pad)).unwrap();
let mut digest = md5.compute();
for _ in 0..self.stretch {
let mut md5 = md5::Context::new();
for &b in digest.iter() {
let buf = [nibble2char(b >> 4) as u8, nibble2char(b &0xf) as u8];
md5.consume(&buf);
}
digest = md5.compute();
}
let hexdigest = format!("{:x}", digest);
self.pad += 1;
if let Some(cap) = self.re.captures(&hexdigest) {
let snippet = cap.at(0).unwrap();
return Some((self.pad - 1, snippet.len(), snippet.chars().nth(0).unwrap()));
}
}
}
}
/// The OTP finder yields valid one-time passwords
pub struct OTPFinder<'a> {
finder: HashFinder<'a>,
snippets: Vec<(u32, usize, char)>,
pos: usize,
}
impl<'a> OTPFinder<'a> {
/// Create new OTP finder for the given seed
fn new(seed: &str, stretch: usize) -> OTPFinder {
OTPFinder { finder: HashFinder::new(seed, stretch), snippets: Vec::new(), pos: 0 }
}
}
impl<'a> Iterator for OTPFinder<'a> {
type Item = u32;
/// Yields the pad number of the next one-time password
fn next(&mut self) -> Option<u32> {
loop {
while self.pos >= self.snippets.len() || self.snippets.last().unwrap().0 < self.snippets[self.pos].0 + 1000 {
if let Some(snippet) = self.finder.next() {
self.snippets.push(snippet);
}
}
let (pad, _, ch) = self.snippets[self.pos];
self.pos += 1;
if self.snippets.iter().any(|&(p, l, c)|
p > pad && p <= pad+1000 && l >= 5 && c == ch
) {
return Some(pad);
}
}
}
}
/// Measure time
fn measure_time<T, F: FnMut() -> T>(mut f: F) -> (T, f64) {
let start_time = time::precise_time_s();
let result = f();
let duration = time::precise_time_s() - start_time;
(result, duration)
}
fn main() {
let mut finder = OTPFinder::new("ahsbgdzn", 0);
let (pad, duration1) = measure_time(|| finder.nth(63).unwrap());
println!("Index that produces the 64th key (found in {:5.3}s): {}", duration1, pad);
let mut finder = OTPFinder::new("ahsbgdzn", 2016);
let (pad, duration2) = measure_time(|| finder.nth(63).unwrap());
println!("Index that produces the 64th streched key (found in {:5.3}s): {}", duration2, pad);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn finding_digests() {
let mut finder = HashFinder::new("abc", 0);
assert_eq!(finder.next(), Some((18, 3, '8')));
assert_eq!(finder.next(), Some((39, 3, 'e')));
assert_eq!(finder.skip(6).next(), Some((92, 3, '9')));
}
#[test]
fn finding_stretched_digests() {
let mut finder = HashFinder::new("abc", 2016);
assert_eq!(finder.next(), Some((5, 3, '2')));
assert_eq!(finder.next(), Some((10, 3, 'e')));
}
#[test]
fn finding_otps() {
let mut finder = OTPFinder::new("abc", 0);
assert_eq!(finder.next(), Some(39));
assert_eq!(finder.next(), Some(92));
assert_eq!(finder.skip(61).next(), Some(22728));
}
#[test]
fn finding_stretched_otps() {
let mut finder = OTPFinder::new("abc", 2016);
assert_eq!(finder.next(), Some(10));
// assert_eq!(finder.skip(62).next(), Some(22551));
}
}
| true
|
08ec5c36343fa2b4b738cc81e7154aea0c6f493e
|
Rust
|
zinla/RustPython
|
/vm/src/protocol/iter.rs
|
UTF-8
| 4,523
| 3.078125
| 3
|
[
"CC-BY-4.0",
"MIT"
] |
permissive
|
use crate::IntoPyObject;
use crate::{
builtins::iter::PySequenceIterator, IntoPyResult, PyObjectRef, PyResult, PyValue,
TryFromObject, TypeProtocol, VirtualMachine,
};
use std::borrow::Borrow;
use std::ops::Deref;
/// Iterator Protocol
// https://docs.python.org/3/c-api/iter.html
#[derive(Debug, Clone)]
#[repr(transparent)]
pub struct PyIter<T = PyObjectRef>(T)
where
T: Borrow<PyObjectRef>;
impl PyIter<PyObjectRef> {
pub fn into_object(self) -> PyObjectRef {
self.0
}
pub fn check(obj: &PyObjectRef) -> bool {
obj.class()
.mro_find_map(|x| x.slots.iternext.load())
.is_some()
}
}
impl<T> PyIter<T>
where
T: Borrow<PyObjectRef>,
{
pub fn new(obj: T) -> Self {
Self(obj)
}
pub fn as_object(&self) -> &PyObjectRef {
self.0.borrow()
}
pub fn next(&self, vm: &VirtualMachine) -> PyResult<PyIterReturn> {
let iternext = {
self.0
.borrow()
.class()
.mro_find_map(|x| x.slots.iternext.load())
.ok_or_else(|| {
vm.new_type_error(format!(
"'{}' object is not an iterator",
self.0.borrow().class().name()
))
})?
};
iternext(self.0.borrow(), vm)
}
}
impl<T> Borrow<PyObjectRef> for PyIter<T>
where
T: Borrow<PyObjectRef>,
{
fn borrow(&self) -> &PyObjectRef {
self.0.borrow()
}
}
impl<T> Deref for PyIter<T>
where
T: Borrow<PyObjectRef>,
{
type Target = PyObjectRef;
fn deref(&self) -> &Self::Target {
self.0.borrow()
}
}
impl IntoPyObject for PyIter<PyObjectRef> {
fn into_pyobject(self, _vm: &VirtualMachine) -> PyObjectRef {
self.into_object()
}
}
impl TryFromObject for PyIter<PyObjectRef> {
// This helper function is called at multiple places. First, it is called
// in the vm when a for loop is entered. Next, it is used when the builtin
// function 'iter' is called.
fn try_from_object(vm: &VirtualMachine, iter_target: PyObjectRef) -> PyResult<Self> {
let getiter = {
let cls = iter_target.class();
cls.mro_find_map(|x| x.slots.iter.load())
};
if let Some(getiter) = getiter {
let iter = getiter(iter_target, vm)?;
if PyIter::check(&iter) {
Ok(Self(iter))
} else {
Err(vm.new_type_error(format!(
"iter() returned non-iterator of type '{}'",
iter.class().name()
)))
}
} else {
// TODO: __getitem__ method lookup must be replaced by sequence protocol checking
vm.get_method_or_type_error(iter_target.clone(), "__getitem__", || {
format!("'{}' object is not iterable", iter_target.class().name())
})?;
Ok(Self(
PySequenceIterator::new(iter_target)
.into_ref(vm)
.into_object(),
))
}
}
}
impl PyObjectRef {
/// Takes an object and returns an iterator for it.
/// This is typically a new iterator but if the argument is an iterator, this
/// returns itself.
pub fn get_iter(self, vm: &VirtualMachine) -> PyResult<PyIter> {
// PyObject_GetIter
PyIter::try_from_object(vm, self)
}
}
pub enum PyIterReturn<T = PyObjectRef> {
Return(T),
StopIteration(Option<PyObjectRef>),
}
impl PyIterReturn {
pub fn from_result(result: PyResult, vm: &VirtualMachine) -> PyResult<Self> {
match result {
Ok(obj) => Ok(Self::Return(obj)),
Err(err) if err.isinstance(&vm.ctx.exceptions.stop_iteration) => {
let args = err.get_arg(0);
Ok(Self::StopIteration(args))
}
Err(err) => Err(err),
}
}
}
impl IntoPyResult for PyIterReturn {
fn into_pyresult(self, vm: &VirtualMachine) -> PyResult {
match self {
Self::Return(obj) => Ok(obj),
Self::StopIteration(v) => Err({
let args = if let Some(v) = v { vec![v] } else { Vec::new() };
vm.new_exception(vm.ctx.exceptions.stop_iteration.clone(), args)
}),
}
}
}
impl IntoPyResult for PyResult<PyIterReturn> {
fn into_pyresult(self, vm: &VirtualMachine) -> PyResult {
self.and_then(|obj| obj.into_pyresult(vm))
}
}
| true
|
df168f095d43e8a9c2c09b207480ca068dfdabbb
|
Rust
|
xieren58/stabilizer
|
/dsp/src/unwrap.rs
|
UTF-8
| 4,498
| 3.34375
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
use serde::{Deserialize, Serialize};
/// Subtract `y - x` with signed overflow.
///
/// This is very similar to `i32::overflowing_sub(y, x)` except that the
/// overflow indicator is not a boolean but the signum of the overflow.
/// Additionally it's typically faster.
///
/// Returns:
/// A tuple containg the (wrapped) difference `y - x` and the signum of the
/// overflow.
#[inline(always)]
pub fn overflowing_sub(y: i32, x: i32) -> (i32, i8) {
let delta = y.wrapping_sub(x);
let wrap = (delta >= 0) as i8 - (y >= x) as i8;
(delta, wrap)
}
/// Combine high and low i32 into a single downscaled i32, saturating monotonically.
///
/// Args:
/// `lo`: LSB i32 to scale down by `shift` and range-extend with `hi`
/// `hi`: MSB i32 to scale up and extend `lo` with. Output will be clipped if
/// `hi` exceeds the output i32 range.
/// `shift`: Downscale `lo` by that many bits. Values from 1 to 32 inclusive
/// are valid.
pub fn saturating_scale(lo: i32, hi: i32, shift: u32) -> i32 {
debug_assert!(shift > 0);
debug_assert!(shift <= 32);
let hi_range = -1 << (shift - 1);
if hi <= hi_range {
i32::MIN - hi_range
} else if -hi <= hi_range {
hi_range - i32::MIN
} else {
(lo >> shift) + (hi << (32 - shift))
}
}
/// Overflow unwrapper.
///
/// This is unwrapping as in the phase and overflow unwrapping context, not
/// unwrapping as in the `Result`/`Option` context.
#[derive(Copy, Clone, Default, Deserialize, Serialize)]
pub struct Unwrapper {
// last input
x: i32,
// last wraps
w: i32,
}
impl Unwrapper {
/// Unwrap a new sample from a sequence and update the unwrapper state.
///
/// Args:
/// * `x`: New sample
///
/// Returns:
/// A tuple containing the (wrapped) difference `x - x_old` and the
/// signed number of wraps accumulated by the new sample.
pub fn update(&mut self, x: i32) -> (i32, i32) {
let (dx, dw) = overflowing_sub(x, self.x);
self.x = x;
self.w = self.w.wrapping_add(dw as i32);
(dx, self.w)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn overflowing_sub_correctness() {
for (x0, x1, v) in [
(0i32, 0i32, 0i8),
(0, 1, 0),
(0, -1, 0),
(1, 0, 0),
(-1, 0, 0),
(0, 0x7fff_ffff, 0),
(-1, 0x7fff_ffff, -1),
(-2, 0x7fff_ffff, -1),
(-1, -0x8000_0000, 0),
(0, -0x8000_0000, 0),
(1, -0x8000_0000, 1),
(-0x6000_0000, 0x6000_0000, -1),
(0x6000_0000, -0x6000_0000, 1),
(-0x4000_0000, 0x3fff_ffff, 0),
(-0x4000_0000, 0x4000_0000, -1),
(-0x4000_0000, 0x4000_0001, -1),
(0x4000_0000, -0x3fff_ffff, 0),
(0x4000_0000, -0x4000_0000, 0),
(0x4000_0000, -0x4000_0001, 1),
]
.iter()
{
let (dx, w) = overflowing_sub(*x1, *x0);
assert_eq!(*v, w, " = overflowing_sub({:#x}, {:#x})", *x0, *x1);
let (dx0, w0) = x1.overflowing_sub(*x0);
assert_eq!(w0, w != 0);
assert_eq!(dx, dx0);
}
}
#[test]
fn saturating_scale_correctness() {
let shift = 8;
for (lo, hi, res) in [
(0i32, 0i32, 0i32),
(0, 1, 0x0100_0000),
(0, -1, -0x0100_0000),
(0x100, 0, 1),
(-1 << 31, 0, -1 << 23),
(0x7fffffff, 0, 0x007f_ffff),
(0x7fffffff, 1, 0x0017f_ffff),
(-0x7fffffff, -1, -0x0180_0000),
(0x1234_5600, 0x7f, 0x7f12_3456),
(0x1234_5600, -0x7f, -0x7f00_0000 + 0x12_3456),
(0, 0x7f, 0x7f00_0000),
(0, 0x80, 0x7fff_ff80),
(0, -0x7f, -0x7f00_0000),
(0, -0x80, -0x7fff_ff80),
(0x7fff_ffff, 0x7f, 0x7f7f_ffff),
(-0x8000_0000, 0x7f, 0x7e80_0000),
(-0x8000_0000, -0x7f, -0x7f80_0000),
(0x7fff_ffff, -0x7f, -0x7e80_0001),
(0x100, 0x7f, 0x7f00_0001),
(0, -0x80, -0x7fff_ff80),
(-1 << 31, 0x80, 0x7fff_ff80),
(-1 << 31, -0x80, -0x7fff_ff80),
]
.iter()
{
let s = saturating_scale(*lo, *hi, shift);
assert_eq!(
*res, s,
"{:#x} != {:#x} = saturating_scale({:#x}, {:#x}, {:#x})",
*res, s, *lo, *hi, shift
);
}
}
}
| true
|
90354aedef1161ce989f22e7059886f9313c7de2
|
Rust
|
ltruchot/haiku-generator
|
/src/adj.rs
|
UTF-8
| 3,607
| 3.046875
| 3
|
[] |
no_license
|
// IMPORTS
// commons
use crate::common_enums;
use common_enums::{Gender, Number};
// wordgroups
use crate::wordgroup;
use wordgroup::{WordGroup, get_plural};
// strings
use crate::string;
use string::{drop_last_graphemes, take_last_grapheme, take_last_graphemes};
// adjectives
use crate::adj_enums;
use adj_enums::AdjId;
// EXPORTS
#[derive(Clone)]
pub struct Adj {
pub id: AdjId,
pub fem: Option<String>,
pub masc_plur: Option<String>,
pub fem_plur: Option<String>,
pub invariable: bool,
pub word: WordGroup,
}
impl Adj {
pub fn agreed(&self, gender: Gender, number: Number) -> WordGroup {
if self.invariable {
return self.word.clone();
}
enum ToAgree {
Form(Gender, Number),
}
match ToAgree::Form(gender, number) {
ToAgree::Form(Gender::Male, Number::Singular) => self.word.clone(),
ToAgree::Form(Gender::Male, Number::Plural) => match &self.masc_plur {
Some(masc) => WordGroup {
text: String::from(masc),
foots: self.word.foots,
},
None => get_plural(&self.word)
},
ToAgree::Form(Gender::Female, Number::Singular) => match &self.fem {
Some(fem) => WordGroup {
text: String::from(fem),
foots: self.word.foots,
},
None => get_feminine(&self.word),
},
ToAgree::Form(Gender::Female, Number::Plural) => match &self.fem_plur {
Some(fem_plur) => WordGroup {
text: String::from(fem_plur),
foots: self.word.foots,
},
None => {
get_plural(&get_feminine(&self.word))
}
},
}
}
pub fn new(id: AdjId, masc: &str, foots: (u8, u8)) -> Adj {
Adj {
id: id,
fem: None,
masc_plur: None,
fem_plur: None,
invariable: false,
word: WordGroup {
text: String::from(masc),
foots: foots,
},
}
}
pub fn new_special(
id: AdjId,
masc: &str,
fem: Option<String>,
masc_plur: Option<String>,
fem_plur: Option<String>,
invariable: bool,
foots: (u8, u8),
) -> Adj {
Adj {
id: id,
fem: fem,
masc_plur: masc_plur,
fem_plur: fem_plur,
invariable: invariable,
word: WordGroup {
text: String::from(masc),
foots: foots,
},
}
}
}
fn get_feminine(wg: &WordGroup) -> WordGroup {
let last = take_last_grapheme(&wg.text);
let last_two = take_last_graphemes(&wg.text, 2);
let last_three = take_last_graphemes(&wg.text, 3);
if &last == "e" {
wg.clone()
} else if last_three == "eux" || last_three == "eur" {
WordGroup {
text: [&drop_last_graphemes(&wg.text, 3), "euse"].join(""),
foots: wg.foots
}
} else if &last_two == "er" {
WordGroup {
text: [&drop_last_graphemes(&wg.text, 2), "ère"].join(""),
foots: wg.foots
}
} else if &last_two == "et" {
WordGroup {
text: [&drop_last_graphemes(&wg.text, 2), "ette"].join(""),
foots: wg.foots
}
} else {
WordGroup {
text: [&wg.text, "e"].join(""),
foots: wg.foots
}
}
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.