123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886 |
- use http::status::StatusCode;
- use http_body_util::{BodyExt, Empty};
- use hyper::{body::Bytes, Body, Client, Method, Request};
- use hyper_util::rt::TokioExecutor;
- use lazy_static::lazy_static;
- //use select::{document::Document, predicate::Name};
- use serde::{Deserialize, Serialize};
- use sled::Db;
- use std::{
- collections::{btree_map, hash_map, BTreeMap, HashMap, HashSet},
- fmt,
- };
- use x25519_dalek::{PublicKey, StaticSecret};
- #[cfg(any(feature = "simulation", test))]
- use {
- chrono::{DateTime, Utc},
- julianday::JulianDay,
- std::{path::Path, time::UNIX_EPOCH},
- };
- pub mod analysis;
- pub mod bridge_verification_info;
- pub mod crypto;
- pub mod extra_info;
- pub mod negative_report;
- pub mod positive_report;
- pub mod request_handler;
- #[cfg(feature = "simulation")]
- pub mod simulation {
- pub mod bridge;
- pub mod censor;
- pub mod config;
- pub mod extra_infos_server;
- pub mod user;
- }
- #[cfg(test)]
- pub mod simulation {
- pub mod extra_infos_server;
- }
- use analysis::Analyzer;
- use extra_info::*;
- use negative_report::*;
- use positive_report::*;
- lazy_static! {
- // known country codes based on Tor geoIP database
- // Produced with `cat /usr/share/tor/geoip{,6} | grep -v ^# | grep -o ..$ | sort | uniq | tr '[:upper:]' '[:lower:]' | tr '\n' ',' | sed 's/,/","/g'`
- pub static ref COUNTRY_CODES: HashSet<&'static str> = HashSet::from(["??","ac","ad","ae","af","ag","ai","al","am","an","ao","ap","aq","ar","as","at","au","aw","ax","az","ba","bb","bd","be","bf","bg","bh","bi","bj","bl","bm","bn","bo","bq","br","bs","bt","bv","bw","by","bz","ca","cc","cd","cf","cg","ch","ci","ck","cl","cm","cn","co","cr","cs","cu","cv","cw","cx","cy","cz","de","dg","dj","dk","dm","do","dz","ea","ec","ee","eg","eh","er","es","et","eu","fi","fj","fk","fm","fo","fr","ga","gb","gd","ge","gf","gg","gh","gi","gl","gm","gn","gp","gq","gr","gs","gt","gu","gw","gy","hk","hm","hn","hr","ht","hu","ic","id","ie","il","im","in","io","iq","ir","is","it","je","jm","jo","jp","ke","kg","kh","ki","km","kn","kp","kr","kw","ky","kz","la","lb","lc","li","lk","lr","ls","lt","lu","lv","ly","ma","mc","md","me","mf","mg","mh","mk","ml","mm","mn","mo","mp","mq","mr","ms","mt","mu","mv","mw","mx","my","mz","na","nc","ne","nf","ng","ni","nl","no","np","nr","nu","nz","om","pa","pe","pf","pg","ph","pk","pl","pm","pn","pr","ps","pt","pw","py","qa","re","ro","rs","ru","rw","sa","sb","sc","sd","se","sg","sh","si","sj","sk","sl","sm","sn","so","sr","ss","st","sv","sx","sy","sz","ta","tc","td","tf","tg","th","tj","tk","tl","tm","tn","to","tr","tt","tv","tw","tz","ua","ug","uk","um","un","us","uy","uz","va","vc","ve","vg","vi","vn","vu","wf","ws","ye","yt","za","zm","zw"]);
- }
- /// We will accept reports up to this many days old.
- pub const MAX_BACKDATE: u32 = 3;
- #[cfg(any(feature = "simulation", test))]
- const FAKETIME_FILE: &str = "/tmp/troll-patrol-faketime";
- /// Get real or simulated Julian date
- pub fn get_date() -> u32 {
- // If this is a simulation, get the simulated date
- #[cfg(any(feature = "simulation", test))]
- return get_simulated_date();
- // If we're not running a simulation, return today's date
- #[allow(unreachable_code)]
- get_real_date()
- }
- fn get_real_date() -> u32 {
- time::OffsetDateTime::now_utc()
- .date()
- .to_julian_day()
- .try_into()
- .unwrap()
- }
- #[cfg(any(feature = "simulation", test))]
- fn get_simulated_date() -> u32 {
- faketime::enable(Path::new(FAKETIME_FILE));
- JulianDay::from(DateTime::<Utc>::from(UNIX_EPOCH + faketime::unix_time()).date_naive())
- .inner()
- .try_into()
- .unwrap()
- }
- #[cfg(any(feature = "simulation", test))]
- pub fn set_simulated_date(date: u32) {
- faketime::enable(Path::new(FAKETIME_FILE));
- let unix_date_ms = DateTime::<Utc>::from_naive_utc_and_offset(
- JulianDay::new(date.try_into().unwrap()).to_date().into(),
- Utc,
- )
- .timestamp_millis();
- //str.push_str(format!("\nbridge-stats-end {} 23:59:59 (86400 s)", date).as_str());
- faketime::write_millis(Path::new(FAKETIME_FILE), unix_date_ms.try_into().unwrap()).unwrap();
- }
- #[cfg(any(feature = "simulation", test))]
- pub fn increment_simulated_date() {
- let date = get_date();
- set_simulated_date(date + 1);
- }
- #[cfg(any(feature = "simulation", test))]
- pub fn reset_simulated_date() {
- set_simulated_date(get_real_date());
- }
- #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
- pub enum BridgeDistributor {
- Lox,
- }
- /// All the info for a bridge, to be stored in the database
- #[derive(Serialize, Deserialize)]
- pub struct BridgeInfo {
- /// hashed fingerprint (SHA-1 hash of 20-byte bridge ID)
- pub fingerprint: [u8; 20],
- /// nickname of bridge (probably not necessary)
- pub nickname: String,
- /// map of countries to data for this bridge in that country
- pub info_by_country: HashMap<String, BridgeCountryInfo>,
- }
- impl BridgeInfo {
- pub fn new(fingerprint: [u8; 20], nickname: &String) -> Self {
- Self {
- fingerprint,
- nickname: nickname.to_string(),
- info_by_country: HashMap::<String, BridgeCountryInfo>::new(),
- }
- }
- }
- impl fmt::Display for BridgeInfo {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut str = format!(
- "fingerprint:{}\n",
- array_bytes::bytes2hex("", self.fingerprint).as_str()
- );
- str.push_str(format!("nickname: {}\n", self.nickname).as_str());
- //str.push_str(format!("first_seen: {}\n", self.first_seen).as_str());
- str.push_str("info_by_country:");
- for country in self.info_by_country.keys() {
- str.push_str(format!("\n country: {}", country).as_str());
- let country_info = self.info_by_country.get(country).unwrap();
- for line in country_info.to_string().lines() {
- str.push_str(format!("\n {}", line).as_str());
- }
- }
- write!(f, "{}", str)
- }
- }
- #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
- pub enum BridgeInfoType {
- BridgeIps,
- NegativeReports,
- PositiveReports,
- }
- /// Information about bridge reachability from a given country
- #[derive(Serialize, Deserialize)]
- pub struct BridgeCountryInfo {
- pub info_by_day: BTreeMap<u32, BTreeMap<BridgeInfoType, u32>>,
- pub blocked: bool,
- /// first Julian date we saw data from this country for this bridge
- pub first_seen: u32,
- /// first Julian date we saw a positive report from this country for this bridge
- pub first_pr: Option<u32>,
- }
- impl BridgeCountryInfo {
- pub fn new(first_seen: u32) -> Self {
- Self {
- info_by_day: BTreeMap::<u32, BTreeMap<BridgeInfoType, u32>>::new(),
- blocked: false,
- first_seen,
- first_pr: None,
- }
- }
- pub fn add_info(&mut self, info_type: BridgeInfoType, date: u32, count: u32) {
- if let btree_map::Entry::Vacant(e) = self.info_by_day.entry(date) {
- let mut info = BTreeMap::<BridgeInfoType, u32>::new();
- info.insert(info_type, count);
- e.insert(info);
- } else {
- let info = self.info_by_day.get_mut(&date).unwrap();
- if !info.contains_key(&info_type) {
- info.insert(info_type, count);
- } else if info_type == BridgeInfoType::BridgeIps {
- if *info.get(&info_type).unwrap() < count {
- // Use highest value we've seen today
- info.insert(info_type, count);
- }
- } else {
- // Add count to previous count for reports
- let new_count = info.get(&info_type).unwrap() + count;
- info.insert(info_type, new_count);
- }
- }
- // If this is the first instance of positive reports, save the date
- if self.first_pr.is_none() && info_type == BridgeInfoType::PositiveReports && count > 0 {
- self.first_pr = Some(date);
- }
- }
- }
- impl fmt::Display for BridgeCountryInfo {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut str = format!("blocked: {}\n", self.blocked);
- str.push_str(format!("first seen: {}\n", self.first_seen).as_str());
- let first_pr = if self.first_pr.is_none() {
- "never".to_string()
- } else {
- self.first_pr.unwrap().to_string()
- };
- str.push_str(format!("first positive report observed: {}\n", first_pr).as_str());
- str.push_str("info:");
- for date in self.info_by_day.keys() {
- let info = self.info_by_day.get(date).unwrap();
- let ip_count = match info.get(&BridgeInfoType::BridgeIps) {
- Some(&v) => v,
- None => 0,
- };
- let nr_count = match info.get(&BridgeInfoType::NegativeReports) {
- Some(&v) => v,
- None => 0,
- };
- let pr_count = match info.get(&BridgeInfoType::PositiveReports) {
- Some(&v) => v,
- None => 0,
- };
- if ip_count > 0 || nr_count > 0 || pr_count > 0 {
- str.push_str(
- format!(
- "\n date: {}\n connections: {}\n negative reports: {}\n positive reports: {}",
- date,
- ip_count,
- nr_count,
- pr_count,
- )
- .as_str(),
- );
- }
- }
- write!(f, "{}", str)
- }
- }
- /// We store a set of all known bridges so that we can later iterate over them.
- /// This function just adds a bridge fingerprint to that set.
- pub fn add_bridge_to_db(db: &Db, fingerprint: [u8; 20]) {
- let mut bridges = match db.get("bridges").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => HashSet::<[u8; 20]>::new(),
- };
- bridges.insert(fingerprint);
- db.insert("bridges", bincode::serialize(&bridges).unwrap())
- .unwrap();
- }
- // Download a webpage and return it as a string
- pub async fn download(url: &str) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
- if url.starts_with("https://") {
- let https = hyper_rustls::HttpsConnectorBuilder::new()
- .with_native_roots()
- .expect("no native root CA certificates found")
- .https_only()
- .enable_http1()
- .build();
- let client: hyper_util::client::legacy::Client<_, Empty<Bytes>> =
- hyper_util::client::legacy::Client::builder(TokioExecutor::new()).build(https);
- println!("Downloading {}", url);
- let mut res = client.get(url.parse()?).await?;
- assert_eq!(res.status(), StatusCode::OK);
- let mut body_str = String::default();
- while let Some(next) = res.frame().await {
- let frame = next?;
- if let Some(chunk) = frame.data_ref() {
- body_str.push_str(&String::from_utf8(chunk.to_vec())?);
- }
- }
- Ok(body_str)
- } else {
- let client: hyper_util::client::legacy::Client<_, Empty<Bytes>> =
- hyper_util::client::legacy::Client::builder(TokioExecutor::new()).build_http();
- println!("Downloading {}", url);
- let mut res = client.get(url.parse()?).await?;
- assert_eq!(res.status(), StatusCode::OK);
- let mut body_str = String::default();
- while let Some(next) = res.frame().await {
- let frame = next?;
- if let Some(chunk) = frame.data_ref() {
- body_str.push_str(&String::from_utf8(chunk.to_vec())?);
- }
- }
- Ok(body_str)
- }
- }
- // Process extra-infos
- /// Adds the extra-info data for a single bridge to the database. If the
- /// database already contains an extra-info for this bridge for thid date,
- /// but this extra-info contains different data for some reason, use the
- /// greater count of connections from each country.
- pub fn add_extra_info_to_db(db: &Db, extra_info: ExtraInfo) {
- let fingerprint = extra_info.fingerprint;
- let mut bridge_info = match db.get(fingerprint).unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => {
- add_bridge_to_db(db, fingerprint);
- BridgeInfo::new(fingerprint, &extra_info.nickname)
- }
- };
- for country in extra_info.bridge_ips.keys() {
- if bridge_info.info_by_country.contains_key::<String>(country) {
- bridge_info
- .info_by_country
- .get_mut(country)
- .unwrap()
- .add_info(
- BridgeInfoType::BridgeIps,
- extra_info.date,
- *extra_info.bridge_ips.get(country).unwrap(),
- );
- } else {
- // No existing entry; make a new one.
- let mut bridge_country_info = BridgeCountryInfo::new(extra_info.date);
- bridge_country_info.add_info(
- BridgeInfoType::BridgeIps,
- extra_info.date,
- *extra_info.bridge_ips.get(country).unwrap(),
- );
- bridge_info
- .info_by_country
- .insert(country.to_string(), bridge_country_info);
- }
- }
- // Commit changes to database
- db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap())
- .unwrap();
- }
- /// Download new extra-infos files and add their data to the database
- pub async fn update_extra_infos(
- db: &Db,
- base_url: &str,
- ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
- // Track which files have been processed. This is slightly redundant
- // because we're only downloading files we don't already have, but it
- // might be a good idea to check in case we downloaded a file but didn't
- // process it for some reason.
- let mut processed_extra_infos_files = match db.get(b"extra_infos_files").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => HashSet::<String>::new(),
- };
- let dir_page = download(base_url).await?;
- // Causes Send issues, so use solution below instead
- //let doc = Document::from(dir_page.as_str());
- //let links = doc.find(Name("a")).filter_map(|n| n.attr("href"));
- // Alternative, less robust solution
- let mut links = HashSet::<String>::new();
- for line in dir_page.lines() {
- let begin_match = "<a href=\"";
- let end_match = "\">";
- if line.contains(begin_match) {
- let link = &line[line.find(begin_match).unwrap() + begin_match.len()..];
- if link.contains(end_match) {
- let link = &link[0..link.find(end_match).unwrap()];
- links.insert(link.to_string());
- }
- }
- }
- let mut new_extra_infos = HashSet::<ExtraInfo>::new();
- // We should now have an iterable collection of links to consider downloading.
- for link in links {
- if link.ends_with("-extra-infos") && !processed_extra_infos_files.contains(&link) {
- let extra_infos_url = format!("{}{}", base_url, link);
- let extra_info_str = download(&extra_infos_url).await?;
- //ExtraInfo::parse_file(&extra_info_str, &mut new_extra_infos);
- let extra_infos = ExtraInfo::parse_file(&extra_info_str);
- new_extra_infos.extend(extra_infos);
- processed_extra_infos_files.insert(link);
- }
- }
- // Add new extra-infos data to database
- for extra_info in new_extra_infos {
- add_extra_info_to_db(db, extra_info);
- }
- // Store which files we've already downloaded and processed
- db.insert(
- b"extra_infos_files",
- bincode::serialize(&processed_extra_infos_files).unwrap(),
- )
- .unwrap();
- Ok(())
- }
- // Process negative reports
- /// If there is already a negative report ECDH key for this date, return None.
- /// Otherwise, generate a new keypair, save the secret part in the db, and
- /// return the public part.
- pub fn new_negative_report_key(db: &Db, date: u32) -> Option<PublicKey> {
- let mut nr_keys = if !db.contains_key("nr-keys").unwrap() {
- BTreeMap::<u32, StaticSecret>::new()
- } else {
- match bincode::deserialize(&db.get("nr-keys").unwrap().unwrap()) {
- Ok(v) => v,
- Err(_) => BTreeMap::<u32, StaticSecret>::new(),
- }
- };
- if let btree_map::Entry::Vacant(_e) = nr_keys.entry(date) {
- let rng = rand::thread_rng();
- let secret = StaticSecret::random_from_rng(rng);
- let public = PublicKey::from(&secret);
- nr_keys.insert(date, secret);
- db.insert("nr-keys", bincode::serialize(&nr_keys).unwrap())
- .unwrap();
- Some(public)
- } else {
- None
- }
- }
- /// If we have a key for the requested day, return the secret part.
- pub fn get_negative_report_secret_key(db: &Db, date: u32) -> Option<StaticSecret> {
- if db.contains_key("nr-keys").unwrap() {
- let nr_keys: BTreeMap<u32, StaticSecret> =
- match bincode::deserialize(&db.get("nr-keys").unwrap().unwrap()) {
- Ok(map) => map,
- Err(_) => {
- return None;
- }
- };
- if nr_keys.contains_key(&date) {
- let secret = nr_keys.get(&date).unwrap();
- Some(secret.clone())
- } else {
- None
- }
- } else {
- None
- }
- }
- /// If we have a key for the requested day, return the public part.
- pub fn get_negative_report_public_key(db: &Db, date: u32) -> Option<PublicKey> {
- get_negative_report_secret_key(db, date).map(|secret| PublicKey::from(&secret))
- }
- /// Receive an encrypted negative report. Attempt to decrypt it and if
- /// successful, add it to the database to be processed later.
- pub fn handle_encrypted_negative_report(db: &Db, enc_report: EncryptedNegativeReport) {
- if let Some(secret) = get_negative_report_secret_key(db, enc_report.date) {
- if let Ok(nr) = enc_report.decrypt(&secret) {
- save_negative_report_to_process(db, nr);
- }
- }
- }
- /// We store to-be-processed negative reports as a vector. Add this NR
- /// to that vector (or create a new vector if necessary)
- pub fn save_negative_report_to_process(db: &Db, nr: NegativeReport) {
- // TODO: Purge these database entries sometimes
- let mut nonces = match db.get(format!("nonces_{}", &nr.date)).unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => HashSet::<[u8; 32]>::new(),
- };
- // Just ignore the report if we've seen the nonce before
- if nonces.insert(nr.nonce) {
- db.insert(
- format!("nonces_{}", &nr.date),
- bincode::serialize(&nonces).unwrap(),
- )
- .unwrap();
- let mut reports = match db.get("nrs-to-process").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => BTreeMap::<String, Vec<SerializableNegativeReport>>::new(),
- };
- // Store to-be-processed reports with key [fingerprint]_[country]_[date]
- let map_key = format!(
- "{}_{}_{}",
- array_bytes::bytes2hex("", nr.fingerprint),
- &nr.country,
- &nr.date,
- );
- if let btree_map::Entry::Vacant(e) = reports.entry(map_key.clone()) {
- let nrs = vec![nr.to_serializable_report()];
- e.insert(nrs);
- } else {
- reports
- .get_mut(&map_key)
- .unwrap()
- .push(nr.to_serializable_report());
- }
- // Commit changes to database
- db.insert("nrs-to-process", bincode::serialize(&reports).unwrap())
- .unwrap();
- }
- }
- /// Sends a collection of negative reports to the Lox Authority and returns the
- /// number of valid reports returned by the server. The negative reports in the
- /// collection should all have the same bridge fingerprint, date, country, and
- /// distributor.
- pub async fn verify_negative_reports(
- distributors: &BTreeMap<BridgeDistributor, String>,
- reports: &Vec<SerializableNegativeReport>,
- ) -> u32 {
- // Don't make a network call if we don't have any reports anyway
- if reports.is_empty() {
- return 0;
- }
- // Get one report, assume the rest have the same distributor
- let first_report = &reports[0];
- let distributor = first_report.distributor;
- let client = Client::new();
- let uri: String = (distributors.get(&distributor).unwrap().to_owned() + "/verifynegative")
- .parse()
- .unwrap();
- let req = Request::builder()
- .method(Method::POST)
- .uri(uri)
- .body(Body::from(serde_json::to_string(&reports).unwrap()))
- .unwrap();
- let resp = client.request(req).await.unwrap();
- let buf = hyper::body::to_bytes(resp).await.unwrap();
- serde_json::from_slice(&buf).unwrap()
- }
- /// Process today's negative reports and store the count of verified reports in
- /// the database.
- pub async fn update_negative_reports(db: &Db, distributors: &BTreeMap<BridgeDistributor, String>) {
- let all_negative_reports = match db.get("nrs-to-process").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => BTreeMap::<String, Vec<SerializableNegativeReport>>::new(),
- };
- let mut bridges_to_re_evaluate = match db.get("bridges-to-re-evaluate").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => HashMap::<String, u32>::new(),
- // We map fingerprint:date where date is the earliest date for
- // which we have new reports
- };
- let today = get_date();
- // Key is [fingerprint]_[country]_[date]
- for bridge_country_date in all_negative_reports.keys() {
- let reports = all_negative_reports.get(bridge_country_date).unwrap();
- if !reports.is_empty() {
- let first_report = &reports[0];
- let fingerprint = first_report.fingerprint;
- let date = first_report.date;
- let country = first_report.country.clone();
- let count_valid = verify_negative_reports(distributors, reports).await;
- // If we have new historical data, re-evaluate this bridge
- if count_valid > 0 && date < today {
- let fpr_str = array_bytes::bytes2hex("", fingerprint);
- if bridges_to_re_evaluate.contains_key(&fpr_str) {
- if *bridges_to_re_evaluate.get(&fpr_str).unwrap() > date {
- bridges_to_re_evaluate.insert(fpr_str, date);
- }
- } else {
- bridges_to_re_evaluate.insert(fpr_str, date);
- }
- }
- // Get bridge info or make new one
- let mut bridge_info = match db.get(fingerprint).unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => {
- // This case shouldn't happen unless the bridge hasn't
- // published any bridge stats.
- add_bridge_to_db(db, fingerprint);
- BridgeInfo::new(fingerprint, &String::default())
- }
- };
- // Add the new report count to it
- if let hash_map::Entry::Vacant(_e) = bridge_info.info_by_country.entry(country.clone())
- {
- // No existing entry; make a new one.
- let mut bridge_country_info = BridgeCountryInfo::new(date);
- bridge_country_info.add_info(BridgeInfoType::NegativeReports, date, count_valid);
- bridge_info
- .info_by_country
- .insert(country, bridge_country_info);
- } else {
- let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
- bridge_country_info.add_info(BridgeInfoType::NegativeReports, date, count_valid);
- }
- // Commit changes to database
- db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap())
- .unwrap();
- }
- }
- // Remove the now-processed reports from the database
- db.insert(
- "nrs-to-process",
- bincode::serialize(&BTreeMap::<String, Vec<SerializableNegativeReport>>::new()).unwrap(),
- )
- .unwrap();
- // Commit new set of bridges to re-evaluate
- db.insert(
- "bridges-to-re-evaluate",
- bincode::serialize(&bridges_to_re_evaluate).unwrap(),
- )
- .unwrap();
- }
- // Process positive reports
- /// We store to-be-processed positive reports as a vector. Add this PR
- /// to that vector (or create a new vector if necessary).
- pub fn save_positive_report_to_process(db: &Db, pr: PositiveReport) {
- let mut reports = match db.get("prs-to-process").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => BTreeMap::<String, Vec<SerializablePositiveReport>>::new(),
- };
- // Store to-be-processed reports with key [fingerprint]_[country]_[date]
- let map_key = format!(
- "{}_{}_{}",
- array_bytes::bytes2hex("", pr.fingerprint),
- &pr.country,
- &pr.date,
- );
- if let btree_map::Entry::Vacant(e) = reports.entry(map_key.clone()) {
- let prs = vec![pr.to_serializable_report()];
- e.insert(prs);
- } else {
- reports
- .get_mut(&map_key)
- .unwrap()
- .push(pr.to_serializable_report());
- }
- // Commit changes to database
- db.insert("prs-to-process", bincode::serialize(&reports).unwrap())
- .unwrap();
- }
- /// Sends a collection of positive reports to the Lox Authority and returns the
- /// number of valid reports returned by the server. The positive reports in the
- /// collection should all have the same bridge fingerprint, date, and country.
- pub async fn verify_positive_reports(
- distributors: &BTreeMap<BridgeDistributor, String>,
- reports: &Vec<SerializablePositiveReport>,
- ) -> u32 {
- // Don't make a network call if we don't have any reports anyway
- if reports.is_empty() {
- return 0;
- }
- let client = Client::new();
- let uri: String = (distributors
- .get(&BridgeDistributor::Lox)
- .unwrap()
- .to_owned()
- + "/verifypositive")
- .parse()
- .unwrap();
- let req = Request::builder()
- .method(Method::POST)
- .uri(uri)
- .body(Body::from(serde_json::to_string(&reports).unwrap()))
- .unwrap();
- let resp = client.request(req).await.unwrap();
- let buf = hyper::body::to_bytes(resp).await.unwrap();
- serde_json::from_slice(&buf).unwrap()
- }
- /// Process today's positive reports and store the count of verified reports in
- /// the database.
- pub async fn update_positive_reports(db: &Db, distributors: &BTreeMap<BridgeDistributor, String>) {
- let all_positive_reports = match db.get("prs-to-process").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => BTreeMap::<String, Vec<SerializablePositiveReport>>::new(),
- };
- let mut bridges_to_re_evaluate = match db.get("bridges-to-re-evaluate").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => HashMap::<String, u32>::new(),
- // We map fingerprint:date where date is the earliest date for
- // which we have new reports
- };
- let today = get_date();
- // Key is [fingerprint]_[country]_[date]
- for bridge_country_date in all_positive_reports.keys() {
- let reports = all_positive_reports.get(bridge_country_date).unwrap();
- if !reports.is_empty() {
- let first_report = &reports[0];
- let fingerprint = first_report.fingerprint;
- let date = first_report.date;
- let country = first_report.country.clone();
- let count_valid = verify_positive_reports(distributors, reports).await;
- // If we have new historical data, re-evaluate this bridge
- if count_valid > 0 && date < today {
- let fpr_str = array_bytes::bytes2hex("", fingerprint);
- if bridges_to_re_evaluate.contains_key(&fpr_str) {
- if *bridges_to_re_evaluate.get(&fpr_str).unwrap() > date {
- bridges_to_re_evaluate.insert(fpr_str, date);
- }
- } else {
- bridges_to_re_evaluate.insert(fpr_str, date);
- }
- }
- // Get bridge info or make new one
- let mut bridge_info = match db.get(fingerprint).unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => {
- // This case shouldn't happen unless the bridge hasn't
- // published any bridge stats.
- add_bridge_to_db(db, fingerprint);
- BridgeInfo::new(fingerprint, &String::default())
- }
- };
- // Add the new report count to it
- if let hash_map::Entry::Vacant(e) = bridge_info.info_by_country.entry(country.clone()) {
- // No existing entry; make a new one.
- let mut bridge_country_info = BridgeCountryInfo::new(date);
- bridge_country_info.add_info(BridgeInfoType::PositiveReports, date, count_valid);
- e.insert(bridge_country_info);
- } else {
- let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
- bridge_country_info.add_info(BridgeInfoType::PositiveReports, date, count_valid);
- }
- // Commit changes to database
- db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap())
- .unwrap();
- }
- }
- // Remove the now-processed reports from the database
- db.insert(
- "prs-to-process",
- bincode::serialize(&BTreeMap::<String, Vec<SerializablePositiveReport>>::new()).unwrap(),
- )
- .unwrap();
- // Commit new set of bridges to re-evaluate
- db.insert(
- "bridges-to-re-evaluate",
- bincode::serialize(&bridges_to_re_evaluate).unwrap(),
- )
- .unwrap();
- }
- // Verdict on bridge reachability
- /// Guess which countries block a bridge. This function returns a map of new
- /// blockages (fingerprint : set of countries which block the bridge)
- pub fn guess_blockages(
- db: &Db,
- analyzer: &dyn Analyzer,
- confidence: f64,
- min_historical_days: u32,
- max_historical_days: u32,
- ) -> HashMap<[u8; 20], HashSet<String>> {
- // Map of bridge fingerprint to set of countries which newly block it
- let mut blockages = HashMap::<[u8; 20], HashSet<String>>::new();
- // Get list of bridges from database
- let bridges = match db.get("bridges").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => HashSet::<[u8; 20]>::new(),
- };
- // Get list of bridges with historical data to re-evaluate
- let bridges_to_re_evaluate = match db.get("bridges-to-re-evaluate").unwrap() {
- Some(v) => bincode::deserialize(&v).unwrap(),
- None => HashMap::<String, u32>::new(),
- };
- // Guess for each bridge
- for fingerprint in bridges {
- let today = get_date();
- let mut bridge_info: BridgeInfo =
- bincode::deserialize(&db.get(fingerprint).unwrap().unwrap()).unwrap();
- let mut new_blockages = HashSet::<String>::new();
- let fpr_str = array_bytes::bytes2hex("", fingerprint);
- let first_date = if bridges_to_re_evaluate.contains_key(&fpr_str) {
- *bridges_to_re_evaluate.get(&fpr_str).unwrap()
- } else {
- today
- };
- // Re-evaluate the last days from first_date to today.
- // (This approach is still suboptimal because we re-evaluate for
- // countries that don't have new reports.)
- for i in first_date..=today {
- let blocked_in = analysis::blocked_in(
- analyzer,
- &bridge_info,
- confidence,
- i,
- min_historical_days,
- max_historical_days,
- );
- for country in blocked_in {
- let bridge_country_info = bridge_info.info_by_country.get_mut(&country).unwrap();
- if !bridge_country_info.blocked {
- new_blockages.insert(country.to_string());
- // Mark bridge as blocked when db gets updated
- bridge_country_info.blocked = true;
- }
- }
- }
- blockages.insert(fingerprint, new_blockages);
- // Commit changes to database
- db.insert(fingerprint, bincode::serialize(&bridge_info).unwrap())
- .unwrap();
- }
- // Remove all bridges to re-evaluate from DB
- db.insert(
- "bridges-to-re-evaluate",
- bincode::serialize(&HashMap::<String, u32>::new()).unwrap(),
- )
- .unwrap();
- // Return map of new blockages
- blockages
- }
- /// Report blocked bridges to bridge distributor
- pub async fn report_blockages(
- distributors: &BTreeMap<BridgeDistributor, String>,
- blockages: HashMap<[u8; 20], HashSet<String>>,
- ) {
- // For now, only report to Lox
- // TODO: Support more distributors
- let uri: String = (distributors
- .get(&BridgeDistributor::Lox)
- .unwrap()
- .to_owned()
- + "/reportblocked")
- .parse()
- .unwrap();
- // Convert map keys from [u8; 20] to 40-character hex strings
- let mut blockages_str = HashMap::<String, HashSet<String>>::new();
- for (fingerprint, countries) in blockages {
- let fpr_string = array_bytes::bytes2hex("", fingerprint);
- if !countries.is_empty() {
- blockages_str.insert(fpr_string, countries);
- }
- }
- if !blockages_str.is_empty() {
- // Report blocked bridges to bridge distributor
- let client = Client::new();
- let req = Request::builder()
- .method(Method::POST)
- .uri(uri)
- .body(Body::from(serde_json::to_string(&blockages_str).unwrap()))
- .unwrap();
- let resp = client.request(req).await.unwrap();
- let buf = hyper::body::to_bytes(resp).await.unwrap();
- let resp_str = String::from_utf8(buf.to_vec()).unwrap();
- assert_eq!("OK", resp_str);
- }
- }
- // Unit tests
- #[cfg(test)]
- mod tests;
|