2018-12-31 01:34:31 +03:00
|
|
|
//
|
2019-01-19 23:36:34 +03:00
|
|
|
// Web Headers and caching
|
2018-12-31 01:34:31 +03:00
|
|
|
//
|
2018-12-24 00:37:02 +03:00
|
|
|
use rocket::fairing::{Fairing, Info, Kind};
|
2019-12-07 00:19:07 +03:00
|
|
|
use rocket::http::{ContentType, Header, HeaderMap, Method, Status};
|
2019-01-19 23:36:34 +03:00
|
|
|
use rocket::response::{self, Responder};
|
2019-12-07 00:19:07 +03:00
|
|
|
use rocket::{Data, Request, Response, Rocket};
|
2019-09-01 14:00:12 +03:00
|
|
|
use std::io::Cursor;
|
2018-02-10 03:00:55 +03:00
|
|
|
|
2020-02-05 00:14:50 +03:00
|
|
|
use crate::CONFIG;
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
pub struct AppHeaders();
|
2018-12-24 00:37:02 +03:00
|
|
|
|
|
|
|
impl Fairing for AppHeaders {
|
|
|
|
fn info(&self) -> Info {
|
|
|
|
Info {
|
|
|
|
name: "Application Headers",
|
|
|
|
kind: Kind::Response,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_response(&self, _req: &Request, res: &mut Response) {
|
2019-01-17 19:14:14 +03:00
|
|
|
res.set_raw_header("Feature-Policy", "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; camera 'none'; encrypted-media 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; payment 'none'; picture-in-picture 'none'; sync-xhr 'self' https://haveibeenpwned.com https://twofactorauth.org; usb 'none'; vr 'none'");
|
2018-12-24 00:37:02 +03:00
|
|
|
res.set_raw_header("Referrer-Policy", "same-origin");
|
|
|
|
res.set_raw_header("X-Frame-Options", "SAMEORIGIN");
|
|
|
|
res.set_raw_header("X-Content-Type-Options", "nosniff");
|
|
|
|
res.set_raw_header("X-XSS-Protection", "1; mode=block");
|
2020-02-05 00:14:50 +03:00
|
|
|
let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors());
|
2018-12-24 00:37:02 +03:00
|
|
|
res.set_raw_header("Content-Security-Policy", csp);
|
|
|
|
|
|
|
|
// Disable cache unless otherwise specified
|
|
|
|
if !res.headers().contains("cache-control") {
|
|
|
|
res.set_raw_header("Cache-Control", "no-cache, no-store, max-age=0");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-01 14:00:12 +03:00
|
|
|
pub struct CORS();
|
|
|
|
|
2019-09-02 22:13:12 +03:00
|
|
|
impl CORS {
|
|
|
|
fn get_header(headers: &HeaderMap, name: &str) -> String {
|
|
|
|
match headers.get_one(name) {
|
|
|
|
Some(h) => h.to_string(),
|
|
|
|
_ => "".to_string(),
|
|
|
|
}
|
|
|
|
}
|
2019-10-01 18:26:58 +03:00
|
|
|
|
|
|
|
fn valid_url(url: String) -> String {
|
|
|
|
match url.as_ref() {
|
|
|
|
"file://" => "*".to_string(),
|
|
|
|
_ => url,
|
|
|
|
}
|
|
|
|
}
|
2019-09-02 22:13:12 +03:00
|
|
|
}
|
|
|
|
|
2019-09-01 14:00:12 +03:00
|
|
|
impl Fairing for CORS {
|
|
|
|
fn info(&self) -> Info {
|
|
|
|
Info {
|
2019-09-23 08:44:44 +03:00
|
|
|
name: "CORS",
|
2019-12-07 00:19:07 +03:00
|
|
|
kind: Kind::Response,
|
2019-09-01 14:00:12 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_response(&self, request: &Request, response: &mut Response) {
|
2019-09-02 22:13:12 +03:00
|
|
|
let req_headers = request.headers();
|
|
|
|
|
|
|
|
// We need to explicitly get the Origin header for Access-Control-Allow-Origin
|
2019-10-01 18:26:58 +03:00
|
|
|
let req_allow_origin = CORS::valid_url(CORS::get_header(&req_headers, "Origin"));
|
2019-09-02 22:13:12 +03:00
|
|
|
|
2019-10-01 18:26:58 +03:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Origin", req_allow_origin));
|
2019-09-02 22:13:12 +03:00
|
|
|
|
2019-10-01 18:26:58 +03:00
|
|
|
if request.method() == Method::Options {
|
|
|
|
let req_allow_headers = CORS::get_header(&req_headers, "Access-Control-Request-Headers");
|
2019-12-07 00:19:07 +03:00
|
|
|
let req_allow_method = CORS::get_header(&req_headers, "Access-Control-Request-Method");
|
2019-09-01 14:00:12 +03:00
|
|
|
|
2019-09-23 08:44:44 +03:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Methods", req_allow_method));
|
2019-09-02 22:13:12 +03:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Headers", req_allow_headers));
|
2019-09-01 14:00:12 +03:00
|
|
|
response.set_header(Header::new("Access-Control-Allow-Credentials", "true"));
|
2019-09-02 22:13:12 +03:00
|
|
|
response.set_status(Status::Ok);
|
2019-09-01 14:00:12 +03:00
|
|
|
response.set_header(ContentType::Plain);
|
|
|
|
response.set_sized_body(Cursor::new(""));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-19 23:36:34 +03:00
|
|
|
pub struct Cached<R>(R, &'static str);
|
|
|
|
|
|
|
|
impl<R> Cached<R> {
|
|
|
|
pub fn long(r: R) -> Cached<R> {
|
|
|
|
// 7 days
|
|
|
|
Cached(r, "public, max-age=604800")
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn short(r: R) -> Cached<R> {
|
|
|
|
// 10 minutes
|
|
|
|
Cached(r, "public, max-age=600")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'r, R: Responder<'r>> Responder<'r> for Cached<R> {
|
|
|
|
fn respond_to(self, req: &Request) -> response::Result<'r> {
|
|
|
|
match self.0.respond_to(req) {
|
|
|
|
Ok(mut res) => {
|
|
|
|
res.set_raw_header("Cache-Control", self.1);
|
|
|
|
Ok(res)
|
|
|
|
}
|
|
|
|
e @ Err(_) => e,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-19 08:27:00 +03:00
|
|
|
// Log all the routes from the main paths list, and the attachments endpoint
|
2019-12-07 00:19:07 +03:00
|
|
|
// Effectively ignores, any static file route, and the alive endpoint
|
|
|
|
const LOGGED_ROUTES: [&str; 6] = [
|
|
|
|
"/api",
|
|
|
|
"/admin",
|
|
|
|
"/identity",
|
|
|
|
"/icons",
|
|
|
|
"/notifications/hub/negotiate",
|
|
|
|
"/attachments",
|
|
|
|
];
|
|
|
|
|
|
|
|
// Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts
|
|
|
|
pub struct BetterLogging(pub bool);
|
|
|
|
impl Fairing for BetterLogging {
|
|
|
|
fn info(&self) -> Info {
|
|
|
|
Info {
|
|
|
|
name: "Better Logging",
|
|
|
|
kind: Kind::Launch | Kind::Request | Kind::Response,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_launch(&self, rocket: &Rocket) {
|
|
|
|
if self.0 {
|
|
|
|
info!(target: "routes", "Routes loaded:");
|
2020-02-05 00:14:50 +03:00
|
|
|
let mut routes: Vec<_> = rocket.routes().collect();
|
|
|
|
routes.sort_by_key(|r| r.uri.path());
|
|
|
|
for route in routes {
|
2019-12-07 00:19:07 +03:00
|
|
|
if route.rank < 0 {
|
|
|
|
info!(target: "routes", "{:<6} {}", route.method, route.uri);
|
|
|
|
} else {
|
|
|
|
info!(target: "routes", "{:<6} {} [{}]", route.method, route.uri, route.rank);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let config = rocket.config();
|
|
|
|
let scheme = if config.tls_enabled() { "https" } else { "http" };
|
|
|
|
let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port);
|
|
|
|
info!(target: "start", "Rocket has launched from {}", addr);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_request(&self, request: &mut Request<'_>, _data: &Data) {
|
2019-12-07 00:55:29 +03:00
|
|
|
let method = request.method();
|
|
|
|
if !self.0 && method == Method::Options {
|
|
|
|
return;
|
|
|
|
}
|
2019-12-29 17:29:46 +03:00
|
|
|
let uri = request.uri();
|
|
|
|
let uri_path = uri.path();
|
2020-02-19 08:27:00 +03:00
|
|
|
// FIXME: trim_start_matches() could result in over-trimming in pathological cases;
|
|
|
|
// strip_prefix() would be a better option once it's stable.
|
|
|
|
let uri_subpath = uri_path.trim_start_matches(&CONFIG.domain_path());
|
|
|
|
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
2019-12-29 17:29:46 +03:00
|
|
|
match uri.query() {
|
|
|
|
Some(q) => info!(target: "request", "{} {}?{}", method, uri_path, &q[..q.len().min(30)]),
|
|
|
|
None => info!(target: "request", "{} {}", method, uri_path),
|
|
|
|
};
|
2019-12-07 00:19:07 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn on_response(&self, request: &Request, response: &mut Response) {
|
2019-12-07 00:55:29 +03:00
|
|
|
if !self.0 && request.method() == Method::Options {
|
|
|
|
return;
|
|
|
|
}
|
2020-02-19 08:27:00 +03:00
|
|
|
// FIXME: trim_start_matches() could result in over-trimming in pathological cases;
|
|
|
|
// strip_prefix() would be a better option once it's stable.
|
|
|
|
let uri_subpath = request.uri().path().trim_start_matches(&CONFIG.domain_path());
|
|
|
|
if self.0 || LOGGED_ROUTES.iter().any(|r| uri_subpath.starts_with(r)) {
|
2019-12-07 00:19:07 +03:00
|
|
|
let status = response.status();
|
|
|
|
if let Some(ref route) = request.route() {
|
|
|
|
info!(target: "response", "{} => {} {}", route, status.code, status.reason)
|
|
|
|
} else {
|
|
|
|
info!(target: "response", "{} {}", status.code, status.reason)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
//
|
|
|
|
// File handling
|
|
|
|
//
|
2018-02-15 02:40:34 +03:00
|
|
|
use std::fs::{self, File};
|
2018-12-24 00:37:02 +03:00
|
|
|
use std::io::{Read, Result as IOResult};
|
|
|
|
use std::path::Path;
|
2018-02-10 03:00:55 +03:00
|
|
|
|
|
|
|
pub fn file_exists(path: &str) -> bool {
|
|
|
|
Path::new(path).exists()
|
|
|
|
}
|
|
|
|
|
2018-12-24 00:37:02 +03:00
|
|
|
pub fn read_file(path: &str) -> IOResult<Vec<u8>> {
|
2018-02-10 03:00:55 +03:00
|
|
|
let mut contents: Vec<u8> = Vec::new();
|
2018-12-31 01:34:31 +03:00
|
|
|
|
2018-12-24 00:37:02 +03:00
|
|
|
let mut file = File::open(Path::new(path))?;
|
|
|
|
file.read_to_end(&mut contents)?;
|
2018-02-10 03:00:55 +03:00
|
|
|
|
|
|
|
Ok(contents)
|
|
|
|
}
|
|
|
|
|
2019-02-02 18:47:27 +03:00
|
|
|
pub fn read_file_string(path: &str) -> IOResult<String> {
|
|
|
|
let mut contents = String::new();
|
|
|
|
|
|
|
|
let mut file = File::open(Path::new(path))?;
|
|
|
|
file.read_to_string(&mut contents)?;
|
|
|
|
|
|
|
|
Ok(contents)
|
|
|
|
}
|
|
|
|
|
2018-12-24 00:37:02 +03:00
|
|
|
pub fn delete_file(path: &str) -> IOResult<()> {
|
|
|
|
let res = fs::remove_file(path);
|
2018-02-15 21:05:57 +03:00
|
|
|
|
|
|
|
if let Some(parent) = Path::new(path).parent() {
|
2018-02-23 02:38:54 +03:00
|
|
|
// If the directory isn't empty, this returns an error, which we ignore
|
|
|
|
// We only want to delete the folder if it's empty
|
|
|
|
fs::remove_dir(parent).ok();
|
2018-02-15 21:05:57 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
res
|
2018-02-15 02:40:34 +03:00
|
|
|
}
|
|
|
|
|
2019-12-19 02:37:16 +03:00
|
|
|
pub struct LimitedReader<'a> {
|
|
|
|
reader: &'a mut dyn std::io::Read,
|
|
|
|
limit: usize, // In bytes
|
|
|
|
count: usize,
|
|
|
|
}
|
|
|
|
impl<'a> LimitedReader<'a> {
|
|
|
|
pub fn new(reader: &'a mut dyn std::io::Read, limit: usize) -> LimitedReader<'a> {
|
|
|
|
LimitedReader {
|
|
|
|
reader,
|
|
|
|
limit,
|
|
|
|
count: 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> std::io::Read for LimitedReader<'a> {
|
|
|
|
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
|
|
|
self.count += buf.len();
|
|
|
|
|
|
|
|
if self.count > self.limit {
|
|
|
|
Ok(0) // End of the read
|
|
|
|
} else {
|
|
|
|
self.reader.read(buf)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-11 16:44:37 +03:00
|
|
|
const UNITS: [&str; 6] = ["bytes", "KB", "MB", "GB", "TB", "PB"];
|
2018-02-15 02:40:34 +03:00
|
|
|
|
|
|
|
pub fn get_display_size(size: i32) -> String {
|
2019-02-20 19:54:18 +03:00
|
|
|
let mut size: f64 = size.into();
|
2018-02-15 02:40:34 +03:00
|
|
|
let mut unit_counter = 0;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
if size > 1024. {
|
|
|
|
size /= 1024.;
|
|
|
|
unit_counter += 1;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
2018-12-31 01:34:31 +03:00
|
|
|
}
|
2018-02-15 02:40:34 +03:00
|
|
|
|
|
|
|
// Round to two decimals
|
|
|
|
size = (size * 100.).round() / 100.;
|
|
|
|
format!("{} {}", size, UNITS[unit_counter])
|
|
|
|
}
|
|
|
|
|
2018-12-07 16:32:40 +03:00
|
|
|
pub fn get_uuid() -> String {
|
|
|
|
uuid::Uuid::new_v4().to_string()
|
|
|
|
}
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
//
|
|
|
|
// String util methods
|
|
|
|
//
|
2018-02-10 03:00:55 +03:00
|
|
|
|
2018-09-13 21:59:51 +03:00
|
|
|
use std::ops::Try;
|
2018-12-31 01:34:31 +03:00
|
|
|
use std::str::FromStr;
|
2018-02-10 03:00:55 +03:00
|
|
|
|
|
|
|
pub fn upcase_first(s: &str) -> String {
|
|
|
|
let mut c = s.chars();
|
|
|
|
match c.next() {
|
|
|
|
None => String::new(),
|
|
|
|
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
pub fn try_parse_string<S, T, U>(string: impl Try<Ok = S, Error = U>) -> Option<T>
|
|
|
|
where
|
|
|
|
S: AsRef<str>,
|
|
|
|
T: FromStr,
|
|
|
|
{
|
2018-09-13 21:59:51 +03:00
|
|
|
if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::<T>()) {
|
2018-02-10 03:00:55 +03:00
|
|
|
Some(value)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
//
|
|
|
|
// Env methods
|
|
|
|
//
|
2018-09-13 21:59:51 +03:00
|
|
|
|
|
|
|
use std::env;
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
pub fn get_env<V>(key: &str) -> Option<V>
|
|
|
|
where
|
|
|
|
V: FromStr,
|
|
|
|
{
|
2018-09-13 21:59:51 +03:00
|
|
|
try_parse_string(env::var(key))
|
|
|
|
}
|
|
|
|
|
2020-01-21 00:28:54 +03:00
|
|
|
const TRUE_VALUES: &[&str] = &["true", "t", "yes", "y", "1"];
|
|
|
|
const FALSE_VALUES: &[&str] = &["false", "f", "no", "n", "0"];
|
|
|
|
|
|
|
|
pub fn get_env_bool(key: &str) -> Option<bool> {
|
|
|
|
match env::var(key) {
|
|
|
|
Ok(val) if TRUE_VALUES.contains(&val.to_lowercase().as_ref()) => Some(true),
|
|
|
|
Ok(val) if FALSE_VALUES.contains(&val.to_lowercase().as_ref()) => Some(false),
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
//
|
|
|
|
// Date util methods
|
|
|
|
//
|
2018-02-10 03:00:55 +03:00
|
|
|
|
|
|
|
use chrono::NaiveDateTime;
|
|
|
|
|
2018-06-11 16:44:37 +03:00
|
|
|
const DATETIME_FORMAT: &str = "%Y-%m-%dT%H:%M:%S%.6fZ";
|
2018-02-10 03:00:55 +03:00
|
|
|
|
|
|
|
pub fn format_date(date: &NaiveDateTime) -> String {
|
|
|
|
date.format(DATETIME_FORMAT).to_string()
|
|
|
|
}
|
2018-06-01 01:18:50 +03:00
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
//
|
|
|
|
// Deserialization methods
|
|
|
|
//
|
2018-06-01 01:18:50 +03:00
|
|
|
|
2018-06-13 00:01:14 +03:00
|
|
|
use std::fmt;
|
2018-06-01 01:18:50 +03:00
|
|
|
|
2018-06-13 00:01:14 +03:00
|
|
|
use serde::de::{self, DeserializeOwned, Deserializer, MapAccess, SeqAccess, Visitor};
|
2018-07-12 22:46:50 +03:00
|
|
|
use serde_json::{self, Value};
|
|
|
|
|
|
|
|
pub type JsonMap = serde_json::Map<String, Value>;
|
2018-06-01 01:18:50 +03:00
|
|
|
|
2018-06-13 00:01:14 +03:00
|
|
|
#[derive(PartialEq, Serialize, Deserialize)]
|
|
|
|
pub struct UpCase<T: DeserializeOwned> {
|
|
|
|
#[serde(deserialize_with = "upcase_deserialize")]
|
|
|
|
#[serde(flatten)]
|
|
|
|
pub data: T,
|
|
|
|
}
|
|
|
|
|
2018-12-31 01:34:31 +03:00
|
|
|
// https://github.com/serde-rs/serde/issues/586
|
2018-06-01 01:18:50 +03:00
|
|
|
pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
|
2018-12-31 01:34:31 +03:00
|
|
|
where
|
|
|
|
T: DeserializeOwned,
|
|
|
|
D: Deserializer<'de>,
|
2018-06-01 01:18:50 +03:00
|
|
|
{
|
2018-06-13 00:01:14 +03:00
|
|
|
let d = deserializer.deserialize_any(UpCaseVisitor)?;
|
|
|
|
T::deserialize(d).map_err(de::Error::custom)
|
2018-06-01 01:18:50 +03:00
|
|
|
}
|
|
|
|
|
2018-06-13 00:01:14 +03:00
|
|
|
struct UpCaseVisitor;
|
|
|
|
|
|
|
|
impl<'de> Visitor<'de> for UpCaseVisitor {
|
|
|
|
type Value = Value;
|
|
|
|
|
|
|
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
formatter.write_str("an object or an array")
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
2018-12-31 01:34:31 +03:00
|
|
|
where
|
|
|
|
A: MapAccess<'de>,
|
2018-06-13 00:01:14 +03:00
|
|
|
{
|
2018-07-12 22:46:50 +03:00
|
|
|
let mut result_map = JsonMap::new();
|
2018-06-13 00:01:14 +03:00
|
|
|
|
|
|
|
while let Some((key, value)) = map.next_entry()? {
|
2019-03-19 00:02:37 +03:00
|
|
|
result_map.insert(upcase_first(key), upcase_value(value));
|
2018-06-13 00:01:14 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Object(result_map))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
|
2018-12-31 01:34:31 +03:00
|
|
|
where
|
|
|
|
A: SeqAccess<'de>,
|
|
|
|
{
|
2018-06-13 00:01:14 +03:00
|
|
|
let mut result_seq = Vec::<Value>::new();
|
|
|
|
|
|
|
|
while let Some(value) = seq.next_element()? {
|
2019-03-19 00:02:37 +03:00
|
|
|
result_seq.push(upcase_value(value));
|
2018-06-13 00:01:14 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Array(result_seq))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-19 00:02:37 +03:00
|
|
|
fn upcase_value(value: Value) -> Value {
|
|
|
|
if let Value::Object(map) = value {
|
2018-06-13 00:01:14 +03:00
|
|
|
let mut new_value = json!({});
|
2018-12-31 01:34:31 +03:00
|
|
|
|
2019-03-19 00:02:37 +03:00
|
|
|
for (key, val) in map.into_iter() {
|
|
|
|
let processed_key = _process_key(&key);
|
2018-06-13 00:01:14 +03:00
|
|
|
new_value[processed_key] = upcase_value(val);
|
|
|
|
}
|
|
|
|
new_value
|
2019-03-19 00:02:37 +03:00
|
|
|
} else if let Value::Array(array) = value {
|
2018-06-13 00:01:14 +03:00
|
|
|
// Initialize array with null values
|
|
|
|
let mut new_value = json!(vec![Value::Null; array.len()]);
|
|
|
|
|
2019-03-19 00:02:37 +03:00
|
|
|
for (index, val) in array.into_iter().enumerate() {
|
2018-06-13 00:01:14 +03:00
|
|
|
new_value[index] = upcase_value(val);
|
|
|
|
}
|
|
|
|
new_value
|
|
|
|
} else {
|
2019-03-19 00:02:37 +03:00
|
|
|
value
|
2018-06-13 00:01:14 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn _process_key(key: &str) -> String {
|
|
|
|
match key.to_lowercase().as_ref() {
|
|
|
|
"ssn" => "SSN".into(),
|
2018-12-13 00:15:54 +03:00
|
|
|
_ => self::upcase_first(key),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// Retry methods
|
|
|
|
//
|
|
|
|
|
|
|
|
pub fn retry<F, T, E>(func: F, max_tries: i32) -> Result<T, E>
|
|
|
|
where
|
|
|
|
F: Fn() -> Result<T, E>,
|
|
|
|
{
|
|
|
|
use std::{thread::sleep, time::Duration};
|
|
|
|
let mut tries = 0;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
match func() {
|
|
|
|
ok @ Ok(_) => return ok,
|
|
|
|
err @ Err(_) => {
|
|
|
|
tries += 1;
|
|
|
|
|
|
|
|
if tries >= max_tries {
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
sleep(Duration::from_millis(500));
|
|
|
|
}
|
|
|
|
}
|
2018-06-13 00:01:14 +03:00
|
|
|
}
|
2018-06-01 01:18:50 +03:00
|
|
|
}
|