forked from hexbear-collective/lemmy-hexbear
Browse Source
Split code into cargo workspaces (#67)
Split code into cargo workspaces (#67)
More fixes - fixed docker builds - fixed mentions regex test - fixed DATABASE_URL stuff - change schema path in diesel.toml Address review comments - add jsonb column back into activity table - remove authors field from cargo.toml - adjust LEMMY_DATABASE_URL env var usage - rename all occurences of LEMMY_DATABASE_URL to DATABASE_URL Decouple utils and db Split code into cargo workspaces Co-authored-by: Felix Ableitner <[email protected]> Reviewed-on: https://yerbamate.dev/LemmyNet/lemmy/pulls/67feature/settings-cleanup
committed by
dessalines
62 changed files with 1070 additions and 920 deletions
-
9docker/dev/Dockerfile
-
10docker/prod/Dockerfile
-
2docs/src/contributing_tests.md
-
2install.sh
-
49server/Cargo.lock
-
17server/Cargo.toml
-
3server/db-init.sh
-
2server/diesel.toml
-
15server/lemmy_db/Cargo.toml
-
50server/lemmy_db/src/activity.rs
-
5server/lemmy_db/src/category.rs
-
21server/lemmy_db/src/comment.rs
-
14server/lemmy_db/src/comment_view.rs
-
8server/lemmy_db/src/community.rs
-
2server/lemmy_db/src/community_view.rs
-
38server/lemmy_db/src/lib.rs
-
15server/lemmy_db/src/moderator.rs
-
2server/lemmy_db/src/moderator_views.rs
-
4server/lemmy_db/src/password_reset_request.rs
-
22server/lemmy_db/src/post.rs
-
13server/lemmy_db/src/post_view.rs
-
28server/lemmy_db/src/private_message.rs
-
2server/lemmy_db/src/private_message_view.rs
-
0server/lemmy_db/src/schema.rs
-
2server/lemmy_db/src/site.rs
-
0server/lemmy_db/src/site_view.rs
-
83server/lemmy_db/src/user.rs
-
15server/lemmy_db/src/user_mention.rs
-
2server/lemmy_db/src/user_mention_view.rs
-
2server/lemmy_db/src/user_view.rs
-
22server/lemmy_utils/Cargo.toml
-
324server/lemmy_utils/src/lib.rs
-
33server/lemmy_utils/src/settings.rs
-
73server/src/api/claims.rs
-
52server/src/api/comment.rs
-
26server/src/api/community.rs
-
9server/src/api/mod.rs
-
54server/src/api/post.rs
-
37server/src/api/site.rs
-
95server/src/api/user.rs
-
12server/src/apub/activities.rs
-
18server/src/apub/comment.rs
-
84server/src/apub/community.rs
-
12server/src/apub/community_inbox.rs
-
6server/src/apub/extensions/group_extensions.rs
-
18server/src/apub/extensions/signatures.rs
-
39server/src/apub/fetcher.rs
-
60server/src/apub/mod.rs
-
16server/src/apub/post.rs
-
14server/src/apub/private_message.rs
-
29server/src/apub/shared_inbox.rs
-
24server/src/apub/user.rs
-
20server/src/apub/user_inbox.rs
-
33server/src/code_migrations.rs
-
331server/src/lib.rs
-
16server/src/main.rs
-
5server/src/rate_limit/mod.rs
-
22server/src/routes/federation.rs
-
31server/src/routes/feeds.rs
-
2server/src/routes/index.rs
-
12server/src/routes/nodeinfo.rs
-
24server/src/routes/webfinger.rs
@ -9,7 +9,7 @@ following commands in the `server` subfolder: |
|||
psql -U lemmy -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public;" |
|||
export DATABASE_URL=postgres://lemmy:[email protected]:5432/lemmy |
|||
diesel migration run |
|||
RUST_TEST_THREADS=1 cargo test |
|||
RUST_TEST_THREADS=1 cargo test --workspace |
|||
``` |
|||
|
|||
### Federation |
|||
|
@ -1,14 +1,21 @@ |
|||
[package] |
|||
name = "lemmy_server" |
|||
version = "0.0.1" |
|||
authors = ["Dessalines <[email protected]>"] |
|||
edition = "2018" |
|||
|
|||
[profile.release] |
|||
lto = true |
|||
|
|||
[workspace] |
|||
members = [ |
|||
"lemmy_utils", |
|||
"lemmy_db" |
|||
] |
|||
|
|||
[dependencies] |
|||
diesel = { version = "1.4.4", features = ["postgres","chrono","r2d2","64-column-tables","serde_json"] } |
|||
lemmy_utils = { path = "./lemmy_utils" } |
|||
lemmy_db = { path = "./lemmy_db" } |
|||
diesel = "1.4.4" |
|||
diesel_migrations = "1.4.0" |
|||
dotenv = "0.15.0" |
|||
activitystreams = "0.6.2" |
|||
@ -31,16 +38,10 @@ rand = "0.7.3" |
|||
strum = "0.18.0" |
|||
strum_macros = "0.18.0" |
|||
jsonwebtoken = "7.0.1" |
|||
regex = "1.3.5" |
|||
lazy_static = "1.3.0" |
|||
lettre = "0.9.3" |
|||
lettre_email = "0.9.4" |
|||
rss = "1.9.0" |
|||
htmlescape = "0.3.1" |
|||
url = { version = "2.1.1", features = ["serde"] } |
|||
config = {version = "0.10.1", default-features = false, features = ["hjson"] } |
|||
percent-encoding = "2.1.0" |
|||
comrak = "0.7" |
|||
openssl = "0.10" |
|||
http = "0.2.1" |
|||
http-signature-normalization-actix = { version = "0.4.0-alpha.0", default-features = false, features = ["sha-2"] } |
|||
|
@ -0,0 +1,15 @@ |
|||
[package] |
|||
name = "lemmy_db" |
|||
version = "0.1.0" |
|||
edition = "2018" |
|||
|
|||
[dependencies] |
|||
diesel = { version = "1.4.4", features = ["postgres","chrono","r2d2","64-column-tables","serde_json"] } |
|||
chrono = { version = "0.4.7", features = ["serde"] } |
|||
serde = { version = "1.0.105", features = ["derive"] } |
|||
serde_json = { version = "1.0.52", features = ["preserve_order"]} |
|||
strum = "0.18.0" |
|||
strum_macros = "0.18.0" |
|||
log = "0.4.0" |
|||
sha2 = "0.9" |
|||
bcrypt = "0.8.0" |
@ -0,0 +1,22 @@ |
|||
[package] |
|||
name = "lemmy_utils" |
|||
version = "0.1.0" |
|||
edition = "2018" |
|||
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html |
|||
|
|||
[dependencies] |
|||
regex = "1.3.5" |
|||
config = { version = "0.10.1", default-features = false, features = ["hjson"] } |
|||
chrono = { version = "0.4.7", features = ["serde"] } |
|||
lettre = "0.9.3" |
|||
lettre_email = "0.9.4" |
|||
log = "0.4.0" |
|||
itertools = "0.9.0" |
|||
rand = "0.7.3" |
|||
serde = { version = "1.0.105", features = ["derive"] } |
|||
serde_json = { version = "1.0.52", features = ["preserve_order"]} |
|||
comrak = "0.7" |
|||
lazy_static = "1.3.0" |
|||
openssl = "0.10" |
|||
url = { version = "2.1.1", features = ["serde"] } |
@ -0,0 +1,324 @@ |
|||
#[macro_use]
|
|||
pub extern crate lazy_static;
|
|||
pub extern crate comrak;
|
|||
pub extern crate lettre;
|
|||
pub extern crate lettre_email;
|
|||
pub extern crate openssl;
|
|||
pub extern crate rand;
|
|||
pub extern crate regex;
|
|||
pub extern crate serde_json;
|
|||
pub extern crate url;
|
|||
|
|||
pub mod settings;
|
|||
|
|||
use crate::settings::Settings;
|
|||
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, Utc};
|
|||
use itertools::Itertools;
|
|||
use lettre::{
|
|||
smtp::{
|
|||
authentication::{Credentials, Mechanism},
|
|||
extension::ClientId,
|
|||
ConnectionReuseParameters,
|
|||
},
|
|||
ClientSecurity,
|
|||
SmtpClient,
|
|||
Transport,
|
|||
};
|
|||
use lettre_email::Email;
|
|||
use openssl::{pkey::PKey, rsa::Rsa};
|
|||
use rand::{distributions::Alphanumeric, thread_rng, Rng};
|
|||
use regex::{Regex, RegexBuilder};
|
|||
use std::io::{Error, ErrorKind};
|
|||
use url::Url;
|
|||
|
|||
pub fn to_datetime_utc(ndt: NaiveDateTime) -> DateTime<Utc> {
|
|||
DateTime::<Utc>::from_utc(ndt, Utc)
|
|||
}
|
|||
|
|||
pub fn naive_from_unix(time: i64) -> NaiveDateTime {
|
|||
NaiveDateTime::from_timestamp(time, 0)
|
|||
}
|
|||
|
|||
pub fn convert_datetime(datetime: NaiveDateTime) -> DateTime<FixedOffset> {
|
|||
let now = Local::now();
|
|||
DateTime::<FixedOffset>::from_utc(datetime, *now.offset())
|
|||
}
|
|||
|
|||
pub fn is_email_regex(test: &str) -> bool {
|
|||
EMAIL_REGEX.is_match(test)
|
|||
}
|
|||
|
|||
pub fn remove_slurs(test: &str) -> String {
|
|||
SLUR_REGEX.replace_all(test, "*removed*").to_string()
|
|||
}
|
|||
|
|||
pub fn slur_check(test: &str) -> Result<(), Vec<&str>> {
|
|||
let mut matches: Vec<&str> = SLUR_REGEX.find_iter(test).map(|mat| mat.as_str()).collect();
|
|||
|
|||
// Unique
|
|||
matches.sort_unstable();
|
|||
matches.dedup();
|
|||
|
|||
if matches.is_empty() {
|
|||
Ok(())
|
|||
} else {
|
|||
Err(matches)
|
|||
}
|
|||
}
|
|||
|
|||
pub fn slurs_vec_to_str(slurs: Vec<&str>) -> String {
|
|||
let start = "No slurs - ";
|
|||
let combined = &slurs.join(", ");
|
|||
[start, combined].concat()
|
|||
}
|
|||
|
|||
pub fn generate_random_string() -> String {
|
|||
thread_rng().sample_iter(&Alphanumeric).take(30).collect()
|
|||
}
|
|||
|
|||
pub fn send_email(
|
|||
subject: &str,
|
|||
to_email: &str,
|
|||
to_username: &str,
|
|||
html: &str,
|
|||
) -> Result<(), String> {
|
|||
let email_config = Settings::get().email.ok_or("no_email_setup")?;
|
|||
|
|||
let email = Email::builder()
|
|||
.to((to_email, to_username))
|
|||
.from(email_config.smtp_from_address.to_owned())
|
|||
.subject(subject)
|
|||
.html(html)
|
|||
.build()
|
|||
.unwrap();
|
|||
|
|||
let mailer = if email_config.use_tls {
|
|||
SmtpClient::new_simple(&email_config.smtp_server).unwrap()
|
|||
} else {
|
|||
SmtpClient::new(&email_config.smtp_server, ClientSecurity::None).unwrap()
|
|||
}
|
|||
.hello_name(ClientId::Domain(Settings::get().hostname))
|
|||
.smtp_utf8(true)
|
|||
.authentication_mechanism(Mechanism::Plain)
|
|||
.connection_reuse(ConnectionReuseParameters::ReuseUnlimited);
|
|||
let mailer = if let (Some(login), Some(password)) =
|
|||
(&email_config.smtp_login, &email_config.smtp_password)
|
|||
{
|
|||
mailer.credentials(Credentials::new(login.to_owned(), password.to_owned()))
|
|||
} else {
|
|||
mailer
|
|||
};
|
|||
|
|||
let mut transport = mailer.transport();
|
|||
let result = transport.send(email.into());
|
|||
transport.close();
|
|||
|
|||
match result {
|
|||
Ok(_) => Ok(()),
|
|||
Err(e) => Err(e.to_string()),
|
|||
}
|
|||
}
|
|||
|
|||
pub fn markdown_to_html(text: &str) -> String {
|
|||
comrak::markdown_to_html(text, &comrak::ComrakOptions::default())
|
|||
}
|
|||
|
|||
// TODO nothing is done with community / group webfingers yet, so just ignore those for now
|
|||
#[derive(Clone, PartialEq, Eq, Hash)]
|
|||
pub struct MentionData {
|
|||
pub name: String,
|
|||
pub domain: String,
|
|||
}
|
|||
|
|||
impl MentionData {
|
|||
pub fn is_local(&self) -> bool {
|
|||
Settings::get().hostname.eq(&self.domain)
|
|||
}
|
|||
pub fn full_name(&self) -> String {
|
|||
format!("@{}@{}", &self.name, &self.domain)
|
|||
}
|
|||
}
|
|||
|
|||
pub fn scrape_text_for_mentions(text: &str) -> Vec<MentionData> {
|
|||
let mut out: Vec<MentionData> = Vec::new();
|
|||
for caps in MENTIONS_REGEX.captures_iter(text) {
|
|||
out.push(MentionData {
|
|||
name: caps["name"].to_string(),
|
|||
domain: caps["domain"].to_string(),
|
|||
});
|
|||
}
|
|||
out.into_iter().unique().collect()
|
|||
}
|
|||
|
|||
pub fn is_valid_username(name: &str) -> bool {
|
|||
VALID_USERNAME_REGEX.is_match(name)
|
|||
}
|
|||
|
|||
pub fn is_valid_community_name(name: &str) -> bool {
|
|||
VALID_COMMUNITY_NAME_REGEX.is_match(name)
|
|||
}
|
|||
|
|||
#[cfg(test)]
|
|||
mod tests {
|
|||
use crate::{
|
|||
is_email_regex,
|
|||
is_valid_community_name,
|
|||
is_valid_username,
|
|||
remove_slurs,
|
|||
scrape_text_for_mentions,
|
|||
slur_check,
|
|||
slurs_vec_to_str,
|
|||
};
|
|||
|
|||
#[test]
|
|||
fn test_mentions_regex() {
|
|||
let text = "Just read a great blog post by [@[email protected]](/u/test). And another by [email protected] . Another [@[email protected]:8540](/u/fish)";
|
|||
let mentions = scrape_text_for_mentions(text);
|
|||
|
|||
assert_eq!(mentions[0].name, "tedu".to_string());
|
|||
assert_eq!(mentions[0].domain, "honk.teduangst.com".to_string());
|
|||
assert_eq!(mentions[1].domain, "lemmy-alpha:8540".to_string());
|
|||
}
|
|||
|
|||
#[test]
|
|||
fn test_email() {
|
|||
assert!(is_email_regex("[email protected]"));
|
|||
assert!(!is_email_regex("nada_neutho"));
|
|||
}
|
|||
|
|||
#[test]
|
|||
fn test_valid_register_username() {
|
|||
assert!(is_valid_username("Hello_98"));
|
|||
assert!(is_valid_username("ten"));
|
|||
assert!(!is_valid_username("Hello-98"));
|
|||
assert!(!is_valid_username("a"));
|
|||
assert!(!is_valid_username(""));
|
|||
}
|
|||
|
|||
#[test]
|
|||
fn test_valid_community_name() {
|
|||
assert!(is_valid_community_name("example"));
|
|||
assert!(is_valid_community_name("example_community"));
|
|||
assert!(!is_valid_community_name("Example"));
|
|||
assert!(!is_valid_community_name("Ex"));
|
|||
assert!(!is_valid_community_name(""));
|
|||
}
|
|||
|
|||
#[test]
|
|||
fn test_slur_filter() {
|
|||
let test =
|
|||
"coons test dindu ladyboy tranny retardeds. Capitalized Niggerz. This is a bunch of other safe text.";
|
|||
let slur_free = "No slurs here";
|
|||
assert_eq!(
|
|||
remove_slurs(&test),
|
|||
"*removed* test *removed* *removed* *removed* *removed*. Capitalized *removed*. This is a bunch of other safe text."
|
|||
.to_string()
|
|||
);
|
|||
|
|||
let has_slurs_vec = vec![
|
|||
"Niggerz",
|
|||
"coons",
|
|||
"dindu",
|
|||
"ladyboy",
|
|||
"retardeds",
|
|||
"tranny",
|
|||
];
|
|||
let has_slurs_err_str = "No slurs - Niggerz, coons, dindu, ladyboy, retardeds, tranny";
|
|||
|
|||
assert_eq!(slur_check(test), Err(has_slurs_vec));
|
|||
assert_eq!(slur_check(slur_free), Ok(()));
|
|||
if let Err(slur_vec) = slur_check(test) {
|
|||
assert_eq!(&slurs_vec_to_str(slur_vec), has_slurs_err_str);
|
|||
}
|
|||
}
|
|||
|
|||
// These helped with testing
|
|||
// #[test]
|
|||
// fn test_send_email() {
|
|||
// let result = send_email("not a subject", "[email protected]", "ur user", "<h1>HI there</h1>");
|
|||
// assert!(result.is_ok());
|
|||
// }
|
|||
}
|
|||
|
|||
lazy_static! {
|
|||
static ref EMAIL_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9.!#$%&โ*+/=?^_`{|}~-][email protected][a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap();
|
|||
static ref SLUR_REGEX: Regex = RegexBuilder::new(r"(fag(g|got|tard)?|maricos?|cock\s?sucker(s|ing)?|\bn(i|1)g(\b|g?(a|er)?(s|z)?)\b|dindu(s?)|mudslime?s?|kikes?|mongoloids?|towel\s*heads?|\bspi(c|k)s?\b|\bchinks?|niglets?|beaners?|\bnips?\b|\bcoons?\b|jungle\s*bunn(y|ies?)|jigg?aboo?s?|\bpakis?\b|rag\s*heads?|gooks?|cunts?|bitch(es|ing|y)?|puss(y|ies?)|twats?|feminazis?|whor(es?|ing)|\bslut(s|t?y)?|\btr(a|@)nn?(y|ies?)|ladyboy(s?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().unwrap();
|
|||
static ref USERNAME_MATCHES_REGEX: Regex = Regex::new(r"/u/[a-zA-Z][0-9a-zA-Z_]*").unwrap();
|
|||
// TODO keep this old one, it didn't work with port well tho
|
|||
// static ref MENTIONS_REGEX: Regex = Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._-]+\.[a-zA-Z0-9_-]+)").unwrap();
|
|||
static ref MENTIONS_REGEX: Regex = Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._:-]+)").unwrap();
|
|||
static ref VALID_USERNAME_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9_]{3,20}$").unwrap();
|
|||
static ref VALID_COMMUNITY_NAME_REGEX: Regex = Regex::new(r"^[a-z0-9_]{3,20}$").unwrap();
|
|||
pub static ref WEBFINGER_COMMUNITY_REGEX: Regex = Regex::new(&format!(
|
|||
"^group:([a-z0-9_]{{3, 20}})@{}$",
|
|||
Settings::get().hostname
|
|||
))
|
|||
.unwrap();
|
|||
pub static ref WEBFINGER_USER_REGEX: Regex = Regex::new(&format!(
|
|||
"^acct:([a-z0-9_]{{3, 20}})@{}$",
|
|||
Settings::get().hostname
|
|||
))
|
|||
.unwrap();
|
|||
pub static ref CACHE_CONTROL_REGEX: Regex =
|
|||
Regex::new("^((text|image)/.+|application/javascript)$").unwrap();
|
|||
}
|
|||
|
|||
pub struct Keypair {
|
|||
pub private_key: String,
|
|||
pub public_key: String,
|
|||
}
|
|||
|
|||
/// Generate the asymmetric keypair for ActivityPub HTTP signatures.
|
|||
pub fn generate_actor_keypair() -> Result<Keypair, Error> {
|
|||
let rsa = Rsa::generate(2048)?;
|
|||
let pkey = PKey::from_rsa(rsa)?;
|
|||
let public_key = pkey.public_key_to_pem()?;
|
|||
let private_key = pkey.private_key_to_pem_pkcs8()?;
|
|||
let key_to_string = |key| match String::from_utf8(key) {
|
|||
Ok(s) => Ok(s),
|
|||
Err(e) => Err(Error::new(
|
|||
ErrorKind::Other,
|
|||
format!("Failed converting key to string: {}", e),
|
|||
)),
|
|||
};
|
|||
Ok(Keypair {
|
|||
private_key: key_to_string(private_key)?,
|
|||
public_key: key_to_string(public_key)?,
|
|||
})
|
|||
}
|
|||
|
|||
pub enum EndpointType {
|
|||
Community,
|
|||
User,
|
|||
Post,
|
|||
Comment,
|
|||
PrivateMessage,
|
|||
}
|
|||
|
|||
pub fn get_apub_protocol_string() -> &'static str {
|
|||
if Settings::get().federation.tls_enabled {
|
|||
"https"
|
|||
} else {
|
|||
"http"
|
|||
}
|
|||
}
|
|||
|
|||
/// Generates the ActivityPub ID for a given object type and ID.
|
|||
pub fn make_apub_endpoint(endpoint_type: EndpointType, name: &str) -> Url {
|
|||
let point = match endpoint_type {
|
|||
EndpointType::Community => "c",
|
|||
EndpointType::User => "u",
|
|||
EndpointType::Post => "post",
|
|||
EndpointType::Comment => "comment",
|
|||
EndpointType::PrivateMessage => "private_message",
|
|||
};
|
|||
|
|||
Url::parse(&format!(
|
|||
"{}://{}/{}/{}",
|
|||
get_apub_protocol_string(),
|
|||
Settings::get().hostname,
|
|||
point,
|
|||
name
|
|||
))
|
|||
.unwrap()
|
|||
}
|
@ -0,0 +1,73 @@ |
|||
use diesel::{result::Error, PgConnection};
|
|||
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, TokenData, Validation};
|
|||
use lemmy_db::{user::User_, Crud};
|
|||
use lemmy_utils::{is_email_regex, settings::Settings};
|
|||
use serde::{Deserialize, Serialize};
|
|||
|
|||
type Jwt = String;
|
|||
|
|||
#[derive(Debug, Serialize, Deserialize)]
|
|||
pub struct Claims {
|
|||
pub id: i32,
|
|||
pub username: String,
|
|||
pub iss: String,
|
|||
pub show_nsfw: bool,
|
|||
pub theme: String,
|
|||
pub default_sort_type: i16,
|
|||
pub default_listing_type: i16,
|
|||
pub lang: String,
|
|||
pub avatar: Option<String>,
|
|||
pub show_avatars: bool,
|
|||
}
|
|||
|
|||
impl Claims {
|
|||
pub fn decode(jwt: &str) -> Result<TokenData<Claims>, jsonwebtoken::errors::Error> {
|
|||
let v = Validation {
|
|||
validate_exp: false,
|
|||
..Validation::default()
|
|||
};
|
|||
decode::<Claims>(
|
|||
&jwt,
|
|||
&DecodingKey::from_secret(Settings::get().jwt_secret.as_ref()),
|
|||
&v,
|
|||
)
|
|||
}
|
|||
|
|||
pub fn jwt(user: User_, hostname: String) -> Jwt {
|
|||
let my_claims = Claims {
|
|||
id: user.id,
|
|||
username: user.name.to_owned(),
|
|||
iss: hostname,
|
|||
show_nsfw: user.show_nsfw,
|
|||
theme: user.theme.to_owned(),
|
|||
default_sort_type: user.default_sort_type,
|
|||
default_listing_type: user.default_listing_type,
|
|||
lang: user.lang.to_owned(),
|
|||
avatar: user.avatar.to_owned(),
|
|||
show_avatars: user.show_avatars.to_owned(),
|
|||
};
|
|||
encode(
|
|||
&Header::default(),
|
|||
&my_claims,
|
|||
&EncodingKey::from_secret(Settings::get().jwt_secret.as_ref()),
|
|||
)
|
|||
.unwrap()
|
|||
}
|
|||
|
|||
// TODO: move these into user?
|
|||
pub fn find_by_email_or_username(
|
|||
conn: &PgConnection,
|
|||
username_or_email: &str,
|
|||
) -> Result<User_, Error> {
|
|||
if is_email_regex(username_or_email) {
|
|||
User_::find_by_email(conn, username_or_email)
|
|||
} else {
|
|||
User_::find_by_username(conn, username_or_email)
|
|||
}
|
|||
}
|
|||
|
|||
pub fn find_by_jwt(conn: &PgConnection, jwt: &str) -> Result<User_, Error> {
|
|||
let claims: Claims = Claims::decode(&jwt).expect("Invalid token").claims;
|
|||
User_::read(&conn, claims.id)
|
|||
}
|
|||
}
|