This commit is contained in:
Djkáťo 2024-03-14 14:51:55 +01:00
parent 4f972771bc
commit 4131590b48
9 changed files with 64 additions and 91 deletions

View file

@ -11,14 +11,14 @@ use saleor_app_sdk::{
AsyncWebhookEventType,
},
};
use tracing::{debug, error, info};
use tracing::{debug, info};
use crate::{
app::{AppError, AppState},
queries::{
event_products_updated::ProductUpdated,
product_metadata_update::{
MetadataInput, MetadataItem, UpdateProductMetadata, UpdateProductMetadataVariables,
MetadataInput, UpdateProductMetadata, UpdateProductMetadataVariables,
},
},
};

View file

@ -14,10 +14,9 @@ pub async fn webhook_signature_verifier(request: Request, next: Next) -> Respons
let jwks_url = request
.headers()
.get(SALEOR_API_URL_HEADER)
.map_or(None, |h| {
.get(SALEOR_API_URL_HEADER).and_then(|h| {
h.to_str()
.map_or(None, |h| url::Url::parse(h).map_or(None, |h| Some(h)))
.map_or(None, |h| url::Url::parse(h).ok())
});
//get jwk from saleor api
@ -36,7 +35,7 @@ pub async fn webhook_signature_verifier(request: Request, next: Next) -> Respons
let nstr = jwks["keys"][0]["n"].as_str().unwrap();
let estr = jwks["keys"][0]["e"].as_str().unwrap();
let pubkey = DecodingKey::from_rsa_components(&nstr, &estr).unwrap();
let pubkey = DecodingKey::from_rsa_components(nstr, estr).unwrap();
let (parts, body) = request.into_parts();
let payload = body::to_bytes(body, usize::MAX).await.unwrap();

View file

@ -217,11 +217,11 @@ pub struct WebhookManifestBuilder {
impl WebhookManifestBuilder {
pub fn set_name(mut self, name: &str) -> Self {
self.webhook_manifest.name = name.to_owned();
name.clone_into(&mut self.webhook_manifest.name);
self
}
pub fn set_query(mut self, query: &str) -> Self {
self.webhook_manifest.query = query.to_owned();
query.clone_into(&mut self.webhook_manifest.query);
self
}
pub fn add_async_event(mut self, async_event: AsyncWebhookEventType) -> Self {
@ -257,7 +257,7 @@ impl WebhookManifestBuilder {
self
}
pub fn set_target_url(mut self, url: &str) -> Self {
self.webhook_manifest.target_url = url.to_owned();
url.clone_into(&mut self.webhook_manifest.target_url);
self
}
pub fn set_is_active(mut self, active: bool) -> Self {

View file

@ -3,12 +3,11 @@ use axum::{
response::{IntoResponse, Response},
};
use enum_iterator::{all, Sequence};
use std::{fmt::Display, str::FromStr, sync::Arc};
use std::{sync::Arc};
use saleor_app_sdk::{config::Config, locales::LocaleCode, manifest::AppManifest, SaleorApp};
use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize,
Serialize,
};
// Make our own error that wraps `anyhow::Error`.
pub struct AppError(anyhow::Error);
@ -75,7 +74,7 @@ pub fn get_active_gateways_from_env() -> anyhow::Result<Vec<ActiveGateway>> {
l => unimplemented!("Locale {l} not implemented"),
};
let str_types: Vec<_> = env_types.split(",").collect();
let str_types: Vec<_> = env_types.split(',').collect();
let gateway_types = str_types
.iter()
.zip(all::<GatewayType>())

View file

@ -34,20 +34,7 @@ pub async fn register(
saleor_api_url: saleor_api_url.clone(),
};
app.apl.set(auth_data.clone()).await?;
//unlock the mutex guard so state isn't borrowed anymore and it can move
std::mem::drop(app);
info!("registered app for{:?}", &saleor_api_url);
tokio::spawn(async move {
match register_active_gateways(&state, auth_data).await {
Ok(_) => info!("Payment gateways registered"),
Err(e) => error!("Failed registering gateways, {e}"),
};
});
info!("registered app for: {:?}", &saleor_api_url);
Ok(StatusCode::OK)
}
pub async fn register_active_gateways(state: &AppState, auth_data: AuthData) -> anyhow::Result<()> {
// Maybe AppFetch manifest? Tho I might not need to since I already have it
// AppsInstallations to see if it's still installing
todo!()
}

View file

@ -1,11 +1,5 @@
use anyhow::Context;
use axum::{
body::Body,
extract::State,
http::{HeaderMap, StatusCode},
response::Response,
Json,
};
use axum::{extract::State, http::HeaderMap, Json};
use cynic::{http::SurfExt, MutationBuilder};
use rust_decimal::Decimal;
use saleor_app_sdk::{
@ -21,7 +15,7 @@ use saleor_app_sdk::{
SyncWebhookEventType,
},
};
use serde::{Deserialize, Serialize};
use serde::Serialize;
use serde_json::Value;
use std::str::FromStr;
use tracing::{debug, error, info};
@ -30,9 +24,8 @@ use crate::{
app::{ActiveGateway, AppError, AppState, GatewayType},
queries::{
event_transactions::{
PaymentGatewayInitializeSession, TransactionActionEnum, TransactionChargeRequested2,
TransactionFlowStrategyEnum, TransactionInitializeSession2, TransactionProcessSession,
TransactionProcessSession2, TransactionRefundRequested2,
TransactionChargeRequested2, TransactionFlowStrategyEnum,
TransactionInitializeSession2, TransactionProcessSession2, TransactionRefundRequested2,
},
mutation_transaction_update::{
TransactionUpdate, TransactionUpdateInput, TransactionUpdateVariables,
@ -120,7 +113,10 @@ pub async fn webhooks(
..Default::default()
}),
});
let mut res = surf::post(&saleor_api_url).run_graphql(operation).await;
let mut res = surf::post(&saleor_api_url)
.header("authorization-bearer", auth_data.token)
.run_graphql(operation)
.await;
let mut webhook_result = WebhookResult::Failiure;
if let Ok(r) = &mut res
@ -132,13 +128,11 @@ pub async fn webhooks(
.errors
.iter()
.for_each(|e| error!("failed update transaction, {:?}", e));
} else {
if let Some(tr) = &mut q_res.transaction {
tr.message = serde_json::to_string(&PaymentMethod {
payment_method: GatewayType::COD,
})?;
webhook_result = WebhookResult::Success;
}
} else if let Some(tr) = &mut q_res.transaction {
tr.message = serde_json::to_string(&PaymentMethod {
payment_method: GatewayType::COD,
})?;
webhook_result = WebhookResult::Success;
}
}

View file

@ -4,11 +4,10 @@ use axum::{
response::{IntoResponse, Response},
};
use chrono::{DateTime, FixedOffset};
use fd_lock::RwLock;
use std::{fs::File, sync::Arc, time::Duration};
use std::{sync::Arc, time::Duration};
use tracing_subscriber::EnvFilter;
use redis::{AsyncCommands, Client, RedisError};
use redis::{AsyncCommands, Client};
use saleor_app_sdk::{config::Config, manifest::AppManifest, SaleorApp};
use serde::{Deserialize, Serialize};
use tracing::{debug, info, level_filters::LevelFilter};
@ -87,8 +86,8 @@ pub struct SitemapConfig {
impl SitemapConfig {
pub fn load() -> Result<Self, envy::Error> {
dotenvy::dotenv().unwrap();
let env = envy::from_env::<SitemapConfig>();
env
envy::from_env::<SitemapConfig>()
}
}

View file

@ -1,4 +1,4 @@
use std::{rc::Rc, str::FromStr, sync::Arc};
use std::{str::FromStr, sync::Arc};
use anyhow::Context;
use axum::{
@ -6,7 +6,6 @@ use axum::{
extract::State,
http::{HeaderMap, StatusCode},
};
use chrono::TimeZone;
use cynic::{http::SurfExt, QueryBuilder};
use saleor_app_sdk::{AuthData, AuthToken};
use sitemap_rs::url::Url;
@ -21,7 +20,7 @@ use crate::{
self, CategoryUpdated, CollectionUpdated, PageUpdated, ProductUpdated,
},
get_all_categories_n_products::{
CategorisedProduct, Category, Category3, GetCategoriesInitial, GetCategoriesNext,
CategorisedProduct, Category3, GetCategoriesInitial, GetCategoriesNext,
GetCategoriesNextVariables, GetCategoryProductsInitial,
GetCategoryProductsInitialVariables, GetCategoryProductsNext,
GetCategoryProductsNextVariables,
@ -263,12 +262,12 @@ pub async fn regenerate(state: AppState, saleor_api_url: String) -> anyhow::Resu
async fn get_all_pages(saleor_api_url: &str) -> anyhow::Result<Vec<get_all_pages::Page>> {
let operation = GetPagesInitial::build(());
let mut all_pages = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
let res = surf::post(saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(pages) = &data.pages
{
debug!("fetched first pages, eg.:{:?}", &pages.edges.get(0));
debug!("fetched first pages, eg.:{:?}", &pages.edges.first());
all_pages.append(
&mut pages
.edges
@ -280,9 +279,9 @@ async fn get_all_pages(saleor_api_url: &str) -> anyhow::Result<Vec<get_all_pages
let mut next_cursor = pages.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
let res = surf::post(saleor_api_url)
.run_graphql(GetPagesNext::build(GetPagesNextVariables {
after: &cursor,
after: cursor,
}))
.await;
if let Ok(query) = &res
@ -296,11 +295,11 @@ async fn get_all_pages(saleor_api_url: &str) -> anyhow::Result<Vec<get_all_pages
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!("fetched next pages, eg.:{:?}", &pages.edges.get(0));
debug!("fetched next pages, eg.:{:?}", &pages.edges.first());
if !pages.page_info.has_next_page {
break;
}
next_cursor = pages.page_info.end_cursor.clone();
next_cursor.clone_from(&pages.page_info.end_cursor);
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
@ -321,7 +320,7 @@ async fn get_all_categories(saleor_api_url: &str) -> anyhow::Result<Vec<Category
debug!("Collecting all categories...");
let operation = GetCategoriesInitial::build(());
let mut all_categories = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
let res = surf::post(saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(categories) = &data.categories
@ -335,15 +334,15 @@ async fn get_all_categories(saleor_api_url: &str) -> anyhow::Result<Vec<Category
);
debug!(
"fetched first categories, eg.:{:?}",
&categories.edges.get(0)
&categories.edges.first()
);
//Keep fetching next page
let mut next_cursor = categories.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
let res = surf::post(saleor_api_url)
.run_graphql(GetCategoriesNext::build(GetCategoriesNextVariables {
after: Some(&cursor),
after: Some(cursor),
}))
.await;
if let Ok(query) = &res
@ -359,12 +358,12 @@ async fn get_all_categories(saleor_api_url: &str) -> anyhow::Result<Vec<Category
);
debug!(
"fetched first categories, eg.:{:?}",
&categories.edges.get(0)
&categories.edges.first()
);
if !categories.page_info.has_next_page {
break;
}
next_cursor = categories.page_info.end_cursor.clone();
next_cursor.clone_from(&categories.page_info.end_cursor);
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
@ -385,7 +384,7 @@ async fn get_all_collections(saleor_api_url: &str) -> anyhow::Result<Vec<Collect
debug!("Collecting all Collections...");
let operation = GetCollectionsInitial::build(());
let mut all_collections = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
let res = surf::post(saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(collections) = &data.collections
@ -399,16 +398,16 @@ async fn get_all_collections(saleor_api_url: &str) -> anyhow::Result<Vec<Collect
);
debug!(
"fetched first collections, eg.:{:?}",
&collections.edges.get(0)
&collections.edges.first()
);
//Keep fetching next page
let mut next_cursor = collections.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
let res = surf::post(saleor_api_url)
.run_graphql(GetCollectionsNext::build(GetCollectionsNextVariables {
after: Some(&cursor),
after: Some(cursor),
}))
.await;
if let Ok(query) = &res
@ -424,12 +423,12 @@ async fn get_all_collections(saleor_api_url: &str) -> anyhow::Result<Vec<Collect
);
debug!(
"fetched next collections, eg.:{:?}",
&collections.edges.get(0)
&collections.edges.first()
);
if !collections.page_info.has_next_page {
break;
}
next_cursor = collections.page_info.end_cursor.clone();
next_cursor.clone_from(&collections.page_info.end_cursor);
} else {
error!("Failed fetching initial collecnios! {:?}", &res);
anyhow::bail!("Failed fetching initial collections! {:?}", res);
@ -457,7 +456,7 @@ async fn get_all_products(
id: &main_category.0.id,
});
let mut all_categorised_products: Vec<Arc<CategorisedProduct>> = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
let res = surf::post(saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(category) = &data.category
@ -476,15 +475,15 @@ async fn get_all_products(
.collect::<Vec<_>>(),
);
//Keep fetching next page
debug!("fetched first products, eg: {:?}", products.edges.get(0));
debug!("fetched first products, eg: {:?}", products.edges.first());
let mut next_cursor = products.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
let res = surf::post(saleor_api_url)
.run_graphql(GetCategoryProductsNext::build(
GetCategoryProductsNextVariables {
id: &main_category.0.id,
after: &cursor,
after: cursor,
},
))
.await;
@ -505,11 +504,11 @@ async fn get_all_products(
})
.collect::<Vec<_>>(),
);
debug!("fetched next products, eg: {:?}", products.edges.get(0));
debug!("fetched next products, eg: {:?}", products.edges.first());
if !products.page_info.has_next_page {
break;
}
next_cursor = products.page_info.end_cursor.clone();
next_cursor.clone_from(&products.page_info.end_cursor);
} else {
error!("Failed fetching initial products! {:?}", &res);
anyhow::bail!("Failed fetching initial products! {:?}", res);

View file

@ -6,7 +6,6 @@ use axum::{
http::{HeaderMap, StatusCode},
};
use chrono::{DateTime, Utc};
use flate2::{write::GzEncoder, Compression};
use saleor_app_sdk::{
headers::SALEOR_API_URL_HEADER,
webhooks::{
@ -17,7 +16,7 @@ use saleor_app_sdk::{
use sitemap_rs::{
sitemap::Sitemap,
sitemap_index::SitemapIndex,
url::{ChangeFrequency, Url},
url::{Url},
url_set::UrlSet,
};
use tinytemplate::TinyTemplate;
@ -27,7 +26,7 @@ use tracing::{debug, error, info};
use crate::{
app::{AppError, AppState, XmlData, XmlDataType},
queries::event_subjects_updated::{
Category, Category2, CategoryUpdated, Collection, CollectionUpdated, Page, PageInfo,
Category, Category2, CategoryUpdated, Collection, CollectionUpdated, Page,
PageUpdated, Product, ProductUpdated,
},
};
@ -150,7 +149,7 @@ async fn update_sitemap_product(
"changed product {} found in xml_data, updating...",
product.slug
);
x.slug = product.slug.clone();
x.slug.clone_from(&product.slug);
x.relations = match &product.category {
Some(c) => vec![c.id.clone()],
None => vec![],
@ -182,15 +181,13 @@ async fn update_sitemap_product(
.iter_mut()
.find(|x| x.id == c.id && x.data_type == XmlDataType::Category)
{
xml_cat.slug = c.slug.clone();
xml_cat.slug.clone_from(&c.slug);
xml_cat.last_modified = chrono::offset::Utc::now().fixed_offset();
// If the category exists but product isn't in relation to it yet,
// add it
if xml_cat
if !xml_cat
.relations
.iter()
.find(|c| **c == product.id)
.is_none()
.iter().any(|c| *c == product.id)
{
xml_cat.relations.push(product.id.clone());
}
@ -277,7 +274,7 @@ async fn update_sitemap_category(
return Ok(());
}
debug!("Category url changed, updating...");
xml_c.slug = category.slug.clone();
xml_c.slug.clone_from(&category.slug);
xml_c.last_modified = chrono::offset::Utc::now().fixed_offset();
if is_category_in_product_url {
debug!("{} products affected by change", affected_product_ids.len());
@ -561,7 +558,7 @@ pub async fn write_xml(
//now check if buffer's over limit, else slice em up into multiple sitemaps
let len = buf.len() * std::mem::size_of::<u8>();
if len > 200000 {
let file_amount = (len as f32 / 150000 as f32).ceil() as usize;
let file_amount = (len as f32 / 150000_f32).ceil() as usize;
let sliced_urls: Vec<&[Url]> = urls.chunks(file_amount).collect();
let mut sitemaps: Vec<UrlSet> = vec![];
@ -627,7 +624,6 @@ async fn update_sitemap_index(state: &AppState) -> anyhow::Result<()> {
p.file_name()
.expect("file dissapeared or broke during sitemap-index construction")
.to_string_lossy()
.to_string()
),
p.metadata().map_or(None, |meta| {
meta.modified().map_or(None, |modified| {