rework related changes

This commit is contained in:
djkato 2024-07-16 17:46:00 +02:00
parent d199d679c4
commit 8744fb6acf
6 changed files with 484 additions and 72 deletions

View file

@ -1,6 +1,6 @@
[package] [package]
name = "sitemap-generator" name = "sitemap-generator"
version = "1.0.1" version = "1.0.0"
edition = "2021" edition = "2021"
authors = ["Djkáťo <djkatovfx@gmail.com>"] authors = ["Djkáťo <djkatovfx@gmail.com>"]
description = "Creates and keeps Sitemap.xml uptodate with Saleor." description = "Creates and keeps Sitemap.xml uptodate with Saleor."
@ -30,10 +30,8 @@ surf.workspace = true
cynic = { workspace = true, features = ["http-surf"] } cynic = { workspace = true, features = ["http-surf"] }
cynic-codegen.workspace = true cynic-codegen.workspace = true
thiserror.workspace = true thiserror.workspace = true
rstest.workspace = true
async-std = { workspace = true, features = ["attributes"] }
toml = "0.8.14" # toml = "0.8.14"
# tera = { version = "1.19.1", default-features = false } # tera = { version = "1.19.1", default-features = false }
# fd-lock = "4.0.2" # fd-lock = "4.0.2"
# quick-xml = { version = "0.34.0", features = ["serialize"] } # quick-xml = { version = "0.34.0", features = ["serialize"] }
@ -46,6 +44,8 @@ serde_cbor = "0.11.2"
# itertools = "0.13.0" # itertools = "0.13.0"
[dev-dependencies] [dev-dependencies]
rstest.workspace = true
async-std = { workspace = true, features = ["attributes"] }
random_word = { version = "0.4.3", features = ["en"] } random_word = { version = "0.4.3", features = ["en"] }
rand = "0.8.5" rand = "0.8.5"
serial_test = "3.1.1" serial_test = "3.1.1"

View file

@ -39,7 +39,7 @@ where
pub fn trace_to_std(config: &Config) -> anyhow::Result<()> { pub fn trace_to_std(config: &Config) -> anyhow::Result<()> {
let filter = EnvFilter::builder() let filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into()) .with_default_directive(LevelFilter::DEBUG.into())
.from_env()? .from_env()?
.add_directive(format!("{}={}", env!("CARGO_PKG_NAME"), config.log_level).parse()?); .add_directive(format!("{}={}", env!("CARGO_PKG_NAME"), config.log_level).parse()?);
tracing_subscriber::fmt() tracing_subscriber::fmt()

View file

@ -13,7 +13,7 @@ use crate::{
CollectionCreated, CollectionDeleted, CollectionUpdated, Page, PageCreated, PageDeleted, CollectionCreated, CollectionDeleted, CollectionUpdated, Page, PageCreated, PageDeleted,
PageUpdated, Product, ProductCreated, ProductDeleted, ProductUpdated, PageUpdated, Product, ProductCreated, ProductDeleted, ProductUpdated,
}, },
sitemap::Url, sitemap::{AffectedResult, AffectedType, Url},
}; };
use tokio::{sync::mpsc::Receiver, task::JoinHandle}; use tokio::{sync::mpsc::Receiver, task::JoinHandle};
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
@ -23,7 +23,7 @@ use super::{ItemData, ItemType, UrlSet};
// 10k links google says, but there's also a size limit and my custom params might be messing with // 10k links google says, but there's also a size limit and my custom params might be messing with
// that? Rather split prematurely to be sure. // that? Rather split prematurely to be sure.
const MAX_URL_IN_SET: usize = 50_000; const MAX_URL_IN_SET: usize = 50_000;
const DB_FILE_NAME: &str = "db.json"; const DB_FILE_NAME: &str = "db.cbor";
const SITEMAP_FILE_NAME: &str = "sitemap.txt"; const SITEMAP_FILE_NAME: &str = "sitemap.txt";
pub struct EventHandler { pub struct EventHandler {
@ -182,14 +182,14 @@ impl EventHandler {
}, },
Event::Unknown => (), Event::Unknown => (),
} }
debug!("Event succesfully handled"); info!("Event succesfully handled");
} }
} }
} }
/* =============== Event handlers =============== */ /* =============== Event handlers =============== */
async fn product_updated_or_created<T: Serialize>( async fn product_updated_or_created<T: Serialize + Clone>(
request: T, request: T,
product: Product, product: Product,
sitemap_config: &SitemapConfig, sitemap_config: &SitemapConfig,
@ -211,7 +211,7 @@ async fn product_updated_or_created<T: Serialize>(
.await; .await;
} }
async fn category_updated_or_created<T: Serialize>( async fn category_updated_or_created<T: Serialize + Clone>(
request: T, request: T,
category: Category2, category: Category2,
sitemap_config: &SitemapConfig, sitemap_config: &SitemapConfig,
@ -229,7 +229,7 @@ async fn category_updated_or_created<T: Serialize>(
.await; .await;
} }
async fn page_updated_or_created<T: Serialize>( async fn page_updated_or_created<T: Serialize + Clone>(
request: T, request: T,
page: Page, page: Page,
sitemap_config: &SitemapConfig, sitemap_config: &SitemapConfig,
@ -247,7 +247,7 @@ async fn page_updated_or_created<T: Serialize>(
.await; .await;
} }
async fn collection_updated_or_created<T: Serialize>( async fn collection_updated_or_created<T: Serialize + Clone>(
request: T, request: T,
collection: Collection, collection: Collection,
sitemap_config: &SitemapConfig, sitemap_config: &SitemapConfig,
@ -267,7 +267,7 @@ async fn collection_updated_or_created<T: Serialize>(
/* ============= URL Manipulations ================ */ /* ============= URL Manipulations ================ */
async fn update_or_create<T: Serialize>( async fn update_or_create<T: Serialize + Clone>(
data: T, data: T,
sitemap_config: &SitemapConfig, sitemap_config: &SitemapConfig,
item: ItemData, item: ItemData,
@ -293,32 +293,124 @@ async fn update_or_create<T: Serialize>(
}, },
}; };
let mut affected_urls = url_set.find_affected(&item.id, &item.slug); let affected_urls = url_set.find_affected(&item.id, &item.slug);
debug!("affected urls: {:?}", &affected_urls); match affected_urls {
AffectedResult::NoneRelated => {
if affected_urls.is_empty() {
debug!("{:?} doesn't exist in url_set yet", &item.slug); debug!("{:?} doesn't exist in url_set yet", &item.slug);
url_set.push(Url::new(data, sitemap_config, item, rel_item).unwrap()); std::mem::drop(affected_urls);
} else { let new_url = match Url::new(data, sitemap_config, item, rel_item) {
// Update affected urls Ok(v) => v,
affected_urls.iter_mut().for_each(|url| { Err(e) => {
let mut templater = TinyTemplate::new(); error!("Failed creating new url, {:?}", e);
templater return;
.add_template("product", &sitemap_config.product_template) }
.expect("Check your url templates!"); };
let new_loc = templater url_set.push(new_url);
.render("product", &data) }
.expect("Check your url templates!"); AffectedResult::NoneAffected => {
debug!("updated `{}` to `{}`", &url.url, new_loc); debug!("Changes haven't affected any urls, ignoring...");
url.url = new_loc; return;
}); }
AffectedResult::Some(mut affected_urls) => {
debug!("affected urls: {:?}", &affected_urls);
for affected in affected_urls.iter_mut() {
match affected {
AffectedType::Data(url) => {
match Url::new(
data.clone(),
&sitemap_config,
item.clone(),
rel_item.clone(),
) {
Ok(new_url) => {
url.url = new_url.url;
url.data = new_url.data;
url.related = new_url.related;
}
Err(e) => error!("Failed updating url, {:?}", e),
}
}
AffectedType::RelatedData(url) => {
url.related = Some(item.clone());
match url.data.typ {
ItemType::Product => {
let new_data: ProductCreated = url.clone().into();
match Url::new(
new_data,
&sitemap_config,
url.clone().data,
Some(item.clone()),
) {
Ok(new_url) => {
url.url = new_url.url;
url.data = new_url.data;
url.related = new_url.related;
}
Err(e) => error!("Failed updating url, {:?}", e),
}
}
ItemType::Collection => {
let new_data: CollectionCreated = url.clone().into();
match Url::new(
new_data,
&sitemap_config,
url.clone().data,
Some(item.clone()),
) {
Ok(new_url) => {
url.url = new_url.url;
url.data = new_url.data;
url.related = new_url.related;
}
Err(e) => error!("Failed updating url, {:?}", e),
}
}
ItemType::Page => {
let new_data: PageCreated = url.clone().into();
match Url::new(
new_data,
&sitemap_config,
url.clone().data,
Some(item.clone()),
) {
Ok(new_url) => {
url.url = new_url.url;
url.data = new_url.data;
url.related = new_url.related;
}
Err(e) => error!("Failed updating url, {:?}", e),
}
}
ItemType::Category => {
let new_data: CollectionCreated = url.clone().into();
match Url::new(
new_data,
&sitemap_config,
url.clone().data,
Some(item.clone()),
) {
Ok(new_url) => {
url.url = new_url.url;
url.data = new_url.data;
url.related = new_url.related;
}
Err(e) => error!("Failed updating url, {:?}", e),
}
}
}
}
}
}
}
}
if let Err(e) = write_db_to_file(&url_set, &sitemap_config.target_folder).await {
error!("failed writing DB to file, {:?}", e);
}
if let Err(e) = write_url_set_to_file(&url_set, &sitemap_config.target_folder).await {
error!("failed writing url to file, {:?}", e);
} }
write_db_to_file(&url_set, &sitemap_config.target_folder)
.await
.unwrap();
write_url_set_to_file(&url_set, &sitemap_config.target_folder)
.await
.unwrap();
} }
async fn delete(id: &str, sitemap_config: &SitemapConfig) { async fn delete(id: &str, sitemap_config: &SitemapConfig) {
@ -343,20 +435,19 @@ async fn delete(id: &str, sitemap_config: &SitemapConfig) {
}; };
url_set.flush_related(id); url_set.flush_related(id);
write_db_to_file(&url_set, &sitemap_config.target_folder) if let Err(e) = write_db_to_file(&url_set, &sitemap_config.target_folder).await {
.await error!("failed writing DB to file, {:?}", e);
.unwrap(); }
write_url_set_to_file(&url_set, &sitemap_config.target_folder) if let Err(e) = write_url_set_to_file(&url_set, &sitemap_config.target_folder).await {
.await error!("failed writing url to file, {:?}", e);
.unwrap(); }
} }
/* =================== File and SerDe operations ========================= */ /* =================== File and SerDe operations ========================= */
pub async fn get_db_from_file(target_folder: &str) -> Result<UrlSet, UrlSetFileOperationsErr> { pub async fn get_db_from_file(target_folder: &str) -> Result<UrlSet, UrlSetFileOperationsErr> {
let urls: UrlSet = let urls: UrlSet =
serde_json::de::from_slice(&std::fs::read(format!("{target_folder}/{DB_FILE_NAME}"))?) serde_cbor::de::from_slice(&std::fs::read(format!("{target_folder}/{DB_FILE_NAME}"))?)?;
.unwrap();
Ok(urls) Ok(urls)
} }
@ -370,7 +461,7 @@ pub async fn write_db_to_file(
} }
fs::write( fs::write(
format!("{target_folder}/{DB_FILE_NAME}"), format!("{target_folder}/{DB_FILE_NAME}"),
serde_json::to_vec(url_set).unwrap(), serde_cbor::to_vec(url_set)?,
)?; )?;
Ok(()) Ok(())
} }

View file

@ -3,11 +3,18 @@ pub mod regenerate;
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
use saleor_app_sdk::webhooks::{utils::EitherWebhookType, AsyncWebhookEventType};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tinytemplate::TinyTemplate; use tinytemplate::TinyTemplate;
use tracing::debug; use tracing::debug;
use crate::app::SitemapConfig; use crate::{
app::SitemapConfig,
queries::event_subjects_updated::{
Category, Category2, CategoryCreated, CategoryDeleted, Collection, CollectionCreated,
CollectionDeleted, Page, PageCreated, PageDeleted, Product, ProductCreated, ProductDeleted,
},
};
const SITEMAP_XMLNS: &str = "http://sitemaps.org/schemas/sitemap/0.9"; const SITEMAP_XMLNS: &str = "http://sitemaps.org/schemas/sitemap/0.9";
const SALEOR_REF_XMLNS: &str = "http://app-sitemap-generator.kremik.sk/xml-schemas/saleor-ref.xsd"; const SALEOR_REF_XMLNS: &str = "http://app-sitemap-generator.kremik.sk/xml-schemas/saleor-ref.xsd";
@ -55,28 +62,45 @@ impl UrlSet {
.collect() .collect()
} }
pub fn find_affected(&mut self, id: &str, slug: &str) -> Vec<&mut Url> { pub fn find_affected(&mut self, id: &str, slug: &str) -> AffectedResult<'_> {
self.iter_mut() let related: Vec<&mut Url> = self.find_related(id);
.filter(|u| { debug!("related urls: {:?}", &related);
debug!( if related.is_empty() {
"comparing: ( {} == {} && {} != {} ) || ( {:?} == {} && {:?} != {} )", return AffectedResult::NoneRelated;
&u.data.id,
&id,
&u.data.slug,
&slug,
u.related.clone().map(|ud| ud.id),
&id,
u.related.clone().map(|ud| ud.slug),
&slug
);
(u.data.id == id && u.data.slug != slug)
|| (u
.related
.as_ref()
.map_or(false, |ud| ud.id == id && ud.slug != slug))
})
.collect()
} }
let affected = related
.into_iter()
.filter(|u| {
(u.data.id == id && u.data.slug != slug)
|| u.related
.as_ref()
.map_or(false, |r| (r.id == id && r.slug != slug))
})
.map(|u| match u.data.id == id {
true => AffectedType::Data(u),
false => AffectedType::RelatedData(u),
})
.collect::<Vec<_>>();
if affected.is_empty() {
return AffectedResult::NoneAffected;
}
AffectedResult::Some(affected)
}
}
#[derive(Debug)]
pub enum AffectedResult<'a> {
Some(Vec<AffectedType<&'a mut Url>>),
NoneAffected,
NoneRelated,
}
#[derive(Debug)]
pub enum AffectedType<T> {
Data(T),
RelatedData(T),
} }
impl Deref for UrlSet { impl Deref for UrlSet {
@ -117,8 +141,153 @@ impl Url {
related: rel_item, related: rel_item,
}) })
} }
pub fn into_event_updated_body(self, slug_postfix: &str) -> (String, EitherWebhookType) {
match self.data.typ.clone() {
ItemType::Product => {
let mut data: ProductCreated = self.into();
data.product = data.product.map(|mut p| {
p.slug = p.slug.clone() + slug_postfix;
p
});
// debug!("{:?}", &data);
(
serde_json::to_string_pretty(&data).unwrap(),
EitherWebhookType::Async(AsyncWebhookEventType::ProductUpdated),
)
}
ItemType::Category => {
let mut data: CategoryCreated = self.into();
data.category = data.category.map(|mut p| {
p.slug = p.slug.clone() + slug_postfix;
p
});
(
serde_json::to_string_pretty(&data).unwrap(),
EitherWebhookType::Async(AsyncWebhookEventType::CategoryUpdated),
)
}
ItemType::Page => {
let mut data: PageCreated = self.into();
data.page = data.page.map(|mut p| {
p.slug = p.slug.clone() + slug_postfix;
p
});
(
serde_json::to_string_pretty(&data).unwrap(),
EitherWebhookType::Async(AsyncWebhookEventType::PageUpdated),
)
}
ItemType::Collection => {
let mut data: CollectionCreated = self.into();
data.collection = data.collection.map(|mut p| {
p.slug = p.slug.clone() + slug_postfix;
p
});
(
serde_json::to_string_pretty(&data).unwrap(),
EitherWebhookType::Async(AsyncWebhookEventType::CollectionUpdated),
)
}
}
}
} }
impl From<Url> for ProductCreated {
fn from(value: Url) -> Self {
Self {
product: Some(Product {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
category: value.related.map(|c| Category {
slug: c.slug,
id: cynic::Id::new(c.id),
}),
}),
}
}
}
impl From<Url> for CategoryCreated {
fn from(value: Url) -> Self {
Self {
category: Some(Category2 {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
}),
}
}
}
impl From<Url> for CollectionCreated {
fn from(value: Url) -> Self {
Self {
collection: Some(Collection {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
}),
}
}
}
impl From<Url> for PageCreated {
fn from(value: Url) -> Self {
Self {
page: Some(Page {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
}),
}
}
}
impl From<Url> for ProductDeleted {
fn from(value: Url) -> Self {
Self {
product: Some(Product {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
category: value.related.map(|c| Category {
slug: c.slug,
id: cynic::Id::new(c.id),
}),
}),
}
}
}
impl From<Url> for CategoryDeleted {
fn from(value: Url) -> Self {
Self {
category: Some(Category2 {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
}),
}
}
}
impl From<Url> for CollectionDeleted {
fn from(value: Url) -> Self {
Self {
collection: Some(Collection {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
}),
}
}
}
impl From<Url> for PageDeleted {
fn from(value: Url) -> Self {
Self {
page: Some(Page {
slug: value.data.slug,
id: cynic::Id::new(value.data.id),
}),
}
}
}
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum NewUrlError { pub enum NewUrlError {
#[error("Some property inside passed data for new url was None, but should've been Some")] #[error("Some property inside passed data for new url was None, but should've been Some")]

View file

@ -2,7 +2,11 @@ mod utils;
use std::time::Duration; use std::time::Duration;
use crate::{create_app, sitemap::UrlSet}; use crate::{
create_app,
queries::event_subjects_updated::{Category, Product, ProductUpdated},
sitemap::{ItemType, Url, UrlSet},
};
use async_std::task::sleep; use async_std::task::sleep;
use axum::{ use axum::{
body::Body, body::Body,
@ -12,12 +16,13 @@ use axum::{
use rstest::*; use rstest::*;
use saleor_app_sdk::{ use saleor_app_sdk::{
headers::{SALEOR_API_URL_HEADER, SALEOR_EVENT_HEADER}, headers::{SALEOR_API_URL_HEADER, SALEOR_EVENT_HEADER},
webhooks::utils::EitherWebhookType, webhooks::{utils::EitherWebhookType, AsyncWebhookEventType},
}; };
use serial_test::{parallel, serial}; use serial_test::{parallel, serial};
use tower::{Service, ServiceExt}; use tower::{Service, ServiceExt};
use tracing::debug;
use tracing_test::traced_test; use tracing_test::traced_test;
use utils::{gen_random_url_set, testing_configs}; use utils::{create_query, gen_random_url_set, testing_configs};
async fn init_test_app() -> RouterIntoService<Body> { async fn init_test_app() -> RouterIntoService<Body> {
if let Err(e) = std::fs::remove_dir_all("./temp/sitemaps") { if let Err(e) = std::fs::remove_dir_all("./temp/sitemaps") {
@ -39,7 +44,7 @@ async fn init_test_app() -> RouterIntoService<Body> {
#[tokio::test] #[tokio::test]
#[traced_test] #[traced_test]
#[serial] #[serial]
pub async fn index_returns_ok() { pub async fn app_runs_and_responses() {
let mut app = init_test_app().await; let mut app = init_test_app().await;
let response = app let response = app
@ -52,6 +57,116 @@ pub async fn index_returns_ok() {
assert_eq!(response.status(), StatusCode::OK); assert_eq!(response.status(), StatusCode::OK);
} }
#[rstest]
#[tokio::test]
#[traced_test]
#[serial]
async fn update_event_updates_correctly() {
let mut app = init_test_app().await;
let (_, sitemap_config) = testing_configs();
let mut evn = gen_random_url_set(50, &sitemap_config);
for (body, _, webhook_type) in evn.clone() {
app = create_query(app, body, webhook_type).await;
}
//wait for the file to get written
sleep(Duration::from_secs(1)).await;
let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap();
assert_eq!(
file_url,
evn.iter()
.map(|u| u.1.url.clone())
.collect::<Vec<_>>()
.join("\n"),
);
/* ======== Now update it and see if it changed correctly ========*/
{
let (_, update_1, _) = evn
.iter_mut()
.find(|e| e.1.data.typ == ItemType::Product)
// 0.01785820902% chance this will crash, wanna bet? :D
.expect("you rolled a 0.01785820902% chance just now, feel proud of yourself");
//no nice way to do this, I control the templates in test anyways so whatever
let q_1 = update_1.clone().into_event_updated_body("_UPDATED");
debug!("{:?}", &q_1);
update_1.data.slug = update_1.clone().data.slug + "_UPDATED";
update_1.url = format!(
"https://example.com/{}/{}",
&update_1.related.as_ref().unwrap().slug,
&update_1.data.slug
);
debug!("{:?}", &update_1.url);
app = create_query(
app,
q_1.0,
EitherWebhookType::Async(AsyncWebhookEventType::ProductUpdated),
)
.await;
sleep(Duration::from_secs(1)).await;
let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap();
assert_eq!(
file_url,
evn.clone()
.iter()
.map(|u| u.1.url.clone())
.collect::<Vec<_>>()
.join("\n"),
);
}
/* ======== Now update a category and see if all products are correct ========*/
let affected_id: String;
let affected_slug: String;
{
let (_, update_2, _) = evn
.iter_mut()
.find(|e| e.1.data.typ == ItemType::Category)
// 0.01785820902% chance this will crash, wanna bet? :D
.expect("you rolled a 0.01785820902% chance just now, feel proud of yourself");
//no nice way to do this, I control the templates in test anyways so whatever
let q_2 = update_2.clone().into_event_updated_body("_UPDATED");
debug!("{:?}", &q_2);
app = create_query(
app,
q_2.0,
EitherWebhookType::Async(AsyncWebhookEventType::CategoryUpdated),
)
.await;
update_2.data.slug = update_2.clone().data.slug + "_UPDATED";
update_2.url = format!("https://example.com/{}", &update_2.data.slug);
debug!("{:?}", &update_2.url);
affected_id = update_2.data.id.clone();
affected_slug = update_2.data.slug.clone();
}
evn.iter_mut().for_each(|u| {
if u.1.data.typ == ItemType::Product
&& u.1.related.as_ref().map_or(false, |c| c.id == affected_id)
{
u.1.url = format!("https://example.com/{}/{}", affected_slug, &u.1.data.slug);
}
});
sleep(Duration::from_secs(1)).await;
let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap();
assert_eq!(
file_url,
evn.iter()
.map(|u| u.1.url.clone())
.collect::<Vec<_>>()
.join("\n"),
);
}
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
#[traced_test] #[traced_test]
@ -87,7 +202,7 @@ async fn updates_sitemap_from_request() {
assert_eq!(response.status(), StatusCode::OK); assert_eq!(response.status(), StatusCode::OK);
//wait for the file to get written //wait for the file to get written
sleep(Duration::from_secs(3)).await; sleep(Duration::from_secs(1)).await;
let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap(); let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap();
@ -97,12 +212,12 @@ async fn updates_sitemap_from_request() {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
#[traced_test] #[traced_test]
#[parallel] #[serial]
async fn sequence_of_actions_is_preserved() { async fn sequence_of_actions_is_preserved() {
let mut app = init_test_app().await; let mut app = init_test_app().await;
let (_, sitemap_config) = testing_configs(); let (_, sitemap_config) = testing_configs();
let evn = gen_random_url_set(1000, &sitemap_config); let evn = gen_random_url_set(10, &sitemap_config);
for (body, _, webhook_type) in evn.clone() { for (body, _, webhook_type) in evn.clone() {
let response = app let response = app
.ready() .ready()
@ -129,7 +244,7 @@ async fn sequence_of_actions_is_preserved() {
} }
//wait for the file to get written //wait for the file to get written
sleep(Duration::from_secs(3)).await; sleep(Duration::from_secs(1)).await;
let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap(); let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap();

View file

@ -1,3 +1,8 @@
use axum::{
body::Body,
http::{Request, Response, StatusCode},
routing::RouterIntoService,
};
use rand::{ use rand::{
distributions::{Distribution, Standard}, distributions::{Distribution, Standard},
seq::SliceRandom, seq::SliceRandom,
@ -6,8 +11,10 @@ use rand::{
use saleor_app_sdk::{ use saleor_app_sdk::{
apl::AplType, apl::AplType,
config::Config, config::Config,
headers::{SALEOR_API_URL_HEADER, SALEOR_EVENT_HEADER},
webhooks::{utils::EitherWebhookType, AsyncWebhookEventType}, webhooks::{utils::EitherWebhookType, AsyncWebhookEventType},
}; };
use tower::{Service, ServiceExt};
use tracing::Level; use tracing::Level;
use crate::{ use crate::{
@ -66,6 +73,36 @@ pub fn testing_configs() -> (Config, SitemapConfig) {
) )
} }
pub async fn create_query(
mut app: RouterIntoService<Body>,
body: String,
webhook: EitherWebhookType,
) -> RouterIntoService<Body> {
let response = app
.ready()
.await
.unwrap()
.call(
Request::builder()
.uri("/api/webhooks")
.header(SALEOR_API_URL_HEADER, "https://api.example.com")
.header(
SALEOR_EVENT_HEADER,
match webhook {
EitherWebhookType::Sync(s) => s.as_ref().to_string(),
EitherWebhookType::Async(a) => a.as_ref().to_string(),
},
)
.body(Body::from(body))
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
app
}
pub struct Action { pub struct Action {
request_body: String, request_body: String,
url: Url, url: Url,