Basically whole app in one commit lol

This commit is contained in:
djkato 2024-07-14 22:28:21 +02:00
parent 9ef2313c1e
commit 5fa0a3f4c0
20 changed files with 1550 additions and 647 deletions

View file

@ -20,6 +20,7 @@ envy.workspace = true
tracing.workspace = true
tracing-serde.workspace = true
tracing-subscriber.workspace = true
tracing-test = "0.2.5"
dotenvy.workspace = true
axum.workspace = true
saleor-app-sdk = { workspace = true, features = ["file_apl"] }
@ -44,5 +45,10 @@ serde_cbor = "0.11.2"
# rayon = "1.10.0"
# itertools = "0.13.0"
[dev-dependencies]
random_word = { version = "0.4.3", features = ["en"] }
rand = "0.8.5"
serial_test = "3.1.1"
[build-dependencies]
cynic-codegen.workspace = true

View file

@ -1,9 +1,7 @@
# Using sitemap-generator
To clear the cache, you can run the program with `./sitemap-generator --for-url https://my-saleor-api.com/graphql --cache-clear` or `docker compose --rm app-sitemap-generator sitemap-generator --for-url https://my-saleor-api.com/graphql --cache-clear`
To regenerate the cache, you can run the program with `./sitemap-generator --for-url https://my-saleor-api.com/graphql --cache-regenerate` or `docker compose --rm app-sitemap-generator sitemap-generator --for-url https://my-saleor-api.com/graphql --cache-regenerate`
You can also add both flags (do --cache-regenerate first), which will clear and then regenerate.
Only works for a single website. No locale support and no sitemap-index. Outputs Only pure sitemap.txt file. Downside is limit of 50 000 links. Upside: Easy to write c:
Partially supports relations of objects (Category-product), where the sitemap template can use info from both.
# Unofficial Saleor App Template

View file

@ -10,7 +10,7 @@ use saleor_app_sdk::{config::Config, manifest::AppManifest, SaleorApp};
use serde::{Deserialize, Serialize};
use tracing::level_filters::LevelFilter;
use crate::queries::event_subjects_updated::Event;
use crate::sitemap::event_handler::Event;
// Make our own error that wraps `anyhow::Error`.
pub struct AppError(anyhow::Error);
@ -39,7 +39,7 @@ where
pub fn trace_to_std(config: &Config) -> anyhow::Result<()> {
let filter = EnvFilter::builder()
.with_default_directive(LevelFilter::DEBUG.into())
.with_default_directive(LevelFilter::INFO.into())
.from_env()?
.add_directive(format!("{}={}", env!("CARGO_PKG_NAME"), config.log_level).parse()?);
tracing_subscriber::fmt()
@ -79,6 +79,8 @@ pub struct SitemapConfig {
pub collection_template: String,
#[serde(rename = "sitemap_index_hostname")]
pub index_hostname: String,
#[serde(rename = "sitemap_allowed_host")]
pub allowed_host: String,
}
impl SitemapConfig {

View file

@ -5,13 +5,13 @@
dead_code
)]
#![feature(let_chains)]
#![deny(clippy::unwrap_used, clippy::expect_used)]
// #![deny(clippy::unwrap_used, clippy::expect_used)]
mod app;
mod queries;
mod routes;
mod sitemap;
#[cfg(debug_assertions)]
#[cfg(test)]
mod tests;
use axum::Router;
@ -21,14 +21,9 @@ use saleor_app_sdk::{
webhooks::{AsyncWebhookEventType, WebhookManifestBuilder},
SaleorApp,
};
use sitemap::event_handler::EventHandler;
use std::sync::Arc;
use tokio::{
spawn,
sync::{
mpsc::{channel, Receiver},
Mutex,
},
};
use tokio::sync::Mutex;
use tracing::{debug, error, info};
use crate::{
@ -62,16 +57,16 @@ async fn main() {
}
async fn create_app(config: &Config, sitemap_config: SitemapConfig) -> Router {
let saleor_app = SaleorApp::new(&config).unwrap();
let saleor_app = SaleorApp::new(config).unwrap();
debug!("Creating saleor App...");
let app_manifest = AppManifestBuilder::new(&config, cargo_info!())
let app_manifest = AppManifestBuilder::new(config, cargo_info!())
.add_permissions(vec![
AppPermission::ManageProducts,
AppPermission::ManagePages,
])
.add_webhook(
WebhookManifestBuilder::new(&config)
WebhookManifestBuilder::new(config)
.set_query(EVENTS_QUERY)
.add_async_events(vec![
AsyncWebhookEventType::ProductCreated,
@ -92,9 +87,10 @@ async fn create_app(config: &Config, sitemap_config: SitemapConfig) -> Router {
.build();
debug!("Created AppManifest...");
//Task queue
let (sender, receiver) = tokio::sync::mpsc::channel(100);
EventHandler::start(sitemap_config.clone(), receiver);
let app_state = AppState {
task_queue_sender: sender,
sitemap_config,
@ -102,7 +98,7 @@ async fn create_app(config: &Config, sitemap_config: SitemapConfig) -> Router {
config: config.clone(),
target_channel: match dotenvy::var("CHANNEL_SLUG") {
Ok(v) => v,
Err(e) => {
Err(_) => {
error!("Missing channel slug, Saleor will soon deprecate product queries without channel specified.");
"".to_string()
}

View file

@ -101,12 +101,12 @@ pub struct ProductUpdated {
pub product: Option<Product>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct ProductDeleted {
pub product: Option<Product>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct ProductCreated {
pub product: Option<Product>,
}
@ -123,12 +123,12 @@ pub struct PageUpdated {
pub page: Option<Page>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct PageDeleted {
pub page: Option<Page>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct PageCreated {
pub page: Option<Page>,
}
@ -144,12 +144,12 @@ pub struct CollectionUpdated {
pub collection: Option<Collection>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct CollectionDeleted {
pub collection: Option<Collection>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct CollectionCreated {
pub collection: Option<Collection>,
}
@ -165,12 +165,12 @@ pub struct CategoryUpdated {
pub category: Option<Category2>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct CategoryDeleted {
pub category: Option<Category2>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct CategoryCreated {
pub category: Option<Category2>,
}

View file

@ -1,10 +1,5 @@
#[cynic::schema("saleor")]
mod schema {}
pub struct CategorisedProduct {
pub product: Product,
pub category_id: cynic::Id,
}
/*
query getCategoriesInitial {
categories(first: 50) {
@ -38,46 +33,6 @@ query getCategoriesNext($after: String) {
}
}
}
query getCategoryProductsInitial($id: ID!, $channel: String!) {
category(id: $id) {
slug
id
updatedAt
products(first: 50, channel: $channel) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
id
slug
updatedAt
}
}
totalCount
}
}
}
query getCategoryProductsNext($id: ID!, $after: String!, $channel: String!) {
category(id: $id) {
products(first: 50, after: $after, channel: $channel) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
id
slug
updatedAt
}
}
}
}
}
*/
#[derive(cynic::QueryVariables, Debug, Clone)]

View file

@ -0,0 +1,108 @@
#[cynic::schema("saleor")]
mod schema {}
/*
query getProductsInitial($id: ID!, $channel: String!) {
category(id: $id) {
slug
id
updatedAt
products(first: 50, channel: $channel) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
id
slug
updatedAt
category {
id
slug
}
}
}
totalCount
}
}
}
query getProductsNext($after: String!, $channel: String!) {
products(first: 50, after: $after, channel: $channel) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
id
slug
updatedAt
category {
id
slug
}
}
}
}
}
*/
#[derive(cynic::QueryVariables, Debug)]
pub struct GetProductsInitialVariables<'a> {
pub channel: &'a str,
}
#[derive(cynic::QueryVariables, Debug)]
pub struct GetProductsNextVariables<'a> {
pub after: &'a str,
pub channel: &'a str,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query", variables = "GetProductsInitialVariables")]
pub struct GetProductsInitial {
#[arguments(first: 50, channel: $channel)]
pub products: Option<ProductCountableConnection>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query", variables = "GetProductsNextVariables")]
pub struct GetProductsNext {
#[arguments(first: 50, after: $after, channel: $channel)]
pub products: Option<ProductCountableConnection>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct ProductCountableConnection {
pub page_info: PageInfo,
pub edges: Vec<ProductCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
pub struct ProductCountableEdge {
pub node: Product,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
pub struct Product {
pub id: cynic::Id,
pub slug: String,
pub updated_at: DateTime,
pub category: Option<Category>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct PageInfo {
pub has_next_page: bool,
pub end_cursor: Option<String>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
pub struct Category {
pub id: cynic::Id,
pub slug: String,
}
#[derive(cynic::Scalar, Debug, Clone)]
pub struct DateTime(pub String);

View file

@ -1,4 +1,5 @@
pub mod event_subjects_updated;
pub mod get_all_categories_n_products;
pub mod get_all_categories;
pub mod get_all_collections;
pub mod get_all_pages;
pub mod get_all_products;

View file

@ -30,6 +30,8 @@ pub fn create_routes(state: AppState) -> Router {
#[cfg(not(debug_assertions))]
let serve_dir = ServeDir::new("./public").not_found_service(service);
// When working in workspace, cargo works relative to workspace dir, not app dir. This is
// dev-only workaround
#[cfg(debug_assertions)]
let serve_dir = ServeDir::new("./sitemap-generator/public").not_found_service(service);
//TODO: Query for everything using the app auth token

View file

@ -1,4 +1,4 @@
use std::{str::FromStr, sync::Arc};
use std::sync::Arc;
use anyhow::Context;
use axum::{
@ -7,28 +7,13 @@ use axum::{
http::{HeaderMap, StatusCode},
};
use cynic::{http::SurfExt, QueryBuilder};
use saleor_app_sdk::{AuthData, AuthToken};
use tinytemplate::TinyTemplate;
use saleor_app_sdk::{headers::SALEOR_API_URL_HEADER, AuthData, AuthToken};
use tokio::spawn;
use tracing::{debug, error, info, trace};
use tracing::{debug, error, info};
use crate::{
app::{AppError, AppState},
queries::{
event_subjects_updated::{
self, CategoryUpdated, CollectionUpdated, PageUpdated, ProductUpdated,
},
get_all_categories_n_products::{
CategorisedProduct, Category3, GetCategoriesInitial, GetCategoriesNext,
GetCategoriesNextVariables, GetCategoryProductsInitial,
GetCategoryProductsInitialVariables, GetCategoryProductsNext,
GetCategoryProductsNextVariables,
},
get_all_collections::{
Collection, GetCollectionsInitial, GetCollectionsNext, GetCollectionsNextVariables,
},
get_all_pages::{self, GetPagesInitial, GetPagesNext, GetPagesNextVariables},
},
sitemap::event_handler::{Event, RegenerateEvent},
};
pub async fn register(
@ -38,13 +23,23 @@ pub async fn register(
) -> Result<StatusCode, AppError> {
debug!(
"/api/register:\nsaleor_api_url: {:?}\nauth_token: {:?}",
headers.get("saleor-api-url"),
auth_token
&headers.get(SALEOR_API_URL_HEADER),
&auth_token
);
if auth_token.auth_token.is_empty() {
return Err(anyhow::anyhow!("missing auth_token").into());
}
if let Some(url) = headers.get(SALEOR_API_URL_HEADER) {
if url.to_str()? != state.sitemap_config.allowed_host {
debug!("register didn't come from allowed host");
return Err(anyhow::anyhow!("Url not in allowed hosts").into());
}
} else {
debug!("no url in header");
return Err(anyhow::anyhow!("Url in header").into());
}
let app = state.saleor_app.lock().await;
let saleor_api_url = headers.get("saleor-api-url").context("missing api field")?;
let saleor_api_url = saleor_api_url.to_str()?.to_owned();
@ -62,290 +57,13 @@ pub async fn register(
//When app registers, start collecting everything of substance
info!("Starting caching and generation process");
let cloned_state = state.clone();
spawn(async move {
match regenerate(cloned_state, saleor_api_url).await {
Ok(_) => info!("Finished caching and regeneration"),
Err(e) => error!("Something went wrong during caching and regeneration, {e}"),
};
});
state
.task_queue_sender
.send(Event::Regenerate(RegenerateEvent {
state: cloned_state,
saleor_api_url,
}))
.await?;
Ok(StatusCode::OK)
}
pub async fn regenerate(state: AppState, saleor_api_url: String) -> anyhow::Result<()> {
info!("regeneration: fetching all categories, products, collections, pages");
let app = state.saleor_app.lock().await;
let auth_data = app.apl.get(&saleor_api_url).await?;
let pages = get_all_pages(&saleor_api_url, &auth_data.token).await?;
let collections = get_all_collections(&saleor_api_url, &auth_data.token).await?;
info!(
"regeneration: found {} products, {} categories, {} pages, {} collections",
0,
0,
pages.len(),
collections.len()
);
info!("regeneration: creating xml data");
info!("regeneration: creating urls");
// write_xml(page_urls, &state, XmlDataType::Page).await?;
// write_xml(collection_urls, &state, XmlDataType::Collection).await?;
// write_xml(category_urls, &state, XmlDataType::Category).await?;
// write_xml(product_urls, &state, XmlDataType::Product).await?;
Ok(())
}
async fn get_all_pages(
saleor_api_url: &str,
token: &str,
) -> anyhow::Result<Vec<get_all_pages::Page>> {
let operation = GetPagesInitial::build(());
let mut all_pages = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(pages) = &data.pages
{
debug!("fetched first pages, eg.:{:?}", &pages.edges.first());
all_pages.append(
&mut pages
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
//Keep fetching next page
let mut next_cursor = pages.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetPagesNext::build(GetPagesNextVariables { after: cursor }))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(pages) = &data.pages
{
all_pages.append(
&mut pages
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!("fetched next pages, eg.:{:?}", &pages.edges.first());
next_cursor.clone_from(&pages.page_info.end_cursor);
} else {
error!("Failed fetching next pages! {:?}", &res);
anyhow::bail!("Failed fetching next pages! {:?}", res);
}
}
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
};
info!("fetched all pages");
Ok(all_pages)
}
async fn get_all_categories(saleor_api_url: &str, token: &str) -> anyhow::Result<Vec<Category3>> {
debug!("Collecting all categories...");
let operation = GetCategoriesInitial::build(());
let mut all_categories = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(categories) = &data.categories
{
all_categories.append(
&mut categories
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched first categories, eg.:{:?}",
&categories.edges.first()
);
//Keep fetching next page
let mut next_cursor = categories.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetCategoriesNext::build(GetCategoriesNextVariables {
after: Some(cursor),
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(categories) = &data.categories
{
all_categories.append(
&mut categories
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched next categories, eg.:{:?}",
&categories.edges.first()
);
next_cursor.clone_from(&categories.page_info.end_cursor);
} else {
error!("Failed fetching next categories! {:?}", &res);
anyhow::bail!("Failed fetching next categories! {:?}", res);
}
}
} else {
error!("Failed fetching initial Categories! {:?}", &res);
anyhow::bail!("Failed fetching initial Categories! {:?}", res);
};
info!("All categories collected");
Ok(all_categories)
}
async fn get_all_collections(saleor_api_url: &str, token: &str) -> anyhow::Result<Vec<Collection>> {
debug!("Collecting all Collections...");
let operation = GetCollectionsInitial::build(());
let mut all_collections = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(collections) = &data.collections
{
all_collections.append(
&mut collections
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched first collections, eg.:{:?}",
&collections.edges.first()
);
//Keep fetching next page
let mut next_cursor = collections.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetCollectionsNext::build(GetCollectionsNextVariables {
after: Some(cursor),
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(collections) = &data.collections
{
all_collections.append(
&mut collections
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched next collections, eg.:{:?}",
&collections.edges.first()
);
next_cursor.clone_from(&collections.page_info.end_cursor);
} else {
error!("Failed fetching next collecnios! {:?}", &res);
anyhow::bail!("Failed fetching next collections! {:?}", res);
}
}
} else {
error!("Failed fetching initial collections! {:?}", &res);
anyhow::bail!("Failed fetching initial collections! {:?}", res);
};
info!("All Collections collected...");
Ok(all_collections)
}
/**
* Gets all products of a category then assings them as related
*/
async fn get_all_products(
saleor_api_url: &str,
channel: &str,
token: &str,
main_category: &mut (Category3, Vec<Arc<CategorisedProduct>>),
) -> anyhow::Result<Vec<Arc<CategorisedProduct>>> {
debug!("Collecting all products...");
let operation = GetCategoryProductsInitial::build(GetCategoryProductsInitialVariables {
id: &main_category.0.id,
channel,
});
let mut all_categorised_products: Vec<Arc<CategorisedProduct>> = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(category) = &data.category
&& let Some(products) = &category.products
{
all_categorised_products.append(
&mut products
.edges
.iter()
.map(|p| {
Arc::new(CategorisedProduct {
product: p.node.clone(),
category_id: main_category.0.id.clone(),
})
})
.collect::<Vec<_>>(),
);
//Keep fetching next page
debug!("fetched first products, eg: {:?}", products.edges.first());
let mut next_cursor = products.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetCategoryProductsNext::build(
GetCategoryProductsNextVariables {
id: &main_category.0.id,
after: cursor,
channel,
},
))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(category) = &data.category
&& let Some(products) = &category.products
{
all_categorised_products.append(
&mut products
.edges
.iter()
.map(|p| {
Arc::new(CategorisedProduct {
product: p.node.clone(),
category_id: main_category.0.id.clone(),
})
})
.collect::<Vec<_>>(),
);
debug!("fetched next products, eg: {:?}", products.edges.first());
next_cursor.clone_from(&products.page_info.end_cursor);
} else {
error!("Failed fetching initial products! {:?}", &res);
anyhow::bail!("Failed fetching initial products! {:?}", res);
}
}
}
info!("All products collected...");
Ok(all_categorised_products)
}

View file

@ -19,6 +19,7 @@ use crate::{
CollectionUpdated, PageCreated, PageDeleted, PageUpdated, ProductCreated, ProductDeleted,
ProductUpdated,
},
sitemap::event_handler::Event,
};
pub async fn webhooks(
@ -27,53 +28,106 @@ pub async fn webhooks(
data: String,
) -> Result<StatusCode, AppError> {
debug!("/api/webhooks");
//debug!("req: {:?}", data);
//debug!("headers: {:?}", headers);
debug!("req: {:?}", data);
debug!("headers: {:?}", headers);
let url = headers
.get(SALEOR_API_URL_HEADER)
.context("missing saleor api url header")?
.to_str()?
.to_owned();
if url != state.sitemap_config.allowed_host {
debug!("webhook didn't come from allowed host");
return Ok(StatusCode::METHOD_NOT_ALLOWED);
}
let event_type = get_webhook_event_type(&headers)?;
debug!("event type: {:?}", &event_type);
if let EitherWebhookType::Async(a) = event_type {
// TODO: Extract this into a function so You can check what the error was if something fails
match a {
AsyncWebhookEventType::ProductUpdated => {
let product: ProductUpdated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::ProductUpdated(product))
.await?;
}
AsyncWebhookEventType::ProductCreated => {
let product: ProductCreated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::ProductCreated(product))
.await?;
}
AsyncWebhookEventType::ProductDeleted => {
let product: ProductDeleted = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::ProductDeleted(product))
.await?;
}
AsyncWebhookEventType::CategoryCreated => {
let category: CategoryCreated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::CategoryCreated(category))
.await?;
}
AsyncWebhookEventType::CategoryUpdated => {
let category: CategoryUpdated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::CategoryUpdated(category))
.await?;
}
AsyncWebhookEventType::CategoryDeleted => {
let category: CategoryDeleted = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::CategoryDeleted(category))
.await?;
}
AsyncWebhookEventType::PageCreated => {
let page: PageCreated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::PageCreated(page))
.await?;
}
AsyncWebhookEventType::PageUpdated => {
let page: PageUpdated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::PageUpdated(page))
.await?;
}
AsyncWebhookEventType::PageDeleted => {
let page: PageDeleted = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::PageDeleted(page))
.await?;
}
AsyncWebhookEventType::CollectionCreated => {
let collection: CollectionCreated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::CollectionCreated(collection))
.await?;
}
AsyncWebhookEventType::CollectionUpdated => {
let collection: CollectionUpdated = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::CollectionUpdated(collection))
.await?;
}
AsyncWebhookEventType::CollectionDeleted => {
let collection: CollectionDeleted = serde_json::from_str(&data)?;
state
.task_queue_sender
.send(Event::CollectionDeleted(collection))
.await?;
}
_ => (),
}

View file

@ -1 +0,0 @@

View file

@ -1,68 +1,279 @@
use super::regenerate::regenerate;
use serde::Serialize;
use std::{
fs::{self, read_dir, File},
io::{BufReader, ErrorKind},
path::PathBuf,
fs::{self},
io::ErrorKind,
};
use tinytemplate::TinyTemplate;
use crate::{
app::SitemapConfig,
queries::event_subjects_updated::{Event, Product},
sitemap::{ItemType, Url},
app::{AppState, SitemapConfig},
queries::event_subjects_updated::{
Category2, CategoryCreated, CategoryDeleted, CategoryUpdated, Collection,
CollectionCreated, CollectionDeleted, CollectionUpdated, Page, PageCreated, PageDeleted,
PageUpdated, Product, ProductCreated, ProductDeleted, ProductUpdated,
},
sitemap::Url,
};
use tokio::{sync::mpsc::Receiver, task::JoinHandle};
use tracing::{debug, error, trace, warn};
use tracing::{debug, error, info, trace, warn};
use super::UrlSet;
use super::{ItemData, ItemType, UrlSet};
// 10k links google says, but there's also a size limit and my custom params might be messing with
// that? Rather split prematurely to be sure.
const MAX_URL_IN_SET: usize = 6000;
const MAX_URL_IN_SET: usize = 50_000;
const DB_FILE_NAME: &str = "db.toml";
const SITEMAP_FILE_NAME: &str = "sitemap.txt";
pub struct EventHandler {
receiver: Receiver<(Event, SitemapConfig)>,
receiver: Receiver<Event>,
sitemap_config: SitemapConfig,
}
#[derive(Debug, Clone)]
pub enum Event {
ProductUpdated(ProductUpdated),
ProductCreated(ProductCreated),
ProductDeleted(ProductDeleted),
CategoryCreated(CategoryCreated),
CategoryUpdated(CategoryUpdated),
CategoryDeleted(CategoryDeleted),
PageCreated(PageCreated),
PageUpdated(PageUpdated),
PageDeleted(PageDeleted),
CollectionCreated(CollectionCreated),
CollectionUpdated(CollectionUpdated),
CollectionDeleted(CollectionDeleted),
Regenerate(RegenerateEvent),
Unknown,
}
#[derive(Debug, Clone)]
pub struct RegenerateEvent {
pub state: AppState,
pub saleor_api_url: String,
}
impl EventHandler {
pub fn start(receiver: Receiver<(Event, SitemapConfig)>) -> JoinHandle<()> {
let s = Self { receiver };
pub fn start(sitemap_config: SitemapConfig, receiver: Receiver<Event>) -> JoinHandle<()> {
let s = Self {
sitemap_config,
receiver,
};
tokio::spawn(s.listen())
}
async fn listen(mut self) {
while let Some((message, sitemap_config)) = self.receiver.recv().await {
while let Some(message) = self.receiver.recv().await {
debug!("received Event: {:?}", &message);
match message {
Event::ProductCreated(product) => {
Event::ProductCreated(product_created) => {
if let Some(product) = product_created.clone().product {
product_updated_or_created(product_created, product, &self.sitemap_config)
.await;
} else {
warn!("Event::ProductCreated/Updated missing data");
}
}
Event::ProductUpdated(product_updated) => {
if let Some(product) = product_updated.clone().product {
product_updated_or_created(product_updated, product, &self.sitemap_config)
.await;
} else {
warn!("Event::ProductCreated/Updated missing data");
}
}
Event::ProductDeleted(product) => {
if let Some(product) = product.product {
product_update_or_create(product, sitemap_config).await;
delete(product.id.inner(), &self.sitemap_config).await;
} else {
warn!("Event::ProductDeleted missing data");
}
warn!("Event::ProductCreated missing product");
}
Event::ProductUpdated(product) => {
if let Some(product) = product.product {
product_update_or_create(product, sitemap_config).await;
Event::CategoryCreated(category_created) => {
if let Some(category) = category_created.clone().category {
category_updated_or_created(
category_created,
category,
&self.sitemap_config,
)
.await;
} else {
warn!("Event::CategoryCreated/Updated missing data");
}
warn!("Event::ProductUpdated missing product");
}
Event::ProductDeleted(product) => {}
Event::CategoryCreated(category) => {}
Event::CategoryUpdated(category) => {}
Event::CategoryDeleted(category) => {}
Event::CollectionCreated(collection) => {}
Event::CollectionUpdated(collection) => {}
Event::CollectionDeleted(collection) => {}
Event::PageCreated(page) => {}
Event::PageUpdated(page) => {}
Event::PageDeleted(page) => {}
Event::Unknown => warn!("Unknown event called"),
Event::CategoryUpdated(category_updated) => {
if let Some(category) = category_updated.clone().category {
category_updated_or_created(
category_updated,
category,
&self.sitemap_config,
)
.await;
} else {
warn!("Event::CategoryCreated/Updated missing data");
}
}
Event::CategoryDeleted(category) => {
if let Some(category) = category.category {
delete(category.id.inner(), &self.sitemap_config).await;
} else {
warn!("Event::CategoryDeleted missing data");
}
}
Event::CollectionCreated(collection_created) => {
if let Some(collection) = collection_created.clone().collection {
collection_updated_or_created(
collection_created,
collection,
&self.sitemap_config,
)
.await;
} else {
warn!("Event::ProductCreated/Updated missing Data");
}
}
Event::CollectionUpdated(collection_updated) => {
if let Some(collection) = collection_updated.clone().collection {
collection_updated_or_created(
collection_updated,
collection,
&self.sitemap_config,
)
.await;
} else {
warn!("Event::ProductCreated/Updated missing Data");
}
}
Event::CollectionDeleted(collection) => {
if let Some(collection) = collection.collection {
delete(collection.id.inner(), &self.sitemap_config).await;
} else {
warn!("Event::ProductDeleted missing data");
}
}
Event::PageCreated(page_created) => {
if let Some(page) = page_created.clone().page {
page_updated_or_created(page_created, page, &self.sitemap_config).await;
}
warn!("Event::PageCreated/Updated missing data");
}
Event::PageUpdated(page_updated) => {
if let Some(page) = page_updated.clone().page {
page_updated_or_created(page_updated, page, &self.sitemap_config).await;
} else {
warn!("Event::PageCreated/Updated missing data");
}
}
Event::PageDeleted(page) => {
if let Some(page) = page.page {
delete(page.id.inner(), &self.sitemap_config).await;
} else {
warn!("Event::PageDeleted missing data");
}
}
Event::Regenerate(r) => match regenerate(r.state, r.saleor_api_url).await {
Ok(_) => info!("regenerate: Fully created sitemap!"),
Err(e) => error!("regenerate: ERR! {:?}", e),
},
Event::Unknown => (),
}
debug!("Event succesfully handled");
}
}
}
async fn product_delete(product: Product, sitemap_config: SitemapConfig) {
let mut url_set = match get_from_file(&sitemap_config.target_folder).await {
/* =============== Event handlers =============== */
async fn product_updated_or_created<T: Serialize>(
request: T,
product: Product,
sitemap_config: &SitemapConfig,
) {
update_or_create(
request,
&sitemap_config,
ItemData {
id: product.id.inner().to_owned(),
slug: product.slug,
typ: ItemType::Product,
},
product.category.map(|c| ItemData {
slug: c.slug,
typ: ItemType::Category,
id: c.id.inner().to_owned(),
}),
)
.await;
}
async fn category_updated_or_created<T: Serialize>(
request: T,
category: Category2,
sitemap_config: &SitemapConfig,
) {
update_or_create(
request,
&sitemap_config,
ItemData {
id: category.id.inner().to_owned(),
slug: category.slug,
typ: ItemType::Category,
},
None,
)
.await;
}
async fn page_updated_or_created<T: Serialize>(
request: T,
page: Page,
sitemap_config: &SitemapConfig,
) {
update_or_create(
request,
&sitemap_config,
ItemData {
id: page.id.inner().to_owned(),
slug: page.slug,
typ: ItemType::Page,
},
None,
)
.await;
}
async fn collection_updated_or_created<T: Serialize>(
request: T,
collection: Collection,
sitemap_config: &SitemapConfig,
) {
update_or_create(
request,
&sitemap_config,
ItemData {
id: collection.id.inner().to_owned(),
slug: collection.slug,
typ: ItemType::Collection,
},
None,
)
.await;
}
/* ============= URL Manipulations ================ */
async fn update_or_create<T: Serialize>(
data: T,
sitemap_config: &SitemapConfig,
item: ItemData,
rel_item: Option<ItemData>,
) {
let mut url_set = match get_db_from_file(&sitemap_config.target_folder).await {
Ok(u) => u,
Err(e) => match e {
UrlSetFileOperationsErr::IoResult(e) => match e.kind() {
@ -82,40 +293,12 @@ async fn product_delete(product: Product, sitemap_config: SitemapConfig) {
},
};
url_set.flush_related(product.id.inner());
write_to_file(&url_set, &sitemap_config.target_folder)
.await
.unwrap();
}
async fn product_update_or_create(product: Product, sitemap_config: SitemapConfig) {
let mut url_set = match get_from_file(&sitemap_config.target_folder).await {
Ok(u) => u,
Err(e) => match e {
UrlSetFileOperationsErr::IoResult(e) => match e.kind() {
ErrorKind::NotFound => UrlSet::new(),
_ => {
error!("File errror: {:?}\n won't crash, but probably broken.", e);
return;
}
},
UrlSetFileOperationsErr::DeError(e) => {
error!(
"DE error: {:?}\n Won't crash, but something went badly wrong",
e
);
return;
}
},
};
let mut affected_urls = url_set.find_affected(product.id.inner(), &product.slug);
let mut affected_urls = url_set.find_affected(&item.id, &item.slug);
debug!("affected urls: {:?}", &affected_urls);
if affected_urls.len() == 0 {
trace!("Product doesn't exist in url_set yet");
url_set.push(Url::new_product(&sitemap_config.product_template, product).unwrap());
if affected_urls.is_empty() {
trace!("{:?} doesn't exist in url_set yet", &item.slug);
url_set.push(Url::new(data, &sitemap_config, item, rel_item).unwrap());
} else {
// Update affected urls
affected_urls.iter_mut().for_each(|url| {
@ -124,24 +307,60 @@ async fn product_update_or_create(product: Product, sitemap_config: SitemapConfi
.add_template("product", &sitemap_config.product_template)
.expect("Check your url templates!");
let new_loc = templater
.render("product", &product)
.render("product", &data)
.expect("Check your url templates!");
debug!("updated `{}` to `{}`", &url.url, new_loc);
url.url = new_loc;
});
}
write_to_file(&url_set, &sitemap_config.target_folder)
write_db_to_file(&url_set, &sitemap_config.target_folder)
.await
.unwrap();
write_url_set_to_file(&url_set, &sitemap_config.target_folder)
.await
.unwrap();
}
async fn get_from_file(target_folder: &str) -> Result<UrlSet, UrlSetFileOperationsErr> {
async fn delete(id: &str, sitemap_config: &SitemapConfig) {
let mut url_set = match get_db_from_file(&sitemap_config.target_folder).await {
Ok(u) => u,
Err(e) => match e {
UrlSetFileOperationsErr::IoResult(e) => match e.kind() {
ErrorKind::NotFound => UrlSet::new(),
_ => {
error!("File errror: {:?}\n won't crash, but probably broken.", e);
return;
}
},
UrlSetFileOperationsErr::DeError(e) => {
error!(
"DE error: {:?}\n Won't crash, but something went badly wrong",
e
);
return;
}
},
};
url_set.flush_related(id);
write_db_to_file(&url_set, &sitemap_config.target_folder)
.await
.unwrap();
write_url_set_to_file(&url_set, &sitemap_config.target_folder)
.await
.unwrap();
}
/* =================== File and SerDe operations ========================= */
async fn get_db_from_file(target_folder: &str) -> Result<UrlSet, UrlSetFileOperationsErr> {
let urls: UrlSet =
serde_cbor::de::from_slice(&std::fs::read(format!("{target_folder}/{DB_FILE_NAME}"))?)?;
serde_json::de::from_slice(&std::fs::read(format!("{target_folder}/{DB_FILE_NAME}"))?)
.unwrap();
Ok(urls)
}
async fn write_to_file(
pub async fn write_db_to_file(
url_set: &UrlSet,
target_folder: &str,
) -> Result<(), UrlSetFileOperationsErr> {
@ -151,7 +370,27 @@ async fn write_to_file(
}
fs::write(
format!("{target_folder}/{DB_FILE_NAME}"),
&serde_cbor::to_vec(url_set)?,
&serde_json::to_vec(url_set).unwrap(),
)?;
Ok(())
}
pub async fn write_url_set_to_file(
url_set: &UrlSet,
target_folder: &str,
) -> Result<(), UrlSetFileOperationsErr> {
if url_set.len() > MAX_URL_IN_SET {
// return Err(UrlSetFileOperationsErr::UrlSetTooLong(url_set.len()));
warn!("Urlset exeeded {MAX_URL_IN_SET} links, search engines might start to complain!");
}
fs::write(
format!("{target_folder}/{SITEMAP_FILE_NAME}"),
url_set
.iter()
.map(|u| u.url.clone())
.collect::<Vec<_>>()
.join("\n"),
)?;
Ok(())
}

View file

@ -1,18 +1,12 @@
mod category;
mod collection;
mod event_handler;
mod page;
mod product;
pub mod event_handler;
pub mod regenerate;
use std::ops::{Deref, DerefMut};
use serde::{Deserialize, Serialize};
use tinytemplate::TinyTemplate;
use crate::{
app::SitemapConfig,
queries::event_subjects_updated::{Category, Collection, Page, Product, ProductUpdated},
};
use crate::app::SitemapConfig;
const SITEMAP_XMLNS: &str = "http://sitemaps.org/schemas/sitemap/0.9";
const SALEOR_REF_XMLNS: &str = "http://app-sitemap-generator.kremik.sk/xml-schemas/saleor-ref.xsd";
@ -86,83 +80,28 @@ impl DerefMut for UrlSet {
}
impl Url {
pub fn new_product(template: &str, product: Product) -> Result<Self, NewUrlError> {
let category = product
.category
.as_ref()
.ok_or(NewUrlError::MissingData)?
.clone();
let data = ItemData {
id: product.id.inner().to_owned(),
slug: product.slug.clone(),
typ: ItemType::Product,
};
let related = Some(ItemData {
id: category.id.inner().to_owned(),
slug: category.slug,
typ: ItemType::Category,
});
pub fn new<T: Serialize>(
data: T,
sitemap_config: &SitemapConfig,
item: ItemData,
rel_item: Option<ItemData>,
) -> Result<Self, NewUrlError> {
let mut tt = TinyTemplate::new();
tt.add_template("t", template)?;
let url = tt.render("t", &product)?;
Ok(Self { url, data, related })
}
pub fn new_category(template: &str, category: Category) -> Result<Self, NewUrlError> {
let data = ItemData {
id: category.id.inner().to_owned(),
slug: category.slug.clone(),
typ: ItemType::Category,
};
let mut tt = TinyTemplate::new();
tt.add_template("t", template)?;
let url = tt.render("t", &category)?;
tt.add_template(
"t",
match item.typ {
ItemType::Category => &sitemap_config.category_template,
ItemType::Page => &sitemap_config.pages_template,
ItemType::Collection => &sitemap_config.collection_template,
ItemType::Product => &sitemap_config.product_template,
},
)?;
let url = tt.render("t", &data)?;
Ok(Self {
url,
data,
related: None,
})
}
pub fn new_collection(template: &str, collection: Collection) -> Result<Self, NewUrlError> {
let data = ItemData {
id: collection.id.inner().to_owned(),
slug: collection.slug.clone(),
typ: ItemType::Collection,
};
let mut tt = TinyTemplate::new();
tt.add_template("t", template);
let url = tt.render("t", &collection)?;
Ok(Self {
url,
data,
related: None,
})
}
pub fn new_page(template: &str, page: Page) -> Result<Self, NewUrlError> {
let data = ItemData {
id: page.id.inner().to_owned(),
slug: page.slug.clone(),
typ: ItemType::Page,
};
let mut tt = TinyTemplate::new();
tt.add_template("t", template);
let url = tt.render("t", &page)?;
Ok(Self {
url,
data,
related: None,
data: item,
related: rel_item,
})
}
}

View file

@ -1 +0,0 @@

View file

@ -1 +0,0 @@

View file

@ -0,0 +1,415 @@
use cynic::{http::SurfExt, QueryBuilder};
use tracing::{debug, error, info};
use crate::{
app::AppState,
queries::{
event_subjects_updated::{
CategoryCreated, CollectionCreated, Page, PageCreated, ProductCreated,
},
get_all_categories::{
Category3, GetCategoriesInitial, GetCategoriesNext, GetCategoriesNextVariables,
},
get_all_collections::{
Collection, GetCollectionsInitial, GetCollectionsNext, GetCollectionsNextVariables,
},
get_all_pages::{self, GetPagesInitial, GetPagesNext, GetPagesNextVariables},
get_all_products::{
GetProductsInitial, GetProductsInitialVariables, GetProductsNext,
GetProductsNextVariables, Product,
},
},
sitemap::{
event_handler::{write_db_to_file, write_url_set_to_file},
ItemData, ItemType, Url, UrlSet,
},
};
pub async fn regenerate(state: AppState, saleor_api_url: String) -> anyhow::Result<()> {
info!("regeneration: fetching all categories, products, collections, pages");
let app = state.saleor_app.lock().await;
let auth_data = app.apl.get(&saleor_api_url).await?;
let pages = get_all_pages(&saleor_api_url, &auth_data.token).await?;
let collections = get_all_collections(&saleor_api_url, &auth_data.token).await?;
let categories = get_all_categories(&saleor_api_url, &auth_data.token).await?;
let products =
get_all_products(&saleor_api_url, &state.target_channel, &auth_data.token).await?;
info!(
"regeneration: found {} products, {} categories, {} pages, {} collections",
products.len(),
categories.len(),
pages.len(),
collections.len()
);
info!("regeneration: creating sitemap data");
let mut url_set = UrlSet::new();
url_set.urls.append(
&mut pages
.into_iter()
.filter_map(|p| {
match Url::new(
PageCreated {
page: Some(Page {
id: p.id.clone(),
slug: p.slug.clone(),
}),
},
&state.sitemap_config,
ItemData {
id: p.id.inner().to_owned(),
slug: p.slug.clone(),
typ: ItemType::Page,
},
None,
) {
Ok(u) => Some(u),
Err(e) => {
error!("Error creating Url from page {:?}, {:?}", &p, e);
None
}
}
})
.collect::<Vec<_>>(),
);
url_set.urls.append(
&mut collections
.into_iter()
.filter_map(|p| {
match Url::new(
CollectionCreated {
collection: Some(crate::queries::event_subjects_updated::Collection {
id: p.id.clone(),
slug: p.slug.clone(),
}),
},
&state.sitemap_config,
ItemData {
id: p.id.inner().to_owned(),
slug: p.slug.clone(),
typ: ItemType::Collection,
},
None,
) {
Ok(u) => Some(u),
Err(e) => {
error!("Error creating Url from collection {:?}, {:?}", &p, e);
None
}
}
})
.collect::<Vec<_>>(),
);
url_set.urls.append(
&mut categories
.into_iter()
.filter_map(|p| {
match Url::new(
CategoryCreated {
category: Some(crate::queries::event_subjects_updated::Category2 {
id: p.id.clone(),
slug: p.slug.clone(),
}),
},
&state.sitemap_config,
ItemData {
id: p.id.inner().to_owned(),
slug: p.slug.clone(),
typ: ItemType::Category,
},
None,
) {
Ok(u) => Some(u),
Err(e) => {
error!("Error creating Url from category {:?}, {:?}", &p, e);
None
}
}
})
.collect::<Vec<_>>(),
);
url_set.urls.append(
&mut products
.into_iter()
.filter_map(|p| {
match Url::new(
ProductCreated {
product: Some(crate::queries::event_subjects_updated::Product {
id: p.id.clone(),
slug: p.slug.clone(),
category: p.category.clone().map(|c| {
crate::queries::event_subjects_updated::Category {
slug: c.slug,
id: c.id,
}
}),
}),
},
&state.sitemap_config,
ItemData {
id: p.id.inner().to_owned(),
slug: p.slug.clone(),
typ: ItemType::Product,
},
None,
) {
Ok(u) => Some(u),
Err(e) => {
error!("Error creating Url from product{:?}, {:?}", &p, e);
None
}
}
})
.collect::<Vec<_>>(),
);
info!("regeneration: creating sitemap file");
write_db_to_file(&url_set, &state.sitemap_config.target_folder).await?;
write_url_set_to_file(&url_set, &state.sitemap_config.target_folder).await?;
debug!("Wrote all files to disk");
Ok(())
}
async fn get_all_pages(
saleor_api_url: &str,
token: &str,
) -> anyhow::Result<Vec<get_all_pages::Page>> {
let operation = GetPagesInitial::build(());
let mut all_pages = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(pages) = &data.pages
{
debug!("fetched first pages, eg.:{:?}", &pages.edges.first());
all_pages.append(
&mut pages
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
//Keep fetching next page
let mut next_cursor = pages.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetPagesNext::build(GetPagesNextVariables { after: cursor }))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(pages) = &data.pages
{
all_pages.append(
&mut pages
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!("fetched next pages, eg.:{:?}", &pages.edges.first());
next_cursor.clone_from(&pages.page_info.end_cursor);
} else {
error!("Failed fetching next pages! {:?}", &res);
anyhow::bail!("Failed fetching next pages! {:?}", res);
}
}
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
};
info!("fetched all pages");
Ok(all_pages)
}
async fn get_all_categories(saleor_api_url: &str, token: &str) -> anyhow::Result<Vec<Category3>> {
debug!("Collecting all categories...");
let operation = GetCategoriesInitial::build(());
let mut all_categories = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(categories) = &data.categories
{
all_categories.append(
&mut categories
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched first categories, eg.:{:?}",
&categories.edges.first()
);
//Keep fetching next page
let mut next_cursor = categories.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetCategoriesNext::build(GetCategoriesNextVariables {
after: Some(cursor),
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(categories) = &data.categories
{
all_categories.append(
&mut categories
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched next categories, eg.:{:?}",
&categories.edges.first()
);
next_cursor.clone_from(&categories.page_info.end_cursor);
} else {
error!("Failed fetching next categories! {:?}", &res);
anyhow::bail!("Failed fetching next categories! {:?}", res);
}
}
} else {
error!("Failed fetching initial Categories! {:?}", &res);
anyhow::bail!("Failed fetching initial Categories! {:?}", res);
};
info!("All categories collected");
Ok(all_categories)
}
async fn get_all_collections(saleor_api_url: &str, token: &str) -> anyhow::Result<Vec<Collection>> {
debug!("Collecting all Collections...");
let operation = GetCollectionsInitial::build(());
let mut all_collections = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(collections) = &data.collections
{
all_collections.append(
&mut collections
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched first collections, eg.:{:?}",
&collections.edges.first()
);
//Keep fetching next page
let mut next_cursor = collections.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetCollectionsNext::build(GetCollectionsNextVariables {
after: Some(cursor),
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(collections) = &data.collections
{
all_collections.append(
&mut collections
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched next collections, eg.:{:?}",
&collections.edges.first()
);
next_cursor.clone_from(&collections.page_info.end_cursor);
} else {
error!("Failed fetching next collecnios! {:?}", &res);
anyhow::bail!("Failed fetching next collections! {:?}", res);
}
}
} else {
error!("Failed fetching initial collections! {:?}", &res);
anyhow::bail!("Failed fetching initial collections! {:?}", res);
};
info!("All Collections collected...");
Ok(all_collections)
}
/**
* Gets all products of a category then assings them as related
*/
async fn get_all_products(
saleor_api_url: &str,
channel: &str,
token: &str,
) -> anyhow::Result<Vec<Product>> {
debug!("Collecting all products...");
let operation = GetProductsInitial::build(GetProductsInitialVariables { channel });
let mut all_categorised_products: Vec<Product> = vec![];
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(operation)
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(products) = &data.products
{
all_categorised_products.append(
&mut products
.edges
.clone()
.into_iter()
.map(|p| p.node)
.collect::<Vec<_>>(),
);
//Keep fetching next page
debug!("fetched first products, eg: {:?}", products.edges.first());
let mut next_cursor = products.page_info.end_cursor.clone();
while let Some(cursor) = &mut next_cursor {
let res = surf::post(saleor_api_url)
.header("authorization-bearer", token)
.run_graphql(GetProductsNext::build(GetProductsNextVariables {
after: cursor,
channel,
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(products) = &data.products
{
all_categorised_products.append(
&mut products
.edges
.clone()
.into_iter()
.map(|p| p.node)
.collect::<Vec<_>>(),
);
debug!("fetched next products, eg: {:?}", products.edges.first());
next_cursor.clone_from(&products.page_info.end_cursor);
} else {
error!("Failed fetching initial products! {:?}", &res);
anyhow::bail!("Failed fetching initial products! {:?}", res);
}
}
}
info!("All products collected...");
Ok(all_categorised_products)
}

View file

@ -1,61 +1,34 @@
use crate::{
app::{trace_to_std, SitemapConfig},
create_app,
queries::event_subjects_updated::{Category, Product, ProductUpdated},
sitemap::{Url, UrlSet},
};
mod utils;
use std::time::Duration;
use crate::{create_app, sitemap::UrlSet};
use async_std::task::sleep;
use axum::{
body::Body,
extract::path::ErrorKind,
http::{Request, StatusCode},
routing::RouterIntoService,
Json, Router,
};
use rstest::*;
use saleor_app_sdk::{apl::AplType, config::Config};
use tower::{MakeService, Service, ServiceExt};
use tracing::Level;
fn init_tracing() {
let config = Config {
apl: AplType::File,
apl_url: "redis://localhost:6379".to_string(),
log_level: Level::TRACE,
app_api_base_url: "http://localhost:3000".to_string(),
app_iframe_base_url: "http://localhost:3000".to_string(),
required_saleor_version: "^3.13".to_string(),
use saleor_app_sdk::{
headers::{SALEOR_API_URL_HEADER, SALEOR_EVENT_HEADER},
webhooks::{utils::EitherWebhookType, AsyncWebhookEventType},
};
trace_to_std(&config).unwrap();
}
use serial_test::{parallel, serial};
use tower::{Service, ServiceExt};
use tracing_test::traced_test;
use utils::{gen_random_url_set, init_tracing, testing_configs};
async fn init_test_app() -> RouterIntoService<Body> {
match std::fs::remove_dir_all("./temp/sitemaps") {
Err(e) => match e.kind() {
if let Err(e) = std::fs::remove_dir_all("./temp/sitemaps") {
match e.kind() {
std::io::ErrorKind::NotFound => (),
_ => panic!("{:?}", e),
},
_ => (),
}
};
std::fs::create_dir_all("./temp/sitemaps").unwrap();
std::env::set_var("APP_API_BASE_URL", "http://localhost:3000");
let config = Config {
apl: AplType::File,
apl_url: "redis://localhost:6379".to_string(),
log_level: Level::TRACE,
app_api_base_url: "http://localhost:3000".to_string(),
app_iframe_base_url: "http://localhost:3000".to_string(),
required_saleor_version: "^3.13".to_string(),
};
let sitemap_config = SitemapConfig {
target_folder: "./temp/sitemaps".to_string(),
pages_template: "https://example.com/{page.slug}".to_string(),
index_hostname: "https://example.com".to_string(),
product_template: "https://example.com/{product.category.slug}/{product.slug}".to_string(),
category_template: "https://example.com/{category.slug}".to_string(),
collection_template: "https://example.com/collection/{collection.slug}".to_string(),
};
let (config, sitemap_config) = testing_configs();
create_app(&config, sitemap_config)
.await
@ -63,7 +36,10 @@ async fn init_test_app() -> RouterIntoService<Body> {
}
#[rstest]
async fn index_returns_ok() {
#[tokio::test]
#[traced_test]
#[serial]
pub async fn index_returns_ok() {
let mut app = init_test_app().await;
let response = app
@ -77,19 +53,15 @@ async fn index_returns_ok() {
}
#[rstest]
async fn updates_xml_from_product() {
#[tokio::test]
#[traced_test]
#[serial]
async fn updates_sitemap_from_request() {
let mut app = init_test_app().await;
let (_, sitemap_config) = testing_configs();
let product_updated = ProductUpdated {
product: Some(Product {
id: cynic::Id::new("product1".to_owned()),
slug: "product1slug".to_owned(),
category: Some(Category {
slug: "category1slug".to_owned(),
id: cynic::Id::new("category1".to_owned()),
}),
}),
};
let evn = gen_random_url_set(1, &sitemap_config);
let (body, url, webhook_type) = evn.get(0).cloned().unwrap();
let response = app
.ready()
@ -98,9 +70,15 @@ async fn updates_xml_from_product() {
.call(
Request::builder()
.uri("/api/webhooks")
.body(Body::from(
serde_json::to_string_pretty(&product_updated).unwrap(),
))
.header(SALEOR_API_URL_HEADER, "https://api.example.com")
.header(
SALEOR_EVENT_HEADER,
match webhook_type {
EitherWebhookType::Sync(s) => s.as_ref().to_string(),
EitherWebhookType::Async(a) => a.as_ref().to_string(),
},
)
.body(Body::from(body))
.unwrap(),
)
.await
@ -108,65 +86,72 @@ async fn updates_xml_from_product() {
assert_eq!(response.status(), StatusCode::OK);
let xml: UrlSet =
serde_json::from_str(&std::fs::read_to_string("./temp/sitemaps/1.xml").unwrap()).unwrap();
//wait for the file to get written
sleep(Duration::from_secs(3)).await;
let mut webhook_url_set = UrlSet::new();
webhook_url_set.urls = vec![Url::new_product(
"https://example.com/{product.category.slug}/{product.slug}",
product_updated.product.unwrap(),
)
.unwrap()];
let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap();
assert_eq!(xml, webhook_url_set);
assert_eq!(file_url, url.url);
}
#[rstest]
#[tokio::test]
#[traced_test]
#[parallel]
async fn sequence_of_actions_is_preserved() {
let mut app = init_test_app().await;
let (_, sitemap_config) = testing_configs();
let evn = gen_random_url_set(1000, &sitemap_config);
for (body, _, webhook_type) in evn.clone() {
let response = app
.ready()
.await
.unwrap()
.call(
Request::builder()
.uri("/api/webhooks")
.header(SALEOR_API_URL_HEADER, "https://api.example.com")
.header(
SALEOR_EVENT_HEADER,
match webhook_type {
EitherWebhookType::Sync(s) => s.as_ref().to_string(),
EitherWebhookType::Async(a) => a.as_ref().to_string(),
},
)
.body(Body::from(body))
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::OK);
}
//wait for the file to get written
sleep(Duration::from_secs(3)).await;
let file_url = std::fs::read_to_string("./temp/sitemaps/sitemap.txt").unwrap();
assert_eq!(
file_url,
evn.iter()
.map(|u| u.1.url.clone())
.collect::<Vec<_>>()
.join("\n"),
);
}
#[rstest]
#[traced_test]
fn urlset_serialisation_isnt_lossy() {
std::env::set_var("APP_API_BASE_URL", "http://localhost:3000");
let sitemap_config = SitemapConfig {
target_folder: "./temp/sitemaps".to_string(),
pages_template: "https://example.com/{page.slug}".to_string(),
index_hostname: "https://example.com".to_string(),
product_template: "https://example.com/{product.category.slug}/{product.slug}".to_string(),
category_template: "https://example.com/{category.slug}".to_string(),
collection_template: "https://example.com/collection/{collection.slug}".to_string(),
};
let (_, sitemap_config) = testing_configs();
init_tracing();
let product1 = Product {
id: cynic::Id::new("product1".to_owned()),
slug: "product1slug".to_owned(),
category: Some(Category {
slug: "category1slug".to_owned(),
id: cynic::Id::new("category1".to_owned()),
}),
};
let product2 = Product {
id: cynic::Id::new("product2".to_owned()),
slug: "product2slug".to_owned(),
category: Some(Category {
slug: "category2slug".to_owned(),
id: cynic::Id::new("category2".to_owned()),
}),
};
let urls = gen_random_url_set(100, &sitemap_config);
let mut url_set = UrlSet::new();
url_set.urls = vec![
Url::new_category(
&sitemap_config.category_template,
product1.category.clone().unwrap(),
)
.unwrap(),
Url::new_product(&sitemap_config.product_template, product1).unwrap(),
Url::new_category(
&sitemap_config.category_template,
product2.category.clone().unwrap(),
)
.unwrap(),
Url::new_product(&sitemap_config.product_template, product2).unwrap(),
];
url_set.urls = urls.into_iter().map(|u| u.1).collect();
let file_str = serde_cbor::to_vec(&url_set).unwrap();
let deserialized_url_set: UrlSet = serde_cbor::de::from_slice(&file_str).unwrap();
assert_eq!(url_set, deserialized_url_set);

View file

@ -0,0 +1,489 @@
use rand::{
distributions::{Distribution, Standard},
seq::SliceRandom,
Rng,
};
use saleor_app_sdk::{
apl::AplType,
config::Config,
webhooks::{utils::EitherWebhookType, AsyncWebhookEventType, SyncWebhookEventType},
};
use tracing::Level;
use crate::{
app::{trace_to_std, SitemapConfig},
queries::event_subjects_updated::{
Category, Category2, CategoryUpdated, Collection, CollectionUpdated, Page, PageUpdated,
Product, ProductCreated, ProductUpdated,
},
sitemap::{ItemData, ItemType, Url},
};
pub fn init_tracing() {
let config = Config {
apl: AplType::File,
apl_url: "redis://localhost:6379".to_string(),
log_level: Level::TRACE,
app_api_base_url: "http://localhost:3000".to_string(),
app_iframe_base_url: "http://localhost:3000".to_string(),
required_saleor_version: "^3.13".to_string(),
};
trace_to_std(&config).unwrap();
}
impl Distribution<ItemType> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> ItemType {
match rng.gen_range(0..5) {
0 | 1 => ItemType::Category,
2 => ItemType::Page,
3 => ItemType::Product,
4 => ItemType::Collection,
_ => ItemType::Product,
}
}
}
pub fn testing_configs() -> (Config, SitemapConfig) {
(
Config {
apl: AplType::File,
apl_url: "redis://localhost:6379".to_string(),
log_level: Level::TRACE,
app_api_base_url: "http://localhost:3000".to_string(),
app_iframe_base_url: "http://localhost:3000".to_string(),
required_saleor_version: "^3.13".to_string(),
},
SitemapConfig {
allowed_host: "https://api.example.com".to_string(),
target_folder: "./temp/sitemaps".to_string(),
pages_template: "https://example.com/{page.slug}".to_string(),
index_hostname: "https://example.com".to_string(),
product_template: "https://example.com/{product.category.slug}/{product.slug}"
.to_string(),
category_template: "https://example.com/{category.slug}".to_string(),
collection_template: "https://example.com/collection/{collection.slug}".to_string(),
},
)
}
pub struct Action {
request_body: String,
url: Url,
webhook_type: EitherWebhookType,
action_type: ActionType,
}
#[derive(PartialEq, Eq, Clone)]
pub enum ActionType {
Create,
Update,
Delete,
}
impl Distribution<ActionType> for Standard {
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> ActionType {
match rng.gen_range(0..4) {
1 => ActionType::Update,
2 => ActionType::Delete,
_ => ActionType::Create,
}
}
}
// pub fn gen_random_actions(
// len: usize,
// sitemap_config: &SitemapConfig,
// unwanted_actions: Vec<ActionType>,
// ) -> Vec<Action> {
// let mut res: Vec<Action> = vec![];
// for _ in 0..len {
// let mut slug = random_word::gen(random_word::Lang::En).to_owned();
// let mut id = cynic::Id::new(slug.to_owned() + "_ID");
//
// let mut rel_slug = random_word::gen(random_word::Lang::En).to_owned();
// let mut rel_id = cynic::Id::new(rel_slug.to_owned() + "_ID");
//
// let mut action_type = rand::random::<ActionType>();
//
// while unwanted_actions.contains(&action_type) {
// action_type = rand::random::<ActionType>();
// }
//
// match rand::random::<ItemType>() {
// ItemType::Product => {
// // If there is a category url already, use that for relation instead of always a
// let mut is_using_existing_category = false;
// // new one
// if res
// .iter()
// .find(|r| r.url.data.typ == ItemType::Category)
// .is_some()
// {
// match rand::random::<bool>() {
// true => loop {
// let r = res.choose(&mut rand::thread_rng()).unwrap().clone();
// if r.url.data.typ == ItemType::Category {
// rel_slug = r.url.data.slug;
// rel_id = cynic::Id::new(r.url.data.id);
// is_using_existing_category = true;
// break;
// }
// },
// false => (),
// };
// }
// let product_updated: String = match action_type {
// ActionType::Create => serde_json::to_string_pretty(&ProductCreated {
// product: Some(Product {
// id: id.clone(),
// slug: slug.clone(),
// category: Some(Category {
// slug: rel_slug.clone(),
// id: rel_id.clone(),
// }),
// }),
// })
// .unwrap(),
// ActionType::Update => {
// let p;
// loop {
// let c = res.choose(&mut rand::thread_rng()).unwrap().clone();
// if c.action_type != ActionType::Delete {
// p = c;
// break;
// }
// }
// serde_json::to_string_pretty(&ProductUpdated {
// product: Some(Product {
// id: cynic::Id::new(p.url.data.id),
// slug: p.url.data.slug.clone(),
// category: p.url.related.map(|c| Category {
// slug: c.slug.clone(),
// id: cynic::Id::new(c.id),
// }),
// }),
// })
// }
// .unwrap(),
// ActionType::Delete => {
// let p;
// loop {
// let c = res.choose(&mut rand::thread_rng()).unwrap().clone();
// if c.action_type != ActionType::Delete {
// p = c;
// break;
// }
// }
// serde_json::to_string_pretty(&ProductUpdated {
// product: Some(Product {
// id: id.clone(),
// slug: slug.clone(),
// category: Some(Category {
// slug: rel_slug.clone(),
// id: rel_id.clone(),
// }),
// }),
// })
// .unwrap()}
// };
// let url = Url::new(
// product_updated.clone(),
// &sitemap_config,
// ItemData {
// id: id.clone().inner().to_owned(),
// slug: slug.clone(),
// typ: ItemType::Product,
// },
// Some(ItemData {
// id: rel_id.inner().to_owned(),
// slug: rel_slug.clone(),
// typ: ItemType::Category,
// }),
// )
// .unwrap();
//
// if !is_using_existing_category {
// let category_updated = CategoryUpdated {
// category: Some(Category2 {
// id: rel_id.clone(),
// slug: rel_slug.clone(),
// }),
// };
//
// let cat_url = Url::new(
// category_updated.clone(),
// &sitemap_config,
// ItemData {
// id: id.clone().inner().to_owned(),
// slug: slug.clone(),
// typ: ItemType::Category,
// },
// None,
// )
// .unwrap();
// res.push((
// serde_json::to_string_pretty(&category_updated).unwrap(),
// cat_url,
// EitherWebhookType::Async(AsyncWebhookEventType::CategoryCreated),
// ));
// }
//
// res.push((
// serde_json::to_string_pretty(&product_updated).unwrap(),
// url,
// EitherWebhookType::Async(AsyncWebhookEventType::ProductCreated),
// ));
// }
// ItemType::Category => {
// let category_updated = CategoryUpdated {
// category: Some(Category2 {
// id: id.clone(),
// slug: slug.clone(),
// }),
// };
//
// let url = Url::new(
// category_updated.clone(),
// &sitemap_config,
// ItemData {
// id: id.clone().inner().to_owned(),
// slug: slug.clone(),
// typ: ItemType::Category,
// },
// None,
// )
// .unwrap();
// res.push((
// serde_json::to_string_pretty(&category_updated).unwrap(),
// url,
// EitherWebhookType::Async(AsyncWebhookEventType::CategoryCreated),
// ));
// }
// ItemType::Collection => {
// let collection_updated = CollectionUpdated {
// collection: Some(Collection {
// id: id.clone(),
// slug: slug.clone(),
// }),
// };
//
// let url = Url::new(
// collection_updated.clone(),
// &sitemap_config,
// ItemData {
// id: id.clone().inner().to_owned(),
// slug: slug.clone(),
// typ: ItemType::Collection,
// },
// None,
// )
// .unwrap();
// res.push((
// serde_json::to_string_pretty(&collection_updated).unwrap(),
// url,
// EitherWebhookType::Async(AsyncWebhookEventType::CollectionCreated),
// ));
// }
// ItemType::Page => {
// let page_updated = PageUpdated {
// page: Some(Page {
// id: id.clone(),
// slug: slug.clone(),
// }),
// };
//
// let url = Url::new(
// page_updated.clone(),
// &sitemap_config,
// ItemData {
// id: id.clone().inner().to_owned(),
// slug: slug.clone(),
// typ: ItemType::Page,
// },
// None,
// )
// .unwrap();
// res.push((
// serde_json::to_string_pretty(&page_updated).unwrap(),
// url,
// EitherWebhookType::Async(AsyncWebhookEventType::PageCreated),
// ));
// }
// }
// }
// res
// }
pub fn gen_random_url_set(
len: usize,
sitemap_config: &SitemapConfig,
) -> Vec<(String, Url, EitherWebhookType)> {
let mut res: Vec<(String, Url, EitherWebhookType)> = vec![];
for _ in 0..len {
let slug = random_word::gen(random_word::Lang::En).to_owned();
let id = cynic::Id::new(slug.to_owned() + "_ID");
let mut rel_slug = random_word::gen(random_word::Lang::En).to_owned();
let mut rel_id = cynic::Id::new(rel_slug.to_owned() + "_ID");
match rand::random::<ItemType>() {
ItemType::Product => {
// If there is a category url already, use that for relation instead of always a
let mut is_using_existing_category = false;
// new one
if res
.iter()
.find(|r| r.1.data.typ == ItemType::Category)
.is_some()
{
match rand::random::<bool>() {
true => loop {
let r = res.choose(&mut rand::thread_rng()).unwrap().clone();
if r.1.data.typ == ItemType::Category {
rel_slug = r.1.data.slug;
rel_id = cynic::Id::new(r.1.data.id);
is_using_existing_category = true;
break;
}
},
false => (),
};
} else {
}
let product_updated = ProductUpdated {
product: Some(Product {
id: id.clone(),
slug: slug.clone(),
category: Some(Category {
slug: rel_slug.clone(),
id: rel_id.clone(),
}),
}),
};
let url = Url::new(
product_updated.clone(),
&sitemap_config,
ItemData {
id: id.clone().inner().to_owned(),
slug: slug.clone(),
typ: ItemType::Product,
},
Some(ItemData {
id: rel_id.inner().to_owned(),
slug: rel_slug.clone(),
typ: ItemType::Category,
}),
)
.unwrap();
if !is_using_existing_category {
let category_updated = CategoryUpdated {
category: Some(Category2 {
id: rel_id.clone(),
slug: rel_slug.clone(),
}),
};
let cat_url = Url::new(
category_updated.clone(),
&sitemap_config,
ItemData {
id: id.clone().inner().to_owned(),
slug: slug.clone(),
typ: ItemType::Category,
},
None,
)
.unwrap();
res.push((
serde_json::to_string_pretty(&category_updated).unwrap(),
cat_url,
EitherWebhookType::Async(AsyncWebhookEventType::CategoryCreated),
));
}
res.push((
serde_json::to_string_pretty(&product_updated).unwrap(),
url,
EitherWebhookType::Async(AsyncWebhookEventType::ProductCreated),
));
}
ItemType::Category => {
let category_updated = CategoryUpdated {
category: Some(Category2 {
id: id.clone(),
slug: slug.clone(),
}),
};
let url = Url::new(
category_updated.clone(),
&sitemap_config,
ItemData {
id: id.clone().inner().to_owned(),
slug: slug.clone(),
typ: ItemType::Category,
},
None,
)
.unwrap();
res.push((
serde_json::to_string_pretty(&category_updated).unwrap(),
url,
EitherWebhookType::Async(AsyncWebhookEventType::CategoryCreated),
));
}
ItemType::Collection => {
let collection_updated = CollectionUpdated {
collection: Some(Collection {
id: id.clone(),
slug: slug.clone(),
}),
};
let url = Url::new(
collection_updated.clone(),
&sitemap_config,
ItemData {
id: id.clone().inner().to_owned(),
slug: slug.clone(),
typ: ItemType::Collection,
},
None,
)
.unwrap();
res.push((
serde_json::to_string_pretty(&collection_updated).unwrap(),
url,
EitherWebhookType::Async(AsyncWebhookEventType::CollectionCreated),
));
}
ItemType::Page => {
let page_updated = PageUpdated {
page: Some(Page {
id: id.clone(),
slug: slug.clone(),
}),
};
let url = Url::new(
page_updated.clone(),
&sitemap_config,
ItemData {
id: id.clone().inner().to_owned(),
slug: slug.clone(),
typ: ItemType::Page,
},
None,
)
.unwrap();
res.push((
serde_json::to_string_pretty(&page_updated).unwrap(),
url,
EitherWebhookType::Async(AsyncWebhookEventType::PageCreated),
));
}
}
}
res
}