starting sitemap rewrite

This commit is contained in:
Djkáťo 2024-07-03 16:07:04 +02:00
parent b8ae6fcb67
commit c986b7feeb
28 changed files with 524 additions and 37180 deletions

5
.env
View file

@ -1,8 +1,8 @@
## COMMON VARIABLES FOR ALL APPS ## COMMON VARIABLES FOR ALL APPS
REQUIRED_SALEOR_VERSION="^3.13" REQUIRED_SALEOR_VERSION="^3.13"
# only sets port, the host is always 0.0.0.0 (listens to everything). Set this to docker-compose service name # only sets port, the host is always 0.0.0.0 (listens to everything). Set this to docker-compose service name
APP_API_BASE_URL="http://10.100.110.21:3000" APP_API_BASE_URL="http://10.100.110.27:3000"
APP_IFRAME_BASE_URL="http://10.100.110.21:3000" APP_IFRAME_BASE_URL="http://10.100.110.27:3000"
APL="Redis" APL="Redis"
APL_URL="redis://localhost:6380/2" APL_URL="redis://localhost:6380/2"
LOG_LEVEL="DEBUG" LOG_LEVEL="DEBUG"
@ -28,3 +28,4 @@ ACTIVE_PAYMENT_METHODS="cod,cash,transfer"
LOCALE="Sk" LOCALE="Sk"
# uses https://crates.io/crates/iso_currency # uses https://crates.io/crates/iso_currency
CURRENCIES="EUR" CURRENCIES="EUR"
COD_EXTRA_PRICE_AS_PRODUCT_SLUG="dobierka"

70
Cargo.lock generated
View file

@ -370,6 +370,7 @@ checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum-core", "axum-core",
"axum-macros",
"bytes 1.5.0", "bytes 1.5.0",
"futures-util", "futures-util",
"http 1.1.0", "http 1.1.0",
@ -418,6 +419,18 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "axum-macros"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00c055ee2d014ae5981ce1016374e8213682aa14d9bf40e48ab48b5f3ef20eaa"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.48",
]
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.69" version = "0.3.69"
@ -3017,9 +3030,9 @@ checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae"
[[package]] [[package]]
name = "quick-xml" name = "quick-xml"
version = "0.31.0" version = "0.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" checksum = "6f24d770aeca0eacb81ac29dfbc55ebcc09312fdd1f8bbecdc7e4a84e000e3b4"
dependencies = [ dependencies = [
"memchr", "memchr",
"serde", "serde",
@ -3134,6 +3147,26 @@ dependencies = [
"rand_core 0.5.1", "rand_core 0.5.1",
] ]
[[package]]
name = "rayon"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
dependencies = [
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-deque",
"crossbeam-utils",
]
[[package]] [[package]]
name = "redis" name = "redis"
version = "0.25.3" version = "0.25.3"
@ -3880,12 +3913,12 @@ dependencies = [
"num-bigint", "num-bigint",
"num-traits", "num-traits",
"thiserror", "thiserror",
"time 0.3.34", "time 0.3.36",
] ]
[[package]] [[package]]
name = "sitemap-generator" name = "sitemap-generator"
version = "0.1.0" version = "1.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"axum", "axum",
@ -3898,12 +3931,11 @@ dependencies = [
"flate2", "flate2",
"pico-args", "pico-args",
"quick-xml", "quick-xml",
"redis", "rayon",
"saleor-app-sdk", "saleor-app-sdk",
"serde", "serde",
"serde_cbor", "serde_cbor",
"serde_json", "serde_json",
"sitemap-rs",
"surf", "surf",
"tera", "tera",
"tinytemplate", "tinytemplate",
@ -3915,16 +3947,6 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
] ]
[[package]]
name = "sitemap-rs"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88cc73a9aac975541c9054e74ceae8d8ee85edc89a322404c275c1d100fffa51"
dependencies = [
"chrono",
"xml-builder",
]
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.9" version = "0.4.9"
@ -4284,9 +4306,9 @@ dependencies = [
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.34" version = "0.3.36"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
dependencies = [ dependencies = [
"deranged", "deranged",
"itoa", "itoa",
@ -4294,7 +4316,7 @@ dependencies = [
"powerfmt", "powerfmt",
"serde", "serde",
"time-core", "time-core",
"time-macros 0.2.17", "time-macros 0.2.18",
] ]
[[package]] [[package]]
@ -4315,9 +4337,9 @@ dependencies = [
[[package]] [[package]]
name = "time-macros" name = "time-macros"
version = "0.2.17" version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
dependencies = [ dependencies = [
"num-conv", "num-conv",
"time-core", "time-core",
@ -5161,12 +5183,6 @@ dependencies = [
"tap", "tap",
] ]
[[package]]
name = "xml-builder"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efc4f1a86af7800dfc4056c7833648ea4515ae21502060b5c98114d828f5333b"
[[package]] [[package]]
name = "xxhash-rust" name = "xxhash-rust"
version = "0.8.10" version = "0.8.10"

View file

@ -51,6 +51,7 @@ serde-wasm-bindgen = { version = "0.6.5", optional = true }
bus = { version = "2.4.1", optional = true } bus = { version = "2.4.1", optional = true }
[dependencies.web-sys] [dependencies.web-sys]
optional = true
workspace = true workspace = true
features = [ features = [
"Window", "Window",
@ -74,4 +75,9 @@ middleware = [
redis_apl = ["dep:redis"] redis_apl = ["dep:redis"]
webhook_utils = ["dep:http"] webhook_utils = ["dep:http"]
tracing = ["dep:tracing", "dep:tracing-subscriber"] tracing = ["dep:tracing", "dep:tracing-subscriber"]
bridge = ["dep:wasm-bindgen", "dep:bus", "dep:serde-wasm-bindgen"] bridge = [
"dep:wasm-bindgen",
"dep:bus",
"dep:serde-wasm-bindgen",
"dep:web-sys",
]

View file

@ -69,6 +69,7 @@ pub struct AppState {
pub config: Config, pub config: Config,
pub manifest: AppManifest, pub manifest: AppManifest,
pub active_payment_methods: Vec<ActivePaymentMethod>, pub active_payment_methods: Vec<ActivePaymentMethod>,
pub cod_extra_price_as_product_slug: Option<String>,
} }
pub fn get_active_payment_methods_from_env() -> anyhow::Result<Vec<ActivePaymentMethod>> { pub fn get_active_payment_methods_from_env() -> anyhow::Result<Vec<ActivePaymentMethod>> {

View file

@ -93,6 +93,7 @@ async fn main() -> anyhow::Result<()> {
manifest: app_manifest, manifest: app_manifest,
config: config.clone(), config: config.clone(),
saleor_app: Arc::new(Mutex::new(saleor_app)), saleor_app: Arc::new(Mutex::new(saleor_app)),
cod_extra_price_as_product_slug: std::env::var("COD_EXTRA_PRICE_AS_PRODUCT_SLUG").ok()
}; };
let app = create_routes(app_state); let app = create_routes(app_state);

View file

@ -139,6 +139,18 @@ async fn create_response(
// .await? // .await?
// .token; // .token;
if payment_method == PaymentMethodType::COD {
match session_data.source_object {
OrderOrCheckout::Order(o) => {
o.collection_point_name;
}
OrderOrCheckout::Checkout(c) => {
c.delivery_method;
}
_ => error!("session_data.source_object is neither Order or Checkout")
}
}
let str_payment_method = let str_payment_method =
serde_json::to_string(&TransactionInitializeSessionData { payment_method })?; serde_json::to_string(&TransactionInitializeSessionData { payment_method })?;

View file

@ -1,6 +1,6 @@
[package] [package]
name = "sitemap-generator" name = "sitemap-generator"
version = "0.1.0" version = "1.0.0"
edition = "2021" edition = "2021"
authors = ["Djkáťo <djkatovfx@gmail.com>"] authors = ["Djkáťo <djkatovfx@gmail.com>"]
description = "Creates and keeps Sitemap.xml uptodate with Saleor." description = "Creates and keeps Sitemap.xml uptodate with Saleor."
@ -16,11 +16,6 @@ anyhow.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
redis = { workspace = true, features = [
"aio",
"tokio-comp",
"connection-manager",
] }
envy.workspace = true envy.workspace = true
tracing.workspace = true tracing.workspace = true
tracing-serde.workspace = true tracing-serde.workspace = true
@ -35,13 +30,13 @@ cynic = { workspace = true, features = ["http-surf"] }
cynic-codegen.workspace = true cynic-codegen.workspace = true
tera = { version = "1.19.1", default-features = false } tera = { version = "1.19.1", default-features = false }
fd-lock = "4.0.2" fd-lock = "4.0.2"
quick-xml = { version = "0.31.0", features = ["serialize"] } quick-xml = { version = "0.34.0", features = ["serialize"] }
flate2 = "1.0.28" flate2 = "1.0.28"
tinytemplate = "1.2.1" tinytemplate = "1.2.1"
sitemap-rs = "0.2.1"
chrono = { version = "0.4.34", features = ["serde"] } chrono = { version = "0.4.34", features = ["serde"] }
serde_cbor = "0.11.2" serde_cbor = "0.11.2"
pico-args = "0.5.0" pico-args = "0.5.0"
rayon = "1.10.0"
[build-dependencies] [build-dependencies]
cynic-codegen.workspace = true cynic-codegen.workspace = true

View file

@ -1,6 +1,6 @@
fn main() { fn main() {
cynic_codegen::register_schema("saleor") cynic_codegen::register_schema("saleor")
.from_sdl_file("schema/schema.graphql") .from_sdl_file("../schema.graphql")
.unwrap() .unwrap()
.as_default() .as_default()
.unwrap(); .unwrap();

View file

@ -0,0 +1,14 @@
<?xml version="1.0"?>
<!-- change the target namespace for your app I think? Not sure -->
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" targetNamespace="http://app-sitemap-generator.kremik.sk/schemas/saleor-ref.xsd">
<xs:element name="ref">
<xs:complexType>
<xs:sequence>
<xs:element name="id" type="xs:string"/>
<xs:element name="category-id" type="xs:string" minOccurs="0"/>
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>

View file

@ -0,0 +1,5 @@
[toolchain]
channel = "nightly-2024-06-20"
## Toggle to this one for sdk releases
# channel = "stable"
targets = ["x86_64-unknown-linux-gnu"]

File diff suppressed because it is too large Load diff

View file

@ -1,16 +1,15 @@
use anyhow::bail;
use axum::{ use axum::{
http::StatusCode, http::StatusCode,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use chrono::{DateTime, FixedOffset}; use std::sync::Arc;
use std::{sync::Arc, time::Duration}; use tokio::sync::mpsc::Sender;
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
use redis::{AsyncCommands, Client};
use saleor_app_sdk::{config::Config, manifest::AppManifest, SaleorApp}; use saleor_app_sdk::{config::Config, manifest::AppManifest, SaleorApp};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{debug, info, level_filters::LevelFilter}; use tracing::level_filters::LevelFilter;
// Make our own error that wraps `anyhow::Error`. // Make our own error that wraps `anyhow::Error`.
pub struct AppError(anyhow::Error); pub struct AppError(anyhow::Error);
@ -56,12 +55,12 @@ pub fn trace_to_std(config: &Config) -> anyhow::Result<()> {
*/ */
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AppState { pub struct AppState {
pub xml_cache: Arc<tokio::sync::Mutex<XmlCache>>,
pub saleor_app: Arc<tokio::sync::Mutex<SaleorApp>>, pub saleor_app: Arc<tokio::sync::Mutex<SaleorApp>>,
pub config: Config, pub config: Config,
pub target_channel: String, pub target_channel: String,
pub sitemap_config: SitemapConfig, pub sitemap_config: SitemapConfig,
pub manifest: AppManifest, pub manifest: AppManifest,
pub task_queue_sender: Sender<EventType>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -86,82 +85,3 @@ impl SitemapConfig {
envy::from_env::<SitemapConfig>() envy::from_env::<SitemapConfig>()
} }
} }
#[derive(Debug)]
pub struct XmlCache {
client: Client,
app_api_base_url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct XmlData {
pub id: cynic::Id,
pub slug: String,
pub relations: Vec<cynic::Id>,
pub data_type: XmlDataType,
pub last_modified: DateTime<FixedOffset>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub enum XmlDataType {
Category,
Product,
Page,
Collection,
}
impl XmlCache {
pub fn new(redis_url: &str, app_api_base_url: &str) -> anyhow::Result<Self> {
debug!("creating XmlCache...");
let client = redis::Client::open(redis_url)?;
let mut conn = client.get_connection_with_timeout(Duration::from_secs(3))?;
let val: Result<String, redis::RedisError> =
redis::cmd("INFO").arg("server").query(&mut conn);
match val {
Ok(_) => Ok(Self {
client,
app_api_base_url: app_api_base_url.to_owned(),
}),
Err(e) => bail!("failed redis connection(XmlCache), {:?}", e),
}
}
/**
* ONLY USE IF YOU KNOW WHAT YOU'RE DOING! Will flush entire cache, run regenerate() from
* webhooks to renew.
*/
pub async fn delete_all(&self, saleor_api_url: &str) -> anyhow::Result<()> {
debug!("xml data delete_cache()");
let mut conn = self.client.get_multiplexed_async_connection().await?;
conn.del(self.prepare_key(saleor_api_url)).await?;
info!("sucessful cache wipe");
Ok(())
}
pub async fn get_all(&self, saleor_api_url: &str) -> anyhow::Result<Vec<XmlData>> {
debug!("xml data get_all()");
let mut conn = self.client.get_multiplexed_async_connection().await?;
let res: Vec<u8> = conn.get(self.prepare_key(saleor_api_url)).await?;
let cache: Vec<XmlData> = serde_cbor::from_slice(&res)?;
info!("sucessful cache get");
Ok(cache)
}
pub async fn set(&self, data: Vec<XmlData>, saleor_api_url: &str) -> anyhow::Result<()> {
debug!("xml data set()");
let mut conn = self.client.get_multiplexed_async_connection().await?;
conn.set(self.prepare_key(saleor_api_url), serde_cbor::to_vec(&data)?)
.await?;
info!("sucessful cache set");
Ok(())
}
pub fn prepare_key(&self, saleor_api_url: &str) -> String {
let key = format!("{}:{saleor_api_url}", self.app_api_base_url);
key
}
}

View file

@ -9,6 +9,8 @@
mod app; mod app;
mod queries; mod queries;
mod routes; mod routes;
mod sitemap;
mod test;
use saleor_app_sdk::{ use saleor_app_sdk::{
config::Config, config::Config,
@ -17,11 +19,17 @@ use saleor_app_sdk::{
SaleorApp, SaleorApp,
}; };
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::Mutex; use tokio::{
spawn,
sync::{
mpsc::{channel, Receiver},
Mutex,
},
};
use tracing::{debug, error, info}; use tracing::{debug, error, info};
use crate::{ use crate::{
app::{trace_to_std, AppState, SitemapConfig, XmlCache}, app::{trace_to_std, AppState, SitemapConfig},
queries::event_subjects_updated::EVENTS_QUERY, queries::event_subjects_updated::EVENTS_QUERY,
routes::{create_routes, register::regenerate}, routes::{create_routes, register::regenerate},
}; };
@ -62,12 +70,13 @@ async fn main() -> anyhow::Result<()> {
) )
.build(); .build();
debug!("Created AppManifest..."); debug!("Created AppManifest...");
//Task queue
let (sender, receiver) = tokio::sync::mpsc::channel(100);
let app_state = AppState { let app_state = AppState {
task_queue_sender: sender,
sitemap_config, sitemap_config,
xml_cache: Arc::new(Mutex::new(XmlCache::new(
&config.apl_url,
&config.app_api_base_url,
)?)),
manifest: app_manifest, manifest: app_manifest,
config: config.clone(), config: config.clone(),
target_channel: match dotenvy::var("CHANNEL_SLUG") { target_channel: match dotenvy::var("CHANNEL_SLUG") {
@ -81,24 +90,6 @@ async fn main() -> anyhow::Result<()> {
}; };
debug!("Created AppState..."); debug!("Created AppState...");
{
// either clear the cache, regenerate or both from command args
let mut pargs = pico_args::Arguments::from_env();
if let Some(for_url) = pargs.opt_value_from_str::<_, String>("--for-url")? {
if pargs.contains("--cache-clear") {
let xml_cache = app_state.xml_cache.lock().await;
xml_cache.delete_all(&for_url).await?;
debug!("Cleared Xml Cache for {for_url}");
}
if pargs.contains("--cache-regenerate") {
regenerate(app_state.clone(), for_url).await?;
}
std::process::exit(0)
}
}
let app = create_routes(app_state); let app = create_routes(app_state);
let listener = tokio::net::TcpListener::bind( let listener = tokio::net::TcpListener::bind(
"0.0.0.0:".to_owned() "0.0.0.0:".to_owned()

View file

@ -90,111 +90,111 @@ fragment BaseProduct on Product {
} }
"#; "#;
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Subscription")] #[cynic(graphql_type = "Subscription")]
pub struct QueryProductsChanged { pub struct QueryProductsChanged {
pub event: Option<Event>, pub event: Option<Event>,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct ProductUpdated { pub struct ProductUpdated {
pub product: Option<Product>, pub product: Option<Product>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct ProductDeleted { pub struct ProductDeleted {
pub product: Option<Product>, pub product: Option<Product>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct ProductCreated { pub struct ProductCreated {
pub product: Option<Product>, pub product: Option<Product>,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct Product { pub struct Product {
pub id: cynic::Id, pub id: cynic::Id,
pub slug: String, pub slug: String,
pub category: Option<Category>, pub category: Option<Category>,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct PageUpdated { pub struct PageUpdated {
pub page: Option<Page>, pub page: Option<Page>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct PageDeleted { pub struct PageDeleted {
pub page: Option<Page>, pub page: Option<Page>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct PageCreated { pub struct PageCreated {
pub page: Option<Page>, pub page: Option<Page>,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct Page { pub struct Page {
pub slug: String, pub slug: String,
pub id: cynic::Id, pub id: cynic::Id,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct CollectionUpdated { pub struct CollectionUpdated {
pub collection: Option<Collection>, pub collection: Option<Collection>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CollectionDeleted { pub struct CollectionDeleted {
pub collection: Option<Collection>, pub collection: Option<Collection>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CollectionCreated { pub struct CollectionCreated {
pub collection: Option<Collection>, pub collection: Option<Collection>,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct Collection { pub struct Collection {
pub id: cynic::Id, pub id: cynic::Id,
pub slug: String, pub slug: String,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct CategoryUpdated { pub struct CategoryUpdated {
pub category: Option<Category2>, pub category: Option<Category2>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CategoryDeleted { pub struct CategoryDeleted {
pub category: Option<Category2>, pub category: Option<Category2>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CategoryCreated { pub struct CategoryCreated {
pub category: Option<Category2>, pub category: Option<Category2>,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct Category { pub struct Category {
pub slug: String, pub slug: String,
pub id: cynic::Id, pub id: cynic::Id,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
#[cynic(graphql_type = "Category")] #[cynic(graphql_type = "Category")]
pub struct Category2 { pub struct Category2 {
pub id: cynic::Id, pub id: cynic::Id,
pub slug: String, pub slug: String,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize, Clone)]
pub struct PageInfo { pub struct PageInfo {
pub end_cursor: Option<String>, pub end_cursor: Option<String>,
pub has_next_page: bool, pub has_next_page: bool,
} }
#[derive(cynic::InlineFragments, Debug)] #[derive(cynic::InlineFragments, Debug, Clone)]
pub enum Event { pub enum Event {
ProductUpdated(ProductUpdated), ProductUpdated(ProductUpdated),
ProductCreated(ProductCreated), ProductCreated(ProductCreated),

View file

@ -80,25 +80,25 @@ query getCategoryProductsNext($id: ID!, $after: String!, $channel: String!) {
} }
*/ */
#[derive(cynic::QueryVariables, Debug)] #[derive(cynic::QueryVariables, Debug, Clone)]
pub struct GetCategoriesNextVariables<'a> { pub struct GetCategoriesNextVariables<'a> {
pub after: Option<&'a str>, pub after: Option<&'a str>,
} }
#[derive(cynic::QueryVariables, Debug)] #[derive(cynic::QueryVariables, Debug, Clone)]
pub struct GetCategoryProductsInitialVariables<'a> { pub struct GetCategoryProductsInitialVariables<'a> {
pub channel: &'a str, pub channel: &'a str,
pub id: &'a cynic::Id, pub id: &'a cynic::Id,
} }
#[derive(cynic::QueryVariables, Debug)] #[derive(cynic::QueryVariables, Debug, Clone)]
pub struct GetCategoryProductsNextVariables<'a> { pub struct GetCategoryProductsNextVariables<'a> {
pub after: &'a str, pub after: &'a str,
pub channel: &'a str, pub channel: &'a str,
pub id: &'a cynic::Id, pub id: &'a cynic::Id,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic( #[cynic(
graphql_type = "Query", graphql_type = "Query",
variables = "GetCategoryProductsInitialVariables" variables = "GetCategoryProductsInitialVariables"
@ -108,28 +108,28 @@ pub struct GetCategoryProductsInitial {
pub category: Option<Category>, pub category: Option<Category>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Query", variables = "GetCategoryProductsNextVariables")] #[cynic(graphql_type = "Query", variables = "GetCategoryProductsNextVariables")]
pub struct GetCategoryProductsNext { pub struct GetCategoryProductsNext {
#[arguments(id: $id)] #[arguments(id: $id)]
pub category: Option<Category2>, pub category: Option<Category2>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Query", variables = "GetCategoriesNextVariables")] #[cynic(graphql_type = "Query", variables = "GetCategoriesNextVariables")]
pub struct GetCategoriesNext { pub struct GetCategoriesNext {
#[arguments(first: 50, after: $after)] #[arguments(first: 50, after: $after)]
pub categories: Option<CategoryCountableConnection>, pub categories: Option<CategoryCountableConnection>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Query")] #[cynic(graphql_type = "Query")]
pub struct GetCategoriesInitial { pub struct GetCategoriesInitial {
#[arguments(first: 50)] #[arguments(first: 50)]
pub categories: Option<CategoryCountableConnection2>, pub categories: Option<CategoryCountableConnection2>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "CategoryCountableConnection")] #[cynic(graphql_type = "CategoryCountableConnection")]
pub struct CategoryCountableConnection2 { pub struct CategoryCountableConnection2 {
pub total_count: Option<i32>, pub total_count: Option<i32>,
@ -137,13 +137,13 @@ pub struct CategoryCountableConnection2 {
pub edges: Vec<CategoryCountableEdge>, pub edges: Vec<CategoryCountableEdge>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CategoryCountableConnection { pub struct CategoryCountableConnection {
pub page_info: PageInfo, pub page_info: PageInfo,
pub edges: Vec<CategoryCountableEdge>, pub edges: Vec<CategoryCountableEdge>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CategoryCountableEdge { pub struct CategoryCountableEdge {
pub node: Category3, pub node: Category3,
} }
@ -156,7 +156,7 @@ pub struct Category3 {
pub slug: String, pub slug: String,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(variables = "GetCategoryProductsInitialVariables")] #[cynic(variables = "GetCategoryProductsInitialVariables")]
pub struct Category { pub struct Category {
pub slug: String, pub slug: String,
@ -166,14 +166,14 @@ pub struct Category {
pub products: Option<ProductCountableConnection>, pub products: Option<ProductCountableConnection>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct ProductCountableConnection { pub struct ProductCountableConnection {
pub page_info: PageInfo, pub page_info: PageInfo,
pub edges: Vec<ProductCountableEdge>, pub edges: Vec<ProductCountableEdge>,
pub total_count: Option<i32>, pub total_count: Option<i32>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic( #[cynic(
graphql_type = "Category", graphql_type = "Category",
variables = "GetCategoryProductsNextVariables" variables = "GetCategoryProductsNextVariables"
@ -183,14 +183,14 @@ pub struct Category2 {
pub products: Option<ProductCountableConnection2>, pub products: Option<ProductCountableConnection2>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "ProductCountableConnection")] #[cynic(graphql_type = "ProductCountableConnection")]
pub struct ProductCountableConnection2 { pub struct ProductCountableConnection2 {
pub page_info: PageInfo, pub page_info: PageInfo,
pub edges: Vec<ProductCountableEdge>, pub edges: Vec<ProductCountableEdge>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct ProductCountableEdge { pub struct ProductCountableEdge {
pub node: Product, pub node: Product,
} }
@ -202,7 +202,7 @@ pub struct Product {
pub updated_at: DateTime, pub updated_at: DateTime,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct PageInfo { pub struct PageInfo {
pub has_next_page: bool, pub has_next_page: bool,
pub end_cursor: Option<String>, pub end_cursor: Option<String>,

View file

@ -1,26 +1,26 @@
#[cynic::schema("saleor")] #[cynic::schema("saleor")]
mod schema {} mod schema {}
#[derive(cynic::QueryVariables, Debug)] #[derive(cynic::QueryVariables, Debug, Clone)]
pub struct GetCollectionsNextVariables<'a> { pub struct GetCollectionsNextVariables<'a> {
pub after: Option<&'a str>, pub after: Option<&'a str>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Query", variables = "GetCollectionsNextVariables")] #[cynic(graphql_type = "Query", variables = "GetCollectionsNextVariables")]
pub struct GetCollectionsNext { pub struct GetCollectionsNext {
#[arguments(first: 50, after: $after)] #[arguments(first: 50, after: $after)]
pub collections: Option<CollectionCountableConnection>, pub collections: Option<CollectionCountableConnection>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Query")] #[cynic(graphql_type = "Query")]
pub struct GetCollectionsInitial { pub struct GetCollectionsInitial {
#[arguments(first: 50)] #[arguments(first: 50)]
pub collections: Option<CollectionCountableConnection2>, pub collections: Option<CollectionCountableConnection2>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "CollectionCountableConnection")] #[cynic(graphql_type = "CollectionCountableConnection")]
pub struct CollectionCountableConnection2 { pub struct CollectionCountableConnection2 {
pub total_count: Option<i32>, pub total_count: Option<i32>,
@ -28,18 +28,18 @@ pub struct CollectionCountableConnection2 {
pub edges: Vec<CollectionCountableEdge>, pub edges: Vec<CollectionCountableEdge>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CollectionCountableConnection { pub struct CollectionCountableConnection {
pub page_info: PageInfo, pub page_info: PageInfo,
pub edges: Vec<CollectionCountableEdge>, pub edges: Vec<CollectionCountableEdge>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct CollectionCountableEdge { pub struct CollectionCountableEdge {
pub node: Collection, pub node: Collection,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct PageInfo { pub struct PageInfo {
pub has_next_page: bool, pub has_next_page: bool,
pub end_cursor: Option<String>, pub end_cursor: Option<String>,

View file

@ -36,26 +36,26 @@ query getPagesNext($after: String!) {
} }
*/ */
#[derive(cynic::QueryVariables, Debug)] #[derive(cynic::QueryVariables, Debug, Clone)]
pub struct GetPagesNextVariables<'a> { pub struct GetPagesNextVariables<'a> {
pub after: &'a str, pub after: &'a str,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Query", variables = "GetPagesNextVariables")] #[cynic(graphql_type = "Query", variables = "GetPagesNextVariables")]
pub struct GetPagesNext { pub struct GetPagesNext {
#[arguments(first: 50, after: $after)] #[arguments(first: 50, after: $after)]
pub pages: Option<PageCountableConnection>, pub pages: Option<PageCountableConnection>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Query")] #[cynic(graphql_type = "Query")]
pub struct GetPagesInitial { pub struct GetPagesInitial {
#[arguments(first: 50)] #[arguments(first: 50)]
pub pages: Option<PageCountableConnection2>, pub pages: Option<PageCountableConnection2>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "PageCountableConnection")] #[cynic(graphql_type = "PageCountableConnection")]
pub struct PageCountableConnection2 { pub struct PageCountableConnection2 {
pub total_count: Option<i32>, pub total_count: Option<i32>,
@ -63,18 +63,18 @@ pub struct PageCountableConnection2 {
pub edges: Vec<PageCountableEdge>, pub edges: Vec<PageCountableEdge>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct PageCountableConnection { pub struct PageCountableConnection {
pub page_info: PageInfo, pub page_info: PageInfo,
pub edges: Vec<PageCountableEdge>, pub edges: Vec<PageCountableEdge>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct PageCountableEdge { pub struct PageCountableEdge {
pub node: Page, pub node: Page,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Clone)]
pub struct PageInfo { pub struct PageInfo {
pub has_next_page: bool, pub has_next_page: bool,
pub end_cursor: Option<String>, pub end_cursor: Option<String>,

View file

@ -25,9 +25,8 @@ pub fn create_routes(state: AppState) -> Router {
//TODO : Fix this relative path issue in workspaces //TODO : Fix this relative path issue in workspaces
let serve_dir = ServeDir::new("./public").not_found_service(service); let serve_dir = ServeDir::new("./public").not_found_service(service);
//TODO Query for everything using the app auth token //TODO: Query for everything using the app auth token
//TODO "Failed fetching initial products: More than one channel exists, please spocify which //TODO: "Failed fetching initial products: More than one channel exists, please spocify which one"
//one"
Router::new() Router::new()
.route("/api/webhooks", any(webhooks)) .route("/api/webhooks", any(webhooks))
.layer(middleware::from_fn(webhook_signature_verifier)) .layer(middleware::from_fn(webhook_signature_verifier))

View file

@ -8,13 +8,12 @@ use axum::{
}; };
use cynic::{http::SurfExt, QueryBuilder}; use cynic::{http::SurfExt, QueryBuilder};
use saleor_app_sdk::{AuthData, AuthToken}; use saleor_app_sdk::{AuthData, AuthToken};
use sitemap_rs::url::Url;
use tinytemplate::TinyTemplate; use tinytemplate::TinyTemplate;
use tokio::spawn; use tokio::spawn;
use tracing::{debug, error, info, trace}; use tracing::{debug, error, info, trace};
use crate::{ use crate::{
app::{AppError, AppState, XmlData, XmlDataType}, app::{AppError, AppState},
queries::{ queries::{
event_subjects_updated::{ event_subjects_updated::{
self, CategoryUpdated, CollectionUpdated, PageUpdated, ProductUpdated, self, CategoryUpdated, CollectionUpdated, PageUpdated, ProductUpdated,
@ -30,7 +29,6 @@ use crate::{
}, },
get_all_pages::{self, GetPagesInitial, GetPagesNext, GetPagesNextVariables}, get_all_pages::{self, GetPagesInitial, GetPagesNext, GetPagesNextVariables},
}, },
routes::webhooks::write_xml,
}; };
pub async fn register( pub async fn register(
@ -75,206 +73,24 @@ pub async fn register(
pub async fn regenerate(state: AppState, saleor_api_url: String) -> anyhow::Result<()> { pub async fn regenerate(state: AppState, saleor_api_url: String) -> anyhow::Result<()> {
info!("regeneration: fetching all categories, products, collections, pages"); info!("regeneration: fetching all categories, products, collections, pages");
let xml_cache = state.xml_cache.lock().await;
let app = state.saleor_app.lock().await; let app = state.saleor_app.lock().await;
let auth_data = app.apl.get(&saleor_api_url).await?; let auth_data = app.apl.get(&saleor_api_url).await?;
let mut categories: Vec<(Category3, Vec<Arc<CategorisedProduct>>)> =
get_all_categories(&saleor_api_url, &auth_data.token)
.await?
.into_iter()
.map(|c| (c, vec![]))
.collect();
let mut products = vec![];
// If there are no products, append this empty array
let mut empty_products = vec![];
for category in categories.iter_mut() {
products.append(
match &mut get_all_products(
&saleor_api_url,
&state.target_channel,
&auth_data.token,
category,
)
.await
{
Ok(p) => p,
Err(e) => {
info!("Category {} has no products, {e}", category.0.slug);
&mut empty_products
}
},
);
}
let pages = get_all_pages(&saleor_api_url, &auth_data.token).await?; let pages = get_all_pages(&saleor_api_url, &auth_data.token).await?;
let collections = get_all_collections(&saleor_api_url, &auth_data.token).await?; let collections = get_all_collections(&saleor_api_url, &auth_data.token).await?;
info!( info!(
"regeneration: found {} products, {} categories, {} pages, {} collections", "regeneration: found {} products, {} categories, {} pages, {} collections",
products.len(), 0,
categories.len(), 0,
pages.len(), pages.len(),
collections.len() collections.len()
); );
info!("regeneration: creating xml data and caching it"); info!("regeneration: creating xml data");
let mut xml_data = vec![];
xml_data.append(
&mut categories
.into_iter()
.map(|c| XmlData {
slug: c.0.slug,
last_modified: chrono::DateTime::<chrono::Utc>::from_str(&c.0.updated_at.0)
.map_or(chrono::offset::Utc::now().fixed_offset(), |d| {
d.fixed_offset()
}),
id: c.0.id,
relations: c.1.iter().map(|p| p.product.id.clone()).collect::<Vec<_>>(),
data_type: XmlDataType::Category,
})
.collect::<Vec<_>>(),
);
xml_data.append(
&mut products
.into_iter()
.map(|p| XmlData {
data_type: XmlDataType::Product,
relations: vec![p.category_id.clone()],
id: p.product.id.clone(),
last_modified: chrono::DateTime::<chrono::Utc>::from_str(&p.product.updated_at.0)
.map_or(chrono::offset::Utc::now().fixed_offset(), |d| {
d.fixed_offset()
}),
slug: p.product.slug.clone(),
})
.collect(),
);
xml_data.append(
&mut pages
.into_iter()
.map(|p| XmlData {
data_type: XmlDataType::Page,
relations: vec![],
id: p.id.clone(),
last_modified: match p.published_at {
Some(d) => chrono::DateTime::<chrono::Utc>::from_str(&d.0)
.map_or(chrono::offset::Utc::now().fixed_offset(), |d| {
d.fixed_offset()
}),
None => chrono::offset::Utc::now().fixed_offset(),
},
slug: p.slug.clone(),
})
.collect(),
);
xml_data.append(
&mut collections
.into_iter()
.map(|c| XmlData {
slug: c.slug,
last_modified: chrono::offset::Utc::now().fixed_offset(),
id: c.id,
relations: vec![],
data_type: XmlDataType::Category,
})
.collect::<Vec<_>>(),
);
xml_cache.set(xml_data.clone(), &saleor_api_url).await?;
info!("regeneration: xml_cache was set");
//create urls
info!("regeneration: creating urls"); info!("regeneration: creating urls");
let mut page_urls = vec![]; // write_xml(page_urls, &state, XmlDataType::Page).await?;
let mut product_urls = vec![]; // write_xml(collection_urls, &state, XmlDataType::Collection).await?;
let mut category_urls = vec![]; // write_xml(category_urls, &state, XmlDataType::Category).await?;
let mut collection_urls = vec![]; // write_xml(product_urls, &state, XmlDataType::Product).await?;
for x in xml_data.iter() {
match x.data_type {
XmlDataType::Page => {
let mut tt = TinyTemplate::new();
tt.add_template("page_url", &state.sitemap_config.pages_template)?;
let context = PageUpdated {
page: Some(event_subjects_updated::Page {
slug: x.slug.clone(),
id: x.id.clone(),
}),
};
let page_url = Url::builder(tt.render("page_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created Page url: {}", &page_url.location);
page_urls.push(page_url);
}
XmlDataType::Product => {
let mut tt = TinyTemplate::new();
tt.add_template("product_url", &state.sitemap_config.product_template)?;
let context = ProductUpdated {
product: Some(event_subjects_updated::Product {
id: x.id.clone(),
slug: x.slug.clone(),
category: match xml_data.iter().find(|all| {
x.relations
.iter()
.any(|rel| all.id == *rel && all.data_type == XmlDataType::Category)
}) {
Some(c) => Some(event_subjects_updated::Category {
slug: c.slug.clone(),
id: c.id.clone(),
}),
None => Some(event_subjects_updated::Category {
slug: "unknown".to_owned(),
id: cynic::Id::new("unknown".to_owned()),
}),
},
}),
};
let product_url = Url::builder(tt.render("product_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created Page url: {}", &product_url.location);
product_urls.push(product_url);
}
XmlDataType::Category => {
let mut tt = TinyTemplate::new();
tt.add_template("category_url", &state.sitemap_config.category_template)?;
let context = CategoryUpdated {
category: Some(event_subjects_updated::Category2 {
id: x.id.clone(),
slug: x.slug.clone(),
}),
};
let category_url = Url::builder(tt.render("category_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created category url: {}", &category_url.location);
category_urls.push(category_url);
}
XmlDataType::Collection => {
let mut tt = TinyTemplate::new();
tt.add_template("coll_url", &state.sitemap_config.collection_template)?;
let context = CollectionUpdated {
collection: Some(event_subjects_updated::Collection {
slug: x.slug.clone(),
id: x.id.clone(),
}),
};
let collection_url = Url::builder(tt.render("coll_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created collection url: {}", &collection_url.location);
collection_urls.push(collection_url);
}
}
}
write_xml(page_urls, &state, XmlDataType::Page).await?;
write_xml(collection_urls, &state, XmlDataType::Collection).await?;
write_xml(category_urls, &state, XmlDataType::Category).await?;
write_xml(product_urls, &state, XmlDataType::Product).await?;
Ok(()) Ok(())
} }

View file

@ -1,11 +1,8 @@
use tokio::{fs::File, io::AsyncWriteExt};
use anyhow::Context; use anyhow::Context;
use axum::{ use axum::{
extract::State, extract::State,
http::{HeaderMap, StatusCode}, http::{HeaderMap, StatusCode},
}; };
use chrono::{DateTime, Utc};
use saleor_app_sdk::{ use saleor_app_sdk::{
headers::SALEOR_API_URL_HEADER, headers::SALEOR_API_URL_HEADER,
webhooks::{ webhooks::{
@ -13,16 +10,14 @@ use saleor_app_sdk::{
AsyncWebhookEventType, AsyncWebhookEventType,
}, },
}; };
use sitemap_rs::{sitemap::Sitemap, sitemap_index::SitemapIndex, url::Url, url_set::UrlSet}; use tracing::{debug, info};
use tinytemplate::TinyTemplate;
use tokio::spawn;
use tracing::{debug, error, info};
use crate::{ use crate::{
app::{AppError, AppState, XmlData, XmlDataType}, app::{AppError, AppState},
queries::event_subjects_updated::{ queries::event_subjects_updated::{
Category, Category2, CategoryUpdated, Collection, CollectionUpdated, Page, PageUpdated, CategoryCreated, CategoryDeleted, CategoryUpdated, CollectionCreated, CollectionDeleted,
Product, ProductUpdated, CollectionUpdated, PageCreated, PageDeleted, PageUpdated, ProductCreated, ProductDeleted,
ProductUpdated,
}, },
}; };
@ -44,47 +39,42 @@ pub async fn webhooks(
if let EitherWebhookType::Async(a) = event_type { if let EitherWebhookType::Async(a) = event_type {
// TODO: Extract this into a function so You can check what the error was if something fails // TODO: Extract this into a function so You can check what the error was if something fails
match a { match a {
AsyncWebhookEventType::ProductUpdated AsyncWebhookEventType::ProductUpdated => {
| AsyncWebhookEventType::ProductCreated
| AsyncWebhookEventType::ProductDeleted => {
let product: ProductUpdated = serde_json::from_str(&data)?; let product: ProductUpdated = serde_json::from_str(&data)?;
spawn(async move {
if let Err(e) = update_sitemap_product(product, &url, state).await {
error!("Error processing Product, e: {:?}", e);
}
});
} }
AsyncWebhookEventType::CategoryCreated AsyncWebhookEventType::ProductCreated => {
| AsyncWebhookEventType::CategoryUpdated let product: ProductCreated = serde_json::from_str(&data)?;
| AsyncWebhookEventType::CategoryDeleted => { }
AsyncWebhookEventType::ProductDeleted => {
let product: ProductDeleted = serde_json::from_str(&data)?;
}
AsyncWebhookEventType::CategoryCreated => {
let category: CategoryCreated = serde_json::from_str(&data)?;
}
AsyncWebhookEventType::CategoryUpdated => {
let category: CategoryUpdated = serde_json::from_str(&data)?; let category: CategoryUpdated = serde_json::from_str(&data)?;
spawn(async move {
if let Err(e) = update_sitemap_category(category, &url, state).await {
error!("Error processing Category, e: {:?}", e);
}
});
} }
AsyncWebhookEventType::PageCreated AsyncWebhookEventType::CategoryDeleted => {
| AsyncWebhookEventType::PageUpdated let category: CategoryDeleted = serde_json::from_str(&data)?;
| AsyncWebhookEventType::PageDeleted => { }
AsyncWebhookEventType::PageCreated => {
let page: PageCreated = serde_json::from_str(&data)?;
}
AsyncWebhookEventType::PageUpdated => {
let page: PageUpdated = serde_json::from_str(&data)?; let page: PageUpdated = serde_json::from_str(&data)?;
spawn(async move {
if let Err(e) = update_sitemap_page(page, &url, state).await {
error!("Error processing Page, e: {:?}", e);
}
});
} }
AsyncWebhookEventType::CollectionCreated AsyncWebhookEventType::PageDeleted => {
| AsyncWebhookEventType::CollectionUpdated let page: PageDeleted = serde_json::from_str(&data)?;
| AsyncWebhookEventType::CollectionDeleted => { }
AsyncWebhookEventType::CollectionCreated => {
let collection: CollectionCreated = serde_json::from_str(&data)?;
}
AsyncWebhookEventType::CollectionUpdated => {
let collection: CollectionUpdated = serde_json::from_str(&data)?; let collection: CollectionUpdated = serde_json::from_str(&data)?;
spawn(async move {
if let Err(e) = update_sitemap_collection(collection, &url, state).await {
error!("Error processing Collection, e: {:?}", e);
}
});
} }
AsyncWebhookEventType::CollectionDeleted => {
let collection: CollectionDeleted = serde_json::from_str(&data)?;
}
_ => (), _ => (),
} }
} }
@ -93,540 +83,126 @@ pub async fn webhooks(
Ok(StatusCode::OK) Ok(StatusCode::OK)
} }
async fn update_sitemap_product( // pub async fn write_xml(
product: ProductUpdated, // urls: Vec<Url>,
saleor_api_url: &str, // state: &AppState,
state: AppState, // type_group: XmlDataType,
) -> anyhow::Result<()> { // ) -> anyhow::Result<()> {
debug!("Product got changed!, {:?}", &product); // //Acquire lock first, so only one write_xml function can start computing
if let Some(product) = product.product { // let mut f = File::options()
// Update or add the product // .create(true)
let xml_cache = state.xml_cache.lock().await; // .write(true)
let mut xml_data = match xml_cache.get_all(saleor_api_url).await { // .open(format!(
Ok(d) => d, // "{}/sitemap-{:?}-0.xml",
Err(e) => { // state.sitemap_config.target_folder, type_group
error!("Error, {:?}. no xml cache present?", e); // ))
vec![] // .await?;
} // let mut sitemap_urls: Vec<Url> = vec![];
}; // for url in urls.clone() {
// sitemap_urls.push(url);
//find the product in xml data and update / create it // }
let mut new_data = vec![]; // let url_set: UrlSet = UrlSet::new(sitemap_urls)?;
let cloned_xml_data = xml_data.clone(); // debug!("Writing xml into file");
//debug!("{:?}", xml_data); //
match xml_data // //f.set_len(0)?;
.iter_mut() // let mut buf = Vec::<u8>::new();
.find(|x| x.id == product.id && x.data_type == XmlDataType::Product) // url_set.write(&mut buf)?;
{ // //TODO: Gzip the buffer before testing size. Size limit per sitemap should be ~= 10mb
Some(x) => { //
//Check if the slug or category.slug has changed, else ignore the change and continue // //now check if buffer's over limit, else slice em up into multiple sitemaps
debug!("{} == {}", x.slug, product.slug); // let len = buf.len() * std::mem::size_of::<u8>();
if x.slug == product.slug { // if len > 200000 {
match &product.category { // let file_amount = (len as f32 / 150000_f32).ceil() as usize;
Some(c) => { // let sliced_urls: Vec<&[Url]> = urls.chunks(file_amount).collect();
if let Some(xml_c) = cloned_xml_data //
.iter() // let mut sitemaps: Vec<UrlSet> = vec![];
.find(|d| d.id == c.id && d.data_type == XmlDataType::Category) // for urls in sliced_urls {
{ // for url in urls.iter().cloned() {
if xml_c.slug == c.slug { // let sitemap_urls = vec![url];
debug!("Products url didn't change, skipping..."); // sitemaps.push(UrlSet::new(sitemap_urls)?);
return Ok(()); // }
} // }
} //
} // for (i, sitemap) in sitemaps.into_iter().enumerate() {
None => { // let mut new_buf = Vec::<u8>::new();
debug!("Products url didn't change, skipping..."); // sitemap.write(&mut new_buf)?;
return Ok(()); // let len = new_buf.len() * std::mem::size_of::<u8>();
} // if len > 200000 {
} // error!("Sitemap is too big even after splitting. Gosh I wish I was better at math")
} // }
debug!( // let mut f = File::options()
"changed product {} found in xml_data, updating...", // .create(true)
product.slug // .write(true)
); // .open(format!(
x.slug.clone_from(&product.slug); // "{}/sitemap-{:?}-{i}.xml",
x.relations = match &product.category { // state.sitemap_config.target_folder, type_group
Some(c) => vec![c.id.clone()], // ))
None => vec![], // .await?;
}; // f.write_all(&new_buf).await?;
x.last_modified = chrono::offset::Utc::now().fixed_offset(); // }
} // } else {
None => { // f.write_all(&buf).await?;
debug!( // }
"changed product {} not found in xml_data, adding...", // //let mut gzip = GzEncoder::new(f, Compression::default());
product.slug // update_sitemap_index(state).await?;
); // Ok(())
new_data.push(XmlData { // }
last_modified: chrono::offset::Utc::now().fixed_offset(), //
relations: match &product.category { // async fn update_sitemap_index(state: &AppState) -> anyhow::Result<()> {
Some(c) => { // use std::fs::read_dir;
vec![c.id.clone()] // let dir = read_dir(&state.sitemap_config.target_folder)?;
} // let paths = dir
None => vec![], // .filter_map(|f| f.ok())
}, // .map(|e| e.path())
id: product.id.clone(), // .filter_map(|path| {
data_type: XmlDataType::Product, // if path
slug: product.slug.clone(), // .extension()
}); // .map_or(false, |ext| ext == "xml" || ext == "gz")
} // && !path.to_string_lossy().to_string().contains("sitemap_index")
}; // {
//See if produts category exists // Some(path)
if let Some(c) = &product.category { // } else {
if let Some(xml_cat) = xml_data // None
.iter_mut() // }
.find(|x| x.id == c.id && x.data_type == XmlDataType::Category) // })
{ // .collect::<Vec<_>>();
xml_cat.slug.clone_from(&c.slug); //
xml_cat.last_modified = chrono::offset::Utc::now().fixed_offset(); // let sitemaps: Vec<Sitemap> = paths
// If the category exists but product isn't in relation to it yet, // .into_iter()
// add it // .filter_map(|p| {
if !xml_cat.relations.iter().any(|c| *c == product.id) { // if let Some(file_name) = p.file_name() {
xml_cat.relations.push(product.id.clone()); // Some(Sitemap::new(
} // format!(
//if cat isn't in xml data, add it // "{}/{}",
} else { // state.sitemap_config.index_hostname,
new_data.push(XmlData { // file_name.to_string_lossy()
last_modified: chrono::offset::Utc::now().fixed_offset(), // ),
id: c.id.clone(), // p.metadata().map_or(None, |meta| {
slug: c.slug.clone(), // meta.modified().map_or(None, |modified| {
data_type: XmlDataType::Category, // let dt_utc: DateTime<Utc> = modified.into();
relations: vec![product.id.clone()], // Some(dt_utc.fixed_offset())
}) // })
} // }),
} // ))
xml_data.append(&mut new_data); // } else {
//create urls // error!("file dissapeared or broke during sitemap_index construction");
let mut urls = vec![]; // None
for x in xml_data.iter() { // }
if x.data_type == XmlDataType::Product { // })
let mut tt = TinyTemplate::new(); // .collect::<Vec<_>>();
tt.add_template("product_url", &state.sitemap_config.product_template)?; // let sitemap_index = SitemapIndex::new(sitemaps)?;
let context = ProductUpdated { // let mut file = File::options()
product: Some(Product { // .create(true)
id: x.id.clone(), // .write(true)
slug: x.slug.clone(), // .open(format!(
category: match xml_data.iter().find(|all| { // "{}/sitemap_index.xml",
x.relations // state.sitemap_config.target_folder
.iter() // ))
.any(|rel| all.id == *rel && all.data_type == XmlDataType::Category) // .await?;
}) { //
Some(c) => Some(Category { // let mut buf = Vec::<u8>::new();
slug: c.slug.clone(), // sitemap_index.write(&mut buf)?;
id: c.id.clone(), // file.write_all(&buf).await?;
}), //
None => Some(Category { // Ok(())
slug: "unknown".to_owned(), // }
id: cynic::Id::new("unknown".to_owned()),
}),
},
}),
};
urls.push(
Url::builder(tt.render("product_url", &context)?)
.last_modified(x.last_modified)
.build()?,
);
}
}
//debug!("new urls:{:?}", &urls);
write_xml(urls, &state, XmlDataType::Product).await?;
xml_cache.set(xml_data, saleor_api_url).await?;
} else {
error!("Failed to update product, e: {:?}", product);
anyhow::bail!("product not present in in webhook");
}
info!("Sitemap updated, cause: product");
Ok(())
}
async fn update_sitemap_category(
category: CategoryUpdated,
saleor_api_url: &str,
state: AppState,
) -> anyhow::Result<()> {
if let Some(category) = category.category {
let xml_cache = state.xml_cache.lock().await;
let mut xml_data = xml_cache.get_all(saleor_api_url).await?;
let mut affected_product_ids = vec![];
let mut new_xml_data = vec![];
//check if template of product includes categories in url
let is_category_in_product_url = state.sitemap_config.product_template.contains("category");
match xml_data
.iter_mut()
.find(|c| c.id == category.id && c.data_type == XmlDataType::Category)
{
Some(xml_c) => {
// if it changed, update
if xml_c.slug == category.slug {
debug!("Category url didn't change, skipping...");
return Ok(());
}
debug!("Category url changed, updating...");
xml_c.slug.clone_from(&category.slug);
xml_c.last_modified = chrono::offset::Utc::now().fixed_offset();
if is_category_in_product_url {
debug!("{} products affected by change", affected_product_ids.len());
affected_product_ids.append(&mut xml_c.relations.clone());
}
}
None => {
//Add category if it doesn't exist
debug!("Category not found in cache, adding...");
new_xml_data.push(XmlData {
relations: vec![],
last_modified: chrono::offset::Utc::now().fixed_offset(),
data_type: XmlDataType::Category,
slug: category.slug.clone(),
id: category.id.clone(),
})
}
}
//update affected products' last_modified
if is_category_in_product_url {
for prod_id in affected_product_ids {
if let Some(xml_prod) = xml_data
.iter_mut()
.find(|p| p.id == prod_id && p.data_type == XmlDataType::Product)
{
match xml_prod.relations.iter().find(|c| *c == &category.id) {
Some(_) => {
xml_prod.last_modified = chrono::offset::Utc::now().fixed_offset();
}
None => {
debug!("product in categories relation doesn't have the same relation back, what happened? Fixing...");
xml_prod.relations = vec![category.id.clone()];
xml_prod.last_modified = chrono::offset::Utc::now().fixed_offset();
}
};
}
}
}
xml_data.append(&mut new_xml_data);
let mut category_urls = vec![];
let mut product_urls = vec![];
//Create urls
for x in xml_data.iter() {
let mut tt = TinyTemplate::new();
if is_category_in_product_url && x.data_type == XmlDataType::Product {
tt.add_template("product_url", &state.sitemap_config.product_template)?;
//If current xml products category is this changed category, just use that instead
//of searching for it again
let context = ProductUpdated {
product: match x.relations.iter().find(|c| *c == &category.id) {
Some(_) => Some(Product {
id: x.id.clone(),
slug: x.slug.clone(),
category: Some(Category {
slug: category.slug.clone(),
id: category.id.clone(),
}),
}),
None => Some(Product {
id: x.id.clone(),
slug: x.slug.clone(),
category: match xml_data.iter().find(|all| {
x.relations.iter().any(|rel| {
all.id == *rel && all.data_type == XmlDataType::Category
})
}) {
Some(c) => Some(Category {
slug: c.slug.clone(),
id: c.id.clone(),
}),
None => Some(Category {
slug: "unknown".to_owned(),
id: cynic::Id::new("unknown".to_owned()),
}),
},
}),
},
};
product_urls.push(
Url::builder(tt.render("product_url", &context)?)
.last_modified(x.last_modified)
.build()?,
);
}
if x.data_type == XmlDataType::Category {
tt.add_template("category_url", &state.sitemap_config.category_template)?;
let context = CategoryUpdated {
category: Some(Category2 {
id: x.id.clone(),
slug: x.slug.clone(),
}),
};
category_urls.push(
Url::builder(tt.render("category_url", &context)?)
.last_modified(x.last_modified)
.build()?,
);
}
}
//and write
if is_category_in_product_url {
write_xml(product_urls, &state, XmlDataType::Product).await?;
}
write_xml(category_urls, &state, XmlDataType::Category).await?;
xml_cache.set(xml_data, saleor_api_url).await?;
} else {
error!("Failed to update category, e:{:?}", category);
anyhow::bail!("Category not present in webhook");
}
info!("Sitemap updated, cause: category");
Ok(())
}
async fn update_sitemap_collection(
collection: CollectionUpdated,
saleor_api_url: &str,
state: AppState,
) -> anyhow::Result<()> {
if let Some(collection) = collection.collection {
let xml_cache = state.xml_cache.lock().await;
let mut xml_data = xml_cache.get_all(saleor_api_url).await?;
let mut new_xml_data = vec![];
match xml_data
.iter_mut()
.find(|c| c.id == collection.id && c.data_type == XmlDataType::Collection)
{
Some(xml_col) => {
if xml_col.slug == collection.slug {
debug!("Collection url didn't change, skipping");
return Ok(());
}
xml_col.slug = collection.slug;
xml_col.last_modified = chrono::offset::Utc::now().fixed_offset();
}
None => {
debug!("Collection not cached, adding...");
new_xml_data.push(XmlData {
slug: collection.slug,
id: collection.id,
last_modified: chrono::offset::Utc::now().fixed_offset(),
relations: vec![],
data_type: XmlDataType::Collection,
})
}
}
xml_data.append(&mut new_xml_data);
//create urls
let mut collection_urls = vec![];
for xml_col in xml_data.iter() {
if xml_col.data_type == XmlDataType::Collection {
let mut tt = TinyTemplate::new();
tt.add_template("collection_url", &state.sitemap_config.collection_template)?;
let context = CollectionUpdated {
collection: Some(Collection {
slug: xml_col.slug.clone(),
id: xml_col.id.clone(),
}),
};
collection_urls.push(
Url::builder(tt.render("collection_url", &context)?)
.last_modified(xml_col.last_modified)
.build()?,
);
}
}
write_xml(collection_urls, &state, XmlDataType::Collection).await?;
xml_cache.set(xml_data, saleor_api_url).await?;
} else {
error!("Failed to update collection, e:{:?}", collection);
anyhow::bail!("Collection not present in webhook");
}
info!("Sitemap updated, cause: collection");
Ok(())
}
async fn update_sitemap_page(
page: PageUpdated,
saleor_api_url: &str,
state: AppState,
) -> anyhow::Result<()> {
if let Some(page) = page.page {
let xml_cache = state.xml_cache.lock().await;
let mut xml_data = xml_cache.get_all(saleor_api_url).await?;
let mut new_xml_data = vec![];
match xml_data
.iter_mut()
.find(|p| p.id == page.id && p.data_type == XmlDataType::Page)
{
Some(xml_page) => {
if xml_page.slug == page.slug {
debug!("Page url didn't change, skipping");
return Ok(());
}
xml_page.slug = page.slug;
xml_page.last_modified = chrono::offset::Utc::now().fixed_offset();
}
None => {
debug!("Page not cached, adding...");
new_xml_data.push(XmlData {
slug: page.slug,
id: page.id,
last_modified: chrono::offset::Utc::now().fixed_offset(),
relations: vec![],
data_type: XmlDataType::Page,
})
}
}
xml_data.append(&mut new_xml_data);
//create urls
let mut page_urls = vec![];
for xml_page in xml_data.iter() {
if xml_page.data_type == XmlDataType::Page {
let mut tt = TinyTemplate::new();
tt.add_template("page_url", &state.sitemap_config.pages_template)?;
let context = PageUpdated {
page: Some(Page {
slug: xml_page.slug.clone(),
id: xml_page.id.clone(),
}),
};
page_urls.push(
Url::builder(tt.render("page_url", &context)?)
.last_modified(xml_page.last_modified)
.build()?,
);
}
}
write_xml(page_urls, &state, XmlDataType::Page).await?;
xml_cache.set(xml_data, saleor_api_url).await?;
} else {
error!("Failed to update Page, e:{:?}", page);
anyhow::bail!("Page not present in webhook");
}
info!("Sitemap updated, cause: Page");
Ok(())
}
pub async fn write_xml(
urls: Vec<Url>,
state: &AppState,
type_group: XmlDataType,
) -> anyhow::Result<()> {
//Acquire lock first, so only one write_xml function can start computing
let mut f = File::options()
.create(true)
.write(true)
.open(format!(
"{}/sitemap-{:?}-0.xml",
state.sitemap_config.target_folder, type_group
))
.await?;
let mut sitemap_urls: Vec<Url> = vec![];
for url in urls.clone() {
sitemap_urls.push(url);
}
let url_set: UrlSet = UrlSet::new(sitemap_urls)?;
debug!("Writing xml into file");
//f.set_len(0)?;
let mut buf = Vec::<u8>::new();
url_set.write(&mut buf)?;
//TODO: Gzip the buffer before testing size. Size limit per sitemap should be ~= 10mb
//now check if buffer's over limit, else slice em up into multiple sitemaps
let len = buf.len() * std::mem::size_of::<u8>();
if len > 200000 {
let file_amount = (len as f32 / 150000_f32).ceil() as usize;
let sliced_urls: Vec<&[Url]> = urls.chunks(file_amount).collect();
let mut sitemaps: Vec<UrlSet> = vec![];
for urls in sliced_urls {
for url in urls.iter().cloned() {
let sitemap_urls = vec![url];
sitemaps.push(UrlSet::new(sitemap_urls)?);
}
}
for (i, sitemap) in sitemaps.into_iter().enumerate() {
let mut new_buf = Vec::<u8>::new();
sitemap.write(&mut new_buf)?;
let len = new_buf.len() * std::mem::size_of::<u8>();
if len > 200000 {
error!("Sitemap is too big even after splitting. Gosh I wish I was better at math")
}
let mut f = File::options()
.create(true)
.write(true)
.open(format!(
"{}/sitemap-{:?}-{i}.xml",
state.sitemap_config.target_folder, type_group
))
.await?;
f.write_all(&new_buf).await?;
}
} else {
f.write_all(&buf).await?;
}
//let mut gzip = GzEncoder::new(f, Compression::default());
update_sitemap_index(state).await?;
Ok(())
}
async fn update_sitemap_index(state: &AppState) -> anyhow::Result<()> {
use std::fs::read_dir;
let dir = read_dir(&state.sitemap_config.target_folder)?;
let paths = dir
.filter_map(|f| f.ok())
.map(|e| e.path())
.filter_map(|path| {
if path
.extension()
.map_or(false, |ext| ext == "xml" || ext == "gz")
&& !path.to_string_lossy().to_string().contains("sitemap_index")
{
Some(path)
} else {
None
}
})
.collect::<Vec<_>>();
let sitemaps: Vec<Sitemap> = paths
.into_iter()
.filter_map(|p| {
if let Some(file_name) = p.file_name() {
Some(Sitemap::new(
format!(
"{}/{}",
state.sitemap_config.index_hostname,
file_name.to_string_lossy()
),
p.metadata().map_or(None, |meta| {
meta.modified().map_or(None, |modified| {
let dt_utc: DateTime<Utc> = modified.into();
Some(dt_utc.fixed_offset())
})
}),
))
} else {
error!("file dissapeared or broke during sitemap_index construction");
None
}
})
.collect::<Vec<_>>();
let sitemap_index = SitemapIndex::new(sitemaps)?;
let mut file = File::options()
.create(true)
.write(true)
.open(format!(
"{}/sitemap_index.xml",
state.sitemap_config.target_folder
))
.await?;
let mut buf = Vec::<u8>::new();
sitemap_index.write(&mut buf)?;
file.write_all(&buf).await?;
Ok(())
}

View file

@ -0,0 +1,60 @@
use rayon::prelude::*;
use std::{
fs::{read_dir, File},
io::BufReader,
};
use crate::queries::event_subjects_updated::Event;
use tokio::{sync::mpsc::Receiver, task::JoinHandle};
use tracing::warn;
use super::UrlSet;
pub struct EventHandler {
receiver: Receiver<Event>,
}
impl EventHandler {
pub fn start(receiver: Receiver<Event>) -> JoinHandle<()> {
let mut s = Self { receiver };
tokio::spawn(s.listen())
}
async fn listen(mut self) {
while let Some(message) = self.receiver.recv().await {
match message {
Event::ProductCreated(product) => {}
Event::ProductUpdated(product) => {}
Event::ProductDeleted(product) => {}
Event::CategoryCreated(category) => {}
Event::CategoryUpdated(category) => {}
Event::CategoryDeleted(category) => {}
Event::CollectionCreated(collection) => {}
Event::CollectionUpdated(collection) => {}
Event::CollectionDeleted(collection) => {}
Event::PageCreated(page) => {}
Event::PageUpdated(page) => {}
Event::PageDeleted(page) => {}
Event::Unknown => warn!("Unknown event called"),
}
}
}
}
async fn read_xmls() {
let paths = read_dir(std::env::var("SITEMAP_TARGET_FOLDER").unwrap()).unwrap();
let mut all_urls: Vec<UrlSet> = paths
.into_iter()
.par_bridge()
.filter_map(|path| {
if let Ok(path) = path {
if path.path().is_file() {
let file = File::open(path.path()).expect("Unable to open file");
let reader = BufReader::new(file);
return Some(quick_xml::de::from_reader(reader).unwrap());
}
}
return None;
})
.collect();
}

View file

@ -0,0 +1,110 @@
mod category;
mod collection;
mod event_handler;
mod page;
mod product;
use chrono::{DateTime, FixedOffset, SubsecRound};
use quick_xml::DeError;
use serde::{Deserialize, Serialize};
const SITEMAP_XMLNS: &str = "http://sitemaps.org/schemas/sitemap/0.9";
const SALEOR_REF_XMLNS: &str = "http://app-sitemap-generator.kremik.sk/xml-schemas/saleor-ref.xsd";
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
#[serde(rename = "urlset")]
pub struct UrlSet {
#[serde(rename = "@xmlns:saleor")]
xmlns_saleor: String,
#[serde(rename = "@xmlns")]
xmlns: String,
pub url: Vec<Url>,
}
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct Url {
pub loc: String,
pub lastmod: DateTime<FixedOffset>,
#[serde(rename = "saleor:ref")]
pub saleor_ref: SaleorRef,
}
pub enum RefType {
Product,
}
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct SaleorRef {
#[serde(rename = "saleor:id")]
pub id: String,
#[serde(rename = "saleor:type")]
pub typ: String,
#[serde(rename = "saleor:category-id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub category_id: Option<String>,
pub
}
impl UrlSet {
/**
Icludes xml version header
*/
pub fn to_file(&self) -> Result<String, DeError> {
let init = quick_xml::se::to_string(self)?;
Ok(r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string() + "\n" + &init)
}
/**
adds static xmlns default strings
*/
pub fn new() -> Self {
let mut base_url = std::env::var("APP_API_BASE_URL").unwrap();
//Cuz apparently xml url thingy isn't actually an url so you can't https? Gosh I hate xml
if base_url.contains("https") {
base_url = base_url.replacen("https", "http", 1);
}
//Trailing / in url would mess stuff up
if base_url.chars().last().unwrap() == '/' {
base_url.pop();
}
let xmlns_saleor = format!("{base_url}/schemas/saleor-ref.xsd",);
Self {
xmlns: SITEMAP_XMLNS.to_string(),
xmlns_saleor,
url: vec![],
}
}
}
impl Url {
pub fn new_generic_url(id: String, slug: String) -> Self {
Self {
saleor_ref: SaleorRef {
product_id: None,
category_id: Some(id),
},
lastmod: chrono::offset::Utc::now().fixed_offset().round_subsecs(1),
// Have template string determine the url
loc: format!("https://example.com/{slug}"),
}
}
pub fn new_product_url(
category_id: String,
product_id: String,
category_slug: String,
product_slug: String,
) -> Self {
Self {
// Have template string determine the url
loc: format!("https://example.com/{category_slug}/{product_slug}"),
lastmod: chrono::offset::Utc::now().fixed_offset().round_subsecs(1),
saleor_ref: SaleorRef {
product_id: Some(product_id),
category_id: Some(category_id),
},
}
}
}

View file

View file

View file

@ -0,0 +1,28 @@
#[cfg(test)]
mod test {
use crate::sitemap::{Url, UrlSet};
fn urlset_serialisation_isnt_lossy() {
let mut url_set = UrlSet::new();
url_set.url.append(&mut vec![
Url::new_generic_url("category1coolid".to_string(), "category1".to_string()),
Url::new_generic_url("category2coolid".to_string(), "category2".to_string()),
Url::new_product_url(
"category1coolid".to_string(),
"category1".to_string(),
"product1coolid".to_string(),
"product1".to_string(),
),
Url::new_product_url(
"category2coolid".to_string(),
"category2".to_string(),
"product2coolid".to_string(),
"product2".to_string(),
),
]);
let file_str = url_set.to_file().unwrap();
let deserialized_url_set: UrlSet = quick_xml::de::from_str(&file_str).unwrap();
assert_eq!(url_set, deserialized_url_set);
}
}