add regeneration to sitemap, other fixes

This commit is contained in:
Djkáťo 2024-03-09 19:59:15 +01:00
parent 2402c573ac
commit ee34b1bf22
20 changed files with 1608 additions and 464 deletions

6
.tokeinore Normal file
View file

@ -0,0 +1,6 @@
/target
temp
**/*.graphql
app-template/src/schema.graphql
PolyForm-Noncommercial-1.0.0.md
app logo template.xcf

704
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,22 +1,26 @@
[workspace] [workspace]
members = ["sdk", "app-template", "sitemap-generator"] members = ["sdk", "app-template", "sitemap-generator", "simple-payment-gateway"]
resolver = "2" resolver = "2"
[workspace.dependencies] [workspace.dependencies]
anyhow = "1.0.79" anyhow = "1.0.79"
cynic = {version="3.4.3", features = ["http-surf"]} cynic = { version = "3.4.3", features = ["http-surf"] }
surf = "2.3.2" surf = "2.3.2"
serde = "1.0.196" serde = "1.0.196"
serde_json = "1.0.113" serde_json = "1.0.113"
tokio = {version = "1.36.0", features = ["full"]} tokio = { version = "1.36.0", features = ["full"] }
redis = { version = "0.23.0", features = ["aio", "tokio-comp", "connection-manager"] } redis = { version = "0.23.0", features = [
"aio",
"tokio-comp",
"connection-manager",
] }
envy = "0.4.2" envy = "0.4.2"
tracing = "0.1.40" tracing = "0.1.40"
tracing-serde = "0.1.3" tracing-serde = "0.1.3"
tracing-subscriber = { version = "0.3.18" } tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
dotenvy = "0.15.7" dotenvy = "0.15.7"
axum = "0.7.4" axum = "0.7.4"
saleor-app-sdk = {path = "sdk"} saleor-app-sdk = { path = "sdk" }
tower = { version = "0.4.13", features = ["util"] } tower = { version = "0.4.13", features = ["util"] }
tower-http = { version = "0.5.2", features = ["fs", "trace"] } tower-http = { version = "0.5.2", features = ["fs", "trace"] }
cynic-codegen= "3.4.3" cynic-codegen = "3.4.3"

View file

@ -1,9 +1,11 @@
use axum::{ use axum::{
handler::HandlerWithoutStateExt, handler::HandlerWithoutStateExt,
http::StatusCode, http::StatusCode,
middleware,
routing::{get, post}, routing::{get, post},
Router, Router,
}; };
use saleor_app_sdk::middleware::verify_webhook_signature::webhook_signature_verifier;
use tower_http::services::ServeDir; use tower_http::services::ServeDir;
use crate::app::AppState; use crate::app::AppState;
@ -23,7 +25,9 @@ pub fn create_routes(state: AppState) -> Router {
let serve_dir = ServeDir::new("saleor-app-template/public").not_found_service(service); let serve_dir = ServeDir::new("saleor-app-template/public").not_found_service(service);
Router::new() Router::new()
.layer(middleware::from_fn(webhook_signature_verifier))
//handles just path, eg. localhost:3000/ //handles just path, eg. localhost:3000/
.route("/api/webhooks", post(webhooks))
.route( .route(
"/", "/",
get(|| async { "Your app got installed successfully!" }), get(|| async { "Your app got installed successfully!" }),
@ -32,6 +36,5 @@ pub fn create_routes(state: AppState) -> Router {
.fallback_service(serve_dir) .fallback_service(serve_dir)
.route("/api/manifest", get(manifest)) .route("/api/manifest", get(manifest))
.route("/api/register", post(register)) .route("/api/register", post(register))
.route("/api/webhooks", post(webhooks))
.with_state(state) .with_state(state)
} }

View file

@ -5,7 +5,7 @@ version = "0.1.0"
edition = "2021" edition = "2021"
description = "Unofficial Saleor App SDK like library, made to work with rust." description = "Unofficial Saleor App SDK like library, made to work with rust."
keywords = ["saleor", "sdk", "plugin"] keywords = ["saleor", "sdk", "plugin"]
categories = [ "api-bindings", "web-programming::http-server"] categories = ["api-bindings", "web-programming::http-server"]
homepage = "https://github.com/djkato/saleor-app-rs-template" homepage = "https://github.com/djkato/saleor-app-rs-template"
repository = "https://github.com/djkato/saleor-app-rs-template" repository = "https://github.com/djkato/saleor-app-rs-template"
documentation = "https://github.com/djkato/saleor-app-rs-template" documentation = "https://github.com/djkato/saleor-app-rs-template"
@ -13,18 +13,23 @@ license = "MIT OR Apache-2.0"
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
redis = { workspace=true, features = ["aio", "tokio-comp", "connection-manager"] } redis = { workspace = true, features = [
"aio",
"tokio-comp",
"connection-manager",
] }
serde.workspace = true serde.workspace = true
axum.workspace = true
tracing.workspace = true tracing.workspace = true
tracing-subscriber.workspace = true tracing-subscriber.workspace = true
serde_json.workspace = true serde_json.workspace = true
envy.workspace = true envy.workspace = true
dotenvy.workspace = true dotenvy.workspace = true
async-trait = "0.1.77"
jose-jwk = "0.1.2"
tower = { workspace = true } tower = { workspace = true }
jose-jws = "0.1.2" reqwest = { version = "0.11.24", features = ["json"] }
jsonwebtoken = "9.2.0"
async-trait = "0.1.77"
http = "1.0.0" http = "1.0.0"
jose-b64 = {version = "0.1.2", features =["serde"] } url = "2.5.0"
strum = "0.26.0" strum = "0.26.0"
strum_macros = "0.26.1" strum_macros = "0.26.1"

View file

@ -1,4 +1,82 @@
/* use axum::{body, extract::Request, http::StatusCode, middleware::Next, response::Response};
use serde_json::Value;
use jsonwebtoken::{crypto, Algorithm, DecodingKey};
use tracing::{debug, error};
use crate::headers::{SALEOR_API_URL_HEADER, SALEOR_SIGNATURE_HEADER};
pub async fn webhook_signature_verifier(request: Request, next: Next) -> Response {
let unauthorized = Response::builder()
.status(StatusCode::UNAUTHORIZED)
.body(body::Body::from("Not authenticated\n"))
.unwrap();
let jwks_url = request
.headers()
.get(SALEOR_API_URL_HEADER)
.map_or(None, |h| {
h.to_str()
.map_or(None, |h| url::Url::parse(h).map_or(None, |h| Some(h)))
});
//get jwk from saleor api
let jwks: Value = 'block: {
if let Some(mut jwks_url) = jwks_url {
jwks_url.set_path("/.well-known/jwks.json");
if let Ok(get_res) = reqwest::get(jwks_url).await {
if let Ok(val) = get_res.json::<Value>().await {
break 'block val;
}
}
}
error!("Saleor webhook signature not verified, failed fetching jwks from saleor");
return unauthorized;
};
let nstr = jwks["keys"][0]["n"].as_str().unwrap();
let estr = jwks["keys"][0]["e"].as_str().unwrap();
let pubkey = DecodingKey::from_rsa_components(&nstr, &estr).unwrap();
let (parts, body) = request.into_parts();
let payload = body::to_bytes(body, usize::MAX).await.unwrap();
if let Some(is_verified) = parts
.headers
.get(SALEOR_SIGNATURE_HEADER)
.and_then(|sig| sig.to_str().ok())
.and_then(|sig| {
let parts: Vec<&str> = sig.split('.').collect();
match parts.as_slice() {
[protected, _, signature] => Some((*protected, *signature)),
_ => None,
}
})
.and_then(|(protected, signature)| {
let mut msg: Vec<u8> = Vec::new();
msg.extend_from_slice(format!("{}.", protected).as_bytes());
msg.extend_from_slice(&payload);
crypto::verify(signature, &msg, &pubkey, Algorithm::RS256).ok()
})
{
match is_verified {
true => {
debug!("Saleor webhook signature verified");
next.run(Request::from_parts(parts, payload.into())).await
}
false => {
error!("Saleor webhook signature not correct");
unauthorized
}
}
} else {
error!("Saleor webhook signature not verified, error parsing headers");
unauthorized
}
}
/* OLD
use http::{Request, Response}; use http::{Request, Response};
use std::task::{Context, Poll}; use std::task::{Context, Poll};
use tower::Service; use tower::Service;

View file

@ -0,0 +1,6 @@
[package]
name = "simple-payment-gateway"
version = "0.1.0"
edition = "2021"
[dependencies]

View file

@ -0,0 +1,3 @@
fn main() {
println!("Hello, world!");
}

View file

@ -6,11 +6,12 @@ use axum::{
use chrono::{DateTime, FixedOffset}; use chrono::{DateTime, FixedOffset};
use fd_lock::RwLock; use fd_lock::RwLock;
use std::{fs::File, sync::Arc, time::Duration}; use std::{fs::File, sync::Arc, time::Duration};
use tracing_subscriber::EnvFilter;
use redis::{AsyncCommands, Client, RedisError}; use redis::{AsyncCommands, Client, RedisError};
use saleor_app_sdk::{config::Config, manifest::AppManifest, SaleorApp}; use saleor_app_sdk::{config::Config, manifest::AppManifest, SaleorApp};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{debug, info}; use tracing::{debug, info, level_filters::LevelFilter};
// Make our own error that wraps `anyhow::Error`. // Make our own error that wraps `anyhow::Error`.
pub struct AppError(anyhow::Error); pub struct AppError(anyhow::Error);
@ -37,9 +38,20 @@ where
} }
pub fn trace_to_std(config: &Config) { pub fn trace_to_std(config: &Config) {
let filter = EnvFilter::builder()
.with_default_directive(LevelFilter::DEBUG.into())
.from_env()
.unwrap()
.add_directive(
format!("{}={}", env!("CARGO_PKG_NAME"), config.log_level)
.parse()
.unwrap(),
);
tracing_subscriber::fmt() tracing_subscriber::fmt()
.with_max_level(config.log_level) .with_max_level(config.log_level)
.with_target(false) .with_env_filter(filter)
.with_target(true)
.compact()
.init(); .init();
} }
@ -49,12 +61,7 @@ pub fn trace_to_std(config: &Config) {
*/ */
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AppState { pub struct AppState {
pub sitemap_file_products: Vec<Arc<RwLock<File>>>, pub xml_cache: Arc<tokio::sync::Mutex<XmlCache>>,
pub sitemap_file_categories: Vec<Arc<RwLock<File>>>,
pub sitemap_file_collections: Vec<Arc<RwLock<File>>>,
pub sitemap_file_pages: Vec<Arc<RwLock<File>>>,
pub sitemap_file_index: Arc<RwLock<File>>,
pub xml_cache: XmlCache,
pub saleor_app: Arc<tokio::sync::Mutex<SaleorApp>>, pub saleor_app: Arc<tokio::sync::Mutex<SaleorApp>>,
pub config: Config, pub config: Config,
pub sitemap_config: SitemapConfig, pub sitemap_config: SitemapConfig,
@ -71,6 +78,8 @@ pub struct SitemapConfig {
pub category_template: String, pub category_template: String,
#[serde(rename = "sitemap_pages_template")] #[serde(rename = "sitemap_pages_template")]
pub pages_template: String, pub pages_template: String,
#[serde(rename = "sitemap_collection_template")]
pub collection_template: String,
#[serde(rename = "sitemap_index_hostname")] #[serde(rename = "sitemap_index_hostname")]
pub index_hostname: String, pub index_hostname: String,
} }
@ -83,7 +92,7 @@ impl SitemapConfig {
} }
} }
#[derive(Debug, Clone)] #[derive(Debug)]
pub struct XmlCache { pub struct XmlCache {
client: Client, client: Client,
app_api_base_url: String, app_api_base_url: String,

View file

@ -1,18 +1,19 @@
#![feature(let_chains)]
#![deny(clippy::unwrap_used, clippy::expect_used)]
mod app; mod app;
mod queries; mod queries;
mod routes; mod routes;
use anyhow::Context; use anyhow::Context;
use fd_lock::RwLock;
use saleor_app_sdk::{ use saleor_app_sdk::{
config::Config, config::Config,
manifest::{AppManifest, AppPermission}, manifest::{AppManifest, AppPermission},
webhooks::{AsyncWebhookEventType, WebhookManifest}, webhooks::{AsyncWebhookEventType, WebhookManifest},
SaleorApp, SaleorApp,
}; };
use std::{fs::File, sync::Arc}; use std::sync::Arc;
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tracing::debug; use tracing::{debug, info};
use crate::{ use crate::{
app::{trace_to_std, AppState, SitemapConfig, XmlCache}, app::{trace_to_std, AppState, SitemapConfig, XmlCache},
@ -57,27 +58,24 @@ async fn main() -> anyhow::Result<()> {
) )
.build(); .build();
debug!("Created AppManifest..."); debug!("Created AppManifest...");
debug!("{}/sitemap_index.xml.gz", sitemap_config.target_folder);
let app_state = AppState { let app_state = AppState {
sitemap_file_index: Arc::new(RwLock::new(File::options().write(true).create(true).open(
format!("{}/sitemap_index.xml", sitemap_config.target_folder),
)?)),
sitemap_file_products: vec![],
sitemap_file_categories: vec![],
sitemap_file_collections: vec![],
sitemap_file_pages: vec![],
sitemap_config, sitemap_config,
xml_cache: XmlCache::new(&config.apl_url, &config.app_api_base_url)?, xml_cache: Arc::new(Mutex::new(XmlCache::new(
&config.apl_url,
&config.app_api_base_url,
)?)),
manifest: app_manifest, manifest: app_manifest,
config: config.clone(), config: config.clone(),
saleor_app: Arc::new(Mutex::new(saleor_app)), saleor_app: Arc::new(Mutex::new(saleor_app)),
}; };
debug!("Created AppState..."); debug!("Created AppState...");
app_state {
.xml_cache let xml_cache = app_state.xml_cache.lock().await;
xml_cache
.delete_all("http://localhost:8000/graphpl/") .delete_all("http://localhost:8000/graphpl/")
.await?; .await?;
debug!("Cleared Xml Cache");
}
let app = create_routes(app_state); let app = create_routes(app_state);
let listener = tokio::net::TcpListener::bind( let listener = tokio::net::TcpListener::bind(
@ -89,7 +87,7 @@ async fn main() -> anyhow::Result<()> {
.context("APP_API_BASE_URL invalid format")?, .context("APP_API_BASE_URL invalid format")?,
) )
.await?; .await?;
tracing::debug!("listening on {}", listener.local_addr().unwrap()); info!("listening on {}", listener.local_addr().unwrap());
match axum::serve(listener, app).await { match axum::serve(listener, app).await {
Ok(o) => Ok(o), Ok(o) => Ok(o),
Err(e) => anyhow::bail!(e), Err(e) => anyhow::bail!(e),

View file

@ -78,18 +78,6 @@ subscription QueryProductsChanged {
fragment BaseCategory on Category { fragment BaseCategory on Category {
id id
slug slug
products(first: 100) {
pageInfo {
endCursor
hasNextPage
}
edges {
node {
id
slug
}
}
}
} }
fragment BaseProduct on Product { fragment BaseProduct on Product {
@ -130,7 +118,7 @@ pub struct Product {
pub category: Option<Category>, pub category: Option<Category>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Serialize)]
pub struct PageUpdated { pub struct PageUpdated {
pub page: Option<Page>, pub page: Option<Page>,
} }
@ -145,13 +133,13 @@ pub struct PageCreated {
pub page: Option<Page>, pub page: Option<Page>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Serialize)]
pub struct Page { pub struct Page {
pub slug: String, pub slug: String,
pub id: cynic::Id, pub id: cynic::Id,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Serialize)]
pub struct CollectionUpdated { pub struct CollectionUpdated {
pub collection: Option<Collection>, pub collection: Option<Collection>,
} }
@ -166,7 +154,7 @@ pub struct CollectionCreated {
pub collection: Option<Collection>, pub collection: Option<Collection>,
} }
#[derive(cynic::QueryFragment, Debug)] #[derive(cynic::QueryFragment, Debug, Serialize)]
pub struct Collection { pub struct Collection {
pub id: cynic::Id, pub id: cynic::Id,
pub slug: String, pub slug: String,
@ -198,26 +186,6 @@ pub struct Category {
pub struct Category2 { pub struct Category2 {
pub id: cynic::Id, pub id: cynic::Id,
pub slug: String, pub slug: String,
#[arguments(first: 100)]
pub products: Option<ProductCountableConnection>,
}
#[derive(cynic::QueryFragment, Debug, Serialize)]
pub struct ProductCountableConnection {
pub page_info: PageInfo,
pub edges: Vec<ProductCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug, Serialize)]
pub struct ProductCountableEdge {
pub node: Product2,
}
#[derive(cynic::QueryFragment, Debug, Serialize)]
#[cynic(graphql_type = "Product")]
pub struct Product2 {
pub id: cynic::Id,
pub slug: String,
} }
#[derive(cynic::QueryFragment, Debug, Serialize)] #[derive(cynic::QueryFragment, Debug, Serialize)]

View file

@ -0,0 +1,209 @@
#[cynic::schema("saleor")]
mod schema {}
pub struct CategorisedProduct {
pub product: Product,
pub category_id: cynic::Id,
}
/*
query getCategoriesInitial {
categories(first: 50) {
totalCount
pageInfo {
hasNextPage
endCursor
}
edges {
node {
updatedAt
id
slug
}
}
}
}
query getCategoriesNext($after: String) {
categories(first: 50, after: $after) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
updatedAt
id
slug
}
}
}
}
query getCategoryProductsInitial($id: ID!) {
category(id: $id) {
slug
id
updatedAt
products(first: 50) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
id
slug
updatedAt
}
}
totalCount
}
}
}
query getCategoryProductsNext($id: ID!, $after: String!) {
category(id: $id) {
products(first: 50, after: $after) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
id
slug
updatedAt
}
}
}
}
}
*/
#[derive(cynic::QueryVariables, Debug)]
pub struct GetCategoryProductsInitialVariables<'a> {
pub id: &'a cynic::Id,
}
#[derive(cynic::QueryVariables, Debug)]
pub struct GetCategoryProductsNextVariables<'a> {
pub after: &'a str,
pub id: &'a cynic::Id,
}
#[derive(cynic::QueryVariables, Debug)]
pub struct GetCategoriesNextVariables<'a> {
pub after: Option<&'a str>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(
graphql_type = "Query",
variables = "GetCategoryProductsInitialVariables"
)]
pub struct GetCategoryProductsInitial {
#[arguments(id: $id)]
pub category: Option<Category>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query", variables = "GetCategoryProductsNextVariables")]
pub struct GetCategoryProductsNext {
#[arguments(id: $id)]
pub category: Option<Category2>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query", variables = "GetCategoriesNextVariables")]
pub struct GetCategoriesNext {
#[arguments(first: 50, after: $after)]
pub categories: Option<CategoryCountableConnection>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query")]
pub struct GetCategoriesInitial {
#[arguments(first: 50)]
pub categories: Option<CategoryCountableConnection2>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "CategoryCountableConnection")]
pub struct CategoryCountableConnection2 {
pub total_count: Option<i32>,
pub page_info: PageInfo,
pub edges: Vec<CategoryCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct CategoryCountableConnection {
pub page_info: PageInfo,
pub edges: Vec<CategoryCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct CategoryCountableEdge {
pub node: Category3,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
#[cynic(graphql_type = "Category")]
pub struct Category3 {
pub updated_at: DateTime,
pub id: cynic::Id,
pub slug: String,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct Category {
pub slug: String,
pub id: cynic::Id,
pub updated_at: DateTime,
#[arguments(first: 50)]
pub products: Option<ProductCountableConnection>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct ProductCountableConnection {
pub page_info: PageInfo,
pub edges: Vec<ProductCountableEdge>,
pub total_count: Option<i32>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(
graphql_type = "Category",
variables = "GetCategoryProductsNextVariables"
)]
pub struct Category2 {
#[arguments(first: 50, after: $after)]
pub products: Option<ProductCountableConnection2>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "ProductCountableConnection")]
pub struct ProductCountableConnection2 {
pub page_info: PageInfo,
pub edges: Vec<ProductCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct ProductCountableEdge {
pub node: Product,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
pub struct Product {
pub id: cynic::Id,
pub slug: String,
pub updated_at: DateTime,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct PageInfo {
pub has_next_page: bool,
pub end_cursor: Option<String>,
}
#[derive(cynic::Scalar, Debug, Clone)]
pub struct DateTime(pub String);

View file

@ -0,0 +1,52 @@
#[cynic::schema("saleor")]
mod schema {}
#[derive(cynic::QueryVariables, Debug)]
pub struct GetCollectionsNextVariables<'a> {
pub after: Option<&'a str>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query", variables = "GetCollectionsNextVariables")]
pub struct GetCollectionsNext {
#[arguments(first: 50, after: $after)]
pub collections: Option<CollectionCountableConnection>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query")]
pub struct GetCollectionsInitial {
#[arguments(first: 50)]
pub collections: Option<CollectionCountableConnection2>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "CollectionCountableConnection")]
pub struct CollectionCountableConnection2 {
pub total_count: Option<i32>,
pub page_info: PageInfo,
pub edges: Vec<CollectionCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct CollectionCountableConnection {
pub page_info: PageInfo,
pub edges: Vec<CollectionCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct CollectionCountableEdge {
pub node: Collection,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct PageInfo {
pub has_next_page: bool,
pub end_cursor: Option<String>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
pub struct Collection {
pub id: cynic::Id,
pub slug: String,
}

View file

@ -0,0 +1,91 @@
#[cynic::schema("saleor")]
mod schema {}
/*
query getPagesInitial {
pages(first: 50) {
totalCount
pageInfo {
hasNextPage
endCursor
}
edges {
node {
publishedAt
id
slug
}
}
}
}
query getPagesNext($after: String!) {
pages(first: 50, after: $after) {
pageInfo {
hasNextPage
endCursor
}
edges {
node {
publishedAt
id
slug
}
}
}
}
*/
#[derive(cynic::QueryVariables, Debug)]
pub struct GetPagesNextVariables<'a> {
pub after: &'a str,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query", variables = "GetPagesNextVariables")]
pub struct GetPagesNext {
#[arguments(first: 50, after: $after)]
pub pages: Option<PageCountableConnection>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "Query")]
pub struct GetPagesInitial {
#[arguments(first: 50)]
pub pages: Option<PageCountableConnection2>,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(graphql_type = "PageCountableConnection")]
pub struct PageCountableConnection2 {
pub total_count: Option<i32>,
pub page_info: PageInfo,
pub edges: Vec<PageCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct PageCountableConnection {
pub page_info: PageInfo,
pub edges: Vec<PageCountableEdge>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct PageCountableEdge {
pub node: Page,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct PageInfo {
pub has_next_page: bool,
pub end_cursor: Option<String>,
}
#[derive(cynic::QueryFragment, Debug, Clone)]
pub struct Page {
pub published_at: Option<DateTime>,
pub id: cynic::Id,
pub slug: String,
}
#[derive(cynic::Scalar, Debug, Clone)]
pub struct DateTime(pub String);

View file

@ -1,2 +1,4 @@
pub mod event_subjects_updated; pub mod event_subjects_updated;
pub mod product_metadata_update; pub mod get_all_categories_n_products;
pub mod get_all_collections;
pub mod get_all_pages;

View file

@ -1,75 +0,0 @@
#[cynic::schema("saleor")]
mod schema {}
#[derive(cynic::QueryVariables, Debug)]
pub struct UpdateProductMetadataVariables<'a> {
pub metadata: Option<Vec<MetadataInput<'a>>>,
pub product_id: &'a cynic::Id,
}
#[derive(cynic::QueryFragment, Debug)]
#[cynic(
graphql_type = "Mutation",
variables = "UpdateProductMetadataVariables"
)]
pub struct UpdateProductMetadata {
#[arguments(id: $product_id, input: { metadata: $metadata })]
pub product_update: Option<ProductUpdate>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct ProductUpdate {
pub errors: Vec<ProductError>,
pub product: Option<Product>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct Product {
pub id: cynic::Id,
pub metadata: Vec<MetadataItem>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct ProductError {
pub field: Option<String>,
pub message: Option<String>,
pub code: ProductErrorCode,
pub attributes: Option<Vec<cynic::Id>>,
pub values: Option<Vec<cynic::Id>>,
}
#[derive(cynic::QueryFragment, Debug)]
pub struct MetadataItem {
pub key: String,
pub value: String,
}
#[derive(cynic::Enum, Clone, Copy, Debug)]
pub enum ProductErrorCode {
AlreadyExists,
AttributeAlreadyAssigned,
AttributeCannotBeAssigned,
AttributeVariantsDisabled,
MediaAlreadyAssigned,
DuplicatedInputItem,
GraphqlError,
Invalid,
InvalidPrice,
ProductWithoutCategory,
NotProductsImage,
NotProductsVariant,
NotFound,
Required,
Unique,
VariantNoDigitalContent,
CannotManageProductWithoutVariant,
ProductNotAssignedToChannel,
UnsupportedMediaProvider,
PreorderVariantCannotBeDeactivated,
}
#[derive(cynic::InputObject, Debug)]
pub struct MetadataInput<'a> {
pub key: &'a str,
pub value: &'a str,
}

View file

@ -1,5 +1,5 @@
use axum::{extract::State, Json}; use axum::{extract::State, Json};
use saleor_app_sdk::{manifest::AppManifest}; use saleor_app_sdk::manifest::AppManifest;
use crate::app::{AppError, AppState}; use crate::app::{AppError, AppState};

View file

@ -1,9 +1,11 @@
use axum::{ use axum::{
handler::HandlerWithoutStateExt, handler::HandlerWithoutStateExt,
http::StatusCode, http::StatusCode,
middleware,
routing::{any, get, post}, routing::{any, get, post},
Router, Router,
}; };
use saleor_app_sdk::middleware::verify_webhook_signature::webhook_signature_verifier;
use tower_http::services::ServeDir; use tower_http::services::ServeDir;
use crate::app::AppState; use crate::app::AppState;
@ -24,6 +26,8 @@ pub fn create_routes(state: AppState) -> Router {
let serve_dir = ServeDir::new("./sitemap-generator/public").not_found_service(service); let serve_dir = ServeDir::new("./sitemap-generator/public").not_found_service(service);
Router::new() Router::new()
.route("/api/webhooks", any(webhooks))
.layer(middleware::from_fn(webhook_signature_verifier))
//handles just path, eg. localhost:3000/ //handles just path, eg. localhost:3000/
.route( .route(
"/", "/",
@ -33,6 +37,5 @@ pub fn create_routes(state: AppState) -> Router {
.fallback_service(serve_dir) .fallback_service(serve_dir)
.route("/api/manifest", get(manifest)) .route("/api/manifest", get(manifest))
.route("/api/register", post(register)) .route("/api/register", post(register))
.route("/api/webhooks", any(webhooks))
.with_state(state) .with_state(state)
} }

View file

@ -1,13 +1,38 @@
use std::{rc::Rc, str::FromStr, sync::Arc};
use anyhow::Context; use anyhow::Context;
use axum::{ use axum::{
extract::Json, extract::Json,
extract::State, extract::State,
http::{HeaderMap, StatusCode}, http::{HeaderMap, StatusCode},
}; };
use chrono::TimeZone;
use cynic::{http::SurfExt, QueryBuilder};
use saleor_app_sdk::{AuthData, AuthToken}; use saleor_app_sdk::{AuthData, AuthToken};
use tracing::{debug, info}; use sitemap_rs::url::Url;
use tinytemplate::TinyTemplate;
use tokio::spawn;
use tracing::{debug, error, info, trace};
use crate::app::{AppError, AppState}; use crate::{
app::{AppError, AppState, XmlData, XmlDataType},
queries::{
event_subjects_updated::{
self, CategoryUpdated, CollectionUpdated, PageUpdated, ProductUpdated,
},
get_all_categories_n_products::{
CategorisedProduct, Category, Category3, GetCategoriesInitial, GetCategoriesNext,
GetCategoriesNextVariables, GetCategoryProductsInitial,
GetCategoryProductsInitialVariables, GetCategoryProductsNext,
GetCategoryProductsNextVariables,
},
get_all_collections::{
Collection, GetCollectionsInitial, GetCollectionsNext, GetCollectionsNextVariables,
},
get_all_pages::{self, GetPagesInitial, GetPagesNext, GetPagesNextVariables},
},
routes::webhooks::write_xml,
};
pub async fn register( pub async fn register(
headers: HeaderMap, headers: HeaderMap,
@ -29,12 +54,474 @@ pub async fn register(
let auth_data = AuthData { let auth_data = AuthData {
jwks: None, jwks: None,
token: auth_token.auth_token, token: auth_token.auth_token,
domain: Some(state.config.app_api_base_url), domain: Some(state.config.app_api_base_url.clone()),
app_id: state.manifest.id, app_id: state.manifest.id.clone(),
saleor_api_url: saleor_api_url.clone(), saleor_api_url: saleor_api_url.clone(),
}; };
app.apl.set(auth_data).await?; app.apl.set(auth_data).await?;
info!("registered app for{:?}", &saleor_api_url); info!("registered app for{:?}", &saleor_api_url);
//When app registers, start collecting everything of substance
info!("Starting caching and generation process");
let cloned_state = state.clone();
spawn(async move {
match regenerate(cloned_state, saleor_api_url).await {
Ok(_) => info!("Finished caching and regeneration"),
Err(e) => error!("Something went wrong during caching and regeneration, {e}"),
};
});
Ok(StatusCode::OK) Ok(StatusCode::OK)
} }
pub async fn regenerate(state: AppState, saleor_api_url: String) -> anyhow::Result<()> {
info!("regeneration: fetching all categories, products, collections, pages");
let xml_cache = state.xml_cache.lock().await;
let mut categories: Vec<(Category3, Vec<Arc<CategorisedProduct>>)> =
get_all_categories(&saleor_api_url)
.await?
.into_iter()
.map(|c| (c, vec![]))
.collect();
let mut products = vec![];
for category in categories.iter_mut() {
products.append(&mut get_all_products(&saleor_api_url, category).await?);
}
let pages = get_all_pages(&saleor_api_url).await?;
let collections = get_all_collections(&saleor_api_url).await?;
info!(
"regeneration: found {} products, {} categories, {} pages, {} collections",
products.len(),
categories.len(),
pages.len(),
collections.len()
);
info!("regeneration: creating xml data and caching it");
let mut xml_data = vec![];
xml_data.append(
&mut categories
.into_iter()
.map(|c| XmlData {
slug: c.0.slug,
last_modified: chrono::DateTime::<chrono::Utc>::from_str(&c.0.updated_at.0)
.map_or(chrono::offset::Utc::now().fixed_offset(), |d| {
d.fixed_offset()
}),
id: c.0.id,
relations: c.1.iter().map(|p| p.product.id.clone()).collect::<Vec<_>>(),
data_type: XmlDataType::Category,
})
.collect::<Vec<_>>(),
);
xml_data.append(
&mut products
.into_iter()
.map(|p| XmlData {
data_type: XmlDataType::Product,
relations: vec![p.category_id.clone()],
id: p.product.id.clone(),
last_modified: chrono::DateTime::<chrono::Utc>::from_str(&p.product.updated_at.0)
.map_or(chrono::offset::Utc::now().fixed_offset(), |d| {
d.fixed_offset()
}),
slug: p.product.slug.clone(),
})
.collect(),
);
xml_data.append(
&mut pages
.into_iter()
.map(|p| XmlData {
data_type: XmlDataType::Page,
relations: vec![],
id: p.id.clone(),
last_modified: match p.published_at {
Some(d) => chrono::DateTime::<chrono::Utc>::from_str(&d.0)
.map_or(chrono::offset::Utc::now().fixed_offset(), |d| {
d.fixed_offset()
}),
None => chrono::offset::Utc::now().fixed_offset(),
},
slug: p.slug.clone(),
})
.collect(),
);
xml_data.append(
&mut collections
.into_iter()
.map(|c| XmlData {
slug: c.slug,
last_modified: chrono::offset::Utc::now().fixed_offset(),
id: c.id,
relations: vec![],
data_type: XmlDataType::Category,
})
.collect::<Vec<_>>(),
);
xml_cache.set(xml_data.clone(), &saleor_api_url).await?;
info!("regeneration: xml_cache was set");
//create urls
info!("regeneration: creating urls");
let mut page_urls = vec![];
let mut product_urls = vec![];
let mut category_urls = vec![];
let mut collection_urls = vec![];
for x in xml_data.iter() {
match x.data_type {
XmlDataType::Page => {
let mut tt = TinyTemplate::new();
tt.add_template("page_url", &state.sitemap_config.pages_template)?;
let context = PageUpdated {
page: Some(event_subjects_updated::Page {
slug: x.slug.clone(),
id: x.id.clone(),
}),
};
let page_url = Url::builder(tt.render("page_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created Page url: {}", &page_url.location);
page_urls.push(page_url);
}
XmlDataType::Product => {
let mut tt = TinyTemplate::new();
tt.add_template("product_url", &state.sitemap_config.product_template)?;
let context = ProductUpdated {
product: Some(event_subjects_updated::Product {
id: x.id.clone(),
slug: x.slug.clone(),
category: match xml_data.iter().find(|all| {
x.relations
.iter()
.find(|rel| {
all.id == **rel && all.data_type == XmlDataType::Category
})
.is_some()
}) {
Some(c) => Some(event_subjects_updated::Category {
slug: c.slug.clone(),
id: c.id.clone(),
}),
None => Some(event_subjects_updated::Category {
slug: "unknown".to_owned(),
id: cynic::Id::new("unknown".to_owned()),
}),
},
}),
};
let product_url = Url::builder(tt.render("product_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created Page url: {}", &product_url.location);
product_urls.push(product_url);
}
XmlDataType::Category => {
let mut tt = TinyTemplate::new();
tt.add_template("category_url", &state.sitemap_config.category_template)?;
let context = CategoryUpdated {
category: Some(event_subjects_updated::Category2 {
id: x.id.clone(),
slug: x.slug.clone(),
}),
};
let category_url = Url::builder(tt.render("category_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created category url: {}", &category_url.location);
category_urls.push(category_url);
}
XmlDataType::Collection => {
let mut tt = TinyTemplate::new();
tt.add_template("coll_url", &state.sitemap_config.collection_template)?;
let context = CollectionUpdated {
collection: Some(event_subjects_updated::Collection {
slug: x.slug.clone(),
id: x.id.clone(),
}),
};
let collection_url = Url::builder(tt.render("coll_url", &context)?)
.last_modified(x.last_modified)
.build()?;
trace!("Created collection url: {}", &collection_url.location);
collection_urls.push(collection_url);
}
}
}
write_xml(page_urls, &state, XmlDataType::Page).await?;
write_xml(collection_urls, &state, XmlDataType::Collection).await?;
write_xml(category_urls, &state, XmlDataType::Category).await?;
write_xml(product_urls, &state, XmlDataType::Product).await?;
Ok(())
}
async fn get_all_pages(saleor_api_url: &str) -> anyhow::Result<Vec<get_all_pages::Page>> {
let operation = GetPagesInitial::build(());
let mut all_pages = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(pages) = &data.pages
{
debug!("fetched first pages, eg.:{:?}", &pages.edges.get(0));
all_pages.append(
&mut pages
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
//Keep fetching next page
let mut next_cursor = pages.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
.run_graphql(GetPagesNext::build(GetPagesNextVariables {
after: &cursor,
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(pages) = &data.pages
{
all_pages.append(
&mut pages
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!("fetched next pages, eg.:{:?}", &pages.edges.get(0));
if !pages.page_info.has_next_page {
break;
}
next_cursor = pages.page_info.end_cursor.clone();
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
}
} else {
break;
}
}
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
};
info!("fetched all pages");
Ok(all_pages)
}
async fn get_all_categories(saleor_api_url: &str) -> anyhow::Result<Vec<Category3>> {
debug!("Collecting all categories...");
let operation = GetCategoriesInitial::build(());
let mut all_categories = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(categories) = &data.categories
{
all_categories.append(
&mut categories
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched first categories, eg.:{:?}",
&categories.edges.get(0)
);
//Keep fetching next page
let mut next_cursor = categories.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
.run_graphql(GetCategoriesNext::build(GetCategoriesNextVariables {
after: Some(&cursor),
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(categories) = &data.categories
{
all_categories.append(
&mut categories
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched first categories, eg.:{:?}",
&categories.edges.get(0)
);
if !categories.page_info.has_next_page {
break;
}
next_cursor = categories.page_info.end_cursor.clone();
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
}
} else {
break;
}
}
} else {
error!("Failed fetching initial pages! {:?}", &res);
anyhow::bail!("Failed fetching initial pages! {:?}", res);
};
info!("All categories collected");
Ok(all_categories)
}
async fn get_all_collections(saleor_api_url: &str) -> anyhow::Result<Vec<Collection>> {
debug!("Collecting all Collections...");
let operation = GetCollectionsInitial::build(());
let mut all_collections = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(collections) = &data.collections
{
all_collections.append(
&mut collections
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched first collections, eg.:{:?}",
&collections.edges.get(0)
);
//Keep fetching next page
let mut next_cursor = collections.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
.run_graphql(GetCollectionsNext::build(GetCollectionsNextVariables {
after: Some(&cursor),
}))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(collections) = &data.collections
{
all_collections.append(
&mut collections
.edges
.iter()
.map(|p| p.node.clone())
.collect::<Vec<_>>(),
);
debug!(
"fetched next collections, eg.:{:?}",
&collections.edges.get(0)
);
if !collections.page_info.has_next_page {
break;
}
next_cursor = collections.page_info.end_cursor.clone();
} else {
error!("Failed fetching initial collecnios! {:?}", &res);
anyhow::bail!("Failed fetching initial collections! {:?}", res);
}
} else {
break;
}
}
} else {
error!("Failed fetching initial collections! {:?}", &res);
anyhow::bail!("Failed fetching initial collections! {:?}", res);
};
info!("All Collections collected...");
Ok(all_collections)
}
/**
* Gets all products of a category then assings them as related
*/
async fn get_all_products(
saleor_api_url: &str,
main_category: &mut (Category3, Vec<Arc<CategorisedProduct>>),
) -> anyhow::Result<Vec<Arc<CategorisedProduct>>> {
debug!("Collecting all products...");
let operation = GetCategoryProductsInitial::build(GetCategoryProductsInitialVariables {
id: &main_category.0.id,
});
let mut all_categorised_products: Vec<Arc<CategorisedProduct>> = vec![];
let res = surf::post(&saleor_api_url).run_graphql(operation).await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(category) = &data.category
&& let Some(products) = &category.products
{
all_categorised_products.append(
&mut products
.edges
.iter()
.map(|p| {
Arc::new(CategorisedProduct {
product: p.node.clone(),
category_id: main_category.0.id.clone(),
})
})
.collect::<Vec<_>>(),
);
//Keep fetching next page
debug!("fetched first products, eg: {:?}", products.edges.get(0));
let mut next_cursor = products.page_info.end_cursor.clone();
loop {
if let Some(cursor) = &mut next_cursor {
let res = surf::post(&saleor_api_url)
.run_graphql(GetCategoryProductsNext::build(
GetCategoryProductsNextVariables {
id: &main_category.0.id,
after: &cursor,
},
))
.await;
if let Ok(query) = &res
&& let Some(data) = &query.data
&& let Some(category) = &data.category
&& let Some(products) = &category.products
{
all_categorised_products.append(
&mut products
.edges
.iter()
.map(|p| {
Arc::new(CategorisedProduct {
product: p.node.clone(),
category_id: main_category.0.id.clone(),
})
})
.collect::<Vec<_>>(),
);
debug!("fetched next products, eg: {:?}", products.edges.get(0));
if !products.page_info.has_next_page {
break;
}
next_cursor = products.page_info.end_cursor.clone();
} else {
error!("Failed fetching initial products! {:?}", &res);
anyhow::bail!("Failed fetching initial products! {:?}", res);
}
} else {
break;
}
}
} else {
error!("Failed fetching initial products! {:?}", &res);
anyhow::bail!("Failed fetching initial products! {:?}", res);
};
info!("All products collected...");
Ok(all_categorised_products)
}

View file

@ -27,8 +27,8 @@ use tracing::{debug, error, info};
use crate::{ use crate::{
app::{AppError, AppState, XmlData, XmlDataType}, app::{AppError, AppState, XmlData, XmlDataType},
queries::event_subjects_updated::{ queries::event_subjects_updated::{
Category, Category2, CategoryUpdated, CollectionUpdated, PageInfo, PageUpdated, Product, Category, Category2, CategoryUpdated, Collection, CollectionUpdated, Page, PageInfo,
Product2, ProductCountableConnection, ProductCountableEdge, ProductUpdated, PageUpdated, Product, ProductUpdated,
}, },
}; };
@ -53,25 +53,41 @@ pub async fn webhooks(
| AsyncWebhookEventType::ProductCreated | AsyncWebhookEventType::ProductCreated
| AsyncWebhookEventType::ProductDeleted => { | AsyncWebhookEventType::ProductDeleted => {
let product: ProductUpdated = serde_json::from_str(&data)?; let product: ProductUpdated = serde_json::from_str(&data)?;
spawn(async move { update_sitemap_product(product, &url, state).await }); spawn(async move {
if let Err(e) = update_sitemap_product(product, &url, state).await {
error!("Error processing Product, e: {:?}", e);
}
});
} }
AsyncWebhookEventType::CategoryCreated AsyncWebhookEventType::CategoryCreated
| AsyncWebhookEventType::CategoryUpdated | AsyncWebhookEventType::CategoryUpdated
| AsyncWebhookEventType::CategoryDeleted => { | AsyncWebhookEventType::CategoryDeleted => {
let category: CategoryUpdated = serde_json::from_str(&data)?; let category: CategoryUpdated = serde_json::from_str(&data)?;
spawn(async move { update_sitemap_category(category, &url, state).await }); spawn(async move {
if let Err(e) = update_sitemap_category(category, &url, state).await {
error!("Error processing Category, e: {:?}", e);
}
});
} }
AsyncWebhookEventType::PageCreated AsyncWebhookEventType::PageCreated
| AsyncWebhookEventType::PageUpdated | AsyncWebhookEventType::PageUpdated
| AsyncWebhookEventType::PageDeleted => { | AsyncWebhookEventType::PageDeleted => {
let page: PageUpdated = serde_json::from_str(&data)?; let page: PageUpdated = serde_json::from_str(&data)?;
spawn(async move { update_sitemap_page(page, &url, state).await }); spawn(async move {
if let Err(e) = update_sitemap_page(page, &url, state).await {
error!("Error processing Page, e: {:?}", e);
}
});
} }
AsyncWebhookEventType::CollectionCreated AsyncWebhookEventType::CollectionCreated
| AsyncWebhookEventType::CollectionUpdated | AsyncWebhookEventType::CollectionUpdated
| AsyncWebhookEventType::CollectionDeleted => { | AsyncWebhookEventType::CollectionDeleted => {
let collection: CollectionUpdated = serde_json::from_str(&data)?; let collection: CollectionUpdated = serde_json::from_str(&data)?;
spawn(async move { update_sitemap_collection(collection, &url, state).await }); spawn(async move {
if let Err(e) = update_sitemap_collection(collection, &url, state).await {
error!("Error processing Collection, e: {:?}", e);
}
});
} }
_ => (), _ => (),
@ -91,7 +107,8 @@ async fn update_sitemap_product(
debug!("Product got changed!, {:?}", &product); debug!("Product got changed!, {:?}", &product);
if let Some(product) = product.product { if let Some(product) = product.product {
// Update or add the product // Update or add the product
let mut xml_data = match state.xml_cache.get_all(saleor_api_url).await { let xml_cache = state.xml_cache.lock().await;
let mut xml_data = match xml_cache.get_all(saleor_api_url).await {
Ok(d) => d, Ok(d) => d,
Err(e) => { Err(e) => {
error!("Error, {:?}. no xml cache present?", e); error!("Error, {:?}. no xml cache present?", e);
@ -218,13 +235,17 @@ async fn update_sitemap_product(
}, },
}), }),
}; };
urls.push(tt.render("product_url", &context)?); urls.push(
Url::builder(tt.render("product_url", &context)?)
.last_modified(x.last_modified)
.build()?,
);
} }
} }
//debug!("new urls:{:?}", &urls); //debug!("new urls:{:?}", &urls);
write_xml(urls, &state, XmlDataType::Product).await?; write_xml(urls, &state, XmlDataType::Product).await?;
state.xml_cache.set(xml_data, saleor_api_url).await?; xml_cache.set(xml_data, saleor_api_url).await?;
} else { } else {
error!("Failed to update product, e: {:?}", product); error!("Failed to update product, e: {:?}", product);
anyhow::bail!("product not present in in webhook"); anyhow::bail!("product not present in in webhook");
@ -239,7 +260,8 @@ async fn update_sitemap_category(
state: AppState, state: AppState,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
if let Some(category) = category.category { if let Some(category) = category.category {
let mut xml_data = state.xml_cache.get_all(saleor_api_url).await?; let xml_cache = state.xml_cache.lock().await;
let mut xml_data = xml_cache.get_all(saleor_api_url).await?;
let mut affected_product_ids = vec![]; let mut affected_product_ids = vec![];
let mut new_xml_data = vec![]; let mut new_xml_data = vec![];
//check if template of product includes categories in url //check if template of product includes categories in url
@ -346,7 +368,11 @@ async fn update_sitemap_category(
}; };
} }
} }
product_urls.push(tt.render("product_url", &context)?); product_urls.push(
Url::builder(tt.render("product_url", &context)?)
.last_modified(x.last_modified)
.build()?,
);
} }
if x.data_type == XmlDataType::Category { if x.data_type == XmlDataType::Category {
tt.add_template("category_url", &state.sitemap_config.category_template)?; tt.add_template("category_url", &state.sitemap_config.category_template)?;
@ -354,10 +380,13 @@ async fn update_sitemap_category(
category: Some(Category2 { category: Some(Category2 {
id: x.id.clone(), id: x.id.clone(),
slug: x.slug.clone(), slug: x.slug.clone(),
products: None,
}), }),
}; };
category_urls.push(tt.render("category_url", &context)?); category_urls.push(
Url::builder(tt.render("category_url", &context)?)
.last_modified(x.last_modified)
.build()?,
);
} }
} }
//and write //and write
@ -365,6 +394,7 @@ async fn update_sitemap_category(
write_xml(product_urls, &state, XmlDataType::Product).await?; write_xml(product_urls, &state, XmlDataType::Product).await?;
} }
write_xml(category_urls, &state, XmlDataType::Category).await?; write_xml(category_urls, &state, XmlDataType::Category).await?;
xml_cache.set(xml_data, saleor_api_url).await?;
} else { } else {
error!("Failed to update category, e:{:?}", category); error!("Failed to update category, e:{:?}", category);
anyhow::bail!("Category not present in webhook"); anyhow::bail!("Category not present in webhook");
@ -377,20 +407,133 @@ async fn update_sitemap_collection(
saleor_api_url: &str, saleor_api_url: &str,
state: AppState, state: AppState,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
if let Some(collection) = collection.collection {
let xml_cache = state.xml_cache.lock().await;
let mut xml_data = xml_cache.get_all(saleor_api_url).await?;
let mut new_xml_data = vec![];
match xml_data
.iter_mut()
.find(|c| c.id == collection.id && c.data_type == XmlDataType::Collection)
{
Some(xml_col) => {
if xml_col.slug == collection.slug {
debug!("Collection url didn't change, skipping");
return Ok(());
}
xml_col.slug = collection.slug;
xml_col.last_modified = chrono::offset::Utc::now().fixed_offset();
}
None => {
debug!("Collection not cached, adding...");
new_xml_data.push(XmlData {
slug: collection.slug,
id: collection.id,
last_modified: chrono::offset::Utc::now().fixed_offset(),
relations: vec![],
data_type: XmlDataType::Collection,
})
}
}
xml_data.append(&mut new_xml_data);
//create urls
let mut collection_urls = vec![];
for xml_col in xml_data.iter() {
if xml_col.data_type == XmlDataType::Collection {
let mut tt = TinyTemplate::new();
tt.add_template("collection_url", &state.sitemap_config.collection_template)?;
let context = CollectionUpdated {
collection: Some(Collection {
slug: xml_col.slug.clone(),
id: xml_col.id.clone(),
}),
};
collection_urls.push(
Url::builder(tt.render("collection_url", &context)?)
.last_modified(xml_col.last_modified)
.build()?,
);
}
}
write_xml(collection_urls, &state, XmlDataType::Collection).await?;
xml_cache.set(xml_data, saleor_api_url).await?;
} else {
error!("Failed to update collection, e:{:?}", collection);
anyhow::bail!("Collection not present in webhook");
}
info!("Sitemap updated, cause: collection"); info!("Sitemap updated, cause: collection");
todo!() Ok(())
} }
async fn update_sitemap_page( async fn update_sitemap_page(
page: PageUpdated, page: PageUpdated,
saleor_api_url: &str, saleor_api_url: &str,
state: AppState, state: AppState,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
info!("Sitemap updated, cause: collection"); if let Some(page) = page.page {
todo!() let xml_cache = state.xml_cache.lock().await;
let mut xml_data = xml_cache.get_all(saleor_api_url).await?;
let mut new_xml_data = vec![];
match xml_data
.iter_mut()
.find(|p| p.id == page.id && p.data_type == XmlDataType::Page)
{
Some(xml_page) => {
if xml_page.slug == page.slug {
debug!("Page url didn't change, skipping");
return Ok(());
}
xml_page.slug = page.slug;
xml_page.last_modified = chrono::offset::Utc::now().fixed_offset();
}
None => {
debug!("Page not cached, adding...");
new_xml_data.push(XmlData {
slug: page.slug,
id: page.id,
last_modified: chrono::offset::Utc::now().fixed_offset(),
relations: vec![],
data_type: XmlDataType::Page,
})
}
}
xml_data.append(&mut new_xml_data);
//create urls
let mut page_urls = vec![];
for xml_page in xml_data.iter() {
if xml_page.data_type == XmlDataType::Page {
let mut tt = TinyTemplate::new();
tt.add_template("page_url", &state.sitemap_config.pages_template)?;
let context = PageUpdated {
page: Some(Page {
slug: xml_page.slug.clone(),
id: xml_page.id.clone(),
}),
};
page_urls.push(
Url::builder(tt.render("page_url", &context)?)
.last_modified(xml_page.last_modified)
.build()?,
);
}
}
write_xml(page_urls, &state, XmlDataType::Page).await?;
xml_cache.set(xml_data, saleor_api_url).await?;
} else {
error!("Failed to update Page, e:{:?}", page);
anyhow::bail!("Page not present in webhook");
}
info!("Sitemap updated, cause: Page");
Ok(())
} }
async fn write_xml( pub async fn write_xml(
urls: Vec<String>, urls: Vec<Url>,
state: &AppState, state: &AppState,
type_group: XmlDataType, type_group: XmlDataType,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
@ -405,12 +548,7 @@ async fn write_xml(
.await?; .await?;
let mut sitemap_urls: Vec<Url> = vec![]; let mut sitemap_urls: Vec<Url> = vec![];
for url in urls.clone() { for url in urls.clone() {
sitemap_urls.push( sitemap_urls.push(url);
Url::builder(url)
.change_frequency(ChangeFrequency::Weekly)
.last_modified(chrono::offset::Utc::now().fixed_offset())
.build()?,
);
} }
let url_set: UrlSet = UrlSet::new(sitemap_urls)?; let url_set: UrlSet = UrlSet::new(sitemap_urls)?;
debug!("Writing xml into file"); debug!("Writing xml into file");
@ -424,18 +562,13 @@ async fn write_xml(
let len = buf.len() * std::mem::size_of::<u8>(); let len = buf.len() * std::mem::size_of::<u8>();
if len > 200000 { if len > 200000 {
let file_amount = (len as f32 / 150000 as f32).ceil() as usize; let file_amount = (len as f32 / 150000 as f32).ceil() as usize;
let sliced_urls: Vec<&[String]> = urls.chunks(file_amount).collect(); let sliced_urls: Vec<&[Url]> = urls.chunks(file_amount).collect();
let mut sitemaps: Vec<UrlSet> = vec![]; let mut sitemaps: Vec<UrlSet> = vec![];
for urls in sliced_urls { for urls in sliced_urls {
for url in urls { for url in urls.iter().cloned() {
let mut sitemap_urls: Vec<Url> = vec![]; let mut sitemap_urls: Vec<Url> = vec![];
sitemap_urls.push( sitemap_urls.push(url);
Url::builder(url.to_owned())
.change_frequency(ChangeFrequency::Weekly)
.last_modified(chrono::offset::Utc::now().fixed_offset())
.build()?,
);
sitemaps.push(UrlSet::new(sitemap_urls)?); sitemaps.push(UrlSet::new(sitemap_urls)?);
} }
} }
@ -475,7 +608,7 @@ async fn update_sitemap_index(state: &AppState) -> anyhow::Result<()> {
if path if path
.extension() .extension()
.map_or(false, |ext| ext == "xml" || ext == "gz") .map_or(false, |ext| ext == "xml" || ext == "gz")
&& !path.to_string_lossy().to_string().contains("sitemap_index") && !path.to_string_lossy().to_string().contains("sitemap-index")
{ {
Some(path) Some(path)
} else { } else {