sitemap make products and categories work
This commit is contained in:
parent
ba432d9aa5
commit
2402c573ac
13 changed files with 437 additions and 90 deletions
3
.env
3
.env
|
@ -12,4 +12,5 @@ SITEMAP_CATEGORY_TEMPLATE="https://example.com/{category.slug}"
|
|||
SITEMAP_COLLECTION_TEMPLATE="https://example.com/collection/{collection.slug}"
|
||||
# Available fields can be found in ./sitemap-generator/src/queries/event_subjects_updated.rs: PageUpdate
|
||||
SITEMAP_PAGES_TEMPLATE="https://example.com/{page.slug}"
|
||||
SITEMAP_INDEX_HOSTNAME="https://example.com/"
|
||||
# Without trailing "/"!
|
||||
SITEMAP_INDEX_HOSTNAME="https://example.com"
|
||||
|
|
|
@ -11,4 +11,5 @@ SITEMAP_CATEGORY_TEMPLATE="https://example.com/{category.slug}"
|
|||
SITEMAP_COLLECTION_TEMPLATE="https://example.com/collection/{collection.slug}"
|
||||
# Available fields can be found in ./sitemap-generator/src/queries/event_subjects_updated.rs: PageUpdate
|
||||
SITEMAP_PAGES_TEMPLATE="https://example.com/{page.slug}"
|
||||
SITEMAP_INDEX_HOSTNAME="https://example.com/"
|
||||
# Without trailing "/"!
|
||||
SITEMAP_INDEX_HOSTNAME="https://example.com"
|
||||
|
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -1,2 +1,7 @@
|
|||
/target
|
||||
.env
|
||||
temp
|
||||
temp/**.*
|
||||
|
||||
# Allow
|
||||
!.env.example
|
||||
|
|
18
Cargo.lock
generated
18
Cargo.lock
generated
|
@ -485,6 +485,7 @@ dependencies = [
|
|||
"iana-time-zone",
|
||||
"js-sys",
|
||||
"num-traits",
|
||||
"serde",
|
||||
"wasm-bindgen",
|
||||
"windows-targets 0.52.4",
|
||||
]
|
||||
|
@ -1241,6 +1242,12 @@ dependencies = [
|
|||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "half"
|
||||
version = "1.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403"
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.14.3"
|
||||
|
@ -2476,6 +2483,16 @@ dependencies = [
|
|||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_cbor"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5"
|
||||
dependencies = [
|
||||
"half",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.196"
|
||||
|
@ -2615,6 +2632,7 @@ dependencies = [
|
|||
"redis",
|
||||
"saleor-app-sdk",
|
||||
"serde",
|
||||
"serde_cbor",
|
||||
"serde_json",
|
||||
"sitemap-rs",
|
||||
"surf",
|
||||
|
|
|
@ -2,6 +2,7 @@ mod app;
|
|||
mod queries;
|
||||
mod routes;
|
||||
|
||||
use anyhow::Context;
|
||||
use saleor_app_sdk::{
|
||||
config::Config,
|
||||
manifest::{AppManifest, AppPermission},
|
||||
|
@ -69,18 +70,20 @@ async fn main() -> anyhow::Result<()> {
|
|||
.build();
|
||||
let app_state = AppState {
|
||||
manifest: app_manifest,
|
||||
config,
|
||||
config: config.clone(),
|
||||
saleor_app: Arc::new(Mutex::new(saleor_app)),
|
||||
};
|
||||
let app = create_routes(app_state);
|
||||
/* Router::new()
|
||||
.route("/api/manifest", get(manifest))
|
||||
.route("/api/register", post(register))
|
||||
.with_state(app_state);
|
||||
*/
|
||||
|
||||
// let app = create_routes(app_state);
|
||||
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
|
||||
let listener = tokio::net::TcpListener::bind(
|
||||
&config
|
||||
.app_api_base_url
|
||||
.split("//")
|
||||
.collect::<Vec<_>>()
|
||||
.get(1)
|
||||
.context("APP_API_BASE_URL invalid format")?,
|
||||
)
|
||||
.await?;
|
||||
tracing::debug!("listening on {}", listener.local_addr().unwrap());
|
||||
match axum::serve(listener, app).await {
|
||||
Ok(o) => Ok(o),
|
||||
|
|
|
@ -30,7 +30,7 @@ pub async fn register(
|
|||
jwks: None,
|
||||
token: auth_token.auth_token,
|
||||
domain: Some(state.config.app_api_base_url),
|
||||
app_id: state.config.saleor_app_id,
|
||||
app_id: state.manifest.id,
|
||||
saleor_api_url: saleor_api_url.clone(),
|
||||
};
|
||||
app.apl.set(auth_data).await?;
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
/*use http::{Request, Response};
|
||||
use std::{
|
||||
str::Bytes,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
/*
|
||||
use http::{Request, Response};
|
||||
use std::task::{Context, Poll};
|
||||
use tower::Service;
|
||||
|
||||
use crate::headers::SALEOR_SIGNATURE_HEADER;
|
||||
|
@ -31,18 +29,20 @@ where
|
|||
}
|
||||
|
||||
fn call(&mut self, mut req: Request<ReqBody>) -> Self::Future {
|
||||
/*
|
||||
if let Some(signature_header) = req.headers().get(SALEOR_SIGNATURE_HEADER) {
|
||||
let b = req.body_mut().data();
|
||||
if let Ok(saleor_signature) = signature_header.to_str() {
|
||||
let split: Vec<&str> = saleor_signature.split(".").collect();
|
||||
let header = split.get(0);
|
||||
let signature = split.get(2);
|
||||
if let Some(signature) = signature {
|
||||
/*
|
||||
let jws = jose_jws::Signature {
|
||||
signature: signature.parse().unwrap(),
|
||||
header:,
|
||||
protected: None,
|
||||
};
|
||||
*/
|
||||
}
|
||||
}
|
||||
/*
|
||||
|
@ -53,7 +53,6 @@ where
|
|||
*/
|
||||
}
|
||||
self.inner.call(req)
|
||||
*/
|
||||
todo!()
|
||||
}
|
||||
}*/
|
||||
}
|
||||
*/
|
||||
|
|
|
@ -39,7 +39,8 @@ quick-xml = { version = "0.31.0", features = ["serialize"] }
|
|||
flate2 = "1.0.28"
|
||||
tinytemplate = "1.2.1"
|
||||
sitemap-rs = "0.2.1"
|
||||
chrono = "0.4.34"
|
||||
chrono = { version = "0.4.34", features = ["serde"] }
|
||||
serde_cbor = "0.11.2"
|
||||
|
||||
[build-dependencies]
|
||||
cynic-codegen.workspace = true
|
||||
|
|
|
@ -3,10 +3,11 @@ use axum::{
|
|||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
};
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
use fd_lock::RwLock;
|
||||
use std::{fs::File, sync::Arc, time::Duration};
|
||||
|
||||
use redis::{AsyncCommands, Client};
|
||||
use redis::{AsyncCommands, Client, RedisError};
|
||||
use saleor_app_sdk::{config::Config, manifest::AppManifest, SaleorApp};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info};
|
||||
|
@ -88,15 +89,16 @@ pub struct XmlCache {
|
|||
app_api_base_url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct XmlData {
|
||||
pub id: cynic::Id,
|
||||
pub slug: String,
|
||||
pub relations: Vec<cynic::Id>,
|
||||
pub data_type: XmlDataType,
|
||||
pub last_modified: DateTime<FixedOffset>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
|
||||
pub enum XmlDataType {
|
||||
Category,
|
||||
Product,
|
||||
|
@ -121,11 +123,24 @@ impl XmlCache {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ONLY USE IF YOU KNOW WHAT YOU'RE DOING! Will flush entire cache, run regenerate() from
|
||||
* webhooks to renew.
|
||||
*/
|
||||
pub async fn delete_all(&self, saleor_api_url: &str) -> anyhow::Result<()> {
|
||||
debug!("xml data delete_cache()");
|
||||
let mut conn = self.client.get_async_connection().await?;
|
||||
conn.del(self.prepare_key(saleor_api_url)).await?;
|
||||
|
||||
info!("sucessful cache wipe");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_all(&self, saleor_api_url: &str) -> anyhow::Result<Vec<XmlData>> {
|
||||
debug!("xml data get_all()");
|
||||
let mut conn = self.client.get_async_connection().await?;
|
||||
let res: String = conn.get(self.prepare_key(saleor_api_url)).await?;
|
||||
let cache: Vec<XmlData> = serde_json::from_str(&res)?;
|
||||
let res: Vec<u8> = conn.get(self.prepare_key(saleor_api_url)).await?;
|
||||
let cache: Vec<XmlData> = serde_cbor::from_slice(&res)?;
|
||||
|
||||
info!("sucessful cache get");
|
||||
|
||||
|
@ -133,12 +148,9 @@ impl XmlCache {
|
|||
}
|
||||
|
||||
pub async fn set(&self, data: Vec<XmlData>, saleor_api_url: &str) -> anyhow::Result<()> {
|
||||
debug!("xml data set(), {:?}", data);
|
||||
debug!("xml data set()");
|
||||
let mut conn = self.client.get_async_connection().await?;
|
||||
conn.set(
|
||||
self.prepare_key(saleor_api_url),
|
||||
serde_json::to_string(&data)?,
|
||||
)
|
||||
conn.set(self.prepare_key(saleor_api_url), serde_cbor::to_vec(&data)?)
|
||||
.await?;
|
||||
info!("sucessful cache set");
|
||||
Ok(())
|
||||
|
|
|
@ -74,6 +74,11 @@ async fn main() -> anyhow::Result<()> {
|
|||
saleor_app: Arc::new(Mutex::new(saleor_app)),
|
||||
};
|
||||
debug!("Created AppState...");
|
||||
app_state
|
||||
.xml_cache
|
||||
.delete_all("http://localhost:8000/graphpl/")
|
||||
.await?;
|
||||
|
||||
let app = create_routes(app_state);
|
||||
let listener = tokio::net::TcpListener::bind(
|
||||
&config
|
||||
|
|
|
@ -172,7 +172,7 @@ pub struct Collection {
|
|||
pub slug: String,
|
||||
}
|
||||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
#[derive(cynic::QueryFragment, Debug, Serialize)]
|
||||
pub struct CategoryUpdated {
|
||||
pub category: Option<Category2>,
|
||||
}
|
||||
|
@ -193,7 +193,7 @@ pub struct Category {
|
|||
pub id: cynic::Id,
|
||||
}
|
||||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
#[derive(cynic::QueryFragment, Debug, Serialize)]
|
||||
#[cynic(graphql_type = "Category")]
|
||||
pub struct Category2 {
|
||||
pub id: cynic::Id,
|
||||
|
@ -202,25 +202,25 @@ pub struct Category2 {
|
|||
pub products: Option<ProductCountableConnection>,
|
||||
}
|
||||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
#[derive(cynic::QueryFragment, Debug, Serialize)]
|
||||
pub struct ProductCountableConnection {
|
||||
pub page_info: PageInfo,
|
||||
pub edges: Vec<ProductCountableEdge>,
|
||||
}
|
||||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
#[derive(cynic::QueryFragment, Debug, Serialize)]
|
||||
pub struct ProductCountableEdge {
|
||||
pub node: Product2,
|
||||
}
|
||||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
#[derive(cynic::QueryFragment, Debug, Serialize)]
|
||||
#[cynic(graphql_type = "Product")]
|
||||
pub struct Product2 {
|
||||
pub id: cynic::Id,
|
||||
pub slug: String,
|
||||
}
|
||||
|
||||
#[derive(cynic::QueryFragment, Debug)]
|
||||
#[derive(cynic::QueryFragment, Debug, Serialize)]
|
||||
pub struct PageInfo {
|
||||
pub end_cursor: Option<String>,
|
||||
pub has_next_page: bool,
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
use std::{fs::File, io::Write};
|
||||
use tokio::{fs::File, io::AsyncWriteExt};
|
||||
|
||||
use anyhow::Context;
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::{HeaderMap, StatusCode},
|
||||
};
|
||||
use chrono::TimeZone;
|
||||
use fd_lock::RwLock;
|
||||
use chrono::{DateTime, Utc};
|
||||
use flate2::{write::GzEncoder, Compression};
|
||||
use saleor_app_sdk::{
|
||||
headers::SALEOR_API_URL_HEADER,
|
||||
|
@ -16,6 +15,8 @@ use saleor_app_sdk::{
|
|||
},
|
||||
};
|
||||
use sitemap_rs::{
|
||||
sitemap::Sitemap,
|
||||
sitemap_index::SitemapIndex,
|
||||
url::{ChangeFrequency, Url},
|
||||
url_set::UrlSet,
|
||||
};
|
||||
|
@ -26,7 +27,8 @@ use tracing::{debug, error, info};
|
|||
use crate::{
|
||||
app::{AppError, AppState, XmlData, XmlDataType},
|
||||
queries::event_subjects_updated::{
|
||||
Category, CategoryUpdated, CollectionUpdated, PageUpdated, Product, ProductUpdated,
|
||||
Category, Category2, CategoryUpdated, CollectionUpdated, PageInfo, PageUpdated, Product,
|
||||
Product2, ProductCountableConnection, ProductCountableEdge, ProductUpdated,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -36,8 +38,8 @@ pub async fn webhooks(
|
|||
data: String,
|
||||
) -> Result<StatusCode, AppError> {
|
||||
debug!("/api/webhooks");
|
||||
debug!("req: {:?}", data);
|
||||
debug!("headers: {:?}", headers);
|
||||
//debug!("req: {:?}", data);
|
||||
//debug!("headers: {:?}", headers);
|
||||
|
||||
let url = headers
|
||||
.get(SALEOR_API_URL_HEADER)
|
||||
|
@ -77,7 +79,7 @@ pub async fn webhooks(
|
|||
_ => (),
|
||||
}
|
||||
|
||||
info!("got webhooks!");
|
||||
info!("webhook proccessed");
|
||||
Ok(StatusCode::OK)
|
||||
}
|
||||
|
||||
|
@ -89,11 +91,44 @@ async fn update_sitemap_product(
|
|||
debug!("Product got changed!, {:?}", &product);
|
||||
if let Some(product) = product.product {
|
||||
// Update or add the product
|
||||
// TODO: when there are no keys, this will error. Work around that
|
||||
let mut xml_data = state.xml_cache.get_all(saleor_api_url).await?;
|
||||
let mut xml_data = match state.xml_cache.get_all(saleor_api_url).await {
|
||||
Ok(d) => d,
|
||||
Err(e) => {
|
||||
error!("Error, {:?}. no xml cache present?", e);
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
//find the product in xml data and update / create it
|
||||
let mut new_data = vec![];
|
||||
for x in xml_data.iter_mut() {
|
||||
if x.id == product.id && x.data_type == XmlDataType::Product {
|
||||
let cloned_xml_data = xml_data.clone();
|
||||
//debug!("{:?}", xml_data);
|
||||
match xml_data
|
||||
.iter_mut()
|
||||
.find(|x| x.id == product.id && x.data_type == XmlDataType::Product)
|
||||
{
|
||||
Some(x) => {
|
||||
//Check if the slug or category.slug has changed, else ignore the change and continue
|
||||
debug!("{} == {}", x.slug, product.slug);
|
||||
if x.slug == product.slug {
|
||||
match &product.category {
|
||||
Some(c) => {
|
||||
if let Some(xml_c) = cloned_xml_data
|
||||
.iter()
|
||||
.find(|d| d.id == c.id && d.data_type == XmlDataType::Category)
|
||||
{
|
||||
if xml_c.slug == c.slug {
|
||||
debug!("Products url didn't change, skipping...");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
debug!("Products url didn't change, skipping...");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
debug!(
|
||||
"changed product {} found in xml_data, updating...",
|
||||
product.slug
|
||||
|
@ -103,24 +138,57 @@ async fn update_sitemap_product(
|
|||
Some(c) => vec![c.id.clone()],
|
||||
None => vec![],
|
||||
};
|
||||
} else {
|
||||
x.last_modified = chrono::offset::Utc::now().fixed_offset();
|
||||
}
|
||||
None => {
|
||||
debug!(
|
||||
"changed product {} not found in xml_data, adding...",
|
||||
product.slug
|
||||
);
|
||||
new_data.push(XmlData {
|
||||
last_modified: chrono::offset::Utc::now().fixed_offset(),
|
||||
relations: match &product.category {
|
||||
Some(c) => vec![c.id.clone()],
|
||||
Some(c) => {
|
||||
vec![c.id.clone()]
|
||||
}
|
||||
None => vec![],
|
||||
},
|
||||
id: product.id.clone(),
|
||||
data_type: XmlDataType::Product,
|
||||
slug: product.slug.clone(),
|
||||
})
|
||||
});
|
||||
}
|
||||
};
|
||||
//See if produts category exists
|
||||
if let Some(c) = &product.category {
|
||||
if let Some(xml_cat) = xml_data
|
||||
.iter_mut()
|
||||
.find(|x| x.id == c.id && x.data_type == XmlDataType::Category)
|
||||
{
|
||||
xml_cat.slug = c.slug.clone();
|
||||
xml_cat.last_modified = chrono::offset::Utc::now().fixed_offset();
|
||||
// If the category exists but product isn't in relation to it yet,
|
||||
// add it
|
||||
if xml_cat
|
||||
.relations
|
||||
.iter()
|
||||
.find(|c| **c == product.id)
|
||||
.is_none()
|
||||
{
|
||||
xml_cat.relations.push(product.id.clone());
|
||||
}
|
||||
//if cat isn't in xml data, add it
|
||||
} else {
|
||||
new_data.push(XmlData {
|
||||
last_modified: chrono::offset::Utc::now().fixed_offset(),
|
||||
id: c.id.clone(),
|
||||
slug: c.slug.clone(),
|
||||
data_type: XmlDataType::Category,
|
||||
relations: vec![product.id.clone()],
|
||||
})
|
||||
}
|
||||
}
|
||||
xml_data.append(&mut new_data);
|
||||
debug!("new xml_data : {:?}", &xml_data);
|
||||
//create urls
|
||||
let mut urls = vec![];
|
||||
for x in xml_data.iter() {
|
||||
|
@ -131,49 +199,37 @@ async fn update_sitemap_product(
|
|||
product: Some(Product {
|
||||
id: x.id.clone(),
|
||||
slug: x.slug.clone(),
|
||||
category: match x.relations.is_empty() {
|
||||
false => {
|
||||
let data = xml_data
|
||||
category: match xml_data.iter().find(|all| {
|
||||
x.relations
|
||||
.iter()
|
||||
.find(|d| x.relations.iter().find(|r| **r == d.id).is_some());
|
||||
match data {
|
||||
Some(d) => Some(Category {
|
||||
slug: d.slug.clone(),
|
||||
id: d.id.clone(),
|
||||
.find(|rel| {
|
||||
all.id == **rel && all.data_type == XmlDataType::Category
|
||||
})
|
||||
.is_some()
|
||||
}) {
|
||||
Some(c) => Some(Category {
|
||||
slug: c.slug.clone(),
|
||||
id: c.id.clone(),
|
||||
}),
|
||||
None => Some(Category {
|
||||
slug: "unknown".to_owned(),
|
||||
id: cynic::Id::new("unknown".to_owned()),
|
||||
}),
|
||||
}
|
||||
}
|
||||
true => Some(Category {
|
||||
slug: "unknown".to_owned(),
|
||||
id: cynic::Id::new("unknown".to_owned()),
|
||||
}),
|
||||
},
|
||||
}),
|
||||
};
|
||||
urls.push(tt.render("product_url", &context)?);
|
||||
}
|
||||
}
|
||||
debug!("new urls:{:?}", &urls);
|
||||
//debug!("new urls:{:?}", &urls);
|
||||
|
||||
write_xml(
|
||||
urls,
|
||||
RwLock::new(
|
||||
File::options()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open("./sitemap.xml")?,
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
write_xml(urls, &state, XmlDataType::Product).await?;
|
||||
state.xml_cache.set(xml_data, saleor_api_url).await?;
|
||||
} else {
|
||||
error!("Failed to update product, e: {:?}", product);
|
||||
anyhow::bail!("product not present in body");
|
||||
anyhow::bail!("product not present in in webhook");
|
||||
}
|
||||
debug!("Sitemap updated");
|
||||
info!("Sitemap updated, cause: product");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -182,13 +238,146 @@ async fn update_sitemap_category(
|
|||
saleor_api_url: &str,
|
||||
state: AppState,
|
||||
) -> anyhow::Result<()> {
|
||||
todo!()
|
||||
if let Some(category) = category.category {
|
||||
let mut xml_data = state.xml_cache.get_all(saleor_api_url).await?;
|
||||
let mut affected_product_ids = vec![];
|
||||
let mut new_xml_data = vec![];
|
||||
//check if template of product includes categories in url
|
||||
let is_category_in_product_url = state.sitemap_config.product_template.contains("category");
|
||||
match xml_data
|
||||
.iter_mut()
|
||||
.find(|c| c.id == category.id && c.data_type == XmlDataType::Category)
|
||||
{
|
||||
Some(xml_c) => {
|
||||
// if it changed, update
|
||||
if xml_c.slug == category.slug {
|
||||
debug!("Category url didn't change, skipping...");
|
||||
return Ok(());
|
||||
}
|
||||
debug!("Category url changed, updating...");
|
||||
xml_c.slug = category.slug.clone();
|
||||
xml_c.last_modified = chrono::offset::Utc::now().fixed_offset();
|
||||
if is_category_in_product_url {
|
||||
debug!("{} products affected by change", affected_product_ids.len());
|
||||
affected_product_ids.append(&mut xml_c.relations.clone());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
//Add category if it doesn't exist
|
||||
debug!("Category not found in cache, adding...");
|
||||
new_xml_data.push(XmlData {
|
||||
relations: vec![],
|
||||
last_modified: chrono::offset::Utc::now().fixed_offset(),
|
||||
data_type: XmlDataType::Category,
|
||||
slug: category.slug.clone(),
|
||||
id: category.id.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
//update affected products' last_modified
|
||||
if is_category_in_product_url {
|
||||
for prod_id in affected_product_ids {
|
||||
if let Some(xml_prod) = xml_data
|
||||
.iter_mut()
|
||||
.find(|p| p.id == prod_id && p.data_type == XmlDataType::Product)
|
||||
{
|
||||
match xml_prod.relations.iter().find(|c| *c == &category.id) {
|
||||
Some(_) => {
|
||||
xml_prod.last_modified = chrono::offset::Utc::now().fixed_offset();
|
||||
}
|
||||
None => {
|
||||
debug!("product in categories relation doesn't have the same relation back, what happened? Fixing...");
|
||||
xml_prod.relations = vec![category.id.clone()];
|
||||
xml_prod.last_modified = chrono::offset::Utc::now().fixed_offset();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
xml_data.append(&mut new_xml_data);
|
||||
let mut category_urls = vec![];
|
||||
let mut product_urls = vec![];
|
||||
//Create urls
|
||||
for x in xml_data.iter() {
|
||||
let mut tt = TinyTemplate::new();
|
||||
if is_category_in_product_url && x.data_type == XmlDataType::Product {
|
||||
tt.add_template("product_url", &state.sitemap_config.product_template)?;
|
||||
let context;
|
||||
//If current xml products category is this changed category, just use that instead
|
||||
//of searching for it again
|
||||
match x.relations.iter().find(|c| *c == &category.id) {
|
||||
Some(_) => {
|
||||
context = ProductUpdated {
|
||||
product: Some(Product {
|
||||
id: x.id.clone(),
|
||||
slug: x.slug.clone(),
|
||||
category: Some(Category {
|
||||
slug: category.slug.clone(),
|
||||
id: category.id.clone(),
|
||||
}),
|
||||
}),
|
||||
};
|
||||
}
|
||||
None => {
|
||||
context = ProductUpdated {
|
||||
product: Some(Product {
|
||||
id: x.id.clone(),
|
||||
slug: x.slug.clone(),
|
||||
category: match xml_data.iter().find(|all| {
|
||||
x.relations
|
||||
.iter()
|
||||
.find(|rel| {
|
||||
all.id == **rel
|
||||
&& all.data_type == XmlDataType::Category
|
||||
})
|
||||
.is_some()
|
||||
}) {
|
||||
Some(c) => Some(Category {
|
||||
slug: c.slug.clone(),
|
||||
id: c.id.clone(),
|
||||
}),
|
||||
None => Some(Category {
|
||||
slug: "unknown".to_owned(),
|
||||
id: cynic::Id::new("unknown".to_owned()),
|
||||
}),
|
||||
},
|
||||
}),
|
||||
};
|
||||
}
|
||||
}
|
||||
product_urls.push(tt.render("product_url", &context)?);
|
||||
}
|
||||
if x.data_type == XmlDataType::Category {
|
||||
tt.add_template("category_url", &state.sitemap_config.category_template)?;
|
||||
let context = CategoryUpdated {
|
||||
category: Some(Category2 {
|
||||
id: x.id.clone(),
|
||||
slug: x.slug.clone(),
|
||||
products: None,
|
||||
}),
|
||||
};
|
||||
category_urls.push(tt.render("category_url", &context)?);
|
||||
}
|
||||
}
|
||||
//and write
|
||||
if is_category_in_product_url {
|
||||
write_xml(product_urls, &state, XmlDataType::Product).await?;
|
||||
}
|
||||
write_xml(category_urls, &state, XmlDataType::Category).await?;
|
||||
} else {
|
||||
error!("Failed to update category, e:{:?}", category);
|
||||
anyhow::bail!("Category not present in webhook");
|
||||
}
|
||||
info!("Sitemap updated, cause: category");
|
||||
Ok(())
|
||||
}
|
||||
async fn update_sitemap_collection(
|
||||
collection: CollectionUpdated,
|
||||
saleor_api_url: &str,
|
||||
state: AppState,
|
||||
) -> anyhow::Result<()> {
|
||||
info!("Sitemap updated, cause: collection");
|
||||
todo!()
|
||||
}
|
||||
async fn update_sitemap_page(
|
||||
|
@ -196,13 +385,26 @@ async fn update_sitemap_page(
|
|||
saleor_api_url: &str,
|
||||
state: AppState,
|
||||
) -> anyhow::Result<()> {
|
||||
info!("Sitemap updated, cause: collection");
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn write_xml(urls: Vec<String>, mut file: RwLock<File>) -> anyhow::Result<()> {
|
||||
let mut f = file.write()?;
|
||||
async fn write_xml(
|
||||
urls: Vec<String>,
|
||||
state: &AppState,
|
||||
type_group: XmlDataType,
|
||||
) -> anyhow::Result<()> {
|
||||
//Acquire lock first, so only one write_xml function can start computing
|
||||
let mut f = File::options()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(format!(
|
||||
"{}/sitemap-{:?}-0.xml",
|
||||
state.sitemap_config.target_folder, type_group
|
||||
))
|
||||
.await?;
|
||||
let mut sitemap_urls: Vec<Url> = vec![];
|
||||
for url in urls {
|
||||
for url in urls.clone() {
|
||||
sitemap_urls.push(
|
||||
Url::builder(url)
|
||||
.change_frequency(ChangeFrequency::Weekly)
|
||||
|
@ -212,10 +414,110 @@ async fn write_xml(urls: Vec<String>, mut file: RwLock<File>) -> anyhow::Result<
|
|||
}
|
||||
let url_set: UrlSet = UrlSet::new(sitemap_urls)?;
|
||||
debug!("Writing xml into file");
|
||||
f.set_len(0)?;
|
||||
|
||||
//f.set_len(0)?;
|
||||
let mut buf = Vec::<u8>::new();
|
||||
url_set.write(&mut buf)?;
|
||||
f.write_all(&buf)?;
|
||||
//let mut gzip = GzEncoder::new(f, Compression::default());
|
||||
todo!()
|
||||
//TODO: Gzip the buffer before testing size. Size limit per sitemap should be ~= 10mb
|
||||
|
||||
//now check if buffer's over limit, else slice em up into multiple sitemaps
|
||||
let len = buf.len() * std::mem::size_of::<u8>();
|
||||
if len > 200000 {
|
||||
let file_amount = (len as f32 / 150000 as f32).ceil() as usize;
|
||||
let sliced_urls: Vec<&[String]> = urls.chunks(file_amount).collect();
|
||||
|
||||
let mut sitemaps: Vec<UrlSet> = vec![];
|
||||
for urls in sliced_urls {
|
||||
for url in urls {
|
||||
let mut sitemap_urls: Vec<Url> = vec![];
|
||||
sitemap_urls.push(
|
||||
Url::builder(url.to_owned())
|
||||
.change_frequency(ChangeFrequency::Weekly)
|
||||
.last_modified(chrono::offset::Utc::now().fixed_offset())
|
||||
.build()?,
|
||||
);
|
||||
sitemaps.push(UrlSet::new(sitemap_urls)?);
|
||||
}
|
||||
}
|
||||
|
||||
for (i, sitemap) in sitemaps.into_iter().enumerate() {
|
||||
let mut new_buf = Vec::<u8>::new();
|
||||
sitemap.write(&mut new_buf)?;
|
||||
let len = new_buf.len() * std::mem::size_of::<u8>();
|
||||
if len > 200000 {
|
||||
error!("Sitemap is too big even after splitting. Gosh I wish I was better at math")
|
||||
}
|
||||
let mut f = File::options()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(format!(
|
||||
"{}/sitemap-{:?}-{i}.xml",
|
||||
state.sitemap_config.target_folder, type_group
|
||||
))
|
||||
.await?;
|
||||
f.write_all(&new_buf).await?;
|
||||
}
|
||||
} else {
|
||||
f.write_all(&buf).await?;
|
||||
}
|
||||
//let mut gzip = GzEncoder::new(f, Compression::default());
|
||||
update_sitemap_index(state).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_sitemap_index(state: &AppState) -> anyhow::Result<()> {
|
||||
use std::fs::read_dir;
|
||||
let dir = read_dir(&state.sitemap_config.target_folder)?;
|
||||
let paths = dir
|
||||
.filter_map(|f| f.ok())
|
||||
.map(|e| e.path())
|
||||
.filter_map(|path| {
|
||||
if path
|
||||
.extension()
|
||||
.map_or(false, |ext| ext == "xml" || ext == "gz")
|
||||
&& !path.to_string_lossy().to_string().contains("sitemap_index")
|
||||
{
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let sitemaps: Vec<Sitemap> = paths
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
Sitemap::new(
|
||||
format!(
|
||||
"{}/{}",
|
||||
state.sitemap_config.index_hostname,
|
||||
p.file_name()
|
||||
.expect("file dissapeared or broke during sitemap-index construction")
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
),
|
||||
p.metadata().map_or(None, |meta| {
|
||||
meta.modified().map_or(None, |modified| {
|
||||
let dt_utc: DateTime<Utc> = modified.into();
|
||||
Some(dt_utc.fixed_offset())
|
||||
})
|
||||
}),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let sitemap_index = SitemapIndex::new(sitemaps)?;
|
||||
let mut file = File::options()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(format!(
|
||||
"{}/sitemap-index.xml",
|
||||
state.sitemap_config.target_folder
|
||||
))
|
||||
.await?;
|
||||
|
||||
let mut buf = Vec::<u8>::new();
|
||||
sitemap_index.write(&mut buf)?;
|
||||
file.write_all(&mut buf).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue