Compare commits

...

8 commits

Author SHA1 Message Date
Lukasz Ostrowski
55156782cf trying to connect pino but failed 2023-08-03 10:27:48 +02:00
Lukasz Ostrowski
c7d0619dca demo setup with otel traces 2023-08-03 09:58:12 +02:00
Lukasz Ostrowski
2393100b6c otel 2023-08-03 09:15:22 +02:00
Lukasz Ostrowski
51de258d6e display history of indexed jobs 2023-07-03 16:37:55 +02:00
Lukasz Ostrowski
f6a98e0e7e Add table for storing jobs in db 2023-06-07 13:51:26 +02:00
Lukasz Ostrowski
2bd659a1b4 Add indexing job model 2023-06-07 12:56:23 +02:00
Lukasz Ostrowski
d84744bfab Cleanup 2023-06-07 11:09:05 +02:00
Lukasz Ostrowski
7240f6efa7 Add docker, prisma and worker POC
Add docker-compose with Postgres

Install Prisma and generate empty schema

Install Prisma client

Add app config model and migration

Add repository for Algolia Configuration

Migrate metadata to postgres

Replace webhooks metadata with PRisma

Add worker and skeleton code

Implement worker job and removed from the frontend

Attempt to display jobs list

Worker utils

Run worker in the same thread on dev

Run worker in the same thread on dev

Build scripts

fix dev mode

Dockerfiles

prod dockerfiles

docker wip

docker wip

wip working docker

wip working docker

wip - working without prisma migrate
2023-06-07 11:07:37 +02:00
63 changed files with 2945 additions and 839 deletions

2
.dockerignore Normal file
View file

@ -0,0 +1,2 @@
node_modules/
**/.env

1
.gitignore vendored
View file

@ -41,3 +41,4 @@ apps/**/generated
.eslintcache
.sentryclirc
.vscode/

View file

@ -17,7 +17,7 @@
"@material-ui/core": "^4.12.4",
"@material-ui/icons": "^4.11.3",
"@material-ui/lab": "4.0.0-alpha.61",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/macaw-ui": "^0.7.2",
"@sentry/nextjs": "^7.43.0",

View file

@ -13,7 +13,7 @@
},
"dependencies": {
"@mailchimp/mailchimp_marketing": "^3.0.80",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/macaw-ui": "0.8.0-pre.84",
"@sentry/nextjs": "^7.52.1",

View file

@ -15,7 +15,7 @@
"@material-ui/core": "^4.12.4",
"@material-ui/icons": "^4.11.3",
"@material-ui/lab": "4.0.0-alpha.61",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/macaw-ui": "^0.7.2",
"@sentry/nextjs": "^7.39.0",

View file

@ -14,7 +14,7 @@
"dependencies": {
"@hookform/resolvers": "^3.1.0",
"@monaco-editor/react": "^4.4.6",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/apps-ui": "workspace:*",
"@saleor/macaw-ui": "0.8.0-pre.84",

View file

@ -13,7 +13,7 @@
},
"dependencies": {
"@hookform/resolvers": "^3.1.0",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/macaw-ui": "0.8.0-pre.84",
"@sentry/nextjs": "^7.36.0",

View file

@ -14,7 +14,7 @@
"@material-ui/core": "^4.12.4",
"@material-ui/icons": "^4.11.3",
"@material-ui/lab": "4.0.0-alpha.61",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/macaw-ui": "^0.7.2",
"@sentry/nextjs": "^7.36.0",

View file

@ -14,7 +14,7 @@
"@material-ui/core": "^4.12.4",
"@material-ui/icons": "^4.11.3",
"@material-ui/lab": "4.0.0-alpha.61",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/macaw-ui": "^0.7.2",
"@urql/exchange-auth": "^1.0.0",

View file

@ -14,7 +14,7 @@
"dependencies": {
"@aws-sdk/client-s3": "^3.332.0",
"@hookform/resolvers": "^3.1.0",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/apps-ui": "workspace:*",
"@saleor/macaw-ui": "0.8.0-pre.84",

View file

@ -0,0 +1,2 @@
node_modules/
**/.env

View file

@ -2,3 +2,9 @@
SECRET_KEY=
APP_LOG_LEVEL=info
DATABASE_URL=postgresql://USER:PASSWORD@HOST:PORT/DATABASE
# Conditionally run worker with Next use instrumentation hook (src/instrmentation.ts)
# This is handy for development but in production it should be a separate process
RUN_WORKER_IN_NEXT_PROCESS=false

1
apps/search/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
worker-dist/

View file

@ -0,0 +1,67 @@
# Source
# https://turbo.build/repo/docs/handbook/deploying-with-docker#example
# TODO https://pnpm.io/cli/fetch
FROM node:18 AS base
FROM base AS builder
#RUN apk add --no-cache libc6-compat
#RUN apk update
# Set working directory
WORKDIR /app
RUN yarn global add turbo@1.9.1
RUN yarn global add pnpm@8.2.0
# Copy entire monorepo
COPY . .
RUN turbo prune --scope="saleor-app-search" --docker
# Add lockfile and package.json's of isolated subworkspace
FROM base AS installer
WORKDIR /app
RUN yarn global add pnpm@8.2.0
ARG DATABASE_URL
ENV DATABASE_URL=${DATABASE_URL}
COPY .gitignore .gitignore
COPY --from=builder /app/out/full/ .
#COPY --from=builder /app/out/json/ .
COPY --from=builder /app/out/pnpm-lock.yaml ./pnpm-lock.yaml
COPY --from=builder /app/out/pnpm-workspace.yaml ./pnpm-workspace.yaml
RUN pnpm install --frozen-lockfile
COPY turbo.json turbo.json
RUN pnpm turbo run build:app --filter="saleor-app-search"
FROM base AS runner
WORKDIR /app
ARG DATABASE_URL
ENV DATABASE_URL=${DATABASE_URL}
# Don't run production as root
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
USER nextjs
COPY --from=installer /app/apps/search/next.config.js .
COPY --from=installer /app/apps/search/package.json .
# Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=installer --chown=nextjs:nodejs /app/apps/search/.next/standalone ./
COPY --from=installer --chown=nextjs:nodejs /app/apps/search/.next/static ./apps/search/.next/static
COPY --from=installer --chown=nextjs:nodejs /app/apps/search/public ./apps/search/public
COPY --from=installer --chown=nextjs:nodejs /app/apps/search/prisma ./apps/search/prisma
CMD ["node", "apps/search/server.js"]
# TODO Another entrypoint for worker

View file

@ -15,6 +15,9 @@ const isSentryPropertiesInEnvironment =
*/
const moduleExports = {
reactStrictMode: true,
experimental: {
instrumentationHook: true
},
images: {
remotePatterns: [
{
@ -37,6 +40,7 @@ const moduleExports = {
disableServerWebpackPlugin: !isSentryPropertiesInEnvironment,
disableClientWebpackPlugin: !isSentryPropertiesInEnvironment,
},
output: "standalone",
};
const sentryWebpackPluginOptions = {

View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,22 @@
# OpenTelemetry Collector Demo
This repository contains very simple open telemetry setup. It was copied from [`open-telemetry/opentelemetry-collector-contrib`](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/examples/demo).
We modified it for you so it can be use for developing your Next.js application without any changes. Just run the following command in your command line to run all services provided here:
```shell
docker-compose up -d
```
That will expose the following backends:
- Jaeger at http://0.0.0.0:16686
- Zipkin at http://0.0.0.0:9411
- Prometheus at http://0.0.0.0:9090
Notes:
- It may take some time for the application metrics to appear on the Prometheus
dashboard;
To clean up any docker container from the demo run `docker-compose down`.

View file

@ -0,0 +1,46 @@
version: "2"
services:
# Jaeger
jaeger-all-in-one:
image: jaegertracing/all-in-one:latest
restart: always
ports:
- "16686:16686"
- "14268"
- "14250"
# Zipkin
zipkin-all-in-one:
image: openzipkin/zipkin:latest
restart: always
ports:
- "9411:9411"
# Collector
otel-collector:
image: ${OTELCOL_IMG}
restart: always
command: ["--config=/etc/otel-collector-config.yaml", "${OTELCOL_ARGS}"]
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
ports:
- "1888:1888" # pprof extension
- "8888:8888" # Prometheus metrics exposed by the collector
- "8889:8889" # Prometheus exporter metrics
- "13133:13133" # health_check extension
- "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver
- "55679:55679" # zpages extension
depends_on:
- jaeger-all-in-one
- zipkin-all-in-one
prometheus:
container_name: prometheus
image: prom/prometheus:latest
restart: always
volumes:
- ./prometheus.yaml:/etc/prometheus/prometheus.yml
ports:
- "9090:9090"

View file

@ -0,0 +1,44 @@
receivers:
otlp:
protocols:
grpc:
http:
exporters:
prometheus:
endpoint: "0.0.0.0:8889"
const_labels:
label1: value1
logging:
zipkin:
endpoint: "http://zipkin-all-in-one:9411/api/v2/spans"
format: proto
jaeger:
endpoint: jaeger-all-in-one:14250
tls:
insecure: true
processors:
batch:
extensions:
health_check:
pprof:
endpoint: :1888
zpages:
endpoint: :55679
service:
extensions: [pprof, zpages, health_check]
pipelines:
traces:
receivers: [otlp]
processors: [batch]
exporters: [logging, zipkin, jaeger]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [logging, prometheus]

View file

@ -0,0 +1,6 @@
scrape_configs:
- job_name: 'otel-collector'
scrape_interval: 10s
static_configs:
- targets: ['otel-collector:8889']
- targets: ['otel-collector:8888']

View file

@ -2,18 +2,33 @@
"name": "saleor-app-search",
"version": "1.9.3",
"scripts": {
"build": "pnpm generate && next build",
"dev": "pnpm generate && NODE_OPTIONS='--inspect' next dev",
"build": "concurrently \"pnpm build:app\" \"pnpm build:worker\"",
"build:app": "pnpm generate && next build",
"build:worker": "pnpm generate && tsup src/worker/runner.ts --outDir worker-dist",
"dev": "concurrently \"pnpm dev:app\" \"pnpm dev:worker\"",
"dev:app": "pnpm generate && NODE_OPTIONS='--inspect' next dev",
"dev:worker": "pnpm generate && tsx src/worker/runner.ts --watch",
"fetch-schema": "curl https://raw.githubusercontent.com/saleor/saleor/${npm_package_saleor_schemaVersion}/saleor/graphql/schema.graphql > graphql/schema.graphql",
"generate": "graphql-codegen",
"generate": "graphql-codegen && npm run prisma:generate",
"lint": "next lint",
"lint:fix": "eslint --fix .",
"start": "next start",
"prisma:generate": "prisma generate",
"start:app": "next start",
"start:worker": "node worker-dist/runner.js",
"test": "vitest"
},
"dependencies": {
"@hookform/resolvers": "^3.1.0",
"@saleor/app-sdk": "0.39.1",
"@opentelemetry/api": "^1.4.1",
"@opentelemetry/exporter-trace-otlp-http": "^0.41.1",
"@opentelemetry/instrumentation": "^0.41.1",
"@opentelemetry/instrumentation-pino": "^0.34.0",
"@opentelemetry/resources": "^1.15.1",
"@opentelemetry/sdk-node": "^0.41.1",
"@opentelemetry/sdk-trace-node": "^1.15.1",
"@opentelemetry/semantic-conventions": "^1.15.1",
"@prisma/client": "^4.15.0",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/apps-ui": "workspace:*",
"@saleor/macaw-ui": "^0.8.0-pre.84",
@ -24,12 +39,16 @@
"algoliasearch": "4.14.2",
"clsx": "^1.2.1",
"debug": "^4.3.4",
"dotenv": "^16.1.4",
"graphile-worker": "^0.13.0",
"graphql": "16.6.0",
"graphql-tag": "^2.12.6",
"next": "13.3.0",
"next-urql": "4.0.0",
"pino": "^8.14.1",
"pino-opentelemetry-transport": "^0.1.0",
"pino-pretty": "^10.0.0",
"prisma": "^4.15.0",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-helmet": "^6.1.0",
@ -51,9 +70,12 @@
"@types/react": "~18.0.38",
"@types/react-dom": "^18.0.8",
"@vitejs/plugin-react": "4.0.0",
"concurrently": "^8.1.0",
"eslint": "8.42.0",
"eslint-config-saleor": "workspace:*",
"node-mocks-http": "^1.12.2",
"tsup": "^6.7.0",
"tsx": "^3.12.7",
"typescript": "5.1.3",
"vite": "4.3.9",
"vitest": "0.31.3"

View file

@ -0,0 +1,13 @@
-- CreateTable
CREATE TABLE "AlgoliaConfiguration" (
"id" SERIAL NOT NULL,
"appId" TEXT NOT NULL,
"indexNamePrefix" TEXT,
"secretKey" TEXT NOT NULL,
"saleorApiUrl" TEXT NOT NULL,
CONSTRAINT "AlgoliaConfiguration_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "AlgoliaConfiguration_saleorApiUrl_key" ON "AlgoliaConfiguration"("saleorApiUrl");

View file

@ -0,0 +1,13 @@
-- CreateTable
CREATE TABLE "IndexJob" (
"id" SERIAL NOT NULL,
"jobId" INTEGER NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"createdBy" TEXT NOT NULL,
"status" TEXT NOT NULL,
CONSTRAINT "IndexJob_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "IndexJob_jobId_key" ON "IndexJob"("jobId");

View file

@ -0,0 +1,8 @@
/*
Warnings:
- Added the required column `ownerSaleor` to the `IndexJob` table without a default value. This is not possible if the table is not empty.
*/
-- AlterTable
ALTER TABLE "IndexJob" ADD COLUMN "ownerSaleor" TEXT NOT NULL;

View file

@ -0,0 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (i.e. Git)
provider = "postgresql"

View file

@ -0,0 +1,28 @@
// This is your Prisma schema file,
// learn more about it in the docs: https://pris.ly/d/prisma-schema
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
model AlgoliaConfiguration {
id Int @id @default(autoincrement())
appId String
indexNamePrefix String?
secretKey String // TODO encryption
saleorApiUrl String @unique // Reference - maybe it can be unique id? This will share config where 2 apps installed
}
model IndexJob {
id Int @id @default(autoincrement())
jobId Int @unique // todo make it ID instead?
createdAt DateTime @default(now())
createdBy String
status String // probably enum
ownerSaleor String // maybe we should have table with insalled saleors
}

View file

View file

@ -37,15 +37,6 @@ switch (aplType) {
}
}
if (!process.env.SECRET_KEY && process.env.NODE_ENV === "production") {
throw new Error(
"For production deployment SECRET_KEY is mandatory to use EncryptedSettingsManager."
);
}
// Use placeholder value for the development
export const settingsManagerSecretKey = process.env.SECRET_KEY || "CHANGE_ME";
export const saleorApp = new SaleorApp({
apl,
});

View file

@ -0,0 +1,52 @@
import React from "react";
import { Prisma } from "@prisma/client";
import { useAuthenticatedFetch } from "@saleor/app-sdk/app-bridge";
import { useQuery } from "react-query";
import { Box, Text } from "@saleor/macaw-ui/next";
const fetchJobs = (fetch: typeof window.fetch) =>
fetch("/api/jobs").then((r: any) => r.json() as Prisma.IndexJob[]);
export const ImportHistory = () => {
const fetch = useAuthenticatedFetch();
const { data } = useQuery<Prisma.IndexJob[]>({
queryFn: () => fetchJobs(fetch),
refetchInterval: 5000,
});
if (!data) {
return null;
}
return (
<Box marginTop={8} as={"table"} width={"100%"}>
<Box as={"thead"}>
<td>
<Text>Job ID</Text>
</td>
<td>
<Text>Created at</Text>
</td>
<td>
<Text>Createdy by</Text>
</td>
<td>
<Text>Status</Text>
</td>
</Box>
{data.map((job) => (
<tr key={job.jobId}>
<td>{job.jobId}</td>
<td>
{Intl.DateTimeFormat(["en"], { timeStyle: "medium", dateStyle: "medium" }).format(
new Date(job.createdAt)
)}
</td>
<td>{job.createdBy}</td>
<td>{job.status}</td>
</tr>
))}
</Box>
);
};

View file

@ -1,19 +1,14 @@
import { Box, Button, Text } from "@saleor/macaw-ui/next";
import React, { useCallback, useEffect, useMemo, useState } from "react";
import React, { useEffect, useMemo, useState } from "react";
import { AlgoliaSearchProvider } from "../lib/algolia/algoliaSearchProvider";
import { useConfiguration } from "../lib/configuration";
import { Products, useQueryAllProducts } from "./useQueryAllProducts";
import { useWebhooksStatus } from "../lib/useWebhooksStatus";
const BATCH_SIZE = 100;
import { useAuthenticatedFetch } from "@saleor/app-sdk/app-bridge";
import { ImportHistory } from "./ImportHistory";
export const ImportProductsToAlgolia = () => {
const [algoliaConfigured, setAlgoliaConfigured] = useState<null | boolean>(null);
const [started, setStarted] = useState(false);
const [currentProductIndex, setCurrentProductIndex] = useState(0);
const [isAlgoliaImporting, setIsAlgoliaImporting] = useState(false);
const fetch = useAuthenticatedFetch();
const products = useQueryAllProducts(!started);
const [algoliaConfigured, setAlgoliaConfigured] = useState<null | boolean>(null);
const algoliaConfiguration = useConfiguration();
@ -32,10 +27,6 @@ export const ImportProductsToAlgolia = () => {
algoliaConfiguration?.data?.secretKey,
]);
const importProducts = useCallback(() => {
setStarted(true);
}, []);
useEffect(() => {
if (searchProvider) {
searchProvider
@ -45,40 +36,19 @@ export const ImportProductsToAlgolia = () => {
}
}, [searchProvider]);
useEffect(() => {
if (!searchProvider || isAlgoliaImporting || products.length <= currentProductIndex) {
return;
}
(async () => {
setIsAlgoliaImporting(true);
const productsBatchStartIndex = currentProductIndex;
const productsBatchEndIndex = Math.min(currentProductIndex + BATCH_SIZE, products.length);
const productsBatch = products.slice(productsBatchStartIndex, productsBatchEndIndex);
await searchProvider.updatedBatchProducts(productsBatch);
setIsAlgoliaImporting(false);
setCurrentProductIndex(productsBatchEndIndex);
})();
}, [searchProvider, currentProductIndex, isAlgoliaImporting, products]);
return (
<Box __cursor={started ? "wait" : "auto"}>
<Box>
{searchProvider && algoliaConfigured ? (
<Box>
<Text variant={"heading"} as={"p"} marginBottom={4}>
Importing products & variants
</Text>
<Text as={"p"}>
Trigger initial indexing for products catalogue. It can take few minutes.{" "}
Trigger initial indexing for products catalogue. It can take few minutes and will run in
the background
</Text>
<Text marginBottom={8} variant={"bodyStrong"}>
Do not close the app - its running client-side
</Text>
<Box display={"flex"} justifyContent={"flex-end"}>
<Button disabled={started || !searchProvider} onClick={importProducts}>
Start importing
</Button>
<Box display={"flex"} justifyContent={"flex-end"} marginTop={13}>
<Button onClick={() => fetch("/api/index-products")}>Start importing</Button>
</Box>
</Box>
) : (
@ -89,32 +59,7 @@ export const ImportProductsToAlgolia = () => {
<Text>Configure Algolia first</Text>
</Box>
)}
{started && (
<div
style={{
marginTop: "20px",
display: "flex",
flexDirection: "column",
alignItems: "center",
}}
>
{countVariants(products, currentProductIndex)} /{" "}
{countVariants(products, products.length)}
<progress
value={currentProductIndex}
max={products.length}
style={{
height: "30px",
width: "500px",
maxWidth: "100%",
}}
/>
</div>
)}
<ImportHistory />
</Box>
);
};
const countVariants = (products: Products, index: number) =>
products.slice(0, index).reduce((acc, p) => acc + (p.variants?.length ?? 0), 0);

View file

@ -1,78 +0,0 @@
import { useAppBridge } from "@saleor/app-sdk/app-bridge";
import { useEffect, useState } from "react";
import {
ChannelsDocument,
ProductsDataForImportDocument,
ProductsDataForImportQuery,
} from "../../generated/graphql";
import { nextClient } from "../lib/graphql";
const PER_PAGE = 100;
export type Products = NonNullable<
ProductsDataForImportQuery["products"]
>["edges"][number]["node"][];
export const useQueryAllProducts = (paused: boolean) => {
const { appBridgeState } = useAppBridge();
const saleorApiUrl = appBridgeState?.saleorApiUrl!;
const [products, setProducts] = useState<Products>([]);
useEffect(() => {
if (paused) {
return;
}
if (!appBridgeState?.token) {
return;
}
const token = appBridgeState.token;
const client = nextClient(saleorApiUrl, () => Promise.resolve({ token }));
if (!client) {
return;
}
const getChannels = () => client.query(ChannelsDocument, {}).toPromise();
const getProducts = async (channelSlug: string, cursor: string): Promise<void> => {
const response = await client
.query(ProductsDataForImportDocument, {
after: cursor,
first: PER_PAGE,
channel: channelSlug!,
})
.toPromise();
const newProducts = response?.data?.products?.edges.map((e) => e.node) ?? [];
if (newProducts.length > 0) {
setProducts((ps) => [...ps, ...newProducts]);
}
if (
response?.data?.products?.pageInfo.hasNextPage &&
response?.data?.products?.pageInfo.endCursor
) {
// get next page of products
return getProducts(channelSlug, response.data.products?.pageInfo.endCursor);
} else {
// do nothing
return;
}
};
(async () => {
const channels = await getChannels();
// get all products for each channel
await channels.data?.channels?.reduce(async (acc, channel) => {
await acc;
await getProducts(channel.slug, "");
}, Promise.resolve());
})();
}, [appBridgeState?.token, saleorApiUrl, paused]);
return products;
};

View file

@ -0,0 +1,9 @@
import { PrismaClient } from "@prisma/client";
export const prisma = new PrismaClient();
export type Prisma = typeof prisma;
// todo verify if this is enough cleanup
process.on("exit", () => {
prisma.$disconnect();
});

View file

@ -0,0 +1,42 @@
import { prisma, Prisma } from "../../db/prisma";
export class AlgoliaConfigurationRepository {
constructor(private prisma: Prisma) {}
getConfiguration(saleorApiUrl: string) {
return this.prisma.algoliaConfiguration.findFirst({
where: {
saleorApiUrl: saleorApiUrl,
},
});
}
setConfiguration(
saleorApiUrl: string,
configuration: {
appId: string;
indexNamePrefix?: string;
secretKey: string;
}
) {
return this.prisma.algoliaConfiguration.upsert({
where: {
saleorApiUrl: saleorApiUrl,
},
create: {
saleorApiUrl,
appId: configuration.appId,
indexNamePrefix: configuration.indexNamePrefix,
secretKey: configuration.secretKey,
},
update: {
saleorApiUrl,
appId: configuration.appId,
indexNamePrefix: configuration.indexNamePrefix,
secretKey: configuration.secretKey,
},
});
}
}
export const algoliaConfigurationRepository = new AlgoliaConfigurationRepository(prisma);

View file

@ -0,0 +1,55 @@
import { prisma, Prisma } from "../../db/prisma";
export class IndexingJobRepository {
constructor(private prisma: Prisma) {}
getJob(jobId: number, saleorApiUrl: string) {
return this.prisma.indexJob.findFirst({
where: {
AND: {
ownerSaleor: saleorApiUrl,
jobId,
},
},
});
}
getJobs(saleorApiUrl: string) {
return this.prisma.indexJob.findMany({
where: {
ownerSaleor: saleorApiUrl,
},
});
}
createPendingJob(
saleorApiUrl: string,
job: {
jobId: number;
createdByEmail: string;
}
) {
return this.prisma.indexJob.create({
data: {
ownerSaleor: saleorApiUrl,
jobId: job.jobId,
createdBy: job.createdByEmail,
status: "PENDING",
},
});
}
// todo should repository verify saleorApiUrl for protection?
updateJobStatus(saleorApiUrl: string, jobId: number, status: "ERROR" | "SUCCESS") {
return this.prisma.indexJob.update({
where: {
jobId: jobId,
},
data: {
status,
},
});
}
}
export const indexingJobRepository = new IndexingJobRepository(prisma);

View file

@ -0,0 +1,22 @@
import { prisma } from "./db/prisma";
export const register = async () => {
if (process.env.RUN_WORKER_IN_NEXT_PROCESS === "true" && process.env.NEXT_RUNTIME === "nodejs") {
console.log("RUN_WORKER_IN_NEXT_PROCESS env is set, will inject worker to Next.js process");
/**
* Next does not refresh this file, so it will be hard to develop
*/
await import("./worker/runner").catch();
}
if (process.env.NEXT_RUNTIME === "nodejs") {
await import("./otel.ts");
prisma.$connect().catch((e: any) => {
console.error(e);
console.error("Cant connect to database, will exit");
process.exit(1);
});
}
};

View file

@ -1,64 +1,20 @@
import { AuthData } from "@saleor/app-sdk/APL";
import { createDebug } from "../debug";
import { createClient } from "../graphql";
import { createSettingsManager } from "../metadata";
import { algoliaConfigurationRepository } from "../../domain/algolia-configuration/AlgoliaConfigurationRepository";
interface GetAlgoliaConfigurationArgs {
authData: AuthData;
}
const debug = createDebug("getAlgoliaConfiguration");
export const getAlgoliaConfiguration = async ({ authData }: GetAlgoliaConfigurationArgs) => {
const client = createClient(authData.saleorApiUrl, async () =>
Promise.resolve({ token: authData.token })
const configuration = await algoliaConfigurationRepository.getConfiguration(
authData.saleorApiUrl
);
const settings = createSettingsManager(client);
try {
const secretKey = await settings.get("secretKey", authData.domain);
if (!secretKey?.length) {
return {
errors: [
{
message:
"Missing secret key to the Algolia API. Please, configure the application first.",
},
],
return configuration
? {
settings: configuration,
}
: {
errors: [{ message: "Configuration doesnt exist" }],
};
}
const appId = await settings.get("appId", authData.domain);
if (!appId?.length) {
return {
errors: [
{
message: "Missing App ID to the Algolia API. Please, configure the application first.",
},
],
};
}
const indexNamePrefix = (await settings.get("indexNamePrefix", authData.domain)) || "";
debug("Configuration fetched");
return {
settings: {
appId,
secretKey,
indexNamePrefix,
},
};
} catch (error) {
debug("Unexpected error during fetching the configuration");
if (error instanceof Error) {
debug(error.message);
}
return {
errors: [{ message: "Couldn't fetch the settings from the API" }],
};
}
};

View file

@ -1,13 +1,9 @@
import { createLogger as _createLogger } from "@saleor/apps-shared";
import pino from "pino";
/**
* Extend factory to add more settings specific for the app
*/
export const logger = _createLogger(
{},
{
redact: ["token", "secretKey"],
}
);
const otelTransport = pino.transport({
target: "pino-opentelemetry-transport",
});
export const logger = pino(otelTransport);
export const createLogger = logger.child.bind(logger);

View file

@ -1,84 +0,0 @@
import { EncryptedMetadataManager, MetadataEntry, SettingsManager } from "@saleor/app-sdk/settings-manager";
import { FetchAppDetailsDocument, FetchAppDetailsQuery, UpdateAppMetadataDocument } from "../../generated/graphql";
import { settingsManagerSecretKey } from "../../saleor-app";
import { SimpleGraphqlClient } from "./graphql";
/*
* Function is using urql graphql client to fetch all available metadata.
* Before returning query result, we are transforming response to list of objects with key and value fields
* which can be used by the manager.
* Result of this query is cached by the manager.
*/
export async function fetchAllMetadata(client: SimpleGraphqlClient): Promise<MetadataEntry[]> {
const { error, data } = await client
.query<FetchAppDetailsQuery>(FetchAppDetailsDocument, {})
.toPromise();
if (error) {
console.debug("Error during fetching the metadata: ", error);
return [];
}
return data?.app?.privateMetadata.map((md) => ({ key: md.key, value: md.value })) || [];
}
/*
* Mutate function takes urql client and metadata entries, and construct mutation to the API.
* Before data are send, additional query for required App ID is made.
* The manager will use updated entries returned by this mutation to update it's cache.
*/
export async function mutateMetadata(client: SimpleGraphqlClient, metadata: MetadataEntry[]) {
// to update the metadata, ID is required
const { error: idQueryError, data: idQueryData } = await client
.query(FetchAppDetailsDocument, {})
.toPromise();
if (idQueryError) {
console.debug("Could not fetch the app id: ", idQueryError);
throw new Error(
"Could not fetch the app id. Please check if auth data for the client are valid."
);
}
const appId = idQueryData?.app?.id;
if (!appId) {
console.debug("Missing app id");
throw new Error("Could not fetch the app ID");
}
const { error: mutationError, data: mutationData } = await client
.mutation(UpdateAppMetadataDocument, {
id: appId,
input: metadata,
})
.toPromise();
if (mutationError) {
console.debug("Mutation error: ", mutationError);
throw new Error(`Mutation error: ${mutationError.message}`);
}
return (
mutationData?.updatePrivateMetadata?.item?.privateMetadata.map((md) => ({
key: md.key,
value: md.value,
})) || []
);
}
export const createSettingsManager = (client: SimpleGraphqlClient): SettingsManager => {
/*
* EncryptedMetadataManager gives you interface to manipulate metadata and cache values in memory.
* We recommend it for production, because all values are encrypted.
* If your use case require plain text values, you can use MetadataManager.
*/
return new EncryptedMetadataManager({
// Secret key should be randomly created for production and set as environment variable
encryptionKey: settingsManagerSecretKey,
fetchMetadata: () => fetchAllMetadata(client),
mutateMetadata: (metadata) => mutateMetadata(client, metadata),
});
};

27
apps/search/src/otel.ts Normal file
View file

@ -0,0 +1,27 @@
import { trace } from "@opentelemetry/api";
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-http";
import { Resource } from "@opentelemetry/resources";
import { NodeSDK } from "@opentelemetry/sdk-node";
import { SimpleSpanProcessor } from "@opentelemetry/sdk-trace-node";
import { SemanticResourceAttributes } from "@opentelemetry/semantic-conventions";
const { PinoInstrumentation } = require("@opentelemetry/instrumentation-pino");
const sdk = new NodeSDK({
resource: new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: "saleor.app.search",
}),
spanProcessor: new SimpleSpanProcessor(new OTLPTraceExporter()),
instrumentations: [
new PinoInstrumentation({
// Optional hook to insert additional context to log object.
logHook: (span, record, level) => {
record["resource.service.name"] = "saleor.app.search";
},
}),
// other instrumentations
],
});
sdk.start();
export const tracer = trace.getTracer("saleor.app.search");

View file

@ -1,45 +1,20 @@
import type { NextApiRequest, NextApiResponse } from "next";
import { SettingsManager } from "@saleor/app-sdk/settings-manager";
import { createClient } from "../../lib/graphql";
import { createSettingsManager } from "../../lib/metadata";
import { saleorApp } from "../../../saleor-app";
import { createProtectedHandler, ProtectedHandlerContext } from "@saleor/app-sdk/handlers/next";
import { createLogger } from "../../lib/logger";
import { AppConfigurationFields } from "../../domain/configuration";
import { createLogger, logger as otelLogger } from "../../lib/logger";
import { AppConfigurationFields, AppConfigurationSchema } from "../../domain/configuration";
import { AlgoliaSearchProvider } from "../../lib/algolia/algoliaSearchProvider";
import { WebhookActivityTogglerService } from "../../domain/WebhookActivityToggler.service";
import { algoliaConfigurationRepository } from "../../domain/algolia-configuration/AlgoliaConfigurationRepository";
import { tracer } from "../../otel";
const logger = createLogger({
handler: "api/configuration",
});
export interface SettingsApiResponse {
success: boolean;
data?: AppConfigurationFields;
}
const sendResponse = async (
res: NextApiResponse<SettingsApiResponse>,
statusCode: number,
settings: SettingsManager,
domain: string
) => {
const data = {
secretKey: (await settings.get("secretKey", domain)) || "",
appId: (await settings.get("appId", domain)) || "",
indexNamePrefix: (await settings.get("indexNamePrefix", domain)) || "",
};
logger.debug(data, "Will return following settings");
res.status(statusCode).json({
success: statusCode === 200,
data,
});
};
export const handler = async (
req: NextApiRequest,
res: NextApiResponse,
@ -53,56 +28,77 @@ export const handler = async (
const client = createClient(saleorApiUrl, async () => Promise.resolve({ token: token }));
const settings = createSettingsManager(client);
const domain = new URL(saleorApiUrl).host;
// todo extract endpoints, add trpc
if (req.method === "GET") {
logger.debug("Returning configuration");
logger.info("Returning configuration");
await sendResponse(res, 200, settings, domain);
return;
const configuration = await algoliaConfigurationRepository.getConfiguration(saleorApiUrl);
return configuration
? res.status(200).send({
success: true,
data: AppConfigurationSchema.parse(configuration), // todo probably remove Zod at this point
})
: res.status(404).send({
success: false,
});
} else if (req.method === "POST") {
logger.debug("Updating the configuration");
tracer.startActiveSpan("update-configuration", async (span) => {
console.log("test log console");
otelLogger.warn("test warn");
const { appId, secretKey, indexNamePrefix } = JSON.parse(req.body) as AppConfigurationFields;
span.addEvent("update-configuration POST", {
foo: "BAR",
});
const algoliaClient = new AlgoliaSearchProvider({
appId,
apiKey: secretKey,
indexNamePrefix: indexNamePrefix,
logger.info("Updating the configuration - log logger pino");
const { appId, secretKey, indexNamePrefix } = JSON.parse(req.body) as AppConfigurationFields;
const algoliaClient = new AlgoliaSearchProvider({
appId,
apiKey: secretKey,
indexNamePrefix: indexNamePrefix,
});
try {
logger.info("Will ping Algolia");
await algoliaClient.ping();
logger.debug("Algolia connection is ok. Will save settings");
const configuration = await algoliaConfigurationRepository.setConfiguration(saleorApiUrl, {
appId,
secretKey,
indexNamePrefix,
});
logger.debug("Settings set");
const webhooksToggler = new WebhookActivityTogglerService(ctx.authData.appId, client);
await webhooksToggler.enableOwnWebhooks();
logger.debug("Webhooks enabled");
res.status(200).send({
success: true,
data: AppConfigurationSchema.parse(configuration), // todo probably remove Zod at this point
});
span.end();
return;
} catch (e) {
return res.status(400).end();
}
return;
});
} else {
logger.error("Method not supported");
try {
logger.debug("Will ping Algolia");
await algoliaClient.ping();
logger.debug("Algolia connection is ok. Will save settings");
await settings.set([
{ key: "secretKey", value: secretKey || "", domain },
{ key: "appId", value: appId || "", domain },
{ key: "indexNamePrefix", value: indexNamePrefix || "", domain },
]);
logger.debug("Settings set");
const webhooksToggler = new WebhookActivityTogglerService(ctx.authData.appId, client);
await webhooksToggler.enableOwnWebhooks();
logger.debug("Webhooks enabled");
} catch (e) {
return res.status(400).end();
}
await sendResponse(res, 200, settings, domain);
return;
return res.status(405).end();
}
logger.error("Method not supported");
res.status(405).end();
};
export default createProtectedHandler(handler, saleorApp.apl, ["MANAGE_APPS", "MANAGE_PRODUCTS"]);

View file

@ -0,0 +1,24 @@
import { createProtectedHandler } from "@saleor/app-sdk/handlers/next";
import { saleorApp } from "../../../saleor-app";
import { runIndexSaleorProducts } from "../../worker/index-saleor-products/index-saleor-products";
import { indexingJobRepository } from "../../domain/indexing-job/IndexingJobRepository";
export default createProtectedHandler(
async (req, res, ctx) => {
const job = await runIndexSaleorProducts({
saleorApiUrl: ctx.authData.saleorApiUrl,
}); //todo handle error
console.log("Added job");
console.log(job.id);
await indexingJobRepository.createPendingJob(ctx.authData.saleorApiUrl, {
jobId: Number(job.id),
createdByEmail: ctx.user.email,
});
return res.status(200).end();
},
saleorApp.apl,
["MANAGE_APPS", "MANAGE_PRODUCTS"]
);

View file

@ -0,0 +1,13 @@
import { createProtectedHandler } from "@saleor/app-sdk/handlers/next";
import { saleorApp } from "../../../saleor-app";
import { indexingJobRepository } from "../../domain/indexing-job/IndexingJobRepository";
export default createProtectedHandler(
async (req, res, ctx) => {
const jobs = await indexingJobRepository.getJobs(ctx.authData.saleorApiUrl);
return res.json(jobs);
},
saleorApp.apl,
["MANAGE_APPS"]
);

View file

@ -3,15 +3,16 @@ import { saleorApp } from "../../../saleor-app";
import { createClient, SimpleGraphqlClient } from "../../lib/graphql";
import { FetchOwnWebhooksDocument } from "../../../generated/graphql";
import { AlgoliaSearchProvider } from "../../lib/algolia/algoliaSearchProvider";
import { createSettingsManager } from "../../lib/metadata";
import {
IWebhookActivityTogglerService,
WebhookActivityTogglerService,
} from "../../domain/WebhookActivityToggler.service";
import { createLogger } from "../../lib/logger";
import { SettingsManager } from "@saleor/app-sdk/settings-manager";
import { Client } from "urql";
import { SearchProvider } from "../../lib/searchProvider";
import {
AlgoliaConfigurationRepository,
algoliaConfigurationRepository,
} from "../../domain/algolia-configuration/AlgoliaConfigurationRepository";
const logger = createLogger({
service: "webhooksStatusHandler",
@ -21,7 +22,7 @@ const logger = createLogger({
* Simple dependency injection - factory injects all services, in tests everything can be configured without mocks
*/
type FactoryProps = {
settingsManagerFactory: (client: SimpleGraphqlClient) => SettingsManager;
algoliaConfigurationRepository: Pick<AlgoliaConfigurationRepository, "getConfiguration">;
webhookActivityTogglerFactory: (
appId: string,
client: SimpleGraphqlClient
@ -32,7 +33,7 @@ type FactoryProps = {
export const webhooksStatusHandlerFactory =
({
settingsManagerFactory,
algoliaConfigurationRepository,
webhookActivityTogglerFactory,
algoliaSearchProviderFactory,
graphqlClientFactory,
@ -43,25 +44,17 @@ export const webhooksStatusHandlerFactory =
*/
const client = graphqlClientFactory(authData.saleorApiUrl, authData.token);
const webhooksToggler = webhookActivityTogglerFactory(authData.appId, client);
const settingsManager = settingsManagerFactory(client);
const domain = new URL(authData.saleorApiUrl).host;
const [secretKey, appId] = await Promise.all([
settingsManager.get("secretKey", domain),
settingsManager.get("appId", domain),
]);
const settings = { secretKey, appId };
logger.debug(settings, "fetched settings");
const configuration = await algoliaConfigurationRepository.getConfiguration(
authData.saleorApiUrl
);
/**
* If settings are incomplete, disable webhooks
*
* TODO Extract config operations to domain/
*/
if (!settings.appId || !settings.secretKey) {
if (!configuration) {
logger.debug("Settings not set, will disable webhooks");
await webhooksToggler.disableOwnWebhooks();
@ -69,7 +62,10 @@ export const webhooksStatusHandlerFactory =
/**
* Otherwise, if settings are set, check in Algolia if tokens are valid
*/
const algoliaService = algoliaSearchProviderFactory(settings.appId, settings.secretKey);
const algoliaService = algoliaSearchProviderFactory(
configuration.appId,
configuration.secretKey
);
try {
logger.debug("Settings set, will ping Algolia");
@ -105,7 +101,7 @@ export const webhooksStatusHandlerFactory =
export default createProtectedHandler(
webhooksStatusHandlerFactory({
settingsManagerFactory: createSettingsManager,
algoliaConfigurationRepository: algoliaConfigurationRepository,
webhookActivityTogglerFactory: function (appId, client) {
return new WebhookActivityTogglerService(appId, client);
},

View file

@ -41,15 +41,17 @@ export const handler: NextWebhookApiHandler<ProductCreated> = async (req, res, c
logger.warn("Aborting due to lack of settings");
logger.debug(errors);
const error = (errors && errors[0] && errors[0].message) ?? "Unknown error";
return res.status(400).json({
message: errors[0].message,
message: error,
});
}
const searchProvider = new AlgoliaSearchProvider({
appId: settings.appId,
apiKey: settings.secretKey,
indexNamePrefix: settings.indexNamePrefix,
indexNamePrefix: settings.indexNamePrefix ?? undefined,
});
const { product } = context.payload;

View file

@ -42,15 +42,17 @@ export const handler: NextWebhookApiHandler<ProductDeleted> = async (req, res, c
logger.warn("Aborting due to lack of settings");
logger.debug(errors);
const error = (errors && errors[0] && errors[0].message) ?? "Unknown error";
return res.status(400).json({
message: errors[0].message,
message: error,
});
}
const searchProvider = new AlgoliaSearchProvider({
appId: settings.appId,
apiKey: settings.secretKey,
indexNamePrefix: settings.indexNamePrefix,
indexNamePrefix: settings.indexNamePrefix ?? undefined,
});
const { product } = context.payload;

View file

@ -41,15 +41,17 @@ export const handler: NextWebhookApiHandler<ProductUpdated> = async (req, res, c
logger.warn("Aborting due to lack of settings");
logger.debug(errors);
const error = (errors && errors[0] && errors[0].message) ?? "Unknown error";
return res.status(400).json({
message: errors[0].message,
message: error,
});
}
const searchProvider = new AlgoliaSearchProvider({
appId: settings.appId,
apiKey: settings.secretKey,
indexNamePrefix: settings.indexNamePrefix,
indexNamePrefix: settings.indexNamePrefix ?? undefined,
});
const { product } = context.payload;

View file

@ -44,15 +44,17 @@ export const handler: NextWebhookApiHandler<ProductVariantCreated> = async (req,
if (errors?.length || !settings) {
logger.warn("Aborting due to lack of settings");
logger.debug(errors);
const error = (errors && errors[0] && errors[0].message) ?? "Unknown error";
return res.status(400).json({
message: errors[0].message,
message: error,
});
}
const searchProvider = new AlgoliaSearchProvider({
appId: settings.appId,
apiKey: settings.secretKey,
indexNamePrefix: settings.indexNamePrefix,
indexNamePrefix: settings.indexNamePrefix ?? undefined,
});
const { productVariant } = context.payload;

View file

@ -45,15 +45,17 @@ export const handler: NextWebhookApiHandler<ProductVariantDeleted> = async (req,
logger.warn("Aborting due to lack of settings");
logger.debug(errors);
const error = (errors && errors[0] && errors[0].message) ?? "Unknown error";
return res.status(400).json({
message: errors[0].message,
message: error,
});
}
const searchProvider = new AlgoliaSearchProvider({
appId: settings.appId,
apiKey: settings.secretKey,
indexNamePrefix: settings.indexNamePrefix,
indexNamePrefix: settings.indexNamePrefix ?? undefined,
});
const { productVariant } = context.payload;

View file

@ -45,15 +45,17 @@ export const handler: NextWebhookApiHandler<ProductVariantUpdated> = async (req,
logger.warn("Aborting due to lack of settings");
logger.debug(errors);
const error = (errors && errors[0] && errors[0].message) ?? "Unknown error";
return res.status(400).json({
message: errors[0].message,
message: error,
});
}
const searchProvider = new AlgoliaSearchProvider({
appId: settings.appId,
apiKey: settings.secretKey,
indexNamePrefix: settings.indexNamePrefix,
indexNamePrefix: settings.indexNamePrefix ?? undefined,
});
const { productVariant } = context.payload;

View file

@ -0,0 +1,93 @@
import { saleorApp } from "../../../saleor-app";
import { createClient } from "../../lib/graphql";
import { algoliaConfigurationRepository } from "../../domain/algolia-configuration/AlgoliaConfigurationRepository";
import { AlgoliaSearchProvider } from "../../lib/algolia/algoliaSearchProvider";
import {
ChannelsDocument,
ProductsDataForImportDocument,
ProductsDataForImportQuery,
} from "../../../generated/graphql";
import { Client } from "urql";
export type Products = NonNullable<
ProductsDataForImportQuery["products"]
>["edges"][number]["node"][];
const getChannels = (client: Client) => client.query(ChannelsDocument, {}).toPromise();
const PER_PAGE = 100;
/**
* TODO - refactor and split into small tested chunks, not a scope of a POC
*/
export const getProductsAndSendToAlgolia = async (saleorApiUrl: string) => {
let products: Products = [];
const authData = await saleorApp.apl.get(saleorApiUrl);
if (!authData) {
throw new Error();
}
const client = createClient(authData.saleorApiUrl, async () => ({
token: authData.token,
}));
const getProducts = async (channelSlug: string, cursor: string): Promise<void> => {
const response = await client
.query(ProductsDataForImportDocument, {
after: cursor,
first: PER_PAGE,
channel: channelSlug!,
})
.toPromise();
const newProducts = response?.data?.products?.edges.map((e) => e.node) ?? [];
if (newProducts.length > 0) {
products = [...products, ...newProducts];
}
if (
response?.data?.products?.pageInfo.hasNextPage &&
response?.data?.products?.pageInfo.endCursor
) {
// get next page of products
return getProducts(channelSlug, response.data.products?.pageInfo.endCursor);
} else {
// do nothing
return;
}
};
await (async () => {
const channels = await getChannels(client);
// get all products for each channel
await channels.data?.channels?.reduce(async (acc, channel) => {
await acc;
await getProducts(channel.slug, "");
}, Promise.resolve());
})();
const configuration = await algoliaConfigurationRepository.getConfiguration(
authData.saleorApiUrl
); // todo handle error
const algolia = new AlgoliaSearchProvider({
appId: configuration!.appId,
apiKey: configuration!.secretKey,
indexNamePrefix: configuration!.indexNamePrefix ?? undefined,
});
let currentProductIndex = 0;
await (async () => {
const productsBatchStartIndex = currentProductIndex;
const productsBatchEndIndex = Math.min(currentProductIndex + PER_PAGE, products.length);
const productsBatch = products.slice(productsBatchStartIndex, productsBatchEndIndex);
await algolia.updatedBatchProducts(productsBatch);
currentProductIndex = productsBatchEndIndex;
})();
};

View file

@ -0,0 +1,49 @@
import { Task } from "graphile-worker/dist/interfaces";
import { z } from "zod";
import { getProductsAndSendToAlgolia } from "./get-products-and-send-to-algolia";
import { getWorkerUtils } from "../worker-utils";
import { indexingJobRepository } from "../../domain/indexing-job/IndexingJobRepository";
const payloadSchema = z.object({
saleorApiUrl: z.string().url(),
});
/**
* TODO Is it secure to pass only saleorApiUrl
*
* TODO Refactor to extract all product fetching etc
*/
export const IndexSaleorProducts: Task = async (payload, helpers) => {
/**
* Parse payload - in graphile its always unknown, so its a good place to ensure its correct
*/
const typedPayload = payloadSchema.parse(payload);
/**
* Perform some business logic
*/
await getProductsAndSendToAlgolia(typedPayload.saleorApiUrl);
await indexingJobRepository.updateJobStatus(
typedPayload.saleorApiUrl,
Number(helpers.job.id),
"SUCCESS"
);
};
export const IndexSaleorProductsJobName = "IndexSaleorProducts";
/**
* Factory that pushed job to the worker
*
* https://github.com/graphile/worker#makeworkerutilsoptions-workerutilsoptions-promiseworkerutils
*/
export const runIndexSaleorProducts = async (payload: z.infer<typeof payloadSchema>) => {
const utils = await getWorkerUtils();
await utils.migrate();
return utils.addJob(IndexSaleorProductsJobName, payload).finally(() => {
return utils.release();
});
};

View file

@ -0,0 +1,35 @@
import { IndexSaleorProducts } from "./index-saleor-products/index-saleor-products";
require("dotenv").config();
const { run } = require("graphile-worker");
/**
* todo probably use another DB so Prisma will not destroy queue?
*
* how it will expose itself to kubernetes
*/
async function main() {
// Run a worker to execute jobs:
const runner = await run({
connectionString: process.env.DATABASE_URL as string,
concurrency: 5,
// Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc
noHandleSignals: false,
pollInterval: 1000,
taskList: {
IndexSaleorProducts,
},
});
runner.events.on("job:error", ({ job, error, worker }) => {
// todo try to rub prisma here
console.log(job);
});
await runner.promise;
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View file

@ -0,0 +1,18 @@
import { makeWorkerUtils, WorkerUtils } from "graphile-worker";
/**
* Ensure Singleton
*/
let _workerUtils: WorkerUtils | null = null;
export const getWorkerUtils = async () => {
if (_workerUtils) {
return _workerUtils;
}
_workerUtils = await makeWorkerUtils({
connectionString: process.env.DATABASE_URL,
});
return _workerUtils;
};

View file

@ -16,5 +16,5 @@
"incremental": true
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"]
"exclude": ["node_modules", "src/worker/runner.ts", "src/worker/worker-utils.ts"]
}

View file

@ -15,7 +15,10 @@
"SENTRY_PROJECT",
"SENTRY_DSN",
"SENTRY_ORG",
"NEXT_PUBLIC_VERCEL_ENV"
"NEXT_PUBLIC_VERCEL_ENV",
"DATABASE_URL",
"RUN_WORKER_IN_NEXT_PROCESS",
"NEXT_RUNTIME"
]
}
}

View file

@ -14,7 +14,7 @@
"@material-ui/core": "^4.12.4",
"@material-ui/icons": "^4.11.3",
"@material-ui/lab": "4.0.0-alpha.61",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/macaw-ui": "^0.7.2",
"@sentry/nextjs": "^7.30.0",

View file

@ -16,7 +16,7 @@
"@material-ui/core": "^4.12.4",
"@material-ui/icons": "^4.11.3",
"@material-ui/lab": "4.0.0-alpha.61",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/apps-shared": "workspace:*",
"@saleor/apps-ui": "workspace:*",
"@saleor/macaw-ui": "^0.7.2",

23
docker-compose.db.yml Normal file
View file

@ -0,0 +1,23 @@
# This compose contains only Database required for local development.
# App can be run locally without the container
#
# TODO Include app and worker services for local development with fullstack setup (and prod setup)
version: '3'
services:
postgres:
container_name: search_app_postgres
image: postgres # Todo maybe some alpine image?
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
PGDATA: /data/postgres
volumes:
- postgres:/data/postgres
ports:
- "5432:5432"
restart: unless-stopped
volumes:
postgres:

View file

@ -0,0 +1,57 @@
version: '3'
services:
postgres:
container_name: search_app_postgres
image: postgres # Todo maybe some alpine image?
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
PGDATA: /data/postgres
volumes:
- postgres:/data/postgres
ports:
- "5432:5432"
restart: unless-stopped
networks:
- saleor-app-search
saleor-app-search:
container_name: saleor-app-search
depends_on:
- postgres
build:
context: .
dockerfile: apps/search/app.prod.Dockerfile
args:
DATABASE_URL: "postgres://postgres:postgres@postgres/postgres"
environment:
APP_DEBUG: "info"
DATABASE_URL: "postgres://postgres:postgres@postgres/postgres"
restart: always
# command: TODO
# - npx prisma migrate deploy
# - node apps/search/server.js
# entrypoint: ["/bin/bash", "./apps/search/run-app.sh"]
ports:
- 3000:3000
networks:
- saleor-app-search
# saleor-app-search-worker:
# container_name: saleor-app-search-worker
# depends_on:
# - postgres
# build:
# context: . # In examples its ./next-app, we can do that too todo
# dockerfile: worker.prod.Dockerfile
# environment:
# DATABASE_URL: "postgres://postgres:postgres@postgres/postgres"
# restart: always
# networks:
# - saleor-app-search
volumes:
postgres:
networks:
saleor-app-search:
driver: bridge

View file

@ -15,7 +15,7 @@
"@material-ui/core": "^4.12.4",
"@material-ui/icons": "^4.11.3",
"@material-ui/lab": "4.0.0-alpha.61",
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/macaw-ui": "^0.7.2",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",

View file

@ -9,7 +9,7 @@
"typescript": "5.1.3"
},
"devDependencies": {
"@saleor/app-sdk": "0.39.1",
"@saleor/app-sdk": "0.41.0",
"@saleor/macaw-ui": "0.8.0-pre.84",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",

File diff suppressed because it is too large Load diff

View file

@ -8,6 +8,11 @@
"dependsOn": ["^build"],
"outputs": ["dist/**", ".next/**"]
},
"build:app": {
"env": ["NEXT_PUBLIC_VERCEL_ENV"],
"dependsOn": ["^build"],
"outputs": ["dist/**", ".next/**"]
},
"lint": {
"inputs": ["src"],
"outputs": []