This commit is contained in:
leca 2025-01-29 21:28:17 +03:00
parent 6dafe854b6
commit ec7b733ff6
10 changed files with 963 additions and 200 deletions

3
.gitignore vendored
View File

@ -130,3 +130,6 @@ dist
.yarn/install-state.gz .yarn/install-state.gz
.pnp.* .pnp.*
build/
.env
service-account-file.json

10
Dockerfile Normal file
View File

@ -0,0 +1,10 @@
FROM node:22-bullseye
WORKDIR /opt/app
COPY package.json package-lock.json .env src eslint.config.mjs tsconfig.json db_schema.sql .
COPY src ./src
RUN npm i
CMD ["npm", "run", "start"]

8
db_schema.sql Normal file
View File

@ -0,0 +1,8 @@
CREATE TABLE IF NOT EXISTS warehouses (
name VARCHAR(64) PRIMARY KEY,
box_delivery_and_storage_expr INTEGER,
box_delivery_base INTEGER,
box_delivery_liter INTEGER,
box_storage_base REAL,
box_storage_liter REAL
);

22
docker-compose.yml Normal file
View File

@ -0,0 +1,22 @@
services:
database:
image: 'postgres:15'
env_file: .env
ports:
- 5432:5432
volumes:
- ./data/db:/var/lib/postgresql/data
environment:
POSTGRES_USER: ${DB_USER}
POSTGRES_PASSWORD: ${DB_PASS}
POSTGRES_DB: ${DB_NAME}
healthcheck:
test: pg_isready -U $${DB_USER} -d $${DB_NAME}
api_script:
build:
context: .
dockerfile: Dockerfile
env_file: .env
depends_on:
- database
restart: on-failure

929
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,8 @@
"main": "index.js", "main": "index.js",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev":"nodemon src/index.js" "start": "npx tsx --env-file=.env --watch src/index.ts",
"build": "npx rsc src/index.ts"
}, },
"imports": { "imports": {
"#*": [ "#*": [
@ -18,9 +19,13 @@
"axios": "^1.6.2", "axios": "^1.6.2",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"express": "^4.18.2", "express": "^4.18.2",
"googleapis": "^144.0.0",
"knex": "^3.0.1", "knex": "^3.0.1",
"log4js": "^6.9.1", "log4js": "^6.9.1",
"pg": "^8.11.3", "pg": "^8.13.1",
"tsc": "^2.0.4",
"tsx": "^4.19.2",
"typescript": "^5.7.3",
"zod": "^3.23.8" "zod": "^3.23.8"
}, },
"devDependencies": { "devDependencies": {
@ -38,7 +43,6 @@
"eslint-plugin-prettier": "^5.2.1", "eslint-plugin-prettier": "^5.2.1",
"eslint-plugin-unused-imports": "^4.1.4", "eslint-plugin-unused-imports": "^4.1.4",
"jest": "^29.7.0", "jest": "^29.7.0",
"nodemon": "^3.1.9",
"prettier": "^3.3.3", "prettier": "^3.3.3",
"prettier-plugin-jsdoc": "^1.3.0", "prettier-plugin-jsdoc": "^1.3.0",
"prettier-plugin-sql": "^0.18.1" "prettier-plugin-sql": "^0.18.1"

11
sample.env Normal file
View File

@ -0,0 +1,11 @@
DB_HOST=localhost
DB_PORT=5432
DB_USER=api_user
DB_NAME=api_db
DB_PASS=S0M3S7R0NGPA$$W0RD
#in seconds
TIME_BETWEEN_REQUESTS=10
WB_TOKEN=TOKEN_HERE
SPREADSHEETS_NAME=stocks_coefs
#in format id1:id2:id3:.....
SPREADSHEETS_IDS=

View File

165
src/index.ts Normal file
View File

@ -0,0 +1,165 @@
import axios from 'axios';
import knex from 'knex';
import fs from 'fs';
import { google } from 'googleapis';
/**
* @type Warehouse
* Type contains information about a warehouse.
* I dunno what other things aside from name means,
* anyways it works and I don't think I need to know :P
*/
type Warehouse = {
name: string,
box_delivery_and_storage_expr: Number,
box_delivery_base: Number,
box_delivery_liter: Number,
box_storage_base: Number,
box_storage_liter: Number
};
const dbClient = knex({
client: "pg",
connection: {
host: process.env.DB_HOST,
port: Number.parseInt(process.env.DB_PORT!),
user: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_NAME
}
})
/**
* This function executes file ./db_schema.psql that contains the DB schema in format "create if not exists"
*/
const initDb = async () => {
console.log("Starting database initialization.");
const initializeQuery = fs.readFileSync('./db_schema.sql').toString();
await dbClient.raw(initializeQuery);
console.log("Database initialization ended.");
}
/**
* This function makes request to the WB API
* and puts the data to the DB.
*/
const request = async () => {
console.log("starting request")
const data = {
response: {
data: {
dtNextBox: "2024-02-01",
dtTillMax: "2024-03-31",
warehouseList: [
{
boxDeliveryAndStorageExpr: "160",
boxDeliveryBase: "48",
boxDeliveryLiter: "11,2",
boxStorageBase: "0,1",
boxStorageLiter: "0,1",
warehouseName: "Коледино"
}
]
}
}
}
/*
const data = await axios.get(`https://common-api.wildberries.ru/api/v1/tariffs/box`, {
params: {
date: new Date().toISOString().split('T')[0]
},
headers: {
"Authorization": `Bearer ${process.env.WB_TOKEN}`
}
});
*/
const warehouses: Warehouse[] = []
data.response.data.warehouseList.forEach(warehouse => {
const warehoseData: Warehouse = {
name: warehouse.warehouseName,
box_delivery_and_storage_expr: Number.parseInt(warehouse.boxDeliveryAndStorageExpr),
box_delivery_base: Number.parseInt(warehouse.boxDeliveryBase),
box_delivery_liter: Number.parseInt(warehouse.boxDeliveryLiter),
box_storage_base: Number.parseFloat(warehouse.boxStorageBase.replace(",", ".")),
box_storage_liter: Number.parseFloat(warehouse.boxStorageLiter.replace(",", "."))
};
warehouses.push(warehoseData);
dbClient("warehouses")
.insert(warehoseData)
.onConflict("name")
.merge()
.catch(e => console.log(e));
});
return warehouses;
}
/**
* This function handles data export to the google sheets.
*/
const exportData = async (warehouses: Warehouse[]) => {
const auth = new google.auth.GoogleAuth({
keyFile: "./service-account-file.json",
scopes: ["https://www.googleapis.com/auth/spreadsheets"]
});
const sheets = google.sheets({
version: "v4",
auth
});
const spreadsheetsIds = process.env.SPREADSHEETS_IDS?.split(':')
warehouses.forEach( async (warehouse, index) => {
const headerValues = [
['Склад', 'box delivery and storage expr', 'box delivery base', 'box delivery liter', 'box storage_base', 'box storage liter']
];
spreadsheetsIds?.forEach(async spreadsheetId => {
// sheets.spreadsheets.values.clear();
const alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
// Print header.
await sheets.spreadsheets.values.update({
spreadsheetId,
range: `${process.env.SPREADSHEETS_NAME}!${alphabet[0]}1:${alphabet[headerValues[0].length-1]}1`,
valueInputOption: "RAW",
resource: {
values: headerValues
}
})
const values = [
[warehouse.name, warehouse.box_delivery_and_storage_expr, warehouse.box_delivery_base, warehouse.box_delivery_liter, warehouse.box_storage_base, warehouse.box_storage_liter]
];
const reponse = await sheets.spreadsheets.values.update({
spreadsheetId,
range: `${process.env.SPREADSHEETS_NAME}!${alphabet[index]}${index+2}:${alphabet[index+values[index].length]}${index+2}`,
valueInputOption: "RAW",
resource: {
values
}
})
})
})
}
/**
* This function call DB initialization and sets timeout (whit it reads from .env file) for the request() function
*/
const start = async () => {
await initDb();
const warehouses = await request();
exportData(warehouses);
setInterval(async () => {
const warehouses = await request();
exportData(warehouses);
}, Number.parseInt(process.env.TIME_BETWEEN_REQUESTS!) * 1000);
}
await start();

View File

@ -10,8 +10,9 @@
"esModuleInterop": true, "esModuleInterop": true,
"allowJs": true, "allowJs": true,
"checkJs": true, "checkJs": true,
"noEmit": true, "noEmit": false,
"skipLibCheck": true "skipLibCheck": true,
"outDir": "./build"
}, },
"include": ["src/**/*"], "include": ["src/**/*"],
"exclude": ["node_modules", "src/**/*.test.*", "src/**/*.spec.*"] "exclude": ["node_modules", "src/**/*.test.*", "src/**/*.spec.*"]