Merge branch 'main' of https://git.cnapp.co.in/gitadmin/api
This commit is contained in:
commit
53a020c3cf
4
app.js
4
app.js
@ -208,6 +208,8 @@ import AnnouncementRoute from './resources/Announcement/announcementRouter.js'
|
||||
|
||||
//Stock
|
||||
import Stock from "./resources/Stock/StockRoute.js";
|
||||
//Reports
|
||||
import Report from "./resources/Reports/ReportRoute.js";
|
||||
app.use("/api/v1", user);
|
||||
|
||||
//Product
|
||||
@ -313,4 +315,6 @@ app.use('/api/transporter',Transporter)
|
||||
app.use("/api", Stock);
|
||||
// Email CMS
|
||||
// app.use("/api", RegisterEmail);
|
||||
//Reports
|
||||
app.use("/api/report",Report);
|
||||
export default app;
|
||||
|
@ -35,9 +35,9 @@ export const addBrand = async (req, res) => {
|
||||
// Get all Brands
|
||||
export const getBrands = async (req, res) => {
|
||||
try {
|
||||
const PAGE_SIZE = parseInt(req.query.show) || 10;
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const skip = (page - 1) * PAGE_SIZE;
|
||||
// const PAGE_SIZE = parseInt(req.query.show) || 10;
|
||||
// const page = parseInt(req.query.page) || 1;
|
||||
// const skip = (page - 1) * PAGE_SIZE;
|
||||
let filter = {};
|
||||
|
||||
// Search by brandName if provided
|
||||
@ -52,8 +52,8 @@ export const getBrands = async (req, res) => {
|
||||
|
||||
// Fetch brands with pagination and filtering
|
||||
const brands = await BrandModel.find(filter)
|
||||
.limit(PAGE_SIZE)
|
||||
.skip(skip)
|
||||
// .limit(PAGE_SIZE)
|
||||
// .skip(skip)
|
||||
.sort({ createdAt: -1 })
|
||||
.exec();
|
||||
|
||||
@ -61,8 +61,8 @@ export const getBrands = async (req, res) => {
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
total_data: total,
|
||||
total_pages: Math.ceil(total / PAGE_SIZE),
|
||||
current_page: page,
|
||||
// total_pages: Math.ceil(total / PAGE_SIZE),
|
||||
// current_page: page,
|
||||
brands,
|
||||
});
|
||||
} catch (error) {
|
||||
|
@ -35,9 +35,9 @@ export const addCategory = async (req, res) => {
|
||||
// Get all Categories
|
||||
export const getCategories = async (req, res) => {
|
||||
try {
|
||||
const PAGE_SIZE = parseInt(req.query.show) || 10;
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const skip = (page - 1) * PAGE_SIZE;
|
||||
// const PAGE_SIZE = parseInt(req.query.show) || 10;
|
||||
// const page = parseInt(req.query.page) || 1;
|
||||
// const skip = (page - 1) * PAGE_SIZE;
|
||||
let filter = {};
|
||||
|
||||
// Handle filtering by categoryName
|
||||
@ -52,8 +52,8 @@ export const getCategories = async (req, res) => {
|
||||
|
||||
// Fetch the categories with pagination
|
||||
const categories = await CategoryModel.find(filter)
|
||||
.limit(PAGE_SIZE)
|
||||
.skip(skip)
|
||||
// .limit(PAGE_SIZE)
|
||||
// .skip(skip)
|
||||
.sort({ createdAt: -1 })
|
||||
.exec();
|
||||
|
||||
@ -61,8 +61,8 @@ export const getCategories = async (req, res) => {
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
total_data: total,
|
||||
total_pages: Math.ceil(total / PAGE_SIZE),
|
||||
current_page: page,
|
||||
// total_pages: Math.ceil(total / PAGE_SIZE),
|
||||
// current_page: page,
|
||||
categories,
|
||||
});
|
||||
} catch (error) {
|
||||
|
202
resources/Reports/OpeningInventoryReports.js
Normal file
202
resources/Reports/OpeningInventoryReports.js
Normal file
@ -0,0 +1,202 @@
|
||||
import mongoose from "mongoose";
|
||||
import { Product } from "../Products/ProductModel.js";
|
||||
import { PDStock } from "../Stock/PdStockModel.js";
|
||||
import { RDStock } from "../Stock/RdStockModel.js";
|
||||
|
||||
export const getProductsWithOpenInventoryInfo = async (req, res) => {
|
||||
try {
|
||||
const { page = 1, show = 10, name, category, brand } = req.query;
|
||||
|
||||
// Pagination setup
|
||||
const limit = parseInt(show, 10);
|
||||
const skip = (parseInt(page, 10) - 1) * limit;
|
||||
|
||||
// Search filters
|
||||
const searchCriteria = {};
|
||||
if (name) searchCriteria.name = { $regex: name, $options: "i" };
|
||||
if (category) searchCriteria.category = mongoose.Types.ObjectId(category);
|
||||
if (brand) searchCriteria.brand = mongoose.Types.ObjectId(brand);
|
||||
|
||||
// Step 1: Fetch filtered products with population
|
||||
const products = await Product.find(searchCriteria)
|
||||
.skip(skip)
|
||||
.limit(limit)
|
||||
.populate("brand", "brandName") // Populate brandName
|
||||
.populate("category", "categoryName") // Populate categoryName
|
||||
.exec();
|
||||
|
||||
// Step 2: Collect all product SKUs to find corresponding PDs and RDs with stock
|
||||
const productSKUs = products.map((product) => product.SKU);
|
||||
|
||||
// Step 3: Fetch all PDs with non-zero opening inventory for these products using SKU
|
||||
const pdOIs = await PDStock.aggregate([
|
||||
{ $unwind: "$products" },
|
||||
{
|
||||
$match: {
|
||||
"products.openingInventory": { $gt: 0 },
|
||||
"products.SKU": { $in: productSKUs },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$products.SKU",
|
||||
pdCount: { $sum: 1 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Step 4: Fetch all RDs with non-zero opening inventory for these products using SKU
|
||||
const rdOIs = await RDStock.aggregate([
|
||||
{ $unwind: "$products" },
|
||||
{
|
||||
$match: {
|
||||
"products.openingInventory": { $gt: 0 },
|
||||
"products.SKU": { $in: productSKUs },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$products.SKU",
|
||||
rdCount: { $sum: 1 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Step 5: Prepare a mapping of PD and RD counts by SKU
|
||||
const pdMap = {};
|
||||
pdOIs.forEach((pd) => {
|
||||
pdMap[pd._id] = pd.pdCount;
|
||||
});
|
||||
|
||||
const rdMap = {};
|
||||
rdOIs.forEach((rd) => {
|
||||
rdMap[rd._id] = rd.rdCount;
|
||||
});
|
||||
|
||||
// Step 6: Combine product info with PD/RD counts using SKU
|
||||
const productData = products.map((product) => ({
|
||||
SKU: product.SKU,
|
||||
name: product.name,
|
||||
brand: product.brand?.brandName || "N/A", // Access brandName here
|
||||
category: product.category?.categoryName || "N/A", // Access categoryName here
|
||||
allPDs: pdMap[product.SKU] || 0,
|
||||
allRDs: rdMap[product.SKU] || 0,
|
||||
allPdAndRd: (pdMap[product.SKU] || 0) + (rdMap[product.SKU] || 0),
|
||||
}));
|
||||
|
||||
// Step 7: Get total count for pagination
|
||||
const totalCount = await Product.countDocuments(searchCriteria);
|
||||
|
||||
// Step 8: Respond with paginated results and total count
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data: productData,
|
||||
pagination: {
|
||||
total: totalCount,
|
||||
page: parseInt(page, 10),
|
||||
pages: Math.ceil(totalCount / limit),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({ success: false, message: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const getProductsWithStockInfo = async (req, res) => {
|
||||
try {
|
||||
const { page = 1, show = 10, name, category, brand } = req.query;
|
||||
|
||||
// Pagination setup
|
||||
const limit = parseInt(show, 10);
|
||||
const skip = (parseInt(page, 10) - 1) * limit;
|
||||
|
||||
// Search filters
|
||||
const searchCriteria = {};
|
||||
if (name) searchCriteria.name = { $regex: name, $options: "i" };
|
||||
if (category) searchCriteria.category = mongoose.Types.ObjectId(category);
|
||||
if (brand) searchCriteria.brand = mongoose.Types.ObjectId(brand);
|
||||
|
||||
// Step 1: Fetch filtered products with population
|
||||
const products = await Product.find(searchCriteria)
|
||||
.skip(skip)
|
||||
.limit(limit)
|
||||
.populate("brand", "brandName") // Populate brandName
|
||||
.populate("category", "categoryName") // Populate categoryName
|
||||
.exec();
|
||||
|
||||
// Step 2: Collect all product SKUs to find corresponding PDs and RDs with stock
|
||||
const productSKUs = products.map((product) => product.SKU);
|
||||
|
||||
// Step 3: Fetch all PDs with non-zero Stock for these products using SKU
|
||||
const pdStocks = await PDStock.aggregate([
|
||||
{ $unwind: "$products" },
|
||||
{
|
||||
$match: {
|
||||
"products.Stock": { $gt: 0 },
|
||||
"products.SKU": { $in: productSKUs },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$products.SKU",
|
||||
pdCount: { $sum: 1 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Step 4: Fetch all RDs with non-zero Stock for these products using SKU
|
||||
const rdStocks = await RDStock.aggregate([
|
||||
{ $unwind: "$products" },
|
||||
{
|
||||
$match: {
|
||||
"products.Stock": { $gt: 0 },
|
||||
"products.SKU": { $in: productSKUs },
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: "$products.SKU",
|
||||
rdCount: { $sum: 1 },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
// Step 5: Prepare a mapping of PD and RD counts by SKU
|
||||
const pdMap = {};
|
||||
pdStocks.forEach((pd) => {
|
||||
pdMap[pd._id] = pd.pdCount;
|
||||
});
|
||||
|
||||
const rdMap = {};
|
||||
rdStocks.forEach((rd) => {
|
||||
rdMap[rd._id] = rd.rdCount;
|
||||
});
|
||||
|
||||
// Step 6: Combine product info with PD/RD counts using SKU
|
||||
const productData = products.map((product) => ({
|
||||
SKU: product.SKU,
|
||||
name: product.name,
|
||||
brand: product.brand?.brandName || "N/A", // Access brandName here
|
||||
category: product.category?.categoryName || "N/A", // Access categoryName here
|
||||
allPDs: pdMap[product.SKU] || 0,
|
||||
allRDs: rdMap[product.SKU] || 0,
|
||||
allPdAndRd: (pdMap[product.SKU] || 0) + (rdMap[product.SKU] || 0),
|
||||
}));
|
||||
|
||||
// Step 7: Get total count for pagination
|
||||
const totalCount = await Product.countDocuments(searchCriteria);
|
||||
|
||||
// Step 8: Respond with paginated results and total count
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data: productData,
|
||||
pagination: {
|
||||
total: totalCount,
|
||||
page: parseInt(page, 10),
|
||||
pages: Math.ceil(totalCount / limit),
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({ success: false, message: error.message });
|
||||
}
|
||||
};
|
23
resources/Reports/ReportRoute.js
Normal file
23
resources/Reports/ReportRoute.js
Normal file
@ -0,0 +1,23 @@
|
||||
import express from "express";
|
||||
import {
|
||||
getProductsWithStockInfo,
|
||||
getProductsWithOpenInventoryInfo,
|
||||
} from "./OpeningInventoryReports.js";
|
||||
|
||||
import { isAuthenticatedUser, authorizeRoles } from "../../middlewares/auth.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router
|
||||
.route("/opening-inventory")
|
||||
.get(
|
||||
isAuthenticatedUser,
|
||||
authorizeRoles("admin"),
|
||||
getProductsWithOpenInventoryInfo
|
||||
);
|
||||
|
||||
router
|
||||
.route("/stock")
|
||||
.get(isAuthenticatedUser, authorizeRoles("admin"), getProductsWithStockInfo);
|
||||
|
||||
export default router;
|
@ -129,7 +129,7 @@ export const getSingleUserSippingAddress = async (req, res) => {
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
UserShippingAddress,
|
||||
message: "All User Shipping Address Fetched",
|
||||
message: "User Shipping Address Fetched",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
@ -152,7 +152,7 @@ export const getSingleUserSippingAddress = async (req, res) => {
|
||||
res.status(201).json({
|
||||
success: true,
|
||||
UserShippingAddress,
|
||||
message: "All User Shipping Address Fetched",
|
||||
message: "User Shipping Address Fetched",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
@ -9,6 +9,7 @@ const ProductRecordSchema = new mongoose.Schema({
|
||||
},
|
||||
SKU: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
productName: {
|
||||
type: String,
|
||||
|
@ -7,6 +7,10 @@ const ProductRecordSchema = new mongoose.Schema({
|
||||
ref: "Product",
|
||||
required: true,
|
||||
},
|
||||
SKU: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
Stock: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
|
@ -6,7 +6,7 @@ import XLSX from "xlsx";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
export const uploadOpeningInventory = async (req, res) => {
|
||||
export const uploadOpeningInventorypd = async (req, res) => {
|
||||
try {
|
||||
// Ensure valid user ID
|
||||
if (!mongoose.Types.ObjectId.isValid(req.user._id)) {
|
||||
@ -89,7 +89,10 @@ export const uploadOpeningInventory = async (req, res) => {
|
||||
// Validate required fields
|
||||
if (!item.SKU) missingFields.add("SKU");
|
||||
if (!item.productName) missingFields.add("Product Name");
|
||||
if (item.openingInventory === null || item.openingInventory === undefined) {
|
||||
if (
|
||||
item.openingInventory === null ||
|
||||
item.openingInventory === undefined
|
||||
) {
|
||||
missingFields.add("Opening Inventory (Qty)");
|
||||
}
|
||||
// Combine all errors into a single message
|
||||
@ -219,6 +222,203 @@ export const uploadOpeningInventory = async (req, res) => {
|
||||
res.status(500).json({ message: "Internal Server Error" });
|
||||
}
|
||||
};
|
||||
export const uploadOpeningInventoryrd = async (req, res) => {
|
||||
try {
|
||||
// Ensure valid user ID
|
||||
if (!mongoose.Types.ObjectId.isValid(req.user._id)) {
|
||||
return res.status(400).json({ message: "Please login again" });
|
||||
}
|
||||
|
||||
// Ensure file is uploaded
|
||||
if (!req.files || !req.files.file) {
|
||||
return res.status(400).json({ message: "No file uploaded" });
|
||||
}
|
||||
|
||||
const file = req.files.file;
|
||||
const filePath = path.join("public", "uploads", file.name);
|
||||
|
||||
// Ensure 'uploads' directory exists
|
||||
if (!fs.existsSync(path.dirname(filePath))) {
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
}
|
||||
|
||||
// Move the file from temp to the uploads directory
|
||||
await file.mv(filePath);
|
||||
|
||||
// Process the file
|
||||
const fileBuffer = fs.readFileSync(filePath);
|
||||
const workbook = XLSX.read(fileBuffer, { type: "buffer" });
|
||||
const sheetName = workbook.SheetNames[0];
|
||||
const worksheet = workbook.Sheets[sheetName];
|
||||
const data = XLSX.utils.sheet_to_json(worksheet, { header: 1 });
|
||||
|
||||
if (data.length <= 1) {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: "Empty spreadsheet or no data found" });
|
||||
}
|
||||
|
||||
const headers = data[0];
|
||||
|
||||
// Map headers from the Excel file to your schema
|
||||
const headerMapping = {
|
||||
SKU: "SKU",
|
||||
"Product Name": "productName",
|
||||
"Opening Inventory (Qty)": "openingInventory",
|
||||
};
|
||||
|
||||
const requiredHeaders = Object.keys(headerMapping);
|
||||
if (!requiredHeaders.every((header) => headers.includes(header))) {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: "Missing required columns in spreadsheet" });
|
||||
}
|
||||
|
||||
const errors = [];
|
||||
const newlyCreated = [];
|
||||
const updatedOpeningInventories = [];
|
||||
|
||||
// Fetch the user's stock or create if doesn't exist
|
||||
let stock = await RDStock.findOne({ userId: req.params.userId });
|
||||
if (!stock) {
|
||||
stock = new RDStock({ userId: req.params.userId, products: [] });
|
||||
}
|
||||
|
||||
for (let i = 1; i < data.length; i++) {
|
||||
const row = data[i];
|
||||
// Skip empty rows
|
||||
if (row.every((cell) => cell === undefined || cell === "")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const item = {};
|
||||
headers.forEach((header, index) => {
|
||||
if (headerMapping[header]) {
|
||||
item[headerMapping[header]] =
|
||||
row[index] !== undefined ? row[index] : "";
|
||||
}
|
||||
});
|
||||
// Initialize error tracking for each item
|
||||
const missingFields = new Set();
|
||||
const validationErrors = new Set();
|
||||
|
||||
// Validate required fields
|
||||
if (!item.SKU) missingFields.add("SKU");
|
||||
if (!item.productName) missingFields.add("Product Name");
|
||||
if (
|
||||
item.openingInventory === null ||
|
||||
item.openingInventory === undefined ||
|
||||
item.openingInventory === ""
|
||||
) {
|
||||
missingFields.add("Opening Inventory (Qty)");
|
||||
}
|
||||
// Combine all errors into a single message
|
||||
let errorMessage = "";
|
||||
if (missingFields.size > 0) {
|
||||
errorMessage += `Missing fields: ${Array.from(missingFields).join(
|
||||
", "
|
||||
)}. `;
|
||||
}
|
||||
const product = await Product.findOne({ SKU: item.SKU });
|
||||
if (!product) {
|
||||
validationErrors.add("Product not found");
|
||||
}
|
||||
if (validationErrors.size > 0) {
|
||||
errorMessage += `Validation errors: ${Array.from(validationErrors).join(
|
||||
", "
|
||||
)}.`;
|
||||
}
|
||||
|
||||
// If there are errors, push them to the errors array
|
||||
if (errorMessage.trim()) {
|
||||
errors.push({
|
||||
SKU: item.SKU,
|
||||
productName: item.productName,
|
||||
openingInventory: item.openingInventory,
|
||||
message: errorMessage.trim(),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Cast opening inventory to a number to handle leading zeros and ensure numeric comparisons
|
||||
const newOpeningInventory = Number(item.openingInventory);
|
||||
// Check if product exists in user's stock
|
||||
const existingProduct = stock.products.find(
|
||||
(p) => p.SKU === item.SKU.toString()
|
||||
);
|
||||
if (existingProduct) {
|
||||
// Update product if it already exists and the inventory is different
|
||||
if (Number(existingProduct.openingInventory) !== newOpeningInventory) {
|
||||
existingProduct.openingInventory = newOpeningInventory;
|
||||
existingProduct.Stock = newOpeningInventory;
|
||||
updatedOpeningInventories.push({
|
||||
SKU: existingProduct.SKU,
|
||||
updatedFields: "openingInventory",
|
||||
openingInventory: newOpeningInventory,
|
||||
productName: existingProduct.productName,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create new product entry
|
||||
stock.products.push({
|
||||
productid: product._id,
|
||||
SKU: item.SKU,
|
||||
productName: item.productName,
|
||||
openingInventory: item.openingInventory,
|
||||
Stock: item.openingInventory,
|
||||
});
|
||||
newlyCreated.push({
|
||||
SKU: item.SKU,
|
||||
productName: item.productName,
|
||||
openingInventory: item.openingInventory,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure all products from the Product collection are in PDStock
|
||||
const allProducts = await Product.find({});
|
||||
for (const product of allProducts) {
|
||||
const existingProductInStock = stock.products.find(
|
||||
(p) => p.SKU === product.SKU
|
||||
);
|
||||
if (!existingProductInStock) {
|
||||
stock.products.push({
|
||||
productid: product._id,
|
||||
SKU: product.SKU,
|
||||
productName: product.name,
|
||||
openingInventory: 0,
|
||||
Stock: 0,
|
||||
});
|
||||
newlyCreated.push({
|
||||
SKU: product.SKU,
|
||||
productName: product.name,
|
||||
openingInventory: 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Save the updated stock
|
||||
await stock.save();
|
||||
|
||||
// Clean up the uploaded file
|
||||
if (fs.existsSync(filePath)) {
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
res.status(200).json({
|
||||
message: "File processed successfully",
|
||||
newlyCreated,
|
||||
updatedOpeningInventories,
|
||||
errors,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
if (fs.existsSync(filePath)) {
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
res.status(500).json({ message: "Internal Server Error" });
|
||||
}
|
||||
};
|
||||
export const getProductsAndStockByPD = async (req, res) => {
|
||||
try {
|
||||
const userId = req.params.userId || req.user._id;
|
||||
@ -392,14 +592,18 @@ export const getProductsAndStockByRD = async (req, res) => {
|
||||
const stockMap = {};
|
||||
if (userStock && userStock.products) {
|
||||
userStock.products.forEach((product) => {
|
||||
stockMap[product.productid.toString()] = product.Stock;
|
||||
stockMap[product.productid.toString()] = {
|
||||
Stock: product.Stock,
|
||||
openingInventory: product.openingInventory,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Combine products with their respective stock
|
||||
const productsWithStock = products.map((product) => ({
|
||||
...product,
|
||||
stock: stockMap[product._id.toString()] || 0,
|
||||
stock: stockMap[product._id.toString()]?.Stock || 0,
|
||||
openingInventory: stockMap[product._id.toString()]?.openingInventory || 0,
|
||||
}));
|
||||
|
||||
// Get total count for pagination purposes
|
||||
@ -568,7 +772,7 @@ export const getStockPD = async (req, res) => {
|
||||
// RD inventory
|
||||
export const createOrUpdateInventoryForRD = async (req, res) => {
|
||||
const userId = req.body.userId ? req.body.userId : req.user._id;
|
||||
console.log("res came here ");
|
||||
// console.log("res came here ");
|
||||
try {
|
||||
const { products } = req.body;
|
||||
const allProducts = await Product.find({}, "_id SKU name");
|
||||
|
@ -7,7 +7,8 @@ import {
|
||||
getProductsAndStockByRD,
|
||||
getStockPD,
|
||||
getStockRD,
|
||||
uploadOpeningInventory,
|
||||
uploadOpeningInventorypd,
|
||||
uploadOpeningInventoryrd,
|
||||
} from "./StockController.js";
|
||||
import { authorizeRoles, isAuthenticatedUser } from "../../middlewares/auth.js";
|
||||
import { isAuthenticatedRD } from "../../middlewares/rdAuth.js";
|
||||
@ -15,10 +16,16 @@ const router = express.Router();
|
||||
|
||||
// Routes
|
||||
router.post(
|
||||
"/openinginventories/upload/:userId",
|
||||
"/openinginventories/pd/upload/:userId",
|
||||
isAuthenticatedUser,
|
||||
authorizeRoles("admin"),
|
||||
uploadOpeningInventory
|
||||
uploadOpeningInventorypd
|
||||
);
|
||||
router.post(
|
||||
"/openinginventories/rd/upload/:userId",
|
||||
isAuthenticatedUser,
|
||||
authorizeRoles("admin"),
|
||||
uploadOpeningInventoryrd
|
||||
);
|
||||
router.get("/pd/stock/:userId", isAuthenticatedUser, getProductsAndStockByPD);
|
||||
router.get("/pd/stock", isAuthenticatedUser, getStockPD);
|
||||
|
Loading…
Reference in New Issue
Block a user