Merge pull request #40678 from frappe/version-15-hotfix

chore: release v15
This commit is contained in:
Deepesh Garg
2024-03-27 08:56:26 +05:30
committed by GitHub
54 changed files with 3246 additions and 1016 deletions

View File

@@ -0,0 +1,100 @@
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Bisect Accounting Statements", {
onload(frm) {
frm.trigger("render_heatmap");
},
refresh(frm) {
frm.add_custom_button(__("Bisect Left"), () => {
frm.trigger("bisect_left");
});
frm.add_custom_button(__("Bisect Right"), () => {
frm.trigger("bisect_right");
});
frm.add_custom_button(__("Up"), () => {
frm.trigger("move_up");
});
frm.add_custom_button(__("Build Tree"), () => {
frm.trigger("build_tree");
});
},
render_heatmap(frm) {
let bisect_heatmap = frm.get_field("bisect_heatmap").$wrapper;
bisect_heatmap.addClass("bisect_heatmap_location");
// milliseconds in a day
let msiad = 24 * 60 * 60 * 1000;
let datapoints = {};
let fr_dt = new Date(frm.doc.from_date).getTime();
let to_dt = new Date(frm.doc.to_date).getTime();
let bisect_start = new Date(frm.doc.current_from_date).getTime();
let bisect_end = new Date(frm.doc.current_to_date).getTime();
for (let x = fr_dt; x <= to_dt; x += msiad) {
let epoch_in_seconds = x / 1000;
if (bisect_start <= x && x <= bisect_end) {
datapoints[epoch_in_seconds] = 1.0;
} else {
datapoints[epoch_in_seconds] = 0.0;
}
}
new frappe.Chart(".bisect_heatmap_location", {
type: "heatmap",
data: {
dataPoints: datapoints,
start: new Date(frm.doc.from_date),
end: new Date(frm.doc.to_date),
},
countLabel: "Bisecting",
discreteDomains: 1,
});
},
bisect_left(frm) {
frm.call({
doc: frm.doc,
method: "bisect_left",
freeze: true,
freeze_message: __("Bisecting Left ..."),
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
bisect_right(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Bisecting Right ..."),
method: "bisect_right",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
move_up(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Moving up in tree ..."),
method: "move_up",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
build_tree(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Rebuilding BTree for period ..."),
method: "build_tree",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
});

View File

@@ -0,0 +1,194 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2023-09-15 21:28:28.054773",
"default_view": "List",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"section_break_cvfg",
"company",
"column_break_hcam",
"from_date",
"column_break_qxbi",
"to_date",
"column_break_iwny",
"algorithm",
"section_break_8ph9",
"current_node",
"section_break_ngid",
"bisect_heatmap",
"section_break_hmsy",
"bisecting_from",
"current_from_date",
"column_break_uqyd",
"bisecting_to",
"current_to_date",
"section_break_hbyo",
"heading_cppb",
"p_l_summary",
"column_break_aivo",
"balance_sheet_summary",
"b_s_summary",
"column_break_gvwx",
"difference_heading",
"difference"
],
"fields": [
{
"fieldname": "column_break_qxbi",
"fieldtype": "Column Break"
},
{
"fieldname": "from_date",
"fieldtype": "Datetime",
"label": "From Date"
},
{
"fieldname": "to_date",
"fieldtype": "Datetime",
"label": "To Date"
},
{
"default": "BFS",
"fieldname": "algorithm",
"fieldtype": "Select",
"label": "Algorithm",
"options": "BFS\nDFS"
},
{
"fieldname": "column_break_iwny",
"fieldtype": "Column Break"
},
{
"fieldname": "current_node",
"fieldtype": "Link",
"label": "Current Node",
"options": "Bisect Nodes"
},
{
"fieldname": "section_break_hmsy",
"fieldtype": "Section Break"
},
{
"fieldname": "current_from_date",
"fieldtype": "Datetime",
"read_only": 1
},
{
"fieldname": "current_to_date",
"fieldtype": "Datetime",
"read_only": 1
},
{
"fieldname": "column_break_uqyd",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_hbyo",
"fieldtype": "Section Break"
},
{
"fieldname": "p_l_summary",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "b_s_summary",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "difference",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "column_break_aivo",
"fieldtype": "Column Break"
},
{
"fieldname": "column_break_gvwx",
"fieldtype": "Column Break"
},
{
"fieldname": "company",
"fieldtype": "Link",
"label": "Company",
"options": "Company"
},
{
"fieldname": "column_break_hcam",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_ngid",
"fieldtype": "Section Break"
},
{
"fieldname": "section_break_8ph9",
"fieldtype": "Section Break",
"hidden": 1
},
{
"fieldname": "bisect_heatmap",
"fieldtype": "HTML",
"label": "Heatmap"
},
{
"fieldname": "heading_cppb",
"fieldtype": "Heading",
"label": "Profit and Loss Summary"
},
{
"fieldname": "balance_sheet_summary",
"fieldtype": "Heading",
"label": "Balance Sheet Summary"
},
{
"fieldname": "difference_heading",
"fieldtype": "Heading",
"label": "Difference"
},
{
"fieldname": "bisecting_from",
"fieldtype": "Heading",
"label": "Bisecting From"
},
{
"fieldname": "bisecting_to",
"fieldtype": "Heading",
"label": "Bisecting To"
},
{
"fieldname": "section_break_cvfg",
"fieldtype": "Section Break"
}
],
"hide_toolbar": 1,
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2023-12-01 16:49:54.073890",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Bisect Accounting Statements",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "Administrator",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,226 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import datetime
from collections import deque
from math import floor
import frappe
from dateutil.relativedelta import relativedelta
from frappe import _
from frappe.model.document import Document
from frappe.utils import getdate
from frappe.utils.data import guess_date_format
class BisectAccountingStatements(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
algorithm: DF.Literal["BFS", "DFS"]
b_s_summary: DF.Float
company: DF.Link | None
current_from_date: DF.Datetime | None
current_node: DF.Link | None
current_to_date: DF.Datetime | None
difference: DF.Float
from_date: DF.Datetime | None
p_l_summary: DF.Float
to_date: DF.Datetime | None
# end: auto-generated types
def validate(self):
self.validate_dates()
def validate_dates(self):
if getdate(self.from_date) > getdate(self.to_date):
frappe.throw(
_("From Date: {0} cannot be greater than To date: {1}").format(
frappe.bold(self.from_date), frappe.bold(self.to_date)
)
)
def bfs(self, from_date: datetime, to_date: datetime):
# Make Root node
node = frappe.new_doc("Bisect Nodes")
node.root = None
node.period_from_date = from_date
node.period_to_date = to_date
node.insert()
period_queue = deque([node])
while period_queue:
cur_node = period_queue.popleft()
delta = cur_node.period_to_date - cur_node.period_from_date
if delta.days == 0:
continue
else:
cur_floor = floor(delta.days / 2)
next_to_date = cur_node.period_from_date + relativedelta(days=+cur_floor)
left_node = frappe.new_doc("Bisect Nodes")
left_node.period_from_date = cur_node.period_from_date
left_node.period_to_date = next_to_date
left_node.root = cur_node.name
left_node.generated = False
left_node.insert()
cur_node.left_child = left_node.name
period_queue.append(left_node)
next_from_date = cur_node.period_from_date + relativedelta(days=+(cur_floor + 1))
right_node = frappe.new_doc("Bisect Nodes")
right_node.period_from_date = next_from_date
right_node.period_to_date = cur_node.period_to_date
right_node.root = cur_node.name
right_node.generated = False
right_node.insert()
cur_node.right_child = right_node.name
period_queue.append(right_node)
cur_node.save()
def dfs(self, from_date: datetime, to_date: datetime):
# Make Root node
node = frappe.new_doc("Bisect Nodes")
node.root = None
node.period_from_date = from_date
node.period_to_date = to_date
node.insert()
period_stack = [node]
while period_stack:
cur_node = period_stack.pop()
delta = cur_node.period_to_date - cur_node.period_from_date
if delta.days == 0:
continue
else:
cur_floor = floor(delta.days / 2)
next_to_date = cur_node.period_from_date + relativedelta(days=+cur_floor)
left_node = frappe.new_doc("Bisect Nodes")
left_node.period_from_date = cur_node.period_from_date
left_node.period_to_date = next_to_date
left_node.root = cur_node.name
left_node.generated = False
left_node.insert()
cur_node.left_child = left_node.name
period_stack.append(left_node)
next_from_date = cur_node.period_from_date + relativedelta(days=+(cur_floor + 1))
right_node = frappe.new_doc("Bisect Nodes")
right_node.period_from_date = next_from_date
right_node.period_to_date = cur_node.period_to_date
right_node.root = cur_node.name
right_node.generated = False
right_node.insert()
cur_node.right_child = right_node.name
period_stack.append(right_node)
cur_node.save()
@frappe.whitelist()
def build_tree(self):
frappe.db.delete("Bisect Nodes")
# Convert str to datetime format
dt_format = guess_date_format(self.from_date)
from_date = datetime.datetime.strptime(self.from_date, dt_format)
to_date = datetime.datetime.strptime(self.to_date, dt_format)
if self.algorithm == "BFS":
self.bfs(from_date, to_date)
if self.algorithm == "DFS":
self.dfs(from_date, to_date)
# set root as current node
root = frappe.db.get_all("Bisect Nodes", filters={"root": ["is", "not set"]})[0]
self.get_report_summary()
self.current_node = root.name
self.current_from_date = self.from_date
self.current_to_date = self.to_date
self.save()
def get_report_summary(self):
filters = {
"company": self.company,
"filter_based_on": "Date Range",
"period_start_date": self.current_from_date,
"period_end_date": self.current_to_date,
"periodicity": "Yearly",
}
pl_summary = frappe.get_doc("Report", "Profit and Loss Statement")
self.p_l_summary = pl_summary.execute_script_report(filters=filters)[5]
bs_summary = frappe.get_doc("Report", "Balance Sheet")
self.b_s_summary = bs_summary.execute_script_report(filters=filters)[5]
self.difference = abs(self.p_l_summary - self.b_s_summary)
def update_node(self):
current_node = frappe.get_doc("Bisect Nodes", self.current_node)
current_node.balance_sheet_summary = self.b_s_summary
current_node.profit_loss_summary = self.p_l_summary
current_node.difference = self.difference
current_node.generated = True
current_node.save()
def current_node_has_summary_info(self):
"Assertion method"
return frappe.db.get_value("Bisect Nodes", self.current_node, "generated")
def fetch_summary_info_from_current_node(self):
current_node = frappe.get_doc("Bisect Nodes", self.current_node)
self.p_l_summary = current_node.balance_sheet_summary
self.b_s_summary = current_node.profit_loss_summary
self.difference = abs(self.p_l_summary - self.b_s_summary)
def fetch_or_calculate(self):
if self.current_node_has_summary_info():
self.fetch_summary_info_from_current_node()
else:
self.get_report_summary()
self.update_node()
@frappe.whitelist()
def bisect_left(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.left_child is not None:
lft_node = frappe.get_doc("Bisect Nodes", cur_node.left_child)
self.current_node = cur_node.left_child
self.current_from_date = lft_node.period_from_date
self.current_to_date = lft_node.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("No more children on Left"))
@frappe.whitelist()
def bisect_right(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.right_child is not None:
rgt_node = frappe.get_doc("Bisect Nodes", cur_node.right_child)
self.current_node = cur_node.right_child
self.current_from_date = rgt_node.period_from_date
self.current_to_date = rgt_node.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("No more children on Right"))
@frappe.whitelist()
def move_up(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.root is not None:
root = frappe.get_doc("Bisect Nodes", cur_node.root)
self.current_node = cur_node.root
self.current_from_date = root.period_from_date
self.current_to_date = root.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("Reached Root"))

View File

@@ -0,0 +1,9 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests.utils import FrappeTestCase
class TestBisectAccountingStatements(FrappeTestCase):
pass

View File

@@ -0,0 +1,8 @@
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("Bisect Nodes", {
// refresh(frm) {
// },
// });

View File

@@ -0,0 +1,97 @@
{
"actions": [],
"autoname": "autoincrement",
"creation": "2023-09-27 14:56:38.112462",
"default_view": "List",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"root",
"left_child",
"right_child",
"period_from_date",
"period_to_date",
"difference",
"balance_sheet_summary",
"profit_loss_summary",
"generated"
],
"fields": [
{
"fieldname": "root",
"fieldtype": "Link",
"label": "Root",
"options": "Bisect Nodes"
},
{
"fieldname": "left_child",
"fieldtype": "Link",
"label": "Left Child",
"options": "Bisect Nodes"
},
{
"fieldname": "right_child",
"fieldtype": "Link",
"label": "Right Child",
"options": "Bisect Nodes"
},
{
"fieldname": "period_from_date",
"fieldtype": "Datetime",
"label": "Period_from_date"
},
{
"fieldname": "period_to_date",
"fieldtype": "Datetime",
"label": "Period To Date"
},
{
"fieldname": "difference",
"fieldtype": "Float",
"label": "Difference"
},
{
"fieldname": "balance_sheet_summary",
"fieldtype": "Float",
"label": "Balance Sheet Summary"
},
{
"fieldname": "profit_loss_summary",
"fieldtype": "Float",
"label": "Profit and Loss Summary"
},
{
"default": "0",
"fieldname": "generated",
"fieldtype": "Check",
"label": "Generated"
}
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-12-01 17:46:12.437996",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Bisect Nodes",
"naming_rule": "Autoincrement",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,29 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class BisectNodes(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
balance_sheet_summary: DF.Float
difference: DF.Float
generated: DF.Check
left_child: DF.Link | None
name: DF.Int | None
period_from_date: DF.Datetime | None
period_to_date: DF.Datetime | None
profit_loss_summary: DF.Float
right_child: DF.Link | None
root: DF.Link | None
# end: auto-generated types
pass

View File

@@ -0,0 +1,9 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests.utils import FrappeTestCase
class TestBisectNodes(FrappeTestCase):
pass

View File

@@ -141,7 +141,8 @@ class PeriodClosingVoucher(AccountsController):
previous_fiscal_year = get_fiscal_year(last_year_closing, company=self.company, boolean=True)
if previous_fiscal_year and not frappe.db.exists(
"GL Entry", {"posting_date": ("<=", last_year_closing), "company": self.company}
"GL Entry",
{"posting_date": ("<=", last_year_closing), "company": self.company, "is_cancelled": 0},
):
return

View File

@@ -0,0 +1,58 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2024-02-04 10:53:32.307930",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"doctype_name",
"docfield_name",
"no_of_docs",
"done"
],
"fields": [
{
"fieldname": "doctype_name",
"fieldtype": "Link",
"in_list_view": 1,
"label": "DocType",
"options": "DocType",
"read_only": 1,
"reqd": 1
},
{
"fieldname": "docfield_name",
"fieldtype": "Data",
"label": "DocField",
"read_only": 1
},
{
"fieldname": "no_of_docs",
"fieldtype": "Int",
"in_list_view": 1,
"label": "No of Docs",
"read_only": 1
},
{
"default": "0",
"fieldname": "done",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Done",
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2024-02-05 17:35:09.556054",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Transaction Deletion Record Details",
"owner": "Administrator",
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,26 @@
# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TransactionDeletionRecordDetails(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
docfield_name: DF.Data | None
doctype_name: DF.Link
done: DF.Check
no_of_docs: DF.Int
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
# end: auto-generated types
pass

View File

@@ -97,11 +97,11 @@ def execute(filters=None):
chart = get_chart_data(filters, columns, asset, liability, equity)
report_summary = get_report_summary(
report_summary, primitive_summary = get_report_summary(
period_list, asset, liability, equity, provisional_profit_loss, currency, filters
)
return columns, data, message, chart, report_summary
return columns, data, message, chart, report_summary, primitive_summary
def get_provisional_profit_loss(
@@ -217,7 +217,7 @@ def get_report_summary(
"datatype": "Currency",
"currency": currency,
},
]
], (net_asset - net_liability + net_equity)
def get_chart_data(filters, columns, asset, liability, equity):

View File

@@ -669,20 +669,20 @@ class GrossProfitGenerator(object):
elif row.sales_order and row.so_detail:
incoming_amount = self.get_buying_amount_from_so_dn(row.sales_order, row.so_detail, item_code)
if incoming_amount:
return incoming_amount
return flt(row.qty) * incoming_amount
else:
return flt(row.qty) * self.get_average_buying_rate(row, item_code)
return flt(row.qty) * self.get_average_buying_rate(row, item_code)
def get_buying_amount_from_so_dn(self, sales_order, so_detail, item_code):
from frappe.query_builder.functions import Sum
from frappe.query_builder.functions import Avg
delivery_note_item = frappe.qb.DocType("Delivery Note Item")
query = (
frappe.qb.from_(delivery_note_item)
.select(Sum(delivery_note_item.incoming_rate * delivery_note_item.stock_qty))
.select(Avg(delivery_note_item.incoming_rate))
.where(delivery_note_item.docstatus == 1)
.where(delivery_note_item.item_code == item_code)
.where(delivery_note_item.against_sales_order == sales_order)

View File

@@ -460,3 +460,95 @@ class TestGrossProfit(FrappeTestCase):
}
gp_entry = [x for x in data if x.parent_invoice == sinv.name]
self.assertDictContainsSubset(expected_entry, gp_entry[0])
def test_different_rates_in_si_and_dn(self):
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
"""
Test gp calculation when invoice and delivery note differ in qty and aren't connected
SO -- INV
|
DN
"""
se = make_stock_entry(
company=self.company,
item_code=self.item,
target=self.warehouse,
qty=3,
basic_rate=700,
do_not_submit=True,
)
item = se.items[0]
se.append(
"items",
{
"item_code": item.item_code,
"s_warehouse": item.s_warehouse,
"t_warehouse": item.t_warehouse,
"qty": 10,
"basic_rate": 700,
"conversion_factor": item.conversion_factor or 1.0,
"transfer_qty": flt(item.qty) * (flt(item.conversion_factor) or 1.0),
"serial_no": item.serial_no,
"batch_no": item.batch_no,
"cost_center": item.cost_center,
"expense_account": item.expense_account,
},
)
se = se.save().submit()
so = make_sales_order(
customer=self.customer,
company=self.company,
warehouse=self.warehouse,
item=self.item,
rate=800,
qty=10,
do_not_save=False,
do_not_submit=False,
)
from erpnext.selling.doctype.sales_order.sales_order import (
make_delivery_note,
make_sales_invoice,
)
dn1 = make_delivery_note(so.name)
dn1.items[0].qty = 4
dn1.items[0].rate = 800
dn1.save().submit()
dn2 = make_delivery_note(so.name)
dn2.items[0].qty = 6
dn2.items[0].rate = 800
dn2.save().submit()
sinv = make_sales_invoice(so.name)
sinv.items[0].qty = 4
sinv.items[0].rate = 800
sinv.save().submit()
filters = frappe._dict(
company=self.company, from_date=nowdate(), to_date=nowdate(), group_by="Invoice"
)
columns, data = execute(filters=filters)
expected_entry = {
"parent_invoice": sinv.name,
"currency": "INR",
"sales_invoice": self.item,
"customer": self.customer,
"posting_date": frappe.utils.datetime.date.fromisoformat(nowdate()),
"item_code": self.item,
"item_name": self.item,
"warehouse": "Stores - _GP",
"qty": 4.0,
"avg._selling_rate": 800.0,
"valuation_rate": 700.0,
"selling_amount": 3200.0,
"buying_amount": 2800.0,
"gross_profit": 400.0,
"gross_profit_%": 12.5,
}
gp_entry = [x for x in data if x.parent_invoice == sinv.name]
self.assertDictContainsSubset(expected_entry, gp_entry[0])

View File

@@ -24,3 +24,10 @@ frappe.query_reports["Profit and Loss Statement"]["filters"].push({
fieldtype: "Check",
default: 1,
});
frappe.query_reports["Profit and Loss Statement"]["filters"].push({
fieldname: "include_default_book_entries",
label: __("Include Default FB Entries"),
fieldtype: "Check",
default: 1,
});

View File

@@ -66,11 +66,11 @@ def execute(filters=None):
currency = filters.presentation_currency or frappe.get_cached_value(
"Company", filters.company, "default_currency"
)
report_summary = get_report_summary(
report_summary, primitive_summary = get_report_summary(
period_list, filters.periodicity, income, expense, net_profit_loss, currency, filters
)
return columns, data, None, chart, report_summary
return columns, data, None, chart, report_summary, primitive_summary
def get_report_summary(
@@ -123,7 +123,7 @@ def get_report_summary(
"datatype": "Currency",
"currency": currency,
},
]
], net_profit
def get_net_profit_loss(income, expense, period_list, company, currency=None, consolidated=False):

View File

@@ -242,9 +242,7 @@ def make_depreciation_entry(
debit_account,
accounting_dimensions,
)
frappe.db.commit()
except Exception as e:
frappe.db.rollback()
depreciation_posting_error = e
asset.set_status()
@@ -523,6 +521,7 @@ def depreciate_asset(asset_doc, date, notes):
make_depreciation_entry_for_all_asset_depr_schedules(asset_doc, date)
asset_doc.reload()
cancel_depreciation_entries(asset_doc, date)

View File

@@ -327,7 +327,7 @@ class AssetDepreciationSchedule(Document):
schedule_date = get_last_day(schedule_date)
# if asset is being sold or scrapped
if date_of_disposal:
if date_of_disposal and getdate(schedule_date) >= getdate(date_of_disposal):
from_date = add_months(
getdate(asset_doc.available_for_use_date),
(asset_doc.number_of_depreciations_booked * row.frequency_of_depreciation),

View File

@@ -380,6 +380,12 @@ class AccountsController(TransactionBase):
for bundle in bundles:
frappe.delete_doc("Serial and Batch Bundle", bundle.name)
batches = frappe.get_all(
"Batch", filters={"reference_doctype": self.doctype, "reference_name": self.name}
)
for row in batches:
frappe.delete_doc("Batch", row.name)
def validate_return_against_account(self):
if (
self.doctype in ["Sales Invoice", "Purchase Invoice"] and self.is_return and self.return_against

View File

@@ -364,16 +364,26 @@ def get_batch_no(doctype, txt, searchfield, start, page_len, filters):
filtered_batches = get_filterd_batches(batches)
if filters.get("is_inward"):
filtered_batches.extend(get_empty_batches(filters))
filtered_batches.extend(get_empty_batches(filters, start, page_len, filtered_batches, txt))
return filtered_batches
def get_empty_batches(filters):
def get_empty_batches(filters, start, page_len, filtered_batches=None, txt=None):
query_filter = {"item": filters.get("item_code")}
if txt:
query_filter["name"] = ("like", "%{0}%".format(txt))
exclude_batches = [batch[0] for batch in filtered_batches] if filtered_batches else []
if exclude_batches:
query_filter["name"] = ("not in", exclude_batches)
return frappe.get_all(
"Batch",
fields=["name", "batch_qty"],
filters={"item": filters.get("item_code"), "batch_qty": 0.0},
filters=query_filter,
limit_start=start,
limit_page_length=page_len,
as_list=1,
)

View File

@@ -48,7 +48,9 @@ class StockController(AccountsController):
super(StockController, self).validate()
if self.docstatus == 0:
self.validate_duplicate_serial_and_batch_bundle()
for table_name in ["items", "packed_items", "supplied_items"]:
self.validate_duplicate_serial_and_batch_bundle(table_name)
if not self.get("is_return"):
self.validate_inspection()
self.validate_serialized_batch()
@@ -58,12 +60,19 @@ class StockController(AccountsController):
self.validate_internal_transfer()
self.validate_putaway_capacity()
def validate_duplicate_serial_and_batch_bundle(self):
if sbb_list := [
item.get("serial_and_batch_bundle")
for item in self.items
if item.get("serial_and_batch_bundle")
]:
def validate_duplicate_serial_and_batch_bundle(self, table_name):
if not self.get(table_name):
return
sbb_list = []
for item in self.get(table_name):
if item.get("serial_and_batch_bundle"):
sbb_list.append(item.get("serial_and_batch_bundle"))
if item.get("rejected_serial_and_batch_bundle"):
sbb_list.append(item.get("rejected_serial_and_batch_bundle"))
if sbb_list:
SLE = frappe.qb.DocType("Stock Ledger Entry")
data = (
frappe.qb.from_(SLE)
@@ -188,7 +197,7 @@ class StockController(AccountsController):
not row.serial_and_batch_bundle and not row.get("rejected_serial_and_batch_bundle")
):
bundle_details = {
"item_code": row.item_code,
"item_code": row.get("rm_item_code") or row.item_code,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"voucher_type": self.doctype,
@@ -200,7 +209,7 @@ class StockController(AccountsController):
"do_not_submit": True,
}
if row.qty:
if row.get("qty") or row.get("consumed_qty"):
self.update_bundle_details(bundle_details, table_name, row)
self.create_serial_batch_bundle(bundle_details, row)
@@ -219,6 +228,12 @@ class StockController(AccountsController):
type_of_transaction = "Inward"
if not self.is_return:
type_of_transaction = "Outward"
elif table_name == "supplied_items":
qty = row.consumed_qty
warehouse = self.supplier_warehouse
type_of_transaction = "Outward"
if self.is_return:
type_of_transaction = "Inward"
else:
type_of_transaction = get_type_of_transaction(self, row)
@@ -550,13 +565,30 @@ class StockController(AccountsController):
)
def delete_auto_created_batches(self):
for row in self.items:
if row.serial_and_batch_bundle:
frappe.db.set_value(
"Serial and Batch Bundle", row.serial_and_batch_bundle, {"is_cancelled": 1}
)
for table_name in ["items", "packed_items", "supplied_items"]:
if not self.get(table_name):
continue
row.db_set("serial_and_batch_bundle", None)
for row in self.get(table_name):
update_values = {}
if row.get("batch_no"):
update_values["batch_no"] = None
if row.serial_and_batch_bundle:
update_values["serial_and_batch_bundle"] = None
frappe.db.set_value(
"Serial and Batch Bundle", row.serial_and_batch_bundle, {"is_cancelled": 1}
)
if update_values:
row.db_set(update_values)
if table_name == "items" and row.get("rejected_serial_and_batch_bundle"):
frappe.db.set_value(
"Serial and Batch Bundle", row.rejected_serial_and_batch_bundle, {"is_cancelled": 1}
)
row.db_set("rejected_serial_and_batch_bundle", None)
def set_serial_and_batch_bundle(self, table_name=None, ignore_validate=False):
if not table_name:

View File

@@ -379,10 +379,10 @@ class SubcontractingController(StockController):
if row.serial_no:
details.serial_no.extend(get_serial_nos(row.serial_no))
if row.batch_no:
elif row.batch_no:
details.batch_no[row.batch_no] += row.qty
if voucher_bundle_data:
elif voucher_bundle_data:
bundle_key = (row.rm_item_code, row.main_item_code, row.t_warehouse, row.voucher_no)
bundle_data = voucher_bundle_data.get(bundle_key, frappe._dict())
@@ -392,6 +392,9 @@ class SubcontractingController(StockController):
if bundle_data.batch_nos:
for batch_no, qty in bundle_data.batch_nos.items():
if qty < 0:
qty = abs(qty)
if qty > 0:
details.batch_no[batch_no] += qty
bundle_data.batch_nos[batch_no] -= qty
@@ -545,17 +548,24 @@ class SubcontractingController(StockController):
rm_obj.reference_name = item_row.name
use_serial_batch_fields = frappe.db.get_single_value("Stock Settings", "use_serial_batch_fields")
if self.doctype == self.subcontract_data.order_doctype:
rm_obj.required_qty = qty
rm_obj.amount = rm_obj.required_qty * rm_obj.rate
else:
rm_obj.consumed_qty = qty
rm_obj.required_qty = bom_item.required_qty or qty
rm_obj.serial_and_batch_bundle = None
setattr(
rm_obj, self.subcontract_data.order_field, item_row.get(self.subcontract_data.order_field)
)
if self.doctype == "Subcontracting Receipt":
if use_serial_batch_fields:
rm_obj.use_serial_batch_fields = 1
self.__set_batch_nos(bom_item, item_row, rm_obj, qty)
if self.doctype == "Subcontracting Receipt" and not use_serial_batch_fields:
args = frappe._dict(
{
"item_code": rm_obj.rm_item_code,
@@ -581,6 +591,68 @@ class SubcontractingController(StockController):
rm_obj.rate = get_incoming_rate(args)
def __set_batch_nos(self, bom_item, item_row, rm_obj, qty):
key = (rm_obj.rm_item_code, item_row.item_code, item_row.get(self.subcontract_data.order_field))
if self.available_materials.get(key) and self.available_materials[key]["batch_no"]:
new_rm_obj = None
for batch_no, batch_qty in self.available_materials[key]["batch_no"].items():
if batch_qty >= qty or (
rm_obj.consumed_qty == 0
and self.backflush_based_on == "BOM"
and len(self.available_materials[key]["batch_no"]) == 1
):
if rm_obj.consumed_qty == 0:
self.__set_consumed_qty(rm_obj, qty)
self.__set_batch_no_as_per_qty(item_row, rm_obj, batch_no, qty)
self.available_materials[key]["batch_no"][batch_no] -= qty
return
elif qty > 0 and batch_qty > 0:
qty -= batch_qty
new_rm_obj = self.append(self.raw_material_table, bom_item)
new_rm_obj.serial_and_batch_bundle = None
new_rm_obj.use_serial_batch_fields = 1
new_rm_obj.reference_name = item_row.name
self.__set_batch_no_as_per_qty(item_row, new_rm_obj, batch_no, batch_qty)
self.available_materials[key]["batch_no"][batch_no] = 0
if new_rm_obj:
self.remove(rm_obj)
elif abs(qty) > 0:
self.__set_consumed_qty(rm_obj, qty)
else:
self.__set_consumed_qty(rm_obj, qty, bom_item.required_qty or qty)
self.__set_serial_nos(item_row, rm_obj)
def __set_consumed_qty(self, rm_obj, consumed_qty, required_qty=0):
rm_obj.required_qty = required_qty
rm_obj.consumed_qty = consumed_qty
def __set_serial_nos(self, item_row, rm_obj):
key = (rm_obj.rm_item_code, item_row.item_code, item_row.get(self.subcontract_data.order_field))
if self.available_materials.get(key) and self.available_materials[key]["serial_no"]:
used_serial_nos = self.available_materials[key]["serial_no"][0 : cint(rm_obj.consumed_qty)]
rm_obj.serial_no = "\n".join(used_serial_nos)
# Removed the used serial nos from the list
for sn in used_serial_nos:
self.available_materials[key]["serial_no"].remove(sn)
def __set_batch_no_as_per_qty(self, item_row, rm_obj, batch_no, qty):
rm_obj.update(
{
"consumed_qty": qty,
"batch_no": batch_no,
"required_qty": qty,
self.subcontract_data.order_field: item_row.get(self.subcontract_data.order_field),
}
)
self.__set_serial_nos(item_row, rm_obj)
def __get_qty_based_on_material_transfer(self, item_row, transfer_item):
key = (item_row.item_code, item_row.get(self.subcontract_data.order_field))
@@ -1076,6 +1148,9 @@ def make_rm_stock_entry(
"serial_and_batch_bundle": rm_item.get("serial_and_batch_bundle"),
"main_item_code": fg_item_code,
"allow_alternative_item": item_wh.get(rm_item_code, {}).get("allow_alternative_item"),
"use_serial_batch_fields": rm_item.get("use_serial_batch_fields"),
"serial_no": rm_item.get("serial_no") if rm_item.get("use_serial_batch_fields") else None,
"batch_no": rm_item.get("batch_no") if rm_item.get("use_serial_batch_fields") else None,
}
}

View File

@@ -140,6 +140,7 @@ class TestSubcontractingController(FrappeTestCase):
- Create partial SCR against the SCO and check serial nos and batch no.
"""
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
set_backflush_based_on("Material Transferred for Subcontract")
service_items = [
{
@@ -202,6 +203,8 @@ class TestSubcontractingController(FrappeTestCase):
if value.get(field):
self.assertEqual(value.get(field), transferred_detais.get(field))
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_subcontracting_with_same_components_different_fg(self):
"""
- Set backflush based on Material Transfer.
@@ -211,6 +214,7 @@ class TestSubcontractingController(FrappeTestCase):
- Create partial SCR against the SCO and check serial nos.
"""
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
set_backflush_based_on("Material Transferred for Subcontract")
service_items = [
{
@@ -278,6 +282,8 @@ class TestSubcontractingController(FrappeTestCase):
self.assertEqual(value.qty, 6)
self.assertEqual(sorted(value.serial_no), sorted(transferred_detais.get("serial_no")[6:12]))
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_return_non_consumed_materials(self):
"""
- Set backflush based on Material Transfer.
@@ -288,6 +294,7 @@ class TestSubcontractingController(FrappeTestCase):
- After that return the non consumed material back to the store from supplier's warehouse.
"""
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
set_backflush_based_on("Material Transferred for Subcontract")
service_items = [
{
@@ -333,6 +340,7 @@ class TestSubcontractingController(FrappeTestCase):
get_serial_nos(doc.items[0].serial_no),
itemwise_details.get(doc.items[0].item_code)["serial_no"][5:6],
)
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_item_with_batch_based_on_bom(self):
"""
@@ -578,6 +586,7 @@ class TestSubcontractingController(FrappeTestCase):
- Create SCR for remaining qty against the SCO and change the qty manually.
"""
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
set_backflush_based_on("Material Transferred for Subcontract")
service_items = [
{
@@ -643,6 +652,8 @@ class TestSubcontractingController(FrappeTestCase):
self.assertEqual(value.qty, details.qty)
self.assertEqual(sorted(value.serial_no), sorted(details.serial_no))
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_incorrect_serial_no_components_based_on_material_transfer(self):
"""
- Set backflush based on Material Transferred for Subcontract.
@@ -652,6 +663,7 @@ class TestSubcontractingController(FrappeTestCase):
- System should throw the error and not allowed to save the SCR.
"""
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
serial_no = "ABC"
if not frappe.db.exists("Serial No", serial_no):
frappe.get_doc(
@@ -712,6 +724,7 @@ class TestSubcontractingController(FrappeTestCase):
scr1.save()
self.delete_bundle_from_scr(scr1)
scr1.delete()
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
@staticmethod
def delete_bundle_from_scr(scr):
@@ -844,6 +857,223 @@ class TestSubcontractingController(FrappeTestCase):
for item in sco.get("supplied_items"):
self.assertEqual(item.supplied_qty, 0.0)
def test_sco_with_material_transfer_with_use_serial_batch_fields(self):
"""
- Set backflush based on Material Transfer.
- Create SCO for the item Subcontracted Item SA1 and Subcontracted Item SA5.
- Transfer the components from Stores to Supplier warehouse with batch no and serial nos.
- Transfer extra item Subcontracted SRM Item 4 for the subcontract item Subcontracted Item SA5.
- Create partial SCR against the SCO and check serial nos and batch no.
"""
set_backflush_based_on("Material Transferred for Subcontract")
service_items = [
{
"warehouse": "_Test Warehouse - _TC",
"item_code": "Subcontracted Service Item 1",
"qty": 5,
"rate": 100,
"fg_item": "Subcontracted Item SA1",
"fg_item_qty": 5,
},
{
"warehouse": "_Test Warehouse - _TC",
"item_code": "Subcontracted Service Item 5",
"qty": 6,
"rate": 100,
"fg_item": "Subcontracted Item SA5",
"fg_item_qty": 6,
},
]
sco = get_subcontracting_order(service_items=service_items)
rm_items = get_rm_items(sco.supplied_items)
rm_items.append(
{
"main_item_code": "Subcontracted Item SA5",
"item_code": "Subcontracted SRM Item 4",
"qty": 6,
}
)
itemwise_details = make_stock_in_entry(rm_items=rm_items)
for item in rm_items:
item["sco_rm_detail"] = sco.items[0].name if item.get("qty") == 5 else sco.items[1].name
make_stock_transfer_entry(
sco_no=sco.name,
rm_items=rm_items,
itemwise_details=copy.deepcopy(itemwise_details),
)
scr1 = make_subcontracting_receipt(sco.name)
scr1.remove(scr1.items[1])
scr1.save()
scr1.submit()
for key, value in get_supplied_items(scr1).items():
transferred_detais = itemwise_details.get(key)
for field in ["qty", "serial_no", "batch_no"]:
if value.get(field):
data = value.get(field)
if field == "serial_no":
data = sorted(data)
self.assertEqual(data, transferred_detais.get(field))
scr2 = make_subcontracting_receipt(sco.name)
scr2.save()
scr2.submit()
for key, value in get_supplied_items(scr2).items():
transferred_detais = itemwise_details.get(key)
for field in ["qty", "serial_no", "batch_no"]:
if value.get(field):
data = value.get(field)
if field == "serial_no":
data = sorted(data)
self.assertEqual(data, transferred_detais.get(field))
def test_subcontracting_with_same_components_different_fg_with_serial_batch_fields(self):
"""
- Set backflush based on Material Transfer.
- Create SCO for the item Subcontracted Item SA2 and Subcontracted Item SA3.
- Transfer the components from Stores to Supplier warehouse with serial nos.
- Transfer extra qty of components for the item Subcontracted Item SA2.
- Create partial SCR against the SCO and check serial nos.
"""
set_backflush_based_on("Material Transferred for Subcontract")
service_items = [
{
"warehouse": "_Test Warehouse - _TC",
"item_code": "Subcontracted Service Item 2",
"qty": 5,
"rate": 100,
"fg_item": "Subcontracted Item SA2",
"fg_item_qty": 5,
},
{
"warehouse": "_Test Warehouse - _TC",
"item_code": "Subcontracted Service Item 3",
"qty": 6,
"rate": 100,
"fg_item": "Subcontracted Item SA3",
"fg_item_qty": 6,
},
]
sco = get_subcontracting_order(service_items=service_items)
rm_items = get_rm_items(sco.supplied_items)
rm_items[0]["qty"] += 1
itemwise_details = make_stock_in_entry(rm_items=rm_items)
for item in rm_items:
item["sco_rm_detail"] = sco.items[0].name if item.get("qty") == 5 else sco.items[1].name
item["use_serial_batch_fields"] = 1
make_stock_transfer_entry(
sco_no=sco.name,
rm_items=rm_items,
itemwise_details=copy.deepcopy(itemwise_details),
)
scr1 = make_subcontracting_receipt(sco.name)
scr1.items[0].qty = 3
scr1.remove(scr1.items[1])
scr1.save()
scr1.submit()
for key, value in get_supplied_items(scr1).items():
transferred_detais = itemwise_details.get(key)
self.assertEqual(value.qty, 4)
self.assertEqual(sorted(value.serial_no), sorted(transferred_detais.get("serial_no")[0:4]))
scr2 = make_subcontracting_receipt(sco.name)
scr2.items[0].qty = 2
scr2.remove(scr2.items[1])
scr2.save()
scr2.submit()
for key, value in get_supplied_items(scr2).items():
transferred_detais = itemwise_details.get(key)
self.assertEqual(value.qty, 2)
self.assertEqual(sorted(value.serial_no), sorted(transferred_detais.get("serial_no")[4:6]))
scr3 = make_subcontracting_receipt(sco.name)
scr3.save()
scr3.submit()
for key, value in get_supplied_items(scr3).items():
transferred_detais = itemwise_details.get(key)
self.assertEqual(value.qty, 6)
self.assertEqual(sorted(value.serial_no), sorted(transferred_detais.get("serial_no")[6:12]))
def test_return_non_consumed_materials_with_serial_batch_fields(self):
"""
- Set backflush based on Material Transfer.
- Create SCO for item Subcontracted Item SA2.
- Transfer the components from Stores to Supplier warehouse with serial nos.
- Transfer extra qty of component for the subcontracted item Subcontracted Item SA2.
- Create SCR for full qty against the SCO and change the qty of raw material.
- After that return the non consumed material back to the store from supplier's warehouse.
"""
set_backflush_based_on("Material Transferred for Subcontract")
service_items = [
{
"warehouse": "_Test Warehouse - _TC",
"item_code": "Subcontracted Service Item 2",
"qty": 5,
"rate": 100,
"fg_item": "Subcontracted Item SA2",
"fg_item_qty": 5,
},
]
sco = get_subcontracting_order(service_items=service_items)
rm_items = get_rm_items(sco.supplied_items)
rm_items[0]["qty"] += 1
itemwise_details = make_stock_in_entry(rm_items=rm_items)
for item in rm_items:
item["use_serial_batch_fields"] = 1
item["sco_rm_detail"] = sco.items[0].name
make_stock_transfer_entry(
sco_no=sco.name,
rm_items=rm_items,
itemwise_details=copy.deepcopy(itemwise_details),
)
scr1 = make_subcontracting_receipt(sco.name)
scr1.save()
scr1.supplied_items[0].consumed_qty = 5
scr1.supplied_items[0].serial_no = "\n".join(
sorted(itemwise_details.get("Subcontracted SRM Item 2").get("serial_no")[0:5])
)
scr1.submit()
for key, value in get_supplied_items(scr1).items():
transferred_detais = itemwise_details.get(key)
self.assertTrue(value.use_serial_batch_fields)
self.assertEqual(value.qty, 5)
self.assertEqual(sorted(value.serial_no), sorted(transferred_detais.get("serial_no")[0:5]))
sco.load_from_db()
self.assertEqual(sco.supplied_items[0].consumed_qty, 5)
doc = get_materials_from_supplier(sco.name, [d.name for d in sco.supplied_items])
self.assertEqual(doc.items[0].qty, 1)
self.assertEqual(doc.items[0].s_warehouse, "_Test Warehouse 1 - _TC")
self.assertEqual(doc.items[0].t_warehouse, "_Test Warehouse - _TC")
self.assertEqual(
get_serial_nos(doc.items[0].serial_no),
itemwise_details.get(doc.items[0].item_code)["serial_no"][5:6],
)
def add_second_row_in_scr(scr):
item_dict = {}
@@ -914,6 +1144,7 @@ def update_item_details(child_row, details):
else child_row.get("consumed_qty")
)
details.use_serial_batch_fields = child_row.get("use_serial_batch_fields")
if child_row.serial_and_batch_bundle:
doc = frappe.get_doc("Serial and Batch Bundle", child_row.serial_and_batch_bundle)
for row in doc.get("entries"):
@@ -945,6 +1176,7 @@ def make_stock_transfer_entry(**args):
"rate": row.rate or 100,
"stock_uom": row.stock_uom or "Nos",
"warehouse": row.warehouse or "_Test Warehouse - _TC",
"use_serial_batch_fields": row.get("use_serial_batch_fields"),
}
item_details = args.itemwise_details.get(row.item_code)
@@ -960,9 +1192,12 @@ def make_stock_transfer_entry(**args):
if batch_qty >= row.qty:
batches[batch_no] = row.qty
item_details.batch_no[batch_no] -= row.qty
if row.get("use_serial_batch_fields"):
item["batch_no"] = batch_no
break
if serial_nos or batches:
if not row.get("use_serial_batch_fields") and (serial_nos or batches):
item["serial_and_batch_bundle"] = make_serial_batch_bundle(
frappe._dict(
{
@@ -978,6 +1213,9 @@ def make_stock_transfer_entry(**args):
)
).name
if serial_nos and row.get("use_serial_batch_fields"):
item["serial_no"] = "\n".join(serial_nos)
items.append(item)
ste_dict = make_rm_stock_entry(args.sco_no, items)
@@ -1132,6 +1370,7 @@ def get_rm_items(supplied_items):
"rate": item.rate,
"stock_uom": item.stock_uom,
"warehouse": item.reserve_warehouse,
"use_serial_batch_fields": 0,
}
)

View File

@@ -312,7 +312,10 @@ period_closing_doctypes = [
doc_events = {
"*": {
"validate": "erpnext.support.doctype.service_level_agreement.service_level_agreement.apply",
"validate": [
"erpnext.support.doctype.service_level_agreement.service_level_agreement.apply",
"erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.check_for_running_deletion_job",
],
},
tuple(period_closing_doctypes): {
"validate": "erpnext.accounts.doctype.accounting_period.accounting_period.validate_accounting_period_on_doc_save",

View File

@@ -490,7 +490,7 @@ class JobCard(Document):
{
"to_time": get_datetime(args.get("complete_time")),
"operation": args.get("sub_operation"),
"completed_qty": args.get("completed_qty") or 0.0,
"completed_qty": (args.get("completed_qty") if last_row.idx == row.idx else 0.0),
}
)
elif args.get("start_time"):

View File

@@ -1207,6 +1207,51 @@ class TestWorkOrder(FrappeTestCase):
except frappe.MandatoryError:
self.fail("Batch generation causing failing in Work Order")
@change_settings("Manufacturing Settings", {"make_serial_no_batch_from_work_order": 1})
def test_auto_serial_no_batch_creation(self):
from erpnext.manufacturing.doctype.bom.test_bom import create_nested_bom
fg_item = frappe.generate_hash(length=20)
child_item = frappe.generate_hash(length=20)
bom_tree = {fg_item: {child_item: {}}}
create_nested_bom(bom_tree, prefix="")
item = frappe.get_doc("Item", fg_item)
item.update(
{
"has_serial_no": 1,
"has_batch_no": 1,
"serial_no_series": f"SN-TEST-{item.name}.#####",
"create_new_batch": 1,
"batch_number_series": f"BATCH-TEST-{item.name}.#####",
}
)
item.save()
try:
wo_order = make_wo_order_test_record(item=fg_item, batch_size=5, qty=10, skip_transfer=True)
serial_nos = self.get_serial_nos_for_fg(wo_order.name)
stock_entry = frappe.get_doc(make_stock_entry(wo_order.name, "Manufacture", 10))
stock_entry.set_work_order_details()
stock_entry.set_serial_no_batch_for_finished_good()
for row in stock_entry.items:
if row.item_code == fg_item:
self.assertTrue(row.serial_and_batch_bundle)
self.assertEqual(
sorted(get_serial_nos_from_bundle(row.serial_and_batch_bundle)), sorted(serial_nos)
)
sn_doc = frappe.get_doc("Serial and Batch Bundle", row.serial_and_batch_bundle)
for row in sn_doc.entries:
self.assertTrue(row.serial_no)
self.assertTrue(row.batch_no)
except frappe.MandatoryError:
self.fail("Batch generation causing failing in Work Order")
def get_serial_nos_for_fg(self, work_order):
serial_nos = []
for row in frappe.get_all("Serial No", filters={"work_order": work_order}):
@@ -2269,6 +2314,7 @@ def make_wo_order_test_record(**args):
wo_order.planned_start_date = args.planned_start_date or now()
wo_order.transfer_material_against = args.transfer_material_against or "Work Order"
wo_order.from_wip_warehouse = args.from_wip_warehouse or 0
wo_order.batch_size = args.batch_size or 0
if args.source_warehouse:
for item in wo_order.get("required_items"):

View File

@@ -536,6 +536,12 @@ class WorkOrder(Document):
"Item", self.production_item, ["serial_no_series", "item_name", "description"], as_dict=1
)
batches = []
if self.has_batch_no:
batches = frappe.get_all(
"Batch", filters={"reference_name": self.name}, order_by="creation", pluck="name"
)
serial_nos = []
if item_details.serial_no_series:
serial_nos = get_available_serial_nos(item_details.serial_no_series, self.qty)
@@ -556,10 +562,20 @@ class WorkOrder(Document):
"description",
"status",
"work_order",
"batch_no",
]
serial_nos_details = []
index = 0
for serial_no in serial_nos:
index += 1
batch_no = None
if batches and self.batch_size:
batch_no = batches[0]
if index % self.batch_size == 0:
batches.remove(batch_no)
serial_nos_details.append(
(
serial_no,
@@ -574,6 +590,7 @@ class WorkOrder(Document):
item_details.description,
"Inactive",
self.name,
batch_no,
)
)

View File

@@ -107,7 +107,7 @@ class BOMConfigurator {
this.frm?.doc.docstatus === 0
? [
{
label: __(frappe.utils.icon("edit", "sm") + " Qty"),
label: `${frappe.utils.icon("edit", "sm")} ${__("Qty")}`,
click: function (node) {
let view = frappe.views.trees["BOM Configurator"];
view.events.edit_qty(node, view);
@@ -115,7 +115,7 @@ class BOMConfigurator {
btnClass: "hidden-xs",
},
{
label: __(frappe.utils.icon("add", "sm") + " Raw Material"),
label: `${frappe.utils.icon("add", "sm")} ${__("Raw Material")}`,
click: function (node) {
let view = frappe.views.trees["BOM Configurator"];
view.events.add_item(node, view);
@@ -126,7 +126,7 @@ class BOMConfigurator {
btnClass: "hidden-xs",
},
{
label: __(frappe.utils.icon("add", "sm") + " Sub Assembly"),
label: `${frappe.utils.icon("add", "sm")} ${__("Sub Assembly")}`,
click: function (node) {
let view = frappe.views.trees["BOM Configurator"];
view.events.add_sub_assembly(node, view);
@@ -156,7 +156,7 @@ class BOMConfigurator {
btnClass: "hidden-xs expand-all-btn",
},
{
label: __(frappe.utils.icon("move", "sm") + " Sub Assembly"),
label: `${frappe.utils.icon("move", "sm")} ${__("Sub Assembly")}`,
click: function (node) {
let view = frappe.views.trees["BOM Configurator"];
view.events.convert_to_sub_assembly(node, view);
@@ -167,7 +167,7 @@ class BOMConfigurator {
btnClass: "hidden-xs",
},
{
label: __(frappe.utils.icon("delete", "sm") + __(" Item")),
label: `${frappe.utils.icon("delete", "sm")} ${__("Item")}`,
click: function (node) {
let view = frappe.views.trees["BOM Configurator"];
view.events.delete_node(node, view);

View File

@@ -1266,8 +1266,11 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
calculate_stock_uom_rate(doc, cdt, cdn) {
let item = frappe.get_doc(cdt, cdn);
item.stock_uom_rate = flt(item.rate)/flt(item.conversion_factor);
refresh_field("stock_uom_rate", item.name, item.parentfield);
if (item?.rate) {
item.stock_uom_rate = flt(item.rate) / flt(item.conversion_factor);
refresh_field("stock_uom_rate", item.name, item.parentfield);
}
}
service_stop_date(frm, cdt, cdn) {
var child = locals[cdt][cdn];

View File

@@ -834,7 +834,8 @@
"label": "Purchase Order",
"options": "Purchase Order",
"print_hide": 1,
"read_only": 1
"read_only": 1,
"search_index": 1
},
{
"fieldname": "column_break_89",
@@ -909,7 +910,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2024-01-25 14:24:00.330219",
"modified": "2024-03-21 18:15:56.625005",
"modified_by": "Administrator",
"module": "Selling",
"name": "Sales Order Item",

View File

@@ -181,8 +181,10 @@ def get_random_date(start_date, start_range, end_range):
def create_transaction_deletion_record(company):
transaction_deletion_record = frappe.new_doc("Transaction Deletion Record")
transaction_deletion_record.company = company
transaction_deletion_record.process_in_single_transaction = True
transaction_deletion_record.save(ignore_permissions=True)
transaction_deletion_record.submit()
transaction_deletion_record.start_deletion_tasks()
def clear_masters():

View File

@@ -168,7 +168,7 @@ frappe.ui.form.on("Company", {
delete_company_transactions: function (frm) {
frappe.call({
method: "erpnext.setup.doctype.company.company.is_deletion_job_running",
method: "erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.is_deletion_doc_running",
args: {
company: frm.doc.name,
},

View File

@@ -12,7 +12,6 @@ from frappe.contacts.address_and_contact import load_address_and_contact
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
from frappe.desk.page.setup_wizard.setup_wizard import make_records
from frappe.utils import cint, formatdate, get_link_to_form, get_timestamp, today
from frappe.utils.background_jobs import get_job, is_job_enqueued
from frappe.utils.nestedset import NestedSet, rebuild_tree
from erpnext.accounts.doctype.account.account import get_account_currency
@@ -904,37 +903,21 @@ def get_default_company_address(name, sort_key="is_primary_address", existing_ad
return None
def generate_id_for_deletion_job(company):
return "delete_company_transactions_" + company
@frappe.whitelist()
def is_deletion_job_running(company):
job_id = generate_id_for_deletion_job(company)
if is_job_enqueued(job_id):
job_name = get_job(job_id).get_id() # job name will have site prefix
frappe.throw(
_("A Transaction Deletion Job: {0} is already running for {1}").format(
frappe.bold(get_link_to_form("RQ Job", job_name)), frappe.bold(company)
)
)
@frappe.whitelist()
def create_transaction_deletion_request(company):
is_deletion_job_running(company)
job_id = generate_id_for_deletion_job(company)
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
is_deletion_doc_running,
)
is_deletion_doc_running(company)
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert()
tdr.submit()
tdr.start_deletion_tasks()
frappe.enqueue(
"frappe.utils.background_jobs.run_doc_method",
doctype=tdr.doctype,
name=tdr.name,
doc_method="submit",
job_id=job_id,
queue="long",
enqueue_after_commit=True,
frappe.msgprint(
_("A Transaction Deletion Document: {0} is triggered for {0}").format(
get_link_to_form("Transaction Deletion Record", tdr.name)
),
frappe.bold(company),
)
frappe.msgprint(_("A Transaction Deletion Job is triggered for {0}").format(frappe.bold(company)))

View File

@@ -29,6 +29,7 @@ class TestTransactionDeletionRecord(FrappeTestCase):
for i in range(5):
create_task("Dunder Mifflin Paper Co")
tdr = create_transaction_deletion_doc("Dunder Mifflin Paper Co")
tdr.reload()
for doctype in tdr.doctypes:
if doctype.doctype_name == "Task":
self.assertEqual(doctype.no_of_docs, 5)
@@ -60,7 +61,9 @@ def create_company(company_name):
def create_transaction_deletion_doc(company):
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert()
tdr.process_in_single_transaction = True
tdr.submit()
tdr.start_deletion_tasks()
return tdr

View File

@@ -10,20 +10,24 @@ frappe.ui.form.on("Transaction Deletion Record", {
callback: function (r) {
doctypes_to_be_ignored_array = r.message;
populate_doctypes_to_be_ignored(doctypes_to_be_ignored_array, frm);
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored");
},
});
}
frm.get_field("doctypes_to_be_ignored").grid.cannot_add_rows = true;
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored");
},
refresh: function (frm) {
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored");
if (frm.doc.docstatus == 1 && ["Queued", "Failed"].find((x) => x == frm.doc.status)) {
let execute_btn = frm.doc.status == "Queued" ? __("Start Deletion") : __("Retry");
frm.add_custom_button(execute_btn, () => {
// Entry point for chain of events
frm.call({
method: "start_deletion_tasks",
doc: frm.doc,
});
});
}
},
});

View File

@@ -7,10 +7,21 @@
"engine": "InnoDB",
"field_order": [
"company",
"section_break_qpwb",
"status",
"error_log",
"tasks_section",
"delete_bin_data",
"delete_leads_and_addresses",
"reset_company_default_values",
"clear_notifications",
"initialize_doctypes_table",
"delete_transactions",
"section_break_tbej",
"doctypes",
"doctypes_to_be_ignored",
"amended_from",
"status"
"process_in_single_transaction"
],
"fields": [
{
@@ -25,14 +36,16 @@
"fieldname": "doctypes",
"fieldtype": "Table",
"label": "Summary",
"options": "Transaction Deletion Record Item",
"no_copy": 1,
"options": "Transaction Deletion Record Details",
"read_only": 1
},
{
"fieldname": "doctypes_to_be_ignored",
"fieldtype": "Table",
"label": "Excluded DocTypes",
"options": "Transaction Deletion Record Item"
"options": "Transaction Deletion Record Item",
"read_only": 1
},
{
"fieldname": "amended_from",
@@ -46,18 +59,96 @@
{
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Draft\nCompleted"
"no_copy": 1,
"options": "Queued\nRunning\nFailed\nCompleted\nCancelled",
"read_only": 1
},
{
"fieldname": "section_break_tbej",
"fieldtype": "Section Break"
},
{
"fieldname": "tasks_section",
"fieldtype": "Section Break",
"label": "Tasks"
},
{
"default": "0",
"fieldname": "delete_bin_data",
"fieldtype": "Check",
"label": "Delete Bins",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_leads_and_addresses",
"fieldtype": "Check",
"label": "Delete Leads and Addresses",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "clear_notifications",
"fieldtype": "Check",
"label": "Clear Notifications",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "reset_company_default_values",
"fieldtype": "Check",
"label": "Reset Company Default Values",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_transactions",
"fieldtype": "Check",
"label": "Delete Transactions",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "initialize_doctypes_table",
"fieldtype": "Check",
"label": "Initialize Summary Table",
"no_copy": 1,
"read_only": 1
},
{
"depends_on": "eval: doc.error_log",
"fieldname": "error_log",
"fieldtype": "Long Text",
"label": "Error Log"
},
{
"fieldname": "section_break_qpwb",
"fieldtype": "Section Break"
},
{
"default": "0",
"fieldname": "process_in_single_transaction",
"fieldtype": "Check",
"hidden": 1,
"label": "Process in Single Transaction",
"no_copy": 1,
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2021-08-04 20:15:59.071493",
"modified": "2024-03-21 10:29:19.456413",
"modified_by": "Administrator",
"module": "Setup",
"name": "Transaction Deletion Record",
"naming_rule": "Expression (old style)",
"owner": "Administrator",
"permissions": [
{
@@ -76,5 +167,6 @@
],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View File

@@ -1,12 +1,14 @@
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from collections import OrderedDict
import frappe
from frappe import _, qb
from frappe.desk.notifications import clear_notifications
from frappe.model.document import Document
from frappe.utils import cint, create_batch
from frappe.utils import cint, comma_and, create_batch, get_link_to_form
from frappe.utils.background_jobs import get_job, is_job_enqueued
class TransactionDeletionRecord(Document):
@@ -18,20 +20,42 @@ class TransactionDeletionRecord(Document):
if TYPE_CHECKING:
from frappe.types import DF
from erpnext.accounts.doctype.transaction_deletion_record_details.transaction_deletion_record_details import (
TransactionDeletionRecordDetails,
)
from erpnext.setup.doctype.transaction_deletion_record_item.transaction_deletion_record_item import (
TransactionDeletionRecordItem,
)
amended_from: DF.Link | None
clear_notifications: DF.Check
company: DF.Link
doctypes: DF.Table[TransactionDeletionRecordItem]
delete_bin_data: DF.Check
delete_leads_and_addresses: DF.Check
delete_transactions: DF.Check
doctypes: DF.Table[TransactionDeletionRecordDetails]
doctypes_to_be_ignored: DF.Table[TransactionDeletionRecordItem]
status: DF.Literal["Draft", "Completed"]
error_log: DF.LongText | None
initialize_doctypes_table: DF.Check
process_in_single_transaction: DF.Check
reset_company_default_values: DF.Check
status: DF.Literal["Queued", "Running", "Failed", "Completed", "Cancelled"]
# end: auto-generated types
def __init__(self, *args, **kwargs):
super(TransactionDeletionRecord, self).__init__(*args, **kwargs)
self.batch_size = 5000
# Tasks are listed by their execution order
self.task_to_internal_method_map = OrderedDict(
{
"Delete Bins": "delete_bins",
"Delete Leads and Addresses": "delete_lead_addresses",
"Reset Company Values": "reset_company_values",
"Clear Notifications": "delete_notifications",
"Initialize Summary Table": "initialize_doctypes_to_be_deleted_table",
"Delete Transactions": "delete_company_transactions",
}
)
def validate(self):
frappe.only_for("System Manager")
@@ -48,104 +72,266 @@ class TransactionDeletionRecord(Document):
title=_("Not Allowed"),
)
def generate_job_name_for_task(self, task=None):
method = self.task_to_internal_method_map[task]
return f"{self.name}_{method}"
def generate_job_name_for_next_tasks(self, task=None):
job_names = []
current_task_idx = list(self.task_to_internal_method_map).index(task)
for idx, task in enumerate(self.task_to_internal_method_map.keys(), 0):
# generate job_name for next tasks
if idx > current_task_idx:
job_names.append(self.generate_job_name_for_task(task))
return job_names
def generate_job_name_for_all_tasks(self):
job_names = []
for task in self.task_to_internal_method_map.keys():
job_names.append(self.generate_job_name_for_task(task))
return job_names
def before_submit(self):
if queued_docs := frappe.db.get_all(
"Transaction Deletion Record",
filters={"company": self.company, "status": ("in", ["Running", "Queued"]), "docstatus": 1},
pluck="name",
):
frappe.throw(
_(
"Cannot enqueue multi docs for one company. {0} is already queued/running for company: {1}"
).format(
comma_and([get_link_to_form("Transaction Deletion Record", x) for x in queued_docs]),
frappe.bold(self.company),
)
)
if not self.doctypes_to_be_ignored:
self.populate_doctypes_to_be_ignored_table()
self.delete_bins()
self.delete_lead_addresses()
self.reset_company_values()
clear_notifications()
self.delete_company_transactions()
def reset_task_flags(self):
self.clear_notifications = 0
self.delete_bin_data = 0
self.delete_leads_and_addresses = 0
self.delete_transactions = 0
self.initialize_doctypes_table = 0
self.reset_company_default_values = 0
def before_save(self):
self.status = ""
self.doctypes.clear()
self.reset_task_flags()
def on_submit(self):
self.db_set("status", "Queued")
def on_cancel(self):
self.db_set("status", "Cancelled")
def enqueue_task(self, task: str | None = None):
if task and task in self.task_to_internal_method_map:
# make sure that none of next tasks are already running
job_names = self.generate_job_name_for_next_tasks(task=task)
self.validate_running_task_for_doc(job_names=job_names)
# Generate Job Id to uniquely identify each task for this document
job_id = self.generate_job_name_for_task(task)
if self.process_in_single_transaction:
self.execute_task(task_to_execute=task)
else:
frappe.enqueue(
"frappe.utils.background_jobs.run_doc_method",
doctype=self.doctype,
name=self.name,
doc_method="execute_task",
job_id=job_id,
queue="long",
enqueue_after_commit=True,
task_to_execute=task,
)
def execute_task(self, task_to_execute: str | None = None):
if task_to_execute:
method = self.task_to_internal_method_map[task_to_execute]
if task := getattr(self, method, None):
try:
task()
except Exception as err:
frappe.db.rollback()
traceback = frappe.get_traceback(with_context=True)
if traceback:
message = "Traceback: <br>" + traceback
frappe.db.set_value(self.doctype, self.name, "error_log", message)
frappe.db.set_value(self.doctype, self.name, "status", "Failed")
def delete_notifications(self):
self.validate_doc_status()
if not self.clear_notifications:
clear_notifications()
self.db_set("clear_notifications", 1)
self.enqueue_task(task="Initialize Summary Table")
def populate_doctypes_to_be_ignored_table(self):
doctypes_to_be_ignored_list = get_doctypes_to_be_ignored()
for doctype in doctypes_to_be_ignored_list:
self.append("doctypes_to_be_ignored", {"doctype_name": doctype})
def delete_bins(self):
frappe.db.sql(
"""delete from `tabBin` where warehouse in
(select name from tabWarehouse where company=%s)""",
self.company,
)
def validate_running_task_for_doc(self, job_names: list = None):
# at most only one task should be runnning
running_tasks = []
for x in job_names:
if is_job_enqueued(x):
running_tasks.append(get_job(x).get_id())
def delete_lead_addresses(self):
"""Delete addresses to which leads are linked"""
leads = frappe.get_all("Lead", filters={"company": self.company})
leads = ["'%s'" % row.get("name") for row in leads]
addresses = []
if leads:
addresses = frappe.db.sql_list(
"""select parent from `tabDynamic Link` where link_name
in ({leads})""".format(
leads=",".join(leads)
if running_tasks:
frappe.throw(
_("{0} is already running for {1}").format(
comma_and([get_link_to_form("RQ Job", x) for x in running_tasks]), self.name
)
)
if addresses:
addresses = ["%s" % frappe.db.escape(addr) for addr in addresses]
frappe.db.sql(
"""delete from `tabAddress` where name in ({addresses}) and
name not in (select distinct dl1.parent from `tabDynamic Link` dl1
inner join `tabDynamic Link` dl2 on dl1.parent=dl2.parent
and dl1.link_doctype<>dl2.link_doctype)""".format(
addresses=",".join(addresses)
)
def validate_doc_status(self):
if self.status != "Running":
frappe.throw(
_("{0} is not running. Cannot trigger events for this Document").format(
get_link_to_form("Transaction Deletion Record", self.name)
)
)
frappe.db.sql(
"""delete from `tabDynamic Link` where link_doctype='Lead'
and parenttype='Address' and link_name in ({leads})""".format(
@frappe.whitelist()
def start_deletion_tasks(self):
# This method is the entry point for the chain of events that follow
self.db_set("status", "Running")
self.enqueue_task(task="Delete Bins")
def delete_bins(self):
self.validate_doc_status()
if not self.delete_bin_data:
frappe.db.sql(
"""delete from `tabBin` where warehouse in
(select name from tabWarehouse where company=%s)""",
self.company,
)
self.db_set("delete_bin_data", 1)
self.enqueue_task(task="Delete Leads and Addresses")
def delete_lead_addresses(self):
"""Delete addresses to which leads are linked"""
self.validate_doc_status()
if not self.delete_leads_and_addresses:
leads = frappe.get_all("Lead", filters={"company": self.company})
leads = ["'%s'" % row.get("name") for row in leads]
addresses = []
if leads:
addresses = frappe.db.sql_list(
"""select parent from `tabDynamic Link` where link_name
in ({leads})""".format(
leads=",".join(leads)
)
)
frappe.db.sql(
"""update `tabCustomer` set lead_name=NULL where lead_name in ({leads})""".format(
leads=",".join(leads)
if addresses:
addresses = ["%s" % frappe.db.escape(addr) for addr in addresses]
frappe.db.sql(
"""delete from `tabAddress` where name in ({addresses}) and
name not in (select distinct dl1.parent from `tabDynamic Link` dl1
inner join `tabDynamic Link` dl2 on dl1.parent=dl2.parent
and dl1.link_doctype<>dl2.link_doctype)""".format(
addresses=",".join(addresses)
)
)
frappe.db.sql(
"""delete from `tabDynamic Link` where link_doctype='Lead'
and parenttype='Address' and link_name in ({leads})""".format(
leads=",".join(leads)
)
)
frappe.db.sql(
"""update `tabCustomer` set lead_name=NULL where lead_name in ({leads})""".format(
leads=",".join(leads)
)
)
)
self.db_set("delete_leads_and_addresses", 1)
self.enqueue_task(task="Reset Company Values")
def reset_company_values(self):
company_obj = frappe.get_doc("Company", self.company)
company_obj.total_monthly_sales = 0
company_obj.sales_monthly_history = None
company_obj.save()
self.validate_doc_status()
if not self.reset_company_default_values:
company_obj = frappe.get_doc("Company", self.company)
company_obj.total_monthly_sales = 0
company_obj.sales_monthly_history = None
company_obj.save()
self.db_set("reset_company_default_values", 1)
self.enqueue_task(task="Clear Notifications")
def initialize_doctypes_to_be_deleted_table(self):
self.validate_doc_status()
if not self.initialize_doctypes_table:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
tables = self.get_all_child_doctypes()
for docfield in docfields:
if docfield["parent"] != self.doctype:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield["parent"], docfield["fieldname"]
)
if no_of_docs > 0:
# Initialize
self.populate_doctypes_table(tables, docfield["parent"], docfield["fieldname"], 0)
self.db_set("initialize_doctypes_table", 1)
self.enqueue_task(task="Delete Transactions")
def delete_company_transactions(self):
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
self.validate_doc_status()
if not self.delete_transactions:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
tables = self.get_all_child_doctypes()
for docfield in docfields:
if docfield["parent"] != self.doctype:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield["parent"], docfield["fieldname"]
)
if no_of_docs > 0:
self.delete_version_log(docfield["parent"], docfield["fieldname"])
reference_docs = frappe.get_all(
docfield["parent"], filters={docfield["fieldname"]: self.company}
tables = self.get_all_child_doctypes()
for docfield in self.doctypes:
if docfield.doctype_name != self.doctype and not docfield.done:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield.doctype_name, docfield.docfield_name
)
reference_doc_names = [r.name for r in reference_docs]
if no_of_docs > 0:
reference_docs = frappe.get_all(
docfield.doctype_name, filters={docfield.docfield_name: self.company}, limit=self.batch_size
)
reference_doc_names = [r.name for r in reference_docs]
self.delete_communications(docfield["parent"], reference_doc_names)
self.delete_comments(docfield["parent"], reference_doc_names)
self.unlink_attachments(docfield["parent"], reference_doc_names)
self.delete_version_log(docfield.doctype_name, reference_doc_names)
self.delete_communications(docfield.doctype_name, reference_doc_names)
self.delete_comments(docfield.doctype_name, reference_doc_names)
self.unlink_attachments(docfield.doctype_name, reference_doc_names)
self.delete_child_tables(docfield.doctype_name, reference_doc_names)
self.delete_docs_linked_with_specified_company(docfield.doctype_name, reference_doc_names)
processed = int(docfield.no_of_docs) + len(reference_doc_names)
frappe.db.set_value(docfield.doctype, docfield.name, "no_of_docs", processed)
else:
# reset naming series
naming_series = frappe.db.get_value("DocType", docfield.doctype_name, "autoname")
if naming_series:
if "#" in naming_series:
self.update_naming_series(naming_series, docfield.doctype_name)
frappe.db.set_value(docfield.doctype, docfield.name, "done", 1)
self.populate_doctypes_table(tables, docfield["parent"], no_of_docs)
self.delete_child_tables(docfield["parent"], docfield["fieldname"])
self.delete_docs_linked_with_specified_company(docfield["parent"], docfield["fieldname"])
naming_series = frappe.db.get_value("DocType", docfield["parent"], "autoname")
if naming_series:
if "#" in naming_series:
self.update_naming_series(naming_series, docfield["parent"])
pending_doctypes = frappe.db.get_all(
"Transaction Deletion Record Details",
filters={"parent": self.name, "done": 0},
pluck="doctype_name",
)
if pending_doctypes:
# as method is enqueued after commit, calling itself will not make validate_doc_status to throw
# recursively call this task to delete all transactions
self.enqueue_task(task="Delete Transactions")
else:
self.db_set("status", "Completed")
self.db_set("delete_transactions", 1)
self.db_set("error_log", None)
def get_doctypes_to_be_ignored_list(self):
singles = frappe.get_all("DocType", filters={"issingle": 1}, pluck="name")
@@ -174,25 +360,24 @@ class TransactionDeletionRecord(Document):
def get_number_of_docs_linked_with_specified_company(self, doctype, company_fieldname):
return frappe.db.count(doctype, {company_fieldname: self.company})
def populate_doctypes_table(self, tables, doctype, no_of_docs):
def populate_doctypes_table(self, tables, doctype, fieldname, no_of_docs):
self.flags.ignore_validate_update_after_submit = True
if doctype not in tables:
self.append("doctypes", {"doctype_name": doctype, "no_of_docs": no_of_docs})
def delete_child_tables(self, doctype, company_fieldname):
parent_docs_to_be_deleted = frappe.get_all(
doctype, {company_fieldname: self.company}, pluck="name"
)
self.append(
"doctypes", {"doctype_name": doctype, "docfield_name": fieldname, "no_of_docs": no_of_docs}
)
self.save(ignore_permissions=True)
def delete_child_tables(self, doctype, reference_doc_names):
child_tables = frappe.get_all(
"DocField", filters={"fieldtype": "Table", "parent": doctype}, pluck="options"
)
for batch in create_batch(parent_docs_to_be_deleted, self.batch_size):
for table in child_tables:
frappe.db.delete(table, {"parent": ["in", batch]})
for table in child_tables:
frappe.db.delete(table, {"parent": ["in", reference_doc_names]})
def delete_docs_linked_with_specified_company(self, doctype, company_fieldname):
frappe.db.delete(doctype, {company_fieldname: self.company})
def delete_docs_linked_with_specified_company(self, doctype, reference_doc_names):
frappe.db.delete(doctype, {"name": ("in", reference_doc_names)})
def update_naming_series(self, naming_series, doctype_name):
if "." in naming_series:
@@ -213,17 +398,11 @@ class TransactionDeletionRecord(Document):
frappe.db.sql("""update `tabSeries` set current = %s where name=%s""", (last, prefix))
def delete_version_log(self, doctype, company_fieldname):
dt = qb.DocType(doctype)
names = qb.from_(dt).select(dt.name).where(dt[company_fieldname] == self.company).run(as_list=1)
names = [x[0] for x in names]
if names:
versions = qb.DocType("Version")
for batch in create_batch(names, self.batch_size):
qb.from_(versions).delete().where(
(versions.ref_doctype == doctype) & (versions.docname.isin(batch))
).run()
def delete_version_log(self, doctype, docnames):
versions = qb.DocType("Version")
qb.from_(versions).delete().where(
(versions.ref_doctype == doctype) & (versions.docname.isin(docnames))
).run()
def delete_communications(self, doctype, reference_doc_names):
communications = frappe.get_all(
@@ -295,3 +474,34 @@ def get_doctypes_to_be_ignored():
doctypes_to_be_ignored.extend(frappe.get_hooks("company_data_to_be_ignored") or [])
return doctypes_to_be_ignored
@frappe.whitelist()
def is_deletion_doc_running(company: str | None = None, err_msg: str | None = None):
if company:
if running_deletion_jobs := frappe.db.get_all(
"Transaction Deletion Record",
filters={"docstatus": 1, "company": company, "status": "Running"},
):
if not err_msg:
err_msg = ""
frappe.throw(
title=_("Deletion in Progress!"),
msg=_("Transaction Deletion Document: {0} is running for this Company. {1}").format(
get_link_to_form("Transaction Deletion Record", running_deletion_jobs[0].name), err_msg
),
)
def check_for_running_deletion_job(doc, method=None):
# Check if DocType has 'company' field
df = qb.DocType("DocField")
if (
not_allowed := qb.from_(df)
.select(df.parent)
.where((df.fieldname == "company") & (df.parent == doc.doctype))
.run()
):
is_deletion_doc_running(
doc.company, _("Cannot make any transactions until the deletion job is completed")
)

View File

@@ -2,11 +2,15 @@
// License: GNU General Public License v3. See license.txt
frappe.listview_settings["Transaction Deletion Record"] = {
add_fields: ["status"],
get_indicator: function (doc) {
if (doc.docstatus == 0) {
return [__("Draft"), "red"];
} else {
return [__("Completed"), "green"];
}
let colors = {
Queued: "orange",
Completed: "green",
Running: "blue",
Failed: "red",
};
let status = doc.status;
return [__(status), colors[status], "status,=," + status];
},
};

View File

@@ -5,8 +5,7 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"doctype_name",
"no_of_docs"
"doctype_name"
],
"fields": [
{
@@ -16,18 +15,12 @@
"label": "DocType",
"options": "DocType",
"reqd": 1
},
{
"fieldname": "no_of_docs",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Number of Docs"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2021-05-08 23:10:46.166744",
"modified": "2024-02-04 10:56:27.413691",
"modified_by": "Administrator",
"module": "Setup",
"name": "Transaction Deletion Record Item",
@@ -35,5 +28,6 @@
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View File

@@ -16,7 +16,6 @@ class TransactionDeletionRecordItem(Document):
from frappe.types import DF
doctype_name: DF.Link
no_of_docs: DF.Data | None
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data

View File

@@ -246,6 +246,8 @@ class DeprecatedBatchNoValuation:
if self.sle.serial_and_batch_bundle:
query = query.where(bundle.name != self.sle.serial_and_batch_bundle)
query = query.where(bundle.voucher_type != "Pick List")
for d in query.run(as_dict=True):
self.non_batchwise_balance_value += flt(d.batch_value)
self.non_batchwise_balance_qty += flt(d.batch_qty)

View File

@@ -796,7 +796,8 @@
"label": "Purchase Order",
"options": "Purchase Order",
"print_hide": 1,
"read_only": 1
"read_only": 1,
"search_index": 1
},
{
"fieldname": "column_break_82",
@@ -912,7 +913,7 @@
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2024-02-04 14:10:31.750340",
"modified": "2024-03-21 18:15:07.603672",
"modified_by": "Administrator",
"module": "Stock",
"name": "Delivery Note Item",

View File

@@ -7,7 +7,6 @@ from frappe import _, bold
from frappe.model.document import Document
from frappe.query_builder import Criterion
from frappe.query_builder.functions import Cast_
from frappe.utils import getdate
class ItemPriceDuplicateItem(frappe.ValidationError):
@@ -46,7 +45,7 @@ class ItemPrice(Document):
def validate(self):
self.validate_item()
self.validate_dates()
self.validate_from_to_dates("valid_from", "valid_upto")
self.update_price_list_details()
self.update_item_details()
self.check_duplicates()
@@ -56,11 +55,6 @@ class ItemPrice(Document):
if not frappe.db.exists("Item", self.item_code):
frappe.throw(_("Item {0} not found.").format(self.item_code))
def validate_dates(self):
if self.valid_from and self.valid_upto:
if getdate(self.valid_from) > getdate(self.valid_upto):
frappe.throw(_("Valid From Date must be lesser than Valid Upto Date."))
def update_price_list_details(self):
if self.price_list:
price_list_details = frappe.db.get_value(

View File

@@ -778,7 +778,7 @@ def get_available_item_locations(
if picked_item_details:
for location in list(locations):
if location["qty"] < 1:
if location["qty"] < 0:
locations.remove(location)
total_qty_available = sum(location.get("qty") for location in locations)

View File

@@ -408,12 +408,26 @@ frappe.ui.form.on("Stock Entry", {
erpnext.accounts.dimensions.update_dimension(frm, frm.doctype);
}
frm.events.set_route_options_for_new_doc(frm);
},
set_route_options_for_new_doc(frm) {
let batch_no_field = frm.get_docfield("items", "batch_no");
if (batch_no_field) {
batch_no_field.get_route_options_for_new_doc = function (row) {
return {
item: row.doc.item_code,
};
};
}
let sbb_field = frm.get_docfield("items", "serial_and_batch_bundle");
if (sbb_field) {
sbb_field.get_route_options_for_new_doc = (row) => {
return {
item_code: row.doc.item_code,
voucher_type: frm.doc.doctype,
warehouse: row.doc.s_warehouse || row.doc.t_warehouse,
};
};
}
@@ -1081,7 +1095,9 @@ erpnext.stock.StockEntry = class StockEntry extends erpnext.stock.StockControlle
cint(frappe.user_defaults?.use_serial_batch_fields) === 1
) {
this.frm.doc.items.forEach((item) => {
frappe.model.set_value(item.doctype, item.name, "use_serial_batch_fields", 1);
if (!item.serial_and_batch_bundle) {
frappe.model.set_value(item.doctype, item.name, "use_serial_batch_fields", 1);
}
});
}
}

View File

@@ -2536,6 +2536,7 @@ class StockEntry(StockController):
)
d.serial_and_batch_bundle = id
d.use_serial_batch_fields = 0
def get_available_serial_nos(self) -> List[str]:
serial_nos = []
@@ -2635,7 +2636,9 @@ def make_stock_in_entry(source_name, target_doc=None):
def set_missing_values(source, target):
target.stock_entry_type = "Material Transfer"
target.set_missing_values()
target.make_serial_and_batch_bundle_for_transfer()
if not frappe.db.get_single_value("Stock Settings", "use_serial_batch_fields"):
target.make_serial_and_batch_bundle_for_transfer()
def update_item(source_doc, target_doc, source_parent):
target_doc.t_warehouse = ""

View File

@@ -888,11 +888,11 @@ class SerialBatchCreation:
return doc
def validate_qty(self, doc):
if doc.type_of_transaction == "Outward":
if doc.type_of_transaction == "Outward" and self.actual_qty and doc.total_qty:
precision = doc.precision("total_qty")
total_qty = abs(flt(doc.total_qty, precision))
required_qty = abs(flt(self.actual_qty, precision))
total_qty = flt(abs(doc.total_qty), precision)
required_qty = flt(abs(self.actual_qty), precision)
if required_qty - total_qty > 0:
msg = f"For the item {bold(doc.item_code)}, the Avaliable qty {bold(total_qty)} is less than the Required Qty {bold(required_qty)} in the warehouse {bold(doc.warehouse)}. Please add sufficient qty in the warehouse."

View File

@@ -149,7 +149,9 @@ class SubcontractingReceipt(SubcontractingController):
self.update_prevdoc_status()
self.set_subcontracting_order_status()
self.set_consumed_qty_in_subcontract_order()
self.make_bundle_using_old_serial_batch_fields()
for table_name in ["items", "supplied_items"]:
self.make_bundle_using_old_serial_batch_fields(table_name)
self.update_stock_ledger()
self.make_gl_entries()
self.repost_future_sle_and_gle()

View File

@@ -292,6 +292,7 @@ class TestSubcontractingReceipt(FrappeTestCase):
self.assertRaises(OverAllowanceError, make_return_subcontracting_receipt, **args)
def test_subcontracting_receipt_no_gl_entry(self):
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
sco = get_subcontracting_order()
rm_items = get_rm_items(sco.supplied_items)
itemwise_details = make_stock_in_entry(rm_items=rm_items)
@@ -327,8 +328,10 @@ class TestSubcontractingReceipt(FrappeTestCase):
# Service Cost(100 * 10) + Raw Materials Cost(100 * 10) + Additional Costs(10 * 10) = 2100
self.assertEqual(stock_value_difference, 2100)
self.assertFalse(get_gl_entries("Subcontracting Receipt", scr.name))
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_subcontracting_receipt_gl_entry(self):
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
sco = get_subcontracting_order(
company="_Test Company with perpetual inventory",
warehouse="Stores - TCP1",
@@ -387,6 +390,7 @@ class TestSubcontractingReceipt(FrappeTestCase):
scr.reload()
scr.cancel()
self.assertTrue(get_gl_entries("Subcontracting Receipt", scr.name))
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_supplied_items_consumed_qty(self):
# Set Backflush Based On as "Material Transferred for Subcontracting" to transfer RM's more than the required qty
@@ -664,6 +668,7 @@ class TestSubcontractingReceipt(FrappeTestCase):
)
def test_subcontracting_receipt_valuation_for_fg_with_auto_created_serial_batch_bundle(self):
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
set_backflush_based_on("BOM")
fg_item = make_item(
@@ -760,9 +765,11 @@ class TestSubcontractingReceipt(FrappeTestCase):
frappe.db.set_single_value(
"Stock Settings", "auto_create_serial_and_batch_bundle_for_outward", 0
)
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_subcontracting_receipt_raw_material_rate(self):
# Step - 1: Set Backflush Based On as "BOM"
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
set_backflush_based_on("BOM")
# Step - 2: Create FG and RM Items
@@ -820,6 +827,8 @@ class TestSubcontractingReceipt(FrappeTestCase):
self.assertEqual(rm_item.rate, 100)
self.assertEqual(rm_item.amount, rm_item.consumed_qty * rm_item.rate)
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 1)
def test_quality_inspection_for_subcontracting_receipt(self):
from erpnext.stock.doctype.quality_inspection.test_quality_inspection import (
create_quality_inspection,

File diff suppressed because it is too large Load Diff

View File

@@ -164,7 +164,7 @@ def create_log(doc_name, e, from_doctype, to_doctype, status, log_date=None, res
transaction_log.from_doctype = from_doctype
transaction_log.to_doctype = to_doctype
transaction_log.retried = restarted
transaction_log.save()
transaction_log.save(ignore_permissions=True)
def show_job_status(fail_count, deserialized_data_count, to_doctype):