Merge pull request #40677 from frappe/version-14-hotfix

chore: release v14
This commit is contained in:
Deepesh Garg
2024-03-27 08:56:10 +05:30
committed by GitHub
53 changed files with 1981 additions and 308 deletions

View File

@@ -0,0 +1,100 @@
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Bisect Accounting Statements", {
onload(frm) {
frm.trigger("render_heatmap");
},
refresh(frm) {
frm.add_custom_button(__("Bisect Left"), () => {
frm.trigger("bisect_left");
});
frm.add_custom_button(__("Bisect Right"), () => {
frm.trigger("bisect_right");
});
frm.add_custom_button(__("Up"), () => {
frm.trigger("move_up");
});
frm.add_custom_button(__("Build Tree"), () => {
frm.trigger("build_tree");
});
},
render_heatmap(frm) {
let bisect_heatmap = frm.get_field("bisect_heatmap").$wrapper;
bisect_heatmap.addClass("bisect_heatmap_location");
// milliseconds in a day
let msiad = 24 * 60 * 60 * 1000;
let datapoints = {};
let fr_dt = new Date(frm.doc.from_date).getTime();
let to_dt = new Date(frm.doc.to_date).getTime();
let bisect_start = new Date(frm.doc.current_from_date).getTime();
let bisect_end = new Date(frm.doc.current_to_date).getTime();
for (let x = fr_dt; x <= to_dt; x += msiad) {
let epoch_in_seconds = x / 1000;
if (bisect_start <= x && x <= bisect_end) {
datapoints[epoch_in_seconds] = 1.0;
} else {
datapoints[epoch_in_seconds] = 0.0;
}
}
new frappe.Chart(".bisect_heatmap_location", {
type: "heatmap",
data: {
dataPoints: datapoints,
start: new Date(frm.doc.from_date),
end: new Date(frm.doc.to_date),
},
countLabel: "Bisecting",
discreteDomains: 1,
});
},
bisect_left(frm) {
frm.call({
doc: frm.doc,
method: "bisect_left",
freeze: true,
freeze_message: __("Bisecting Left ..."),
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
bisect_right(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Bisecting Right ..."),
method: "bisect_right",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
move_up(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Moving up in tree ..."),
method: "move_up",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
build_tree(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Rebuilding BTree for period ..."),
method: "build_tree",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
});

View File

@@ -0,0 +1,194 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2023-09-15 21:28:28.054773",
"default_view": "List",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"section_break_cvfg",
"company",
"column_break_hcam",
"from_date",
"column_break_qxbi",
"to_date",
"column_break_iwny",
"algorithm",
"section_break_8ph9",
"current_node",
"section_break_ngid",
"bisect_heatmap",
"section_break_hmsy",
"bisecting_from",
"current_from_date",
"column_break_uqyd",
"bisecting_to",
"current_to_date",
"section_break_hbyo",
"heading_cppb",
"p_l_summary",
"column_break_aivo",
"balance_sheet_summary",
"b_s_summary",
"column_break_gvwx",
"difference_heading",
"difference"
],
"fields": [
{
"fieldname": "column_break_qxbi",
"fieldtype": "Column Break"
},
{
"fieldname": "from_date",
"fieldtype": "Datetime",
"label": "From Date"
},
{
"fieldname": "to_date",
"fieldtype": "Datetime",
"label": "To Date"
},
{
"default": "BFS",
"fieldname": "algorithm",
"fieldtype": "Select",
"label": "Algorithm",
"options": "BFS\nDFS"
},
{
"fieldname": "column_break_iwny",
"fieldtype": "Column Break"
},
{
"fieldname": "current_node",
"fieldtype": "Link",
"label": "Current Node",
"options": "Bisect Nodes"
},
{
"fieldname": "section_break_hmsy",
"fieldtype": "Section Break"
},
{
"fieldname": "current_from_date",
"fieldtype": "Datetime",
"read_only": 1
},
{
"fieldname": "current_to_date",
"fieldtype": "Datetime",
"read_only": 1
},
{
"fieldname": "column_break_uqyd",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_hbyo",
"fieldtype": "Section Break"
},
{
"fieldname": "p_l_summary",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "b_s_summary",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "difference",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "column_break_aivo",
"fieldtype": "Column Break"
},
{
"fieldname": "column_break_gvwx",
"fieldtype": "Column Break"
},
{
"fieldname": "company",
"fieldtype": "Link",
"label": "Company",
"options": "Company"
},
{
"fieldname": "column_break_hcam",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_ngid",
"fieldtype": "Section Break"
},
{
"fieldname": "section_break_8ph9",
"fieldtype": "Section Break",
"hidden": 1
},
{
"fieldname": "bisect_heatmap",
"fieldtype": "HTML",
"label": "Heatmap"
},
{
"fieldname": "heading_cppb",
"fieldtype": "Heading",
"label": "Profit and Loss Summary"
},
{
"fieldname": "balance_sheet_summary",
"fieldtype": "Heading",
"label": "Balance Sheet Summary"
},
{
"fieldname": "difference_heading",
"fieldtype": "Heading",
"label": "Difference"
},
{
"fieldname": "bisecting_from",
"fieldtype": "Heading",
"label": "Bisecting From"
},
{
"fieldname": "bisecting_to",
"fieldtype": "Heading",
"label": "Bisecting To"
},
{
"fieldname": "section_break_cvfg",
"fieldtype": "Section Break"
}
],
"hide_toolbar": 1,
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2023-12-01 16:49:54.073890",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Bisect Accounting Statements",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "Administrator",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,226 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import datetime
from collections import deque
from math import floor
import frappe
from dateutil.relativedelta import relativedelta
from frappe import _
from frappe.model.document import Document
from frappe.utils import getdate
from frappe.utils.data import guess_date_format
class BisectAccountingStatements(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
algorithm: DF.Literal["BFS", "DFS"]
b_s_summary: DF.Float
company: DF.Link | None
current_from_date: DF.Datetime | None
current_node: DF.Link | None
current_to_date: DF.Datetime | None
difference: DF.Float
from_date: DF.Datetime | None
p_l_summary: DF.Float
to_date: DF.Datetime | None
# end: auto-generated types
def validate(self):
self.validate_dates()
def validate_dates(self):
if getdate(self.from_date) > getdate(self.to_date):
frappe.throw(
_("From Date: {0} cannot be greater than To date: {1}").format(
frappe.bold(self.from_date), frappe.bold(self.to_date)
)
)
def bfs(self, from_date: datetime, to_date: datetime):
# Make Root node
node = frappe.new_doc("Bisect Nodes")
node.root = None
node.period_from_date = from_date
node.period_to_date = to_date
node.insert()
period_queue = deque([node])
while period_queue:
cur_node = period_queue.popleft()
delta = cur_node.period_to_date - cur_node.period_from_date
if delta.days == 0:
continue
else:
cur_floor = floor(delta.days / 2)
next_to_date = cur_node.period_from_date + relativedelta(days=+cur_floor)
left_node = frappe.new_doc("Bisect Nodes")
left_node.period_from_date = cur_node.period_from_date
left_node.period_to_date = next_to_date
left_node.root = cur_node.name
left_node.generated = False
left_node.insert()
cur_node.left_child = left_node.name
period_queue.append(left_node)
next_from_date = cur_node.period_from_date + relativedelta(days=+(cur_floor + 1))
right_node = frappe.new_doc("Bisect Nodes")
right_node.period_from_date = next_from_date
right_node.period_to_date = cur_node.period_to_date
right_node.root = cur_node.name
right_node.generated = False
right_node.insert()
cur_node.right_child = right_node.name
period_queue.append(right_node)
cur_node.save()
def dfs(self, from_date: datetime, to_date: datetime):
# Make Root node
node = frappe.new_doc("Bisect Nodes")
node.root = None
node.period_from_date = from_date
node.period_to_date = to_date
node.insert()
period_stack = [node]
while period_stack:
cur_node = period_stack.pop()
delta = cur_node.period_to_date - cur_node.period_from_date
if delta.days == 0:
continue
else:
cur_floor = floor(delta.days / 2)
next_to_date = cur_node.period_from_date + relativedelta(days=+cur_floor)
left_node = frappe.new_doc("Bisect Nodes")
left_node.period_from_date = cur_node.period_from_date
left_node.period_to_date = next_to_date
left_node.root = cur_node.name
left_node.generated = False
left_node.insert()
cur_node.left_child = left_node.name
period_stack.append(left_node)
next_from_date = cur_node.period_from_date + relativedelta(days=+(cur_floor + 1))
right_node = frappe.new_doc("Bisect Nodes")
right_node.period_from_date = next_from_date
right_node.period_to_date = cur_node.period_to_date
right_node.root = cur_node.name
right_node.generated = False
right_node.insert()
cur_node.right_child = right_node.name
period_stack.append(right_node)
cur_node.save()
@frappe.whitelist()
def build_tree(self):
frappe.db.delete("Bisect Nodes")
# Convert str to datetime format
dt_format = guess_date_format(self.from_date)
from_date = datetime.datetime.strptime(self.from_date, dt_format)
to_date = datetime.datetime.strptime(self.to_date, dt_format)
if self.algorithm == "BFS":
self.bfs(from_date, to_date)
if self.algorithm == "DFS":
self.dfs(from_date, to_date)
# set root as current node
root = frappe.db.get_all("Bisect Nodes", filters={"root": ["is", "not set"]})[0]
self.get_report_summary()
self.current_node = root.name
self.current_from_date = self.from_date
self.current_to_date = self.to_date
self.save()
def get_report_summary(self):
filters = {
"company": self.company,
"filter_based_on": "Date Range",
"period_start_date": self.current_from_date,
"period_end_date": self.current_to_date,
"periodicity": "Yearly",
}
pl_summary = frappe.get_doc("Report", "Profit and Loss Statement")
self.p_l_summary = pl_summary.execute_script_report(filters=filters)[5]
bs_summary = frappe.get_doc("Report", "Balance Sheet")
self.b_s_summary = bs_summary.execute_script_report(filters=filters)[5]
self.difference = abs(self.p_l_summary - self.b_s_summary)
def update_node(self):
current_node = frappe.get_doc("Bisect Nodes", self.current_node)
current_node.balance_sheet_summary = self.b_s_summary
current_node.profit_loss_summary = self.p_l_summary
current_node.difference = self.difference
current_node.generated = True
current_node.save()
def current_node_has_summary_info(self):
"Assertion method"
return frappe.db.get_value("Bisect Nodes", self.current_node, "generated")
def fetch_summary_info_from_current_node(self):
current_node = frappe.get_doc("Bisect Nodes", self.current_node)
self.p_l_summary = current_node.balance_sheet_summary
self.b_s_summary = current_node.profit_loss_summary
self.difference = abs(self.p_l_summary - self.b_s_summary)
def fetch_or_calculate(self):
if self.current_node_has_summary_info():
self.fetch_summary_info_from_current_node()
else:
self.get_report_summary()
self.update_node()
@frappe.whitelist()
def bisect_left(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.left_child is not None:
lft_node = frappe.get_doc("Bisect Nodes", cur_node.left_child)
self.current_node = cur_node.left_child
self.current_from_date = lft_node.period_from_date
self.current_to_date = lft_node.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("No more children on Left"))
@frappe.whitelist()
def bisect_right(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.right_child is not None:
rgt_node = frappe.get_doc("Bisect Nodes", cur_node.right_child)
self.current_node = cur_node.right_child
self.current_from_date = rgt_node.period_from_date
self.current_to_date = rgt_node.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("No more children on Right"))
@frappe.whitelist()
def move_up(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.root is not None:
root = frappe.get_doc("Bisect Nodes", cur_node.root)
self.current_node = cur_node.root
self.current_from_date = root.period_from_date
self.current_to_date = root.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("Reached Root"))

View File

@@ -0,0 +1,9 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests.utils import FrappeTestCase
class TestBisectAccountingStatements(FrappeTestCase):
pass

View File

@@ -0,0 +1,8 @@
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("Bisect Nodes", {
// refresh(frm) {
// },
// });

View File

@@ -0,0 +1,97 @@
{
"actions": [],
"autoname": "autoincrement",
"creation": "2023-09-27 14:56:38.112462",
"default_view": "List",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"root",
"left_child",
"right_child",
"period_from_date",
"period_to_date",
"difference",
"balance_sheet_summary",
"profit_loss_summary",
"generated"
],
"fields": [
{
"fieldname": "root",
"fieldtype": "Link",
"label": "Root",
"options": "Bisect Nodes"
},
{
"fieldname": "left_child",
"fieldtype": "Link",
"label": "Left Child",
"options": "Bisect Nodes"
},
{
"fieldname": "right_child",
"fieldtype": "Link",
"label": "Right Child",
"options": "Bisect Nodes"
},
{
"fieldname": "period_from_date",
"fieldtype": "Datetime",
"label": "Period_from_date"
},
{
"fieldname": "period_to_date",
"fieldtype": "Datetime",
"label": "Period To Date"
},
{
"fieldname": "difference",
"fieldtype": "Float",
"label": "Difference"
},
{
"fieldname": "balance_sheet_summary",
"fieldtype": "Float",
"label": "Balance Sheet Summary"
},
{
"fieldname": "profit_loss_summary",
"fieldtype": "Float",
"label": "Profit and Loss Summary"
},
{
"default": "0",
"fieldname": "generated",
"fieldtype": "Check",
"label": "Generated"
}
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-12-01 17:46:12.437996",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Bisect Nodes",
"naming_rule": "Autoincrement",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,29 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class BisectNodes(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
balance_sheet_summary: DF.Float
difference: DF.Float
generated: DF.Check
left_child: DF.Link | None
name: DF.Int | None
period_from_date: DF.Datetime | None
period_to_date: DF.Datetime | None
profit_loss_summary: DF.Float
right_child: DF.Link | None
root: DF.Link | None
# end: auto-generated types
pass

View File

@@ -0,0 +1,9 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests.utils import FrappeTestCase
class TestBisectNodes(FrappeTestCase):
pass

View File

@@ -121,7 +121,8 @@ class PeriodClosingVoucher(AccountsController):
previous_fiscal_year = get_fiscal_year(last_year_closing, company=self.company, boolean=True)
if previous_fiscal_year and not frappe.db.exists(
"GL Entry", {"posting_date": ("<=", last_year_closing), "company": self.company}
"GL Entry",
{"posting_date": ("<=", last_year_closing), "company": self.company, "is_cancelled": 0},
):
return

View File

@@ -0,0 +1,58 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2024-02-04 10:53:32.307930",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"doctype_name",
"docfield_name",
"no_of_docs",
"done"
],
"fields": [
{
"fieldname": "doctype_name",
"fieldtype": "Link",
"in_list_view": 1,
"label": "DocType",
"options": "DocType",
"read_only": 1,
"reqd": 1
},
{
"fieldname": "docfield_name",
"fieldtype": "Data",
"label": "DocField",
"read_only": 1
},
{
"fieldname": "no_of_docs",
"fieldtype": "Int",
"in_list_view": 1,
"label": "No of Docs",
"read_only": 1
},
{
"default": "0",
"fieldname": "done",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Done",
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2024-02-05 17:35:09.556054",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Transaction Deletion Record Details",
"owner": "Administrator",
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,26 @@
# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TransactionDeletionRecordDetails(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
docfield_name: DF.Data | None
doctype_name: DF.Link
done: DF.Check
no_of_docs: DF.Int
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
# end: auto-generated types
pass

View File

@@ -97,11 +97,11 @@ def execute(filters=None):
chart = get_chart_data(filters, columns, asset, liability, equity)
report_summary = get_report_summary(
report_summary, primitive_summary = get_report_summary(
period_list, asset, liability, equity, provisional_profit_loss, currency, filters
)
return columns, data, message, chart, report_summary
return columns, data, message, chart, report_summary, primitive_summary
def get_provisional_profit_loss(
@@ -217,7 +217,7 @@ def get_report_summary(
"datatype": "Currency",
"currency": currency,
},
]
], (net_asset - net_liability + net_equity)
def get_chart_data(filters, columns, asset, liability, equity):

View File

@@ -669,20 +669,20 @@ class GrossProfitGenerator(object):
elif row.sales_order and row.so_detail:
incoming_amount = self.get_buying_amount_from_so_dn(row.sales_order, row.so_detail, item_code)
if incoming_amount:
return incoming_amount
return flt(row.qty) * incoming_amount
else:
return flt(row.qty) * self.get_average_buying_rate(row, item_code)
return flt(row.qty) * self.get_average_buying_rate(row, item_code)
def get_buying_amount_from_so_dn(self, sales_order, so_detail, item_code):
from frappe.query_builder.functions import Sum
from frappe.query_builder.functions import Avg
delivery_note_item = frappe.qb.DocType("Delivery Note Item")
query = (
frappe.qb.from_(delivery_note_item)
.select(Sum(delivery_note_item.incoming_rate * delivery_note_item.stock_qty))
.select(Avg(delivery_note_item.incoming_rate))
.where(delivery_note_item.docstatus == 1)
.where(delivery_note_item.item_code == item_code)
.where(delivery_note_item.against_sales_order == sales_order)
@@ -965,7 +965,7 @@ class GrossProfitGenerator(object):
& (sle.is_cancelled == 0)
)
.orderby(sle.item_code)
.orderby(sle.warehouse, sle.posting_date, sle.posting_time, sle.creation, order=Order.desc)
.orderby(sle.warehouse, sle.posting_datetime, sle.creation, order=Order.desc)
.run(as_dict=True)
)

View File

@@ -460,3 +460,95 @@ class TestGrossProfit(FrappeTestCase):
}
gp_entry = [x for x in data if x.parent_invoice == sinv.name]
self.assertDictContainsSubset(expected_entry, gp_entry[0])
def test_different_rates_in_si_and_dn(self):
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
"""
Test gp calculation when invoice and delivery note differ in qty and aren't connected
SO -- INV
|
DN
"""
se = make_stock_entry(
company=self.company,
item_code=self.item,
target=self.warehouse,
qty=3,
basic_rate=700,
do_not_submit=True,
)
item = se.items[0]
se.append(
"items",
{
"item_code": item.item_code,
"s_warehouse": item.s_warehouse,
"t_warehouse": item.t_warehouse,
"qty": 10,
"basic_rate": 700,
"conversion_factor": item.conversion_factor or 1.0,
"transfer_qty": flt(item.qty) * (flt(item.conversion_factor) or 1.0),
"serial_no": item.serial_no,
"batch_no": item.batch_no,
"cost_center": item.cost_center,
"expense_account": item.expense_account,
},
)
se = se.save().submit()
so = make_sales_order(
customer=self.customer,
company=self.company,
warehouse=self.warehouse,
item=self.item,
rate=800,
qty=10,
do_not_save=False,
do_not_submit=False,
)
from erpnext.selling.doctype.sales_order.sales_order import (
make_delivery_note,
make_sales_invoice,
)
dn1 = make_delivery_note(so.name)
dn1.items[0].qty = 4
dn1.items[0].rate = 800
dn1.save().submit()
dn2 = make_delivery_note(so.name)
dn2.items[0].qty = 6
dn2.items[0].rate = 800
dn2.save().submit()
sinv = make_sales_invoice(so.name)
sinv.items[0].qty = 4
sinv.items[0].rate = 800
sinv.save().submit()
filters = frappe._dict(
company=self.company, from_date=nowdate(), to_date=nowdate(), group_by="Invoice"
)
columns, data = execute(filters=filters)
expected_entry = {
"parent_invoice": sinv.name,
"currency": "INR",
"sales_invoice": self.item,
"customer": self.customer,
"posting_date": frappe.utils.datetime.date.fromisoformat(nowdate()),
"item_code": self.item,
"item_name": self.item,
"warehouse": "Stores - _GP",
"qty": 4.0,
"avg._selling_rate": 800.0,
"valuation_rate": 700.0,
"selling_amount": 3200.0,
"buying_amount": 2800.0,
"gross_profit": 400.0,
"gross_profit_%": 12.5,
}
gp_entry = [x for x in data if x.parent_invoice == sinv.name]
self.assertDictContainsSubset(expected_entry, gp_entry[0])

View File

@@ -26,3 +26,10 @@ frappe.require("assets/erpnext/js/financial_statements.js", function () {
default: 1,
});
});
frappe.query_reports["Profit and Loss Statement"]["filters"].push({
fieldname: "include_default_book_entries",
label: __("Include Default FB Entries"),
fieldtype: "Check",
default: 1,
});

View File

@@ -66,11 +66,11 @@ def execute(filters=None):
currency = filters.presentation_currency or frappe.get_cached_value(
"Company", filters.company, "default_currency"
)
report_summary = get_report_summary(
report_summary, primitive_summary = get_report_summary(
period_list, filters.periodicity, income, expense, net_profit_loss, currency, filters
)
return columns, data, None, chart, report_summary
return columns, data, None, chart, report_summary, primitive_summary
def get_report_summary(
@@ -112,7 +112,7 @@ def get_report_summary(
"datatype": "Currency",
"currency": currency,
},
]
], net_profit
def get_net_profit_loss(income, expense, period_list, company, currency=None, consolidated=False):

View File

@@ -1372,8 +1372,7 @@ def sort_stock_vouchers_by_posting_date(
.select(sle.voucher_type, sle.voucher_no, sle.posting_date, sle.posting_time, sle.creation)
.where((sle.is_cancelled == 0) & (sle.voucher_no.isin(voucher_nos)))
.groupby(sle.voucher_type, sle.voucher_no)
.orderby(sle.posting_date)
.orderby(sle.posting_time)
.orderby(sle.posting_datetime)
.orderby(sle.creation)
).run(as_dict=True)
sorted_vouchers = [(sle.voucher_type, sle.voucher_no) for sle in sles]

View File

@@ -299,7 +299,10 @@ period_closing_doctypes = [
doc_events = {
"*": {
"validate": "erpnext.support.doctype.service_level_agreement.service_level_agreement.apply",
"validate": [
"erpnext.support.doctype.service_level_agreement.service_level_agreement.apply",
"erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.check_for_running_deletion_job",
],
},
tuple(period_closing_doctypes): {
"validate": "erpnext.accounts.doctype.accounting_period.accounting_period.validate_accounting_period_on_doc_save",

View File

@@ -978,8 +978,7 @@ def get_valuation_rate(data):
frappe.qb.from_(sle)
.select(sle.valuation_rate)
.where((sle.item_code == item_code) & (sle.valuation_rate > 0) & (sle.is_cancelled == 0))
.orderby(sle.posting_date, order=frappe.qb.desc)
.orderby(sle.posting_time, order=frappe.qb.desc)
.orderby(sle.posting_datetime, order=frappe.qb.desc)
.orderby(sle.creation, order=frappe.qb.desc)
.limit(1)
).run(as_dict=True)

View File

@@ -375,7 +375,7 @@ class JobCard(Document):
{
"to_time": get_datetime(args.get("complete_time")),
"operation": args.get("sub_operation"),
"completed_qty": args.get("completed_qty") or 0.0,
"completed_qty": (args.get("completed_qty") if last_row.idx == row.idx else 0.0),
}
)
elif args.get("start_time"):

View File

@@ -58,7 +58,7 @@ def get_data(filters):
query_filters["creation"] = ("between", [filters.get("from_date"), filters.get("to_date")])
data = frappe.get_all(
"Work Order", fields=fields, filters=query_filters, order_by="planned_start_date asc", debug=1
"Work Order", fields=fields, filters=query_filters, order_by="planned_start_date asc"
)
res = []

View File

@@ -274,6 +274,7 @@ erpnext.patches.v14_0.clear_reconciliation_values_from_singles
[post_model_sync]
execute:frappe.delete_doc_if_exists('Workspace', 'ERPNext Integrations Settings')
erpnext.patches.v14_0.update_posting_datetime_and_dropped_indexes #22-02-2024
erpnext.patches.v14_0.rename_ongoing_status_in_sla_documents
erpnext.patches.v14_0.delete_shopify_doctypes
erpnext.patches.v14_0.delete_healthcare_doctypes
@@ -361,4 +362,4 @@ erpnext.stock.doctype.delivery_note.patches.drop_unused_return_against_index # 2
erpnext.patches.v14_0.set_maintain_stock_for_bom_item
execute:frappe.db.set_single_value('E Commerce Settings', 'show_actual_qty', 1)
erpnext.patches.v14_0.delete_orphaned_asset_movement_item_records
erpnext.patches.v14_0.remove_cancelled_asset_capitalization_from_asset
erpnext.patches.v14_0.remove_cancelled_asset_capitalization_from_asset

View File

@@ -0,0 +1,19 @@
import frappe
def execute():
frappe.db.sql(
"""
UPDATE `tabStock Ledger Entry`
SET posting_datetime = DATE_FORMAT(timestamp(posting_date, posting_time), '%Y-%m-%d %H:%i:%s')
"""
)
drop_indexes()
def drop_indexes():
if not frappe.db.has_index("tabStock Ledger Entry", "posting_sort_index"):
return
frappe.db.sql_ddl("ALTER TABLE `tabStock Ledger Entry` DROP INDEX `posting_sort_index`")

View File

@@ -832,7 +832,8 @@
"label": "Purchase Order",
"options": "Purchase Order",
"print_hide": 1,
"read_only": 1
"read_only": 1,
"search_index": 1
},
{
"fieldname": "column_break_89",
@@ -875,7 +876,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2023-11-24 19:07:17.715231",
"modified": "2024-03-21 18:15:56.625005",
"modified_by": "Administrator",
"module": "Selling",
"name": "Sales Order Item",

View File

@@ -259,6 +259,7 @@ erpnext.PointOfSale.PastOrderSummary = class {
subject: __(frm.meta.name) + ": " + doc.name,
doctype: doc.doctype,
name: doc.name,
content: "",
send_email: 1,
print_format,
sender_full_name: frappe.user.full_name(),

View File

@@ -197,6 +197,8 @@ def prepare_data(
):
details[p_key] += r.get(qty_or_amount_field, 0)
details[variance_key] = details.get(p_key) - details.get(target_key)
else:
details[variance_key] = details.get(p_key) - details.get(target_key)
details["total_achieved"] += details.get(p_key)
details["total_variance"] = details.get("total_achieved") - details.get("total_target")
@@ -209,31 +211,32 @@ def get_actual_data(filters, sales_users_or_territory_data, date_field, sales_fi
parent_doc = frappe.qb.DocType(filters.get("doctype"))
child_doc = frappe.qb.DocType(filters.get("doctype") + " Item")
sales_team = frappe.qb.DocType("Sales Team")
query = (
frappe.qb.from_(parent_doc)
.inner_join(child_doc)
.on(child_doc.parent == parent_doc.name)
.inner_join(sales_team)
.on(sales_team.parent == parent_doc.name)
.select(
child_doc.item_group,
(child_doc.stock_qty * sales_team.allocated_percentage / 100).as_("stock_qty"),
(child_doc.base_net_amount * sales_team.allocated_percentage / 100).as_("base_net_amount"),
sales_team.sales_person,
parent_doc[date_field],
)
.where(
(parent_doc.docstatus == 1)
& (parent_doc[date_field].between(fiscal_year.year_start_date, fiscal_year.year_end_date))
)
)
query = frappe.qb.from_(parent_doc).inner_join(child_doc).on(child_doc.parent == parent_doc.name)
if sales_field == "sales_person":
query = query.where(sales_team.sales_person.isin(sales_users_or_territory_data))
sales_team = frappe.qb.DocType("Sales Team")
stock_qty = child_doc.stock_qty * sales_team.allocated_percentage / 100
net_amount = child_doc.base_net_amount * sales_team.allocated_percentage / 100
sales_field_col = sales_team[sales_field]
query = query.inner_join(sales_team).on(sales_team.parent == parent_doc.name)
else:
query = query.where(parent_doc[sales_field].isin(sales_users_or_territory_data))
stock_qty = child_doc.stock_qty
net_amount = child_doc.base_net_amount
sales_field_col = parent_doc[sales_field]
query = query.select(
child_doc.item_group,
parent_doc[date_field],
(stock_qty).as_("stock_qty"),
(net_amount).as_("base_net_amount"),
sales_field_col,
).where(
(parent_doc.docstatus == 1)
& (parent_doc[date_field].between(fiscal_year.year_start_date, fiscal_year.year_end_date))
& (sales_field_col.isin(sales_users_or_territory_data))
)
return query.run(as_dict=True)

View File

@@ -0,0 +1,57 @@
import frappe
from frappe.tests.utils import FrappeTestCase
from frappe.utils import flt, nowdate
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.accounts.utils import get_fiscal_year
from erpnext.selling.report.sales_partner_target_variance_based_on_item_group.sales_partner_target_variance_based_on_item_group import (
execute,
)
from erpnext.selling.report.sales_person_target_variance_based_on_item_group.test_sales_person_target_variance_based_on_item_group import (
create_sales_target_doc,
create_target_distribution,
)
class TestSalesPartnerTargetVarianceBasedOnItemGroup(FrappeTestCase):
def setUp(self):
self.fiscal_year = get_fiscal_year(nowdate())[0]
def tearDown(self):
frappe.db.rollback()
def test_achieved_target_and_variance_for_partner(self):
# Create a Target Distribution
distribution = create_target_distribution(self.fiscal_year)
# Create Sales Partner with targets for the current fiscal year
sales_partner = create_sales_target_doc(
"Sales Partner", "partner_name", "Sales Partner 1", self.fiscal_year, distribution.name
)
# Create a Sales Invoice for the Partner
si = create_sales_invoice(
rate=1000,
qty=20,
do_not_submit=True,
)
si.sales_partner = sales_partner
si.commission_rate = 5
si.submit()
# Check Achieved Target and Variance for the Sales Partner
result = execute(
frappe._dict(
{
"fiscal_year": self.fiscal_year,
"doctype": "Sales Invoice",
"period": "Yearly",
"target_on": "Quantity",
}
)
)[1]
row = frappe._dict(result[0])
self.assertSequenceEqual(
[flt(value, 2) for value in (row.total_target, row.total_achieved, row.total_variance)],
[50, 20, -30],
)

View File

@@ -18,17 +18,17 @@ class TestSalesPersonTargetVarianceBasedOnItemGroup(FrappeTestCase):
def test_achieved_target_and_variance(self):
# Create a Target Distribution
distribution = frappe.new_doc("Monthly Distribution")
distribution.distribution_id = "Target Report Distribution"
distribution.fiscal_year = self.fiscal_year
distribution.get_months()
distribution.insert()
distribution = create_target_distribution(self.fiscal_year)
# Create sales people with targets
person_1 = create_sales_person_with_target("Sales Person 1", self.fiscal_year, distribution.name)
person_2 = create_sales_person_with_target("Sales Person 2", self.fiscal_year, distribution.name)
# Create sales people with targets for the current fiscal year
person_1 = create_sales_target_doc(
"Sales Person", "sales_person_name", "Sales Person 1", self.fiscal_year, distribution.name
)
person_2 = create_sales_target_doc(
"Sales Person", "sales_person_name", "Sales Person 2", self.fiscal_year, distribution.name
)
# Create a Sales Order with 50-50 contribution
# Create a Sales Order with 50-50 contribution between both Sales people
so = make_sales_order(
rate=1000,
qty=20,
@@ -69,10 +69,20 @@ class TestSalesPersonTargetVarianceBasedOnItemGroup(FrappeTestCase):
)
def create_sales_person_with_target(sales_person_name, fiscal_year, distribution_id):
sales_person = frappe.new_doc("Sales Person")
sales_person.sales_person_name = sales_person_name
sales_person.append(
def create_target_distribution(fiscal_year):
distribution = frappe.new_doc("Monthly Distribution")
distribution.distribution_id = "Target Report Distribution"
distribution.fiscal_year = fiscal_year
distribution.get_months()
return distribution.insert()
def create_sales_target_doc(
sales_field_dt, sales_field_name, sales_field_value, fiscal_year, distribution_id
):
sales_target_doc = frappe.new_doc(sales_field_dt)
sales_target_doc.set(sales_field_name, sales_field_value)
sales_target_doc.append(
"targets",
{
"fiscal_year": fiscal_year,
@@ -81,4 +91,6 @@ def create_sales_person_with_target(sales_person_name, fiscal_year, distribution
"distribution_id": distribution_id,
},
)
return sales_person.insert()
if sales_field_dt == "Sales Partner":
sales_target_doc.commission_rate = 5
return sales_target_doc.insert()

228
erpnext/setup/demo.py Normal file
View File

@@ -0,0 +1,228 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import json
import os
from random import randint
import frappe
from frappe import _
from frappe.utils import add_days, getdate
from erpnext.accounts.doctype.payment_entry.payment_entry import get_payment_entry
from erpnext.accounts.utils import get_fiscal_year
from erpnext.buying.doctype.purchase_order.purchase_order import make_purchase_invoice
from erpnext.selling.doctype.sales_order.sales_order import make_sales_invoice
from erpnext.setup.setup_wizard.operations.install_fixtures import create_bank_account
def setup_demo_data():
from frappe.utils.telemetry import capture
capture("demo_data_creation_started", "erpnext")
try:
company = create_demo_company()
process_masters()
make_transactions(company)
frappe.cache.delete_keys("bootinfo")
frappe.publish_realtime("demo_data_complete")
except Exception:
frappe.log_error("Failed to create demo data")
capture("demo_data_creation_failed", "erpnext", properties={"exception": frappe.get_traceback()})
raise
capture("demo_data_creation_completed", "erpnext")
@frappe.whitelist()
def clear_demo_data():
from frappe.utils.telemetry import capture
frappe.only_for("System Manager")
capture("demo_data_erased", "erpnext")
try:
company = frappe.db.get_single_value("Global Defaults", "demo_company")
create_transaction_deletion_record(company)
clear_masters()
delete_company(company)
default_company = frappe.db.get_single_value("Global Defaults", "default_company")
frappe.db.set_default("company", default_company)
except Exception:
frappe.db.rollback()
frappe.log_error("Failed to erase demo data")
frappe.throw(
_("Failed to erase demo data, please delete the demo company manually."),
title=_("Could Not Delete Demo Data"),
)
def create_demo_company():
company = frappe.db.get_all("Company")[0].name
company_doc = frappe.get_doc("Company", company)
# Make a dummy company
new_company = frappe.new_doc("Company")
new_company.company_name = company_doc.company_name + " (Demo)"
new_company.abbr = company_doc.abbr + "D"
new_company.enable_perpetual_inventory = 1
new_company.default_currency = company_doc.default_currency
new_company.country = company_doc.country
new_company.chart_of_accounts_based_on = "Standard Template"
new_company.chart_of_accounts = company_doc.chart_of_accounts
new_company.insert()
# Set Demo Company as default to
frappe.db.set_single_value("Global Defaults", "demo_company", new_company.name)
frappe.db.set_default("company", new_company.name)
bank_account = create_bank_account({"company_name": new_company.name})
frappe.db.set_value("Company", new_company.name, "default_bank_account", bank_account.name)
return new_company.name
def process_masters():
for doctype in frappe.get_hooks("demo_master_doctypes"):
data = read_data_file_using_hooks(doctype)
if data:
for item in json.loads(data):
create_demo_record(item)
def create_demo_record(doctype):
frappe.get_doc(doctype).insert(ignore_permissions=True)
def make_transactions(company):
frappe.db.set_single_value("Stock Settings", "allow_negative_stock", 1)
from erpnext.accounts.utils import FiscalYearError
try:
start_date = get_fiscal_year(date=getdate())[1]
except FiscalYearError:
# User might have setup fiscal year for previous or upcoming years
active_fiscal_years = frappe.db.get_all("Fiscal Year", filters={"disabled": 0}, as_list=1)
if active_fiscal_years:
start_date = frappe.db.get_value("Fiscal Year", active_fiscal_years[0][0], "year_start_date")
else:
frappe.throw(_("There are no active Fiscal Years for which Demo Data can be generated."))
for doctype in frappe.get_hooks("demo_transaction_doctypes"):
data = read_data_file_using_hooks(doctype)
if data:
for item in json.loads(data):
create_transaction(item, company, start_date)
convert_order_to_invoices()
frappe.db.set_single_value("Stock Settings", "allow_negative_stock", 0)
def create_transaction(doctype, company, start_date):
document_type = doctype.get("doctype")
warehouse = get_warehouse(company)
if document_type == "Purchase Order":
posting_date = get_random_date(start_date, 1, 25)
else:
posting_date = get_random_date(start_date, 31, 350)
doctype.update(
{
"company": company,
"set_posting_time": 1,
"transaction_date": posting_date,
"schedule_date": posting_date,
"delivery_date": posting_date,
"set_warehouse": warehouse,
}
)
doc = frappe.get_doc(doctype)
doc.save(ignore_permissions=True)
doc.submit()
def convert_order_to_invoices():
for document in ["Purchase Order", "Sales Order"]:
# Keep some orders intentionally unbilled/unpaid
for i, order in enumerate(
frappe.db.get_all(
document, filters={"docstatus": 1}, fields=["name", "transaction_date"], limit=6
)
):
if document == "Purchase Order":
invoice = make_purchase_invoice(order.name)
elif document == "Sales Order":
invoice = make_sales_invoice(order.name)
invoice.set_posting_time = 1
invoice.posting_date = order.transaction_date
invoice.due_date = order.transaction_date
invoice.bill_date = order.transaction_date
if invoice.get("payment_schedule"):
invoice.payment_schedule[0].due_date = order.transaction_date
invoice.update_stock = 1
invoice.submit()
if i % 2 != 0:
payment = get_payment_entry(invoice.doctype, invoice.name)
payment.posting_date = order.transaction_date
payment.reference_no = invoice.name
payment.submit()
def get_random_date(start_date, start_range, end_range):
return add_days(start_date, randint(start_range, end_range))
def create_transaction_deletion_record(company):
transaction_deletion_record = frappe.new_doc("Transaction Deletion Record")
transaction_deletion_record.company = company
transaction_deletion_record.process_in_single_transaction = True
transaction_deletion_record.save(ignore_permissions=True)
transaction_deletion_record.submit()
transaction_deletion_record.start_deletion_tasks()
def clear_masters():
for doctype in frappe.get_hooks("demo_master_doctypes")[::-1]:
data = read_data_file_using_hooks(doctype)
if data:
for item in json.loads(data):
clear_demo_record(item)
def clear_demo_record(document):
document_type = document.get("doctype")
del document["doctype"]
valid_columns = frappe.get_meta(document_type).get_valid_columns()
filters = document
for key in list(filters):
if key not in valid_columns:
filters.pop(key, None)
doc = frappe.get_doc(document_type, filters)
doc.delete(ignore_permissions=True)
def delete_company(company):
frappe.db.set_single_value("Global Defaults", "demo_company", "")
frappe.delete_doc("Company", company, ignore_permissions=True)
def read_data_file_using_hooks(doctype):
path = os.path.join(os.path.dirname(__file__), "demo_data")
with open(os.path.join(path, doctype + ".json"), "r") as f:
data = f.read()
return data
def get_warehouse(company):
warehouses = frappe.db.get_all("Warehouse", {"company": company, "is_group": 0})
return warehouses[randint(0, 3)].name

View File

@@ -169,43 +169,49 @@ frappe.ui.form.on("Company", {
},
delete_company_transactions: function (frm) {
frappe.verify_password(function () {
var d = frappe.prompt(
{
fieldtype: "Data",
fieldname: "company_name",
label: __("Please enter the company name to confirm"),
reqd: 1,
description: __(
"Please make sure you really want to delete all the transactions for this company. Your master data will remain as it is. This action cannot be undone."
),
},
function (data) {
if (data.company_name !== frm.doc.name) {
frappe.msgprint(__("Company name not same"));
return;
}
frappe.call({
method: "erpnext.setup.doctype.company.company.create_transaction_deletion_request",
args: {
company: data.company_name,
},
freeze: true,
callback: function (r, rt) {
if (!r.exc)
frappe.msgprint(
__("Successfully deleted all transactions related to this company!")
);
},
onerror: function () {
frappe.msgprint(__("Wrong Password"));
},
frappe.call({
method: "erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.is_deletion_doc_running",
args: {
company: frm.doc.name,
},
freeze: true,
callback: function (r) {
if (!r.exc) {
frappe.verify_password(function () {
var d = frappe.prompt(
{
fieldtype: "Data",
fieldname: "company_name",
label: __("Please enter the company name to confirm"),
reqd: 1,
description: __(
"Please make sure you really want to delete all the transactions for this company. Your master data will remain as it is. This action cannot be undone."
),
},
function (data) {
if (data.company_name !== frm.doc.name) {
frappe.msgprint(__("Company name not same"));
return;
}
frappe.call({
method: "erpnext.setup.doctype.company.company.create_transaction_deletion_request",
args: {
company: data.company_name,
},
freeze: true,
callback: function (r, rt) {},
onerror: function () {
frappe.msgprint(__("Wrong Password"));
},
});
},
__("Delete all the Transactions for this Company"),
__("Delete")
);
d.get_primary_btn().addClass("btn-danger");
});
},
__("Delete all the Transactions for this Company"),
__("Delete")
);
d.get_primary_btn().addClass("btn-danger");
}
},
});
},
});

View File

@@ -11,7 +11,7 @@ from frappe.cache_manager import clear_defaults_cache
from frappe.contacts.address_and_contact import load_address_and_contact
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
from frappe.desk.page.setup_wizard.setup_wizard import make_records
from frappe.utils import cint, formatdate, get_timestamp, today
from frappe.utils import cint, formatdate, get_link_to_form, get_timestamp, today
from frappe.utils.nestedset import NestedSet, rebuild_tree
from erpnext.accounts.doctype.account.account import get_account_currency
@@ -812,6 +812,19 @@ def get_default_company_address(name, sort_key="is_primary_address", existing_ad
@frappe.whitelist()
def create_transaction_deletion_request(company):
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
is_deletion_doc_running,
)
is_deletion_doc_running(company)
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert()
tdr.submit()
tdr.start_deletion_tasks()
frappe.msgprint(
_("A Transaction Deletion Document: {0} is triggered for {0}").format(
get_link_to_form("Transaction Deletion Record", tdr.name)
),
frappe.bold(company),
)

View File

@@ -28,6 +28,7 @@ class TestTransactionDeletionRecord(unittest.TestCase):
for i in range(5):
create_task("Dunder Mifflin Paper Co")
tdr = create_transaction_deletion_request("Dunder Mifflin Paper Co")
tdr.reload()
for doctype in tdr.doctypes:
if doctype.doctype_name == "Task":
self.assertEqual(doctype.no_of_docs, 5)
@@ -49,7 +50,9 @@ def create_company(company_name):
def create_transaction_deletion_request(company):
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert()
tdr.process_in_single_transaction = True
tdr.submit()
tdr.start_deletion_tasks()
return tdr

View File

@@ -10,20 +10,24 @@ frappe.ui.form.on("Transaction Deletion Record", {
callback: function (r) {
doctypes_to_be_ignored_array = r.message;
populate_doctypes_to_be_ignored(doctypes_to_be_ignored_array, frm);
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored");
},
});
}
frm.get_field("doctypes_to_be_ignored").grid.cannot_add_rows = true;
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored");
},
refresh: function (frm) {
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored");
if (frm.doc.docstatus == 1 && ["Queued", "Failed"].find((x) => x == frm.doc.status)) {
let execute_btn = frm.doc.status == "Queued" ? __("Start Deletion") : __("Retry");
frm.add_custom_button(execute_btn, () => {
// Entry point for chain of events
frm.call({
method: "start_deletion_tasks",
doc: frm.doc,
});
});
}
},
});

View File

@@ -7,10 +7,21 @@
"engine": "InnoDB",
"field_order": [
"company",
"section_break_qpwb",
"status",
"error_log",
"tasks_section",
"delete_bin_data",
"delete_leads_and_addresses",
"reset_company_default_values",
"clear_notifications",
"initialize_doctypes_table",
"delete_transactions",
"section_break_tbej",
"doctypes",
"doctypes_to_be_ignored",
"amended_from",
"status"
"process_in_single_transaction"
],
"fields": [
{
@@ -25,14 +36,16 @@
"fieldname": "doctypes",
"fieldtype": "Table",
"label": "Summary",
"options": "Transaction Deletion Record Item",
"no_copy": 1,
"options": "Transaction Deletion Record Details",
"read_only": 1
},
{
"fieldname": "doctypes_to_be_ignored",
"fieldtype": "Table",
"label": "Excluded DocTypes",
"options": "Transaction Deletion Record Item"
"options": "Transaction Deletion Record Item",
"read_only": 1
},
{
"fieldname": "amended_from",
@@ -46,18 +59,96 @@
{
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Draft\nCompleted"
"no_copy": 1,
"options": "Queued\nRunning\nFailed\nCompleted\nCancelled",
"read_only": 1
},
{
"fieldname": "section_break_tbej",
"fieldtype": "Section Break"
},
{
"fieldname": "tasks_section",
"fieldtype": "Section Break",
"label": "Tasks"
},
{
"default": "0",
"fieldname": "delete_bin_data",
"fieldtype": "Check",
"label": "Delete Bins",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_leads_and_addresses",
"fieldtype": "Check",
"label": "Delete Leads and Addresses",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "clear_notifications",
"fieldtype": "Check",
"label": "Clear Notifications",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "reset_company_default_values",
"fieldtype": "Check",
"label": "Reset Company Default Values",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_transactions",
"fieldtype": "Check",
"label": "Delete Transactions",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "initialize_doctypes_table",
"fieldtype": "Check",
"label": "Initialize Summary Table",
"no_copy": 1,
"read_only": 1
},
{
"depends_on": "eval: doc.error_log",
"fieldname": "error_log",
"fieldtype": "Long Text",
"label": "Error Log"
},
{
"fieldname": "section_break_qpwb",
"fieldtype": "Section Break"
},
{
"default": "0",
"fieldname": "process_in_single_transaction",
"fieldtype": "Check",
"hidden": 1,
"label": "Process in Single Transaction",
"no_copy": 1,
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2021-08-04 20:15:59.071493",
"modified": "2024-03-21 10:29:19.456413",
"modified_by": "Administrator",
"module": "Setup",
"name": "Transaction Deletion Record",
"naming_rule": "Expression (old style)",
"owner": "Administrator",
"permissions": [
{
@@ -76,5 +167,6 @@
],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View File

@@ -1,18 +1,31 @@
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from collections import OrderedDict
import frappe
from frappe import _, qb
from frappe.desk.notifications import clear_notifications
from frappe.model.document import Document
from frappe.utils import cint, create_batch
from frappe.utils import cint, comma_and, create_batch, get_link_to_form
from frappe.utils.background_jobs import create_job_id, is_job_enqueued
class TransactionDeletionRecord(Document):
def __init__(self, *args, **kwargs):
super(TransactionDeletionRecord, self).__init__(*args, **kwargs)
self.batch_size = 5000
# Tasks are listed by their execution order
self.task_to_internal_method_map = OrderedDict(
{
"Delete Bins": "delete_bins",
"Delete Leads and Addresses": "delete_lead_addresses",
"Reset Company Values": "reset_company_values",
"Clear Notifications": "delete_notifications",
"Initialize Summary Table": "initialize_doctypes_to_be_deleted_table",
"Delete Transactions": "delete_company_transactions",
}
)
def validate(self):
frappe.only_for("System Manager")
@@ -29,104 +42,266 @@ class TransactionDeletionRecord(Document):
title=_("Not Allowed"),
)
def generate_job_name_for_task(self, task=None):
method = self.task_to_internal_method_map[task]
return f"{self.name}_{method}"
def generate_job_name_for_next_tasks(self, task=None):
job_names = []
current_task_idx = list(self.task_to_internal_method_map).index(task)
for idx, task in enumerate(self.task_to_internal_method_map.keys(), 0):
# generate job_name for next tasks
if idx > current_task_idx:
job_names.append(self.generate_job_name_for_task(task))
return job_names
def generate_job_name_for_all_tasks(self):
job_names = []
for task in self.task_to_internal_method_map.keys():
job_names.append(self.generate_job_name_for_task(task))
return job_names
def before_submit(self):
if queued_docs := frappe.db.get_all(
"Transaction Deletion Record",
filters={"company": self.company, "status": ("in", ["Running", "Queued"]), "docstatus": 1},
pluck="name",
):
frappe.throw(
_(
"Cannot enqueue multi docs for one company. {0} is already queued/running for company: {1}"
).format(
comma_and([get_link_to_form("Transaction Deletion Record", x) for x in queued_docs]),
frappe.bold(self.company),
)
)
if not self.doctypes_to_be_ignored:
self.populate_doctypes_to_be_ignored_table()
self.delete_bins()
self.delete_lead_addresses()
self.reset_company_values()
clear_notifications()
self.delete_company_transactions()
def reset_task_flags(self):
self.clear_notifications = 0
self.delete_bin_data = 0
self.delete_leads_and_addresses = 0
self.delete_transactions = 0
self.initialize_doctypes_table = 0
self.reset_company_default_values = 0
def before_save(self):
self.status = ""
self.doctypes.clear()
self.reset_task_flags()
def on_submit(self):
self.db_set("status", "Queued")
def on_cancel(self):
self.db_set("status", "Cancelled")
def enqueue_task(self, task: str | None = None):
if task and task in self.task_to_internal_method_map:
# make sure that none of next tasks are already running
job_names = self.generate_job_name_for_next_tasks(task=task)
self.validate_running_task_for_doc(job_names=job_names)
# Generate Job Id to uniquely identify each task for this document
job_id = self.generate_job_name_for_task(task)
if self.process_in_single_transaction:
self.execute_task(task_to_execute=task)
else:
frappe.enqueue(
"frappe.utils.background_jobs.run_doc_method",
doctype=self.doctype,
name=self.name,
doc_method="execute_task",
job_id=job_id,
queue="long",
enqueue_after_commit=True,
task_to_execute=task,
)
def execute_task(self, task_to_execute: str | None = None):
if task_to_execute:
method = self.task_to_internal_method_map[task_to_execute]
if task := getattr(self, method, None):
try:
task()
except Exception as err:
frappe.db.rollback()
traceback = frappe.get_traceback(with_context=True)
if traceback:
message = "Traceback: <br>" + traceback
frappe.db.set_value(self.doctype, self.name, "error_log", message)
frappe.db.set_value(self.doctype, self.name, "status", "Failed")
def delete_notifications(self):
self.validate_doc_status()
if not self.clear_notifications:
clear_notifications()
self.db_set("clear_notifications", 1)
self.enqueue_task(task="Initialize Summary Table")
def populate_doctypes_to_be_ignored_table(self):
doctypes_to_be_ignored_list = get_doctypes_to_be_ignored()
for doctype in doctypes_to_be_ignored_list:
self.append("doctypes_to_be_ignored", {"doctype_name": doctype})
def delete_bins(self):
frappe.db.sql(
"""delete from `tabBin` where warehouse in
(select name from tabWarehouse where company=%s)""",
self.company,
)
def validate_running_task_for_doc(self, job_names: list = None):
# at most only one task should be runnning
running_tasks = []
for x in job_names:
if is_job_enqueued(x):
running_tasks.append(create_job_id(x))
def delete_lead_addresses(self):
"""Delete addresses to which leads are linked"""
leads = frappe.get_all("Lead", filters={"company": self.company})
leads = ["'%s'" % row.get("name") for row in leads]
addresses = []
if leads:
addresses = frappe.db.sql_list(
"""select parent from `tabDynamic Link` where link_name
in ({leads})""".format(
leads=",".join(leads)
if running_tasks:
frappe.throw(
_("{0} is already running for {1}").format(
comma_and([get_link_to_form("RQ Job", x) for x in running_tasks]), self.name
)
)
if addresses:
addresses = ["%s" % frappe.db.escape(addr) for addr in addresses]
frappe.db.sql(
"""delete from `tabAddress` where name in ({addresses}) and
name not in (select distinct dl1.parent from `tabDynamic Link` dl1
inner join `tabDynamic Link` dl2 on dl1.parent=dl2.parent
and dl1.link_doctype<>dl2.link_doctype)""".format(
addresses=",".join(addresses)
)
def validate_doc_status(self):
if self.status != "Running":
frappe.throw(
_("{0} is not running. Cannot trigger events for this Document").format(
get_link_to_form("Transaction Deletion Record", self.name)
)
)
frappe.db.sql(
"""delete from `tabDynamic Link` where link_doctype='Lead'
and parenttype='Address' and link_name in ({leads})""".format(
@frappe.whitelist()
def start_deletion_tasks(self):
# This method is the entry point for the chain of events that follow
self.db_set("status", "Running")
self.enqueue_task(task="Delete Bins")
def delete_bins(self):
self.validate_doc_status()
if not self.delete_bin_data:
frappe.db.sql(
"""delete from `tabBin` where warehouse in
(select name from tabWarehouse where company=%s)""",
self.company,
)
self.db_set("delete_bin_data", 1)
self.enqueue_task(task="Delete Leads and Addresses")
def delete_lead_addresses(self):
"""Delete addresses to which leads are linked"""
self.validate_doc_status()
if not self.delete_leads_and_addresses:
leads = frappe.get_all("Lead", filters={"company": self.company})
leads = ["'%s'" % row.get("name") for row in leads]
addresses = []
if leads:
addresses = frappe.db.sql_list(
"""select parent from `tabDynamic Link` where link_name
in ({leads})""".format(
leads=",".join(leads)
)
)
frappe.db.sql(
"""update `tabCustomer` set lead_name=NULL where lead_name in ({leads})""".format(
leads=",".join(leads)
if addresses:
addresses = ["%s" % frappe.db.escape(addr) for addr in addresses]
frappe.db.sql(
"""delete from `tabAddress` where name in ({addresses}) and
name not in (select distinct dl1.parent from `tabDynamic Link` dl1
inner join `tabDynamic Link` dl2 on dl1.parent=dl2.parent
and dl1.link_doctype<>dl2.link_doctype)""".format(
addresses=",".join(addresses)
)
)
frappe.db.sql(
"""delete from `tabDynamic Link` where link_doctype='Lead'
and parenttype='Address' and link_name in ({leads})""".format(
leads=",".join(leads)
)
)
frappe.db.sql(
"""update `tabCustomer` set lead_name=NULL where lead_name in ({leads})""".format(
leads=",".join(leads)
)
)
)
self.db_set("delete_leads_and_addresses", 1)
self.enqueue_task(task="Reset Company Values")
def reset_company_values(self):
company_obj = frappe.get_doc("Company", self.company)
company_obj.total_monthly_sales = 0
company_obj.sales_monthly_history = None
company_obj.save()
self.validate_doc_status()
if not self.reset_company_default_values:
company_obj = frappe.get_doc("Company", self.company)
company_obj.total_monthly_sales = 0
company_obj.sales_monthly_history = None
company_obj.save()
self.db_set("reset_company_default_values", 1)
self.enqueue_task(task="Clear Notifications")
def initialize_doctypes_to_be_deleted_table(self):
self.validate_doc_status()
if not self.initialize_doctypes_table:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
tables = self.get_all_child_doctypes()
for docfield in docfields:
if docfield["parent"] != self.doctype:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield["parent"], docfield["fieldname"]
)
if no_of_docs > 0:
# Initialize
self.populate_doctypes_table(tables, docfield["parent"], docfield["fieldname"], 0)
self.db_set("initialize_doctypes_table", 1)
self.enqueue_task(task="Delete Transactions")
def delete_company_transactions(self):
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
self.validate_doc_status()
if not self.delete_transactions:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
tables = self.get_all_child_doctypes()
for docfield in docfields:
if docfield["parent"] != self.doctype:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield["parent"], docfield["fieldname"]
)
if no_of_docs > 0:
self.delete_version_log(docfield["parent"], docfield["fieldname"])
reference_docs = frappe.get_all(
docfield["parent"], filters={docfield["fieldname"]: self.company}
tables = self.get_all_child_doctypes()
for docfield in self.doctypes:
if docfield.doctype_name != self.doctype and not docfield.done:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield.doctype_name, docfield.docfield_name
)
reference_doc_names = [r.name for r in reference_docs]
if no_of_docs > 0:
reference_docs = frappe.get_all(
docfield.doctype_name, filters={docfield.docfield_name: self.company}, limit=self.batch_size
)
reference_doc_names = [r.name for r in reference_docs]
self.delete_communications(docfield["parent"], reference_doc_names)
self.delete_comments(docfield["parent"], reference_doc_names)
self.unlink_attachments(docfield["parent"], reference_doc_names)
self.delete_version_log(docfield.doctype_name, reference_doc_names)
self.delete_communications(docfield.doctype_name, reference_doc_names)
self.delete_comments(docfield.doctype_name, reference_doc_names)
self.unlink_attachments(docfield.doctype_name, reference_doc_names)
self.delete_child_tables(docfield.doctype_name, reference_doc_names)
self.delete_docs_linked_with_specified_company(docfield.doctype_name, reference_doc_names)
processed = int(docfield.no_of_docs) + len(reference_doc_names)
frappe.db.set_value(docfield.doctype, docfield.name, "no_of_docs", processed)
else:
# reset naming series
naming_series = frappe.db.get_value("DocType", docfield.doctype_name, "autoname")
if naming_series:
if "#" in naming_series:
self.update_naming_series(naming_series, docfield.doctype_name)
frappe.db.set_value(docfield.doctype, docfield.name, "done", 1)
self.populate_doctypes_table(tables, docfield["parent"], no_of_docs)
self.delete_child_tables(docfield["parent"], docfield["fieldname"])
self.delete_docs_linked_with_specified_company(docfield["parent"], docfield["fieldname"])
naming_series = frappe.db.get_value("DocType", docfield["parent"], "autoname")
if naming_series:
if "#" in naming_series:
self.update_naming_series(naming_series, docfield["parent"])
pending_doctypes = frappe.db.get_all(
"Transaction Deletion Record Details",
filters={"parent": self.name, "done": 0},
pluck="doctype_name",
)
if pending_doctypes:
# as method is enqueued after commit, calling itself will not make validate_doc_status to throw
# recursively call this task to delete all transactions
self.enqueue_task(task="Delete Transactions")
else:
self.db_set("status", "Completed")
self.db_set("delete_transactions", 1)
self.db_set("error_log", None)
def get_doctypes_to_be_ignored_list(self):
singles = frappe.get_all("DocType", filters={"issingle": 1}, pluck="name")
@@ -155,25 +330,24 @@ class TransactionDeletionRecord(Document):
def get_number_of_docs_linked_with_specified_company(self, doctype, company_fieldname):
return frappe.db.count(doctype, {company_fieldname: self.company})
def populate_doctypes_table(self, tables, doctype, no_of_docs):
def populate_doctypes_table(self, tables, doctype, fieldname, no_of_docs):
self.flags.ignore_validate_update_after_submit = True
if doctype not in tables:
self.append("doctypes", {"doctype_name": doctype, "no_of_docs": no_of_docs})
def delete_child_tables(self, doctype, company_fieldname):
parent_docs_to_be_deleted = frappe.get_all(
doctype, {company_fieldname: self.company}, pluck="name"
)
self.append(
"doctypes", {"doctype_name": doctype, "docfield_name": fieldname, "no_of_docs": no_of_docs}
)
self.save(ignore_permissions=True)
def delete_child_tables(self, doctype, reference_doc_names):
child_tables = frappe.get_all(
"DocField", filters={"fieldtype": "Table", "parent": doctype}, pluck="options"
)
for batch in create_batch(parent_docs_to_be_deleted, self.batch_size):
for table in child_tables:
frappe.db.delete(table, {"parent": ["in", batch]})
for table in child_tables:
frappe.db.delete(table, {"parent": ["in", reference_doc_names]})
def delete_docs_linked_with_specified_company(self, doctype, company_fieldname):
frappe.db.delete(doctype, {company_fieldname: self.company})
def delete_docs_linked_with_specified_company(self, doctype, reference_doc_names):
frappe.db.delete(doctype, {"name": ("in", reference_doc_names)})
def update_naming_series(self, naming_series, doctype_name):
if "." in naming_series:
@@ -194,17 +368,11 @@ class TransactionDeletionRecord(Document):
frappe.db.sql("""update `tabSeries` set current = %s where name=%s""", (last, prefix))
def delete_version_log(self, doctype, company_fieldname):
dt = qb.DocType(doctype)
names = qb.from_(dt).select(dt.name).where(dt[company_fieldname] == self.company).run(as_list=1)
names = [x[0] for x in names]
if names:
versions = qb.DocType("Version")
for batch in create_batch(names, self.batch_size):
qb.from_(versions).delete().where(
(versions.ref_doctype == doctype) & (versions.docname.isin(batch))
).run()
def delete_version_log(self, doctype, docnames):
versions = qb.DocType("Version")
qb.from_(versions).delete().where(
(versions.ref_doctype == doctype) & (versions.docname.isin(docnames))
).run()
def delete_communications(self, doctype, reference_doc_names):
communications = frappe.get_all(
@@ -276,3 +444,34 @@ def get_doctypes_to_be_ignored():
doctypes_to_be_ignored.extend(frappe.get_hooks("company_data_to_be_ignored") or [])
return doctypes_to_be_ignored
@frappe.whitelist()
def is_deletion_doc_running(company: str | None = None, err_msg: str | None = None):
if company:
if running_deletion_jobs := frappe.db.get_all(
"Transaction Deletion Record",
filters={"docstatus": 1, "company": company, "status": "Running"},
):
if not err_msg:
err_msg = ""
frappe.throw(
title=_("Deletion in Progress!"),
msg=_("Transaction Deletion Document: {0} is running for this Company. {1}").format(
get_link_to_form("Transaction Deletion Record", running_deletion_jobs[0].name), err_msg
),
)
def check_for_running_deletion_job(doc, method=None):
# Check if DocType has 'company' field
df = qb.DocType("DocField")
if (
not_allowed := qb.from_(df)
.select(df.parent)
.where((df.fieldname == "company") & (df.parent == doc.doctype))
.run()
):
is_deletion_doc_running(
doc.company, _("Cannot make any transactions until the deletion job is completed")
)

View File

@@ -2,11 +2,15 @@
// License: GNU General Public License v3. See license.txt
frappe.listview_settings["Transaction Deletion Record"] = {
add_fields: ["status"],
get_indicator: function (doc) {
if (doc.docstatus == 0) {
return [__("Draft"), "red"];
} else {
return [__("Completed"), "green"];
}
let colors = {
Queued: "orange",
Completed: "green",
Running: "blue",
Failed: "red",
};
let status = doc.status;
return [__(status), colors[status], "status,=," + status];
},
};

View File

@@ -5,8 +5,7 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"doctype_name",
"no_of_docs"
"doctype_name"
],
"fields": [
{
@@ -16,18 +15,12 @@
"label": "DocType",
"options": "DocType",
"reqd": 1
},
{
"fieldname": "no_of_docs",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Number of Docs"
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2021-05-08 23:10:46.166744",
"modified": "2024-02-04 10:56:27.413691",
"modified_by": "Administrator",
"module": "Setup",
"name": "Transaction Deletion Record Item",
@@ -35,5 +28,6 @@
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View File

@@ -809,7 +809,8 @@
"label": "Purchase Order",
"options": "Purchase Order",
"print_hide": 1,
"read_only": 1
"read_only": 1,
"search_index": 1
},
{
"fieldname": "column_break_82",
@@ -870,7 +871,7 @@
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2023-11-14 18:37:38.638144",
"modified": "2024-03-21 18:15:07.603672",
"modified_by": "Administrator",
"module": "Stock",
"name": "Delivery Note Item",

View File

@@ -3,7 +3,7 @@
import frappe
from frappe.tests.utils import FrappeTestCase, change_settings
from frappe.utils import add_days, cint, cstr, flt, today
from frappe.utils import add_days, cint, cstr, flt, nowtime, today
from pypika import functions as fn
import erpnext
@@ -2224,6 +2224,95 @@ class TestPurchaseReceipt(FrappeTestCase):
pr.reload()
self.assertEqual(pr.per_billed, 100)
def test_sle_qty_after_transaction(self):
item = make_item(
"_Test Item Qty After Transaction",
properties={"is_stock_item": 1, "valuation_method": "FIFO"},
).name
posting_date = today()
posting_time = nowtime()
# Step 1: Create Purchase Receipt
pr = make_purchase_receipt(
item_code=item,
qty=1,
rate=100,
posting_date=posting_date,
posting_time=posting_time,
do_not_save=1,
)
for i in range(9):
pr.append(
"items",
{
"item_code": item,
"qty": 1,
"rate": 100,
"warehouse": pr.items[0].warehouse,
"cost_center": pr.items[0].cost_center,
"expense_account": pr.items[0].expense_account,
"uom": pr.items[0].uom,
"stock_uom": pr.items[0].stock_uom,
"conversion_factor": pr.items[0].conversion_factor,
},
)
self.assertEqual(len(pr.items), 10)
pr.save()
pr.submit()
data = frappe.get_all(
"Stock Ledger Entry",
fields=["qty_after_transaction", "creation", "posting_datetime"],
filters={"voucher_no": pr.name, "is_cancelled": 0},
order_by="creation",
)
for index, d in enumerate(data):
self.assertEqual(d.qty_after_transaction, 1 + index)
# Step 2: Create Purchase Receipt
pr = make_purchase_receipt(
item_code=item,
qty=1,
rate=100,
posting_date=posting_date,
posting_time=posting_time,
do_not_save=1,
)
for i in range(9):
pr.append(
"items",
{
"item_code": item,
"qty": 1,
"rate": 100,
"warehouse": pr.items[0].warehouse,
"cost_center": pr.items[0].cost_center,
"expense_account": pr.items[0].expense_account,
"uom": pr.items[0].uom,
"stock_uom": pr.items[0].stock_uom,
"conversion_factor": pr.items[0].conversion_factor,
},
)
self.assertEqual(len(pr.items), 10)
pr.save()
pr.submit()
data = frappe.get_all(
"Stock Ledger Entry",
fields=["qty_after_transaction", "creation", "posting_datetime"],
filters={"voucher_no": pr.name, "is_cancelled": 0},
order_by="creation",
)
for index, d in enumerate(data):
self.assertEqual(d.qty_after_transaction, 11 + index)
def prepare_data_for_internal_transfer():
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_internal_supplier

View File

@@ -1671,24 +1671,22 @@ class TestStockEntry(FrappeTestCase):
item_code = "Test Negative Item - 001"
item_doc = create_item(item_code=item_code, is_stock_item=1, valuation_rate=10)
make_stock_entry(
se1 = make_stock_entry(
item_code=item_code,
posting_date=add_days(today(), -3),
posting_time="00:00:00",
purpose="Material Receipt",
target="_Test Warehouse - _TC",
qty=10,
to_warehouse="_Test Warehouse - _TC",
do_not_save=True,
)
make_stock_entry(
se2 = make_stock_entry(
item_code=item_code,
posting_date=today(),
posting_time="00:00:00",
purpose="Material Receipt",
source="_Test Warehouse - _TC",
qty=8,
from_warehouse="_Test Warehouse - _TC",
do_not_save=True,
)
sr_doc = create_stock_reconciliation(

View File

@@ -11,6 +11,7 @@
"warehouse",
"posting_date",
"posting_time",
"posting_datetime",
"is_adjustment_entry",
"column_break_6",
"voucher_type",
@@ -96,7 +97,6 @@
"oldfieldtype": "Date",
"print_width": "100px",
"read_only": 1,
"search_index": 1,
"width": "100px"
},
{
@@ -249,7 +249,6 @@
"options": "Company",
"print_width": "150px",
"read_only": 1,
"search_index": 1,
"width": "150px"
},
{
@@ -316,6 +315,11 @@
"fieldname": "is_adjustment_entry",
"fieldtype": "Check",
"label": "Is Adjustment Entry"
},
{
"fieldname": "posting_datetime",
"fieldtype": "Datetime",
"label": "Posting Datetime"
}
],
"hide_toolbar": 1,
@@ -324,7 +328,7 @@
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [],
"modified": "2024-03-13 09:56:13.021696",
"modified": "2024-02-07 09:18:13.999231",
"modified_by": "Administrator",
"module": "Stock",
"name": "Stock Ledger Entry",

View File

@@ -52,6 +52,12 @@ class StockLedgerEntry(Document):
self.validate_with_last_transaction_posting_time()
self.validate_inventory_dimension_negative_stock()
def set_posting_datetime(self):
from erpnext.stock.utils import get_combine_datetime
self.posting_datetime = get_combine_datetime(self.posting_date, self.posting_time)
self.db_set("posting_datetime", self.posting_datetime)
def validate_inventory_dimension_negative_stock(self):
if self.is_cancelled:
return
@@ -122,6 +128,7 @@ class StockLedgerEntry(Document):
return inv_dimension_dict
def on_submit(self):
self.set_posting_datetime()
self.check_stock_frozen_date()
self.calculate_batch_qty()
@@ -293,9 +300,7 @@ class StockLedgerEntry(Document):
def on_doctype_update():
frappe.db.add_index(
"Stock Ledger Entry", fields=["posting_date", "posting_time"], index_name="posting_sort_index"
)
frappe.db.add_index("Stock Ledger Entry", ["voucher_no", "voucher_type"])
frappe.db.add_index("Stock Ledger Entry", ["batch_no", "item_code", "warehouse"])
frappe.db.add_index("Stock Ledger Entry", ["warehouse", "item_code"], "item_warehouse")
frappe.db.add_index("Stock Ledger Entry", ["posting_datetime", "creation"])

View File

@@ -2,6 +2,7 @@
# See license.txt
import json
import time
from uuid import uuid4
import frappe
@@ -1066,7 +1067,7 @@ class TestStockLedgerEntry(FrappeTestCase, StockTestMixin):
frappe.qb.from_(sle)
.select("qty_after_transaction")
.where((sle.item_code == item) & (sle.warehouse == warehouse) & (sle.is_cancelled == 0))
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
.orderby(sle.posting_datetime)
.orderby(sle.creation)
).run(pluck=True)
@@ -1143,6 +1144,89 @@ class TestStockLedgerEntry(FrappeTestCase, StockTestMixin):
except Exception as e:
self.fail("Double processing of qty for clashing timestamp.")
def test_previous_sle_with_clashed_timestamp(self):
item = make_item().name
warehouse = "_Test Warehouse - _TC"
reciept1 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=100,
rate=10,
posting_date="2021-01-01",
posting_time="02:00:00",
)
time.sleep(3)
reciept2 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=5,
posting_date="2021-01-01",
rate=10,
posting_time="02:00:00.1234",
)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept1.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 100)
self.assertEqual(sle[0].actual_qty, 100)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept2.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 105)
self.assertEqual(sle[0].actual_qty, 5)
def test_backdated_sle_with_same_timestamp(self):
item = make_item().name
warehouse = "_Test Warehouse - _TC"
reciept1 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=5,
posting_date="2021-01-01",
rate=10,
posting_time="02:00:00.1234",
)
time.sleep(3)
# backdated entry with same timestamp but different ms part
reciept2 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=100,
rate=10,
posting_date="2021-01-01",
posting_time="02:00:00",
)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept1.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 5)
self.assertEqual(sle[0].actual_qty, 5)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept2.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 105)
self.assertEqual(sle[0].actual_qty, 100)
@change_settings("System Settings", {"float_precision": 3, "currency_precision": 2})
def test_transfer_invariants(self):
"""Extact stock value should be transferred."""

View File

@@ -5,7 +5,7 @@
import frappe
from frappe import _
from frappe.query_builder import Field
from frappe.query_builder.functions import CombineDatetime, Min
from frappe.query_builder.functions import Min
from frappe.utils import add_days, getdate, today
import erpnext
@@ -75,7 +75,7 @@ def get_data(report_filters):
& (sle.company == report_filters.company)
& (sle.is_cancelled == 0)
)
.orderby(CombineDatetime(sle.posting_date, sle.posting_time), sle.creation)
.orderby(sle.posting_datetime, sle.creation)
).run(as_dict=True)
for d in data:

View File

@@ -213,13 +213,11 @@ def get_stock_ledger_entries(filters, items):
query = (
frappe.qb.from_(sle)
.force_index("posting_sort_index")
.left_join(sle2)
.on(
(sle.item_code == sle2.item_code)
& (sle.warehouse == sle2.warehouse)
& (sle.posting_date < sle2.posting_date)
& (sle.posting_time < sle2.posting_time)
& (sle.posting_datetime < sle2.posting_datetime)
& (sle.name < sle2.name)
)
.select(sle.item_code, sle.warehouse, sle.qty_after_transaction, sle.company)

View File

@@ -8,7 +8,7 @@ from typing import Any, Dict, List, Optional, TypedDict
import frappe
from frappe import _
from frappe.query_builder import Order
from frappe.query_builder.functions import Coalesce, CombineDatetime
from frappe.query_builder.functions import Coalesce
from frappe.utils import add_days, cint, date_diff, flt, getdate
from frappe.utils.nestedset import get_descendants_of
@@ -283,7 +283,7 @@ class StockBalanceReport(object):
item_table.item_name,
)
.where((sle.docstatus < 2) & (sle.is_cancelled == 0))
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
.orderby(sle.posting_datetime)
.orderby(sle.creation)
.orderby(sle.actual_qty)
)

View File

@@ -276,7 +276,7 @@ def get_stock_ledger_entries(filters, items):
frappe.qb.from_(sle)
.select(
sle.item_code,
CombineDatetime(sle.posting_date, sle.posting_time).as_("date"),
sle.posting_datetime.as_("date"),
sle.warehouse,
sle.posting_date,
sle.posting_time,

View File

@@ -7,13 +7,14 @@ from typing import Optional, Set, Tuple
import frappe
from frappe import _
from frappe.model.meta import get_field_precision
from frappe.query_builder.functions import CombineDatetime, Sum
from frappe.query_builder.functions import Sum
from frappe.utils import cint, cstr, flt, get_link_to_form, getdate, now, nowdate
import erpnext
from erpnext.stock.doctype.bin.bin import update_qty as update_bin_qty
from erpnext.stock.doctype.inventory_dimension.inventory_dimension import get_inventory_dimensions
from erpnext.stock.utils import (
get_combine_datetime,
get_incoming_outgoing_rate_for_cancel,
get_incoming_rate,
get_or_make_bin,
@@ -69,6 +70,7 @@ def make_sl_entries(sl_entries, allow_negative_stock=False, via_landed_cost_vouc
args = sle_doc.as_dict()
args["allow_zero_valuation_rate"] = sle.get("allow_zero_valuation_rate") or False
args["posting_datetime"] = get_combine_datetime(args.posting_date, args.posting_time)
if sle.get("voucher_type") == "Stock Reconciliation":
# preserve previous_qty_after_transaction for qty reposting
@@ -431,12 +433,14 @@ class update_entries_after(object):
self.process_sle(sle)
def get_sle_against_current_voucher(self):
self.args["time_format"] = "%H:%i:%s"
self.args["posting_datetime"] = get_combine_datetime(
self.args.posting_date, self.args.posting_time
)
return frappe.db.sql(
"""
select
*, timestamp(posting_date, posting_time) as "timestamp"
*, posting_datetime as "timestamp"
from
`tabStock Ledger Entry`
where
@@ -444,8 +448,7 @@ class update_entries_after(object):
and warehouse = %(warehouse)s
and is_cancelled = 0
and (
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) = time_format(%(posting_time)s, %(time_format)s)
posting_datetime = %(posting_datetime)s
)
order by
creation ASC
@@ -1186,11 +1189,11 @@ class update_entries_after(object):
def get_previous_sle_of_current_voucher(args, operator="<", exclude_current_voucher=False):
"""get stock ledger entries filtered by specific posting datetime conditions"""
args["time_format"] = "%H:%i:%s"
if not args.get("posting_date"):
args["posting_date"] = "1900-01-01"
if not args.get("posting_time"):
args["posting_time"] = "00:00"
args["posting_datetime"] = "1900-01-01 00:00:00"
if not args.get("posting_datetime"):
args["posting_datetime"] = get_combine_datetime(args["posting_date"], args["posting_time"])
voucher_condition = ""
if exclude_current_voucher:
@@ -1199,23 +1202,20 @@ def get_previous_sle_of_current_voucher(args, operator="<", exclude_current_vouc
sle = frappe.db.sql(
"""
select *, timestamp(posting_date, posting_time) as "timestamp"
select *, posting_datetime as "timestamp"
from `tabStock Ledger Entry`
where item_code = %(item_code)s
and warehouse = %(warehouse)s
and is_cancelled = 0
{voucher_condition}
and (
posting_date < %(posting_date)s or
(
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) {operator} time_format(%(posting_time)s, %(time_format)s)
)
posting_datetime {operator} %(posting_datetime)s
)
order by timestamp(posting_date, posting_time) desc, creation desc
order by posting_datetime desc, creation desc
limit 1
for update""".format(
operator=operator, voucher_condition=voucher_condition
operator=operator,
voucher_condition=voucher_condition,
),
args,
as_dict=1,
@@ -1256,9 +1256,7 @@ def get_stock_ledger_entries(
extra_cond=None,
):
"""get stock ledger entries filtered by specific posting datetime conditions"""
conditions = " and timestamp(posting_date, posting_time) {0} timestamp(%(posting_date)s, %(posting_time)s)".format(
operator
)
conditions = " and posting_datetime {0} %(posting_datetime)s".format(operator)
if previous_sle.get("warehouse"):
conditions += " and warehouse = %(warehouse)s"
elif previous_sle.get("warehouse_condition"):
@@ -1284,9 +1282,11 @@ def get_stock_ledger_entries(
)
if not previous_sle.get("posting_date"):
previous_sle["posting_date"] = "1900-01-01"
if not previous_sle.get("posting_time"):
previous_sle["posting_time"] = "00:00"
previous_sle["posting_datetime"] = "1900-01-01 00:00:00"
else:
previous_sle["posting_datetime"] = get_combine_datetime(
previous_sle["posting_date"], previous_sle["posting_time"]
)
if operator in (">", "<=") and previous_sle.get("name"):
conditions += " and name!=%(name)s"
@@ -1299,12 +1299,12 @@ def get_stock_ledger_entries(
return frappe.db.sql(
"""
select *, timestamp(posting_date, posting_time) as "timestamp"
select *, posting_datetime as "timestamp"
from `tabStock Ledger Entry`
where item_code = %%(item_code)s
and is_cancelled = 0
%(conditions)s
order by timestamp(posting_date, posting_time) %(order)s, creation %(order)s
order by posting_datetime %(order)s, creation %(order)s
%(limit)s %(for_update)s"""
% {
"conditions": conditions,
@@ -1330,7 +1330,7 @@ def get_sle_by_voucher_detail_no(voucher_detail_no, excluded_sle=None):
"posting_date",
"posting_time",
"voucher_detail_no",
"timestamp(posting_date, posting_time) as timestamp",
"posting_datetime as timestamp",
],
as_dict=1,
)
@@ -1340,15 +1340,18 @@ def get_batch_incoming_rate(
item_code, warehouse, batch_no, posting_date, posting_time, creation=None
):
import datetime
sle = frappe.qb.DocType("Stock Ledger Entry")
timestamp_condition = CombineDatetime(sle.posting_date, sle.posting_time) < CombineDatetime(
posting_date, posting_time
)
posting_datetime = get_combine_datetime(posting_date, posting_time)
if not creation:
posting_datetime = posting_datetime + datetime.timedelta(milliseconds=1)
timestamp_condition = sle.posting_datetime < posting_datetime
if creation:
timestamp_condition |= (
CombineDatetime(sle.posting_date, sle.posting_time)
== CombineDatetime(posting_date, posting_time)
sle.posting_datetime == get_combine_datetime(posting_date, posting_time)
) & (sle.creation < creation)
batch_details = (
@@ -1411,7 +1414,7 @@ def get_valuation_rate(
AND valuation_rate >= 0
AND is_cancelled = 0
AND NOT (voucher_no = %s AND voucher_type = %s)
order by posting_date desc, posting_time desc, name desc limit 1""",
order by posting_datetime desc, name desc limit 1""",
(item_code, warehouse, voucher_no, voucher_type),
)
@@ -1472,7 +1475,7 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
datetime_limit_condition = ""
qty_shift = args.actual_qty
args["time_format"] = "%H:%i:%s"
args["posting_datetime"] = get_combine_datetime(args["posting_date"], args["posting_time"])
# find difference/shift in qty caused by stock reconciliation
if args.voucher_type == "Stock Reconciliation":
@@ -1482,8 +1485,6 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
next_stock_reco_detail = get_next_stock_reco(args)
if next_stock_reco_detail:
detail = next_stock_reco_detail[0]
# add condition to update SLEs before this date & time
datetime_limit_condition = get_datetime_limit_condition(detail)
frappe.db.sql(
@@ -1496,13 +1497,9 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
and voucher_no != %(voucher_no)s
and is_cancelled = 0
and (
posting_date > %(posting_date)s or
(
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) > time_format(%(posting_time)s, %(time_format)s)
)
posting_datetime > %(posting_datetime)s
)
{datetime_limit_condition}
{datetime_limit_condition}
""",
args,
)
@@ -1557,20 +1554,11 @@ def get_next_stock_reco(kwargs):
& (sle.voucher_no != kwargs.get("voucher_no"))
& (sle.is_cancelled == 0)
& (
(
CombineDatetime(sle.posting_date, sle.posting_time)
> CombineDatetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
| (
(
CombineDatetime(sle.posting_date, sle.posting_time)
== CombineDatetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
& (sle.creation > kwargs.get("creation"))
)
sle.posting_datetime
>= get_combine_datetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
)
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
.orderby(sle.posting_datetime)
.orderby(sle.creation)
.limit(1)
)
@@ -1582,11 +1570,13 @@ def get_next_stock_reco(kwargs):
def get_datetime_limit_condition(detail):
posting_datetime = get_combine_datetime(detail.posting_date, detail.posting_time)
return f"""
and
(timestamp(posting_date, posting_time) < timestamp('{detail.posting_date}', '{detail.posting_time}')
(posting_datetime < '{posting_datetime}'
or (
timestamp(posting_date, posting_time) = timestamp('{detail.posting_date}', '{detail.posting_time}')
posting_datetime = '{posting_datetime}'
and creation < '{detail.creation}'
)
)"""
@@ -1659,14 +1649,11 @@ def get_future_sle_with_negative_qty(sle):
(SLE.item_code == sle.item_code)
& (SLE.warehouse == sle.warehouse)
& (SLE.voucher_no != sle.voucher_no)
& (
CombineDatetime(SLE.posting_date, SLE.posting_time)
>= CombineDatetime(sle.posting_date, sle.posting_time)
)
& (SLE.posting_datetime >= get_combine_datetime(sle.posting_date, sle.posting_time))
& (SLE.is_cancelled == 0)
& (SLE.qty_after_transaction < 0)
)
.orderby(CombineDatetime(SLE.posting_date, SLE.posting_time))
.orderby(SLE.posting_datetime)
.limit(1)
)
@@ -1681,20 +1668,20 @@ def get_future_sle_with_negative_batch_qty(args):
"""
with batch_ledger as (
select
posting_date, posting_time, voucher_type, voucher_no,
sum(actual_qty) over (order by posting_date, posting_time, creation) as cumulative_total
posting_date, posting_time, posting_datetime, voucher_type, voucher_no,
sum(actual_qty) over (order by posting_datetime, creation) as cumulative_total
from `tabStock Ledger Entry`
where
item_code = %(item_code)s
and warehouse = %(warehouse)s
and batch_no=%(batch_no)s
and is_cancelled = 0
order by posting_date, posting_time, creation
order by posting_datetime, creation
)
select * from batch_ledger
where
cumulative_total < 0.0
and timestamp(posting_date, posting_time) >= timestamp(%(posting_date)s, %(posting_time)s)
and posting_datetime >= %(posting_datetime)s
limit 1
""",
args,
@@ -1746,6 +1733,7 @@ def is_internal_transfer(sle):
def get_stock_value_difference(item_code, warehouse, posting_date, posting_time, voucher_no=None):
table = frappe.qb.DocType("Stock Ledger Entry")
posting_datetime = get_combine_datetime(posting_date, posting_time)
query = (
frappe.qb.from_(table)
@@ -1754,10 +1742,7 @@ def get_stock_value_difference(item_code, warehouse, posting_date, posting_time,
(table.is_cancelled == 0)
& (table.item_code == item_code)
& (table.warehouse == warehouse)
& (
(table.posting_date < posting_date)
| ((table.posting_date == posting_date) & (table.posting_time <= posting_time))
)
& (table.posting_datetime <= posting_datetime)
)
)

View File

@@ -8,7 +8,7 @@ from typing import Dict, Optional
import frappe
from frappe import _
from frappe.query_builder.functions import CombineDatetime, IfNull, Sum
from frappe.utils import cstr, flt, get_link_to_form, nowdate, nowtime
from frappe.utils import cstr, flt, get_link_to_form, get_time, getdate, nowdate, nowtime
import erpnext
from erpnext.stock.doctype.warehouse.warehouse import get_child_warehouses
@@ -619,3 +619,18 @@ def _update_item_info(scan_result: Dict[str, Optional[str]]) -> Dict[str, Option
):
scan_result.update(item_info)
return scan_result
def get_combine_datetime(posting_date, posting_time):
import datetime
if isinstance(posting_date, str):
posting_date = getdate(posting_date)
if isinstance(posting_time, str):
posting_time = get_time(posting_time)
if isinstance(posting_time, datetime.timedelta):
posting_time = (datetime.datetime.min + posting_time).time()
return datetime.datetime.combine(posting_date, posting_time).replace(microsecond=0)

View File

@@ -162,7 +162,7 @@ def create_log(doc_name, e, from_doctype, to_doctype, status, log_date=None, res
transaction_log.from_doctype = from_doctype
transaction_log.to_doctype = to_doctype
transaction_log.retried = restarted
transaction_log.save()
transaction_log.save(ignore_permissions=True)
def show_job_status(fail_count, deserialized_data_count, to_doctype):