Merge pull request #40677 from frappe/version-14-hotfix

chore: release v14
This commit is contained in:
Deepesh Garg
2024-03-27 08:56:10 +05:30
committed by GitHub
53 changed files with 1981 additions and 308 deletions

View File

@@ -0,0 +1,100 @@
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Bisect Accounting Statements", {
onload(frm) {
frm.trigger("render_heatmap");
},
refresh(frm) {
frm.add_custom_button(__("Bisect Left"), () => {
frm.trigger("bisect_left");
});
frm.add_custom_button(__("Bisect Right"), () => {
frm.trigger("bisect_right");
});
frm.add_custom_button(__("Up"), () => {
frm.trigger("move_up");
});
frm.add_custom_button(__("Build Tree"), () => {
frm.trigger("build_tree");
});
},
render_heatmap(frm) {
let bisect_heatmap = frm.get_field("bisect_heatmap").$wrapper;
bisect_heatmap.addClass("bisect_heatmap_location");
// milliseconds in a day
let msiad = 24 * 60 * 60 * 1000;
let datapoints = {};
let fr_dt = new Date(frm.doc.from_date).getTime();
let to_dt = new Date(frm.doc.to_date).getTime();
let bisect_start = new Date(frm.doc.current_from_date).getTime();
let bisect_end = new Date(frm.doc.current_to_date).getTime();
for (let x = fr_dt; x <= to_dt; x += msiad) {
let epoch_in_seconds = x / 1000;
if (bisect_start <= x && x <= bisect_end) {
datapoints[epoch_in_seconds] = 1.0;
} else {
datapoints[epoch_in_seconds] = 0.0;
}
}
new frappe.Chart(".bisect_heatmap_location", {
type: "heatmap",
data: {
dataPoints: datapoints,
start: new Date(frm.doc.from_date),
end: new Date(frm.doc.to_date),
},
countLabel: "Bisecting",
discreteDomains: 1,
});
},
bisect_left(frm) {
frm.call({
doc: frm.doc,
method: "bisect_left",
freeze: true,
freeze_message: __("Bisecting Left ..."),
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
bisect_right(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Bisecting Right ..."),
method: "bisect_right",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
move_up(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Moving up in tree ..."),
method: "move_up",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
build_tree(frm) {
frm.call({
doc: frm.doc,
freeze: true,
freeze_message: __("Rebuilding BTree for period ..."),
method: "build_tree",
callback: (r) => {
frm.trigger("render_heatmap");
},
});
},
});

View File

@@ -0,0 +1,194 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2023-09-15 21:28:28.054773",
"default_view": "List",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"section_break_cvfg",
"company",
"column_break_hcam",
"from_date",
"column_break_qxbi",
"to_date",
"column_break_iwny",
"algorithm",
"section_break_8ph9",
"current_node",
"section_break_ngid",
"bisect_heatmap",
"section_break_hmsy",
"bisecting_from",
"current_from_date",
"column_break_uqyd",
"bisecting_to",
"current_to_date",
"section_break_hbyo",
"heading_cppb",
"p_l_summary",
"column_break_aivo",
"balance_sheet_summary",
"b_s_summary",
"column_break_gvwx",
"difference_heading",
"difference"
],
"fields": [
{
"fieldname": "column_break_qxbi",
"fieldtype": "Column Break"
},
{
"fieldname": "from_date",
"fieldtype": "Datetime",
"label": "From Date"
},
{
"fieldname": "to_date",
"fieldtype": "Datetime",
"label": "To Date"
},
{
"default": "BFS",
"fieldname": "algorithm",
"fieldtype": "Select",
"label": "Algorithm",
"options": "BFS\nDFS"
},
{
"fieldname": "column_break_iwny",
"fieldtype": "Column Break"
},
{
"fieldname": "current_node",
"fieldtype": "Link",
"label": "Current Node",
"options": "Bisect Nodes"
},
{
"fieldname": "section_break_hmsy",
"fieldtype": "Section Break"
},
{
"fieldname": "current_from_date",
"fieldtype": "Datetime",
"read_only": 1
},
{
"fieldname": "current_to_date",
"fieldtype": "Datetime",
"read_only": 1
},
{
"fieldname": "column_break_uqyd",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_hbyo",
"fieldtype": "Section Break"
},
{
"fieldname": "p_l_summary",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "b_s_summary",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "difference",
"fieldtype": "Float",
"read_only": 1
},
{
"fieldname": "column_break_aivo",
"fieldtype": "Column Break"
},
{
"fieldname": "column_break_gvwx",
"fieldtype": "Column Break"
},
{
"fieldname": "company",
"fieldtype": "Link",
"label": "Company",
"options": "Company"
},
{
"fieldname": "column_break_hcam",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_ngid",
"fieldtype": "Section Break"
},
{
"fieldname": "section_break_8ph9",
"fieldtype": "Section Break",
"hidden": 1
},
{
"fieldname": "bisect_heatmap",
"fieldtype": "HTML",
"label": "Heatmap"
},
{
"fieldname": "heading_cppb",
"fieldtype": "Heading",
"label": "Profit and Loss Summary"
},
{
"fieldname": "balance_sheet_summary",
"fieldtype": "Heading",
"label": "Balance Sheet Summary"
},
{
"fieldname": "difference_heading",
"fieldtype": "Heading",
"label": "Difference"
},
{
"fieldname": "bisecting_from",
"fieldtype": "Heading",
"label": "Bisecting From"
},
{
"fieldname": "bisecting_to",
"fieldtype": "Heading",
"label": "Bisecting To"
},
{
"fieldname": "section_break_cvfg",
"fieldtype": "Section Break"
}
],
"hide_toolbar": 1,
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2023-12-01 16:49:54.073890",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Bisect Accounting Statements",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "Administrator",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,226 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import datetime
from collections import deque
from math import floor
import frappe
from dateutil.relativedelta import relativedelta
from frappe import _
from frappe.model.document import Document
from frappe.utils import getdate
from frappe.utils.data import guess_date_format
class BisectAccountingStatements(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
algorithm: DF.Literal["BFS", "DFS"]
b_s_summary: DF.Float
company: DF.Link | None
current_from_date: DF.Datetime | None
current_node: DF.Link | None
current_to_date: DF.Datetime | None
difference: DF.Float
from_date: DF.Datetime | None
p_l_summary: DF.Float
to_date: DF.Datetime | None
# end: auto-generated types
def validate(self):
self.validate_dates()
def validate_dates(self):
if getdate(self.from_date) > getdate(self.to_date):
frappe.throw(
_("From Date: {0} cannot be greater than To date: {1}").format(
frappe.bold(self.from_date), frappe.bold(self.to_date)
)
)
def bfs(self, from_date: datetime, to_date: datetime):
# Make Root node
node = frappe.new_doc("Bisect Nodes")
node.root = None
node.period_from_date = from_date
node.period_to_date = to_date
node.insert()
period_queue = deque([node])
while period_queue:
cur_node = period_queue.popleft()
delta = cur_node.period_to_date - cur_node.period_from_date
if delta.days == 0:
continue
else:
cur_floor = floor(delta.days / 2)
next_to_date = cur_node.period_from_date + relativedelta(days=+cur_floor)
left_node = frappe.new_doc("Bisect Nodes")
left_node.period_from_date = cur_node.period_from_date
left_node.period_to_date = next_to_date
left_node.root = cur_node.name
left_node.generated = False
left_node.insert()
cur_node.left_child = left_node.name
period_queue.append(left_node)
next_from_date = cur_node.period_from_date + relativedelta(days=+(cur_floor + 1))
right_node = frappe.new_doc("Bisect Nodes")
right_node.period_from_date = next_from_date
right_node.period_to_date = cur_node.period_to_date
right_node.root = cur_node.name
right_node.generated = False
right_node.insert()
cur_node.right_child = right_node.name
period_queue.append(right_node)
cur_node.save()
def dfs(self, from_date: datetime, to_date: datetime):
# Make Root node
node = frappe.new_doc("Bisect Nodes")
node.root = None
node.period_from_date = from_date
node.period_to_date = to_date
node.insert()
period_stack = [node]
while period_stack:
cur_node = period_stack.pop()
delta = cur_node.period_to_date - cur_node.period_from_date
if delta.days == 0:
continue
else:
cur_floor = floor(delta.days / 2)
next_to_date = cur_node.period_from_date + relativedelta(days=+cur_floor)
left_node = frappe.new_doc("Bisect Nodes")
left_node.period_from_date = cur_node.period_from_date
left_node.period_to_date = next_to_date
left_node.root = cur_node.name
left_node.generated = False
left_node.insert()
cur_node.left_child = left_node.name
period_stack.append(left_node)
next_from_date = cur_node.period_from_date + relativedelta(days=+(cur_floor + 1))
right_node = frappe.new_doc("Bisect Nodes")
right_node.period_from_date = next_from_date
right_node.period_to_date = cur_node.period_to_date
right_node.root = cur_node.name
right_node.generated = False
right_node.insert()
cur_node.right_child = right_node.name
period_stack.append(right_node)
cur_node.save()
@frappe.whitelist()
def build_tree(self):
frappe.db.delete("Bisect Nodes")
# Convert str to datetime format
dt_format = guess_date_format(self.from_date)
from_date = datetime.datetime.strptime(self.from_date, dt_format)
to_date = datetime.datetime.strptime(self.to_date, dt_format)
if self.algorithm == "BFS":
self.bfs(from_date, to_date)
if self.algorithm == "DFS":
self.dfs(from_date, to_date)
# set root as current node
root = frappe.db.get_all("Bisect Nodes", filters={"root": ["is", "not set"]})[0]
self.get_report_summary()
self.current_node = root.name
self.current_from_date = self.from_date
self.current_to_date = self.to_date
self.save()
def get_report_summary(self):
filters = {
"company": self.company,
"filter_based_on": "Date Range",
"period_start_date": self.current_from_date,
"period_end_date": self.current_to_date,
"periodicity": "Yearly",
}
pl_summary = frappe.get_doc("Report", "Profit and Loss Statement")
self.p_l_summary = pl_summary.execute_script_report(filters=filters)[5]
bs_summary = frappe.get_doc("Report", "Balance Sheet")
self.b_s_summary = bs_summary.execute_script_report(filters=filters)[5]
self.difference = abs(self.p_l_summary - self.b_s_summary)
def update_node(self):
current_node = frappe.get_doc("Bisect Nodes", self.current_node)
current_node.balance_sheet_summary = self.b_s_summary
current_node.profit_loss_summary = self.p_l_summary
current_node.difference = self.difference
current_node.generated = True
current_node.save()
def current_node_has_summary_info(self):
"Assertion method"
return frappe.db.get_value("Bisect Nodes", self.current_node, "generated")
def fetch_summary_info_from_current_node(self):
current_node = frappe.get_doc("Bisect Nodes", self.current_node)
self.p_l_summary = current_node.balance_sheet_summary
self.b_s_summary = current_node.profit_loss_summary
self.difference = abs(self.p_l_summary - self.b_s_summary)
def fetch_or_calculate(self):
if self.current_node_has_summary_info():
self.fetch_summary_info_from_current_node()
else:
self.get_report_summary()
self.update_node()
@frappe.whitelist()
def bisect_left(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.left_child is not None:
lft_node = frappe.get_doc("Bisect Nodes", cur_node.left_child)
self.current_node = cur_node.left_child
self.current_from_date = lft_node.period_from_date
self.current_to_date = lft_node.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("No more children on Left"))
@frappe.whitelist()
def bisect_right(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.right_child is not None:
rgt_node = frappe.get_doc("Bisect Nodes", cur_node.right_child)
self.current_node = cur_node.right_child
self.current_from_date = rgt_node.period_from_date
self.current_to_date = rgt_node.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("No more children on Right"))
@frappe.whitelist()
def move_up(self):
if self.current_node is not None:
cur_node = frappe.get_doc("Bisect Nodes", self.current_node)
if cur_node.root is not None:
root = frappe.get_doc("Bisect Nodes", cur_node.root)
self.current_node = cur_node.root
self.current_from_date = root.period_from_date
self.current_to_date = root.period_to_date
self.fetch_or_calculate()
self.save()
else:
frappe.msgprint(_("Reached Root"))

View File

@@ -0,0 +1,9 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests.utils import FrappeTestCase
class TestBisectAccountingStatements(FrappeTestCase):
pass

View File

@@ -0,0 +1,8 @@
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("Bisect Nodes", {
// refresh(frm) {
// },
// });

View File

@@ -0,0 +1,97 @@
{
"actions": [],
"autoname": "autoincrement",
"creation": "2023-09-27 14:56:38.112462",
"default_view": "List",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"root",
"left_child",
"right_child",
"period_from_date",
"period_to_date",
"difference",
"balance_sheet_summary",
"profit_loss_summary",
"generated"
],
"fields": [
{
"fieldname": "root",
"fieldtype": "Link",
"label": "Root",
"options": "Bisect Nodes"
},
{
"fieldname": "left_child",
"fieldtype": "Link",
"label": "Left Child",
"options": "Bisect Nodes"
},
{
"fieldname": "right_child",
"fieldtype": "Link",
"label": "Right Child",
"options": "Bisect Nodes"
},
{
"fieldname": "period_from_date",
"fieldtype": "Datetime",
"label": "Period_from_date"
},
{
"fieldname": "period_to_date",
"fieldtype": "Datetime",
"label": "Period To Date"
},
{
"fieldname": "difference",
"fieldtype": "Float",
"label": "Difference"
},
{
"fieldname": "balance_sheet_summary",
"fieldtype": "Float",
"label": "Balance Sheet Summary"
},
{
"fieldname": "profit_loss_summary",
"fieldtype": "Float",
"label": "Profit and Loss Summary"
},
{
"default": "0",
"fieldname": "generated",
"fieldtype": "Check",
"label": "Generated"
}
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-12-01 17:46:12.437996",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Bisect Nodes",
"naming_rule": "Autoincrement",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,29 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class BisectNodes(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
balance_sheet_summary: DF.Float
difference: DF.Float
generated: DF.Check
left_child: DF.Link | None
name: DF.Int | None
period_from_date: DF.Datetime | None
period_to_date: DF.Datetime | None
profit_loss_summary: DF.Float
right_child: DF.Link | None
root: DF.Link | None
# end: auto-generated types
pass

View File

@@ -0,0 +1,9 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
from frappe.tests.utils import FrappeTestCase
class TestBisectNodes(FrappeTestCase):
pass

View File

@@ -121,7 +121,8 @@ class PeriodClosingVoucher(AccountsController):
previous_fiscal_year = get_fiscal_year(last_year_closing, company=self.company, boolean=True) previous_fiscal_year = get_fiscal_year(last_year_closing, company=self.company, boolean=True)
if previous_fiscal_year and not frappe.db.exists( if previous_fiscal_year and not frappe.db.exists(
"GL Entry", {"posting_date": ("<=", last_year_closing), "company": self.company} "GL Entry",
{"posting_date": ("<=", last_year_closing), "company": self.company, "is_cancelled": 0},
): ):
return return

View File

@@ -0,0 +1,58 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2024-02-04 10:53:32.307930",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"doctype_name",
"docfield_name",
"no_of_docs",
"done"
],
"fields": [
{
"fieldname": "doctype_name",
"fieldtype": "Link",
"in_list_view": 1,
"label": "DocType",
"options": "DocType",
"read_only": 1,
"reqd": 1
},
{
"fieldname": "docfield_name",
"fieldtype": "Data",
"label": "DocField",
"read_only": 1
},
{
"fieldname": "no_of_docs",
"fieldtype": "Int",
"in_list_view": 1,
"label": "No of Docs",
"read_only": 1
},
{
"default": "0",
"fieldname": "done",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Done",
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2024-02-05 17:35:09.556054",
"modified_by": "Administrator",
"module": "Accounts",
"name": "Transaction Deletion Record Details",
"owner": "Administrator",
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@@ -0,0 +1,26 @@
# Copyright (c) 2024, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class TransactionDeletionRecordDetails(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
docfield_name: DF.Data | None
doctype_name: DF.Link
done: DF.Check
no_of_docs: DF.Int
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
# end: auto-generated types
pass

View File

@@ -97,11 +97,11 @@ def execute(filters=None):
chart = get_chart_data(filters, columns, asset, liability, equity) chart = get_chart_data(filters, columns, asset, liability, equity)
report_summary = get_report_summary( report_summary, primitive_summary = get_report_summary(
period_list, asset, liability, equity, provisional_profit_loss, currency, filters period_list, asset, liability, equity, provisional_profit_loss, currency, filters
) )
return columns, data, message, chart, report_summary return columns, data, message, chart, report_summary, primitive_summary
def get_provisional_profit_loss( def get_provisional_profit_loss(
@@ -217,7 +217,7 @@ def get_report_summary(
"datatype": "Currency", "datatype": "Currency",
"currency": currency, "currency": currency,
}, },
] ], (net_asset - net_liability + net_equity)
def get_chart_data(filters, columns, asset, liability, equity): def get_chart_data(filters, columns, asset, liability, equity):

View File

@@ -669,20 +669,20 @@ class GrossProfitGenerator(object):
elif row.sales_order and row.so_detail: elif row.sales_order and row.so_detail:
incoming_amount = self.get_buying_amount_from_so_dn(row.sales_order, row.so_detail, item_code) incoming_amount = self.get_buying_amount_from_so_dn(row.sales_order, row.so_detail, item_code)
if incoming_amount: if incoming_amount:
return incoming_amount return flt(row.qty) * incoming_amount
else: else:
return flt(row.qty) * self.get_average_buying_rate(row, item_code) return flt(row.qty) * self.get_average_buying_rate(row, item_code)
return flt(row.qty) * self.get_average_buying_rate(row, item_code) return flt(row.qty) * self.get_average_buying_rate(row, item_code)
def get_buying_amount_from_so_dn(self, sales_order, so_detail, item_code): def get_buying_amount_from_so_dn(self, sales_order, so_detail, item_code):
from frappe.query_builder.functions import Sum from frappe.query_builder.functions import Avg
delivery_note_item = frappe.qb.DocType("Delivery Note Item") delivery_note_item = frappe.qb.DocType("Delivery Note Item")
query = ( query = (
frappe.qb.from_(delivery_note_item) frappe.qb.from_(delivery_note_item)
.select(Sum(delivery_note_item.incoming_rate * delivery_note_item.stock_qty)) .select(Avg(delivery_note_item.incoming_rate))
.where(delivery_note_item.docstatus == 1) .where(delivery_note_item.docstatus == 1)
.where(delivery_note_item.item_code == item_code) .where(delivery_note_item.item_code == item_code)
.where(delivery_note_item.against_sales_order == sales_order) .where(delivery_note_item.against_sales_order == sales_order)
@@ -965,7 +965,7 @@ class GrossProfitGenerator(object):
& (sle.is_cancelled == 0) & (sle.is_cancelled == 0)
) )
.orderby(sle.item_code) .orderby(sle.item_code)
.orderby(sle.warehouse, sle.posting_date, sle.posting_time, sle.creation, order=Order.desc) .orderby(sle.warehouse, sle.posting_datetime, sle.creation, order=Order.desc)
.run(as_dict=True) .run(as_dict=True)
) )

View File

@@ -460,3 +460,95 @@ class TestGrossProfit(FrappeTestCase):
} }
gp_entry = [x for x in data if x.parent_invoice == sinv.name] gp_entry = [x for x in data if x.parent_invoice == sinv.name]
self.assertDictContainsSubset(expected_entry, gp_entry[0]) self.assertDictContainsSubset(expected_entry, gp_entry[0])
def test_different_rates_in_si_and_dn(self):
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
"""
Test gp calculation when invoice and delivery note differ in qty and aren't connected
SO -- INV
|
DN
"""
se = make_stock_entry(
company=self.company,
item_code=self.item,
target=self.warehouse,
qty=3,
basic_rate=700,
do_not_submit=True,
)
item = se.items[0]
se.append(
"items",
{
"item_code": item.item_code,
"s_warehouse": item.s_warehouse,
"t_warehouse": item.t_warehouse,
"qty": 10,
"basic_rate": 700,
"conversion_factor": item.conversion_factor or 1.0,
"transfer_qty": flt(item.qty) * (flt(item.conversion_factor) or 1.0),
"serial_no": item.serial_no,
"batch_no": item.batch_no,
"cost_center": item.cost_center,
"expense_account": item.expense_account,
},
)
se = se.save().submit()
so = make_sales_order(
customer=self.customer,
company=self.company,
warehouse=self.warehouse,
item=self.item,
rate=800,
qty=10,
do_not_save=False,
do_not_submit=False,
)
from erpnext.selling.doctype.sales_order.sales_order import (
make_delivery_note,
make_sales_invoice,
)
dn1 = make_delivery_note(so.name)
dn1.items[0].qty = 4
dn1.items[0].rate = 800
dn1.save().submit()
dn2 = make_delivery_note(so.name)
dn2.items[0].qty = 6
dn2.items[0].rate = 800
dn2.save().submit()
sinv = make_sales_invoice(so.name)
sinv.items[0].qty = 4
sinv.items[0].rate = 800
sinv.save().submit()
filters = frappe._dict(
company=self.company, from_date=nowdate(), to_date=nowdate(), group_by="Invoice"
)
columns, data = execute(filters=filters)
expected_entry = {
"parent_invoice": sinv.name,
"currency": "INR",
"sales_invoice": self.item,
"customer": self.customer,
"posting_date": frappe.utils.datetime.date.fromisoformat(nowdate()),
"item_code": self.item,
"item_name": self.item,
"warehouse": "Stores - _GP",
"qty": 4.0,
"avg._selling_rate": 800.0,
"valuation_rate": 700.0,
"selling_amount": 3200.0,
"buying_amount": 2800.0,
"gross_profit": 400.0,
"gross_profit_%": 12.5,
}
gp_entry = [x for x in data if x.parent_invoice == sinv.name]
self.assertDictContainsSubset(expected_entry, gp_entry[0])

View File

@@ -26,3 +26,10 @@ frappe.require("assets/erpnext/js/financial_statements.js", function () {
default: 1, default: 1,
}); });
}); });
frappe.query_reports["Profit and Loss Statement"]["filters"].push({
fieldname: "include_default_book_entries",
label: __("Include Default FB Entries"),
fieldtype: "Check",
default: 1,
});

View File

@@ -66,11 +66,11 @@ def execute(filters=None):
currency = filters.presentation_currency or frappe.get_cached_value( currency = filters.presentation_currency or frappe.get_cached_value(
"Company", filters.company, "default_currency" "Company", filters.company, "default_currency"
) )
report_summary = get_report_summary( report_summary, primitive_summary = get_report_summary(
period_list, filters.periodicity, income, expense, net_profit_loss, currency, filters period_list, filters.periodicity, income, expense, net_profit_loss, currency, filters
) )
return columns, data, None, chart, report_summary return columns, data, None, chart, report_summary, primitive_summary
def get_report_summary( def get_report_summary(
@@ -112,7 +112,7 @@ def get_report_summary(
"datatype": "Currency", "datatype": "Currency",
"currency": currency, "currency": currency,
}, },
] ], net_profit
def get_net_profit_loss(income, expense, period_list, company, currency=None, consolidated=False): def get_net_profit_loss(income, expense, period_list, company, currency=None, consolidated=False):

View File

@@ -1372,8 +1372,7 @@ def sort_stock_vouchers_by_posting_date(
.select(sle.voucher_type, sle.voucher_no, sle.posting_date, sle.posting_time, sle.creation) .select(sle.voucher_type, sle.voucher_no, sle.posting_date, sle.posting_time, sle.creation)
.where((sle.is_cancelled == 0) & (sle.voucher_no.isin(voucher_nos))) .where((sle.is_cancelled == 0) & (sle.voucher_no.isin(voucher_nos)))
.groupby(sle.voucher_type, sle.voucher_no) .groupby(sle.voucher_type, sle.voucher_no)
.orderby(sle.posting_date) .orderby(sle.posting_datetime)
.orderby(sle.posting_time)
.orderby(sle.creation) .orderby(sle.creation)
).run(as_dict=True) ).run(as_dict=True)
sorted_vouchers = [(sle.voucher_type, sle.voucher_no) for sle in sles] sorted_vouchers = [(sle.voucher_type, sle.voucher_no) for sle in sles]

View File

@@ -299,7 +299,10 @@ period_closing_doctypes = [
doc_events = { doc_events = {
"*": { "*": {
"validate": "erpnext.support.doctype.service_level_agreement.service_level_agreement.apply", "validate": [
"erpnext.support.doctype.service_level_agreement.service_level_agreement.apply",
"erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.check_for_running_deletion_job",
],
}, },
tuple(period_closing_doctypes): { tuple(period_closing_doctypes): {
"validate": "erpnext.accounts.doctype.accounting_period.accounting_period.validate_accounting_period_on_doc_save", "validate": "erpnext.accounts.doctype.accounting_period.accounting_period.validate_accounting_period_on_doc_save",

View File

@@ -978,8 +978,7 @@ def get_valuation_rate(data):
frappe.qb.from_(sle) frappe.qb.from_(sle)
.select(sle.valuation_rate) .select(sle.valuation_rate)
.where((sle.item_code == item_code) & (sle.valuation_rate > 0) & (sle.is_cancelled == 0)) .where((sle.item_code == item_code) & (sle.valuation_rate > 0) & (sle.is_cancelled == 0))
.orderby(sle.posting_date, order=frappe.qb.desc) .orderby(sle.posting_datetime, order=frappe.qb.desc)
.orderby(sle.posting_time, order=frappe.qb.desc)
.orderby(sle.creation, order=frappe.qb.desc) .orderby(sle.creation, order=frappe.qb.desc)
.limit(1) .limit(1)
).run(as_dict=True) ).run(as_dict=True)

View File

@@ -375,7 +375,7 @@ class JobCard(Document):
{ {
"to_time": get_datetime(args.get("complete_time")), "to_time": get_datetime(args.get("complete_time")),
"operation": args.get("sub_operation"), "operation": args.get("sub_operation"),
"completed_qty": args.get("completed_qty") or 0.0, "completed_qty": (args.get("completed_qty") if last_row.idx == row.idx else 0.0),
} }
) )
elif args.get("start_time"): elif args.get("start_time"):

View File

@@ -58,7 +58,7 @@ def get_data(filters):
query_filters["creation"] = ("between", [filters.get("from_date"), filters.get("to_date")]) query_filters["creation"] = ("between", [filters.get("from_date"), filters.get("to_date")])
data = frappe.get_all( data = frappe.get_all(
"Work Order", fields=fields, filters=query_filters, order_by="planned_start_date asc", debug=1 "Work Order", fields=fields, filters=query_filters, order_by="planned_start_date asc"
) )
res = [] res = []

View File

@@ -274,6 +274,7 @@ erpnext.patches.v14_0.clear_reconciliation_values_from_singles
[post_model_sync] [post_model_sync]
execute:frappe.delete_doc_if_exists('Workspace', 'ERPNext Integrations Settings') execute:frappe.delete_doc_if_exists('Workspace', 'ERPNext Integrations Settings')
erpnext.patches.v14_0.update_posting_datetime_and_dropped_indexes #22-02-2024
erpnext.patches.v14_0.rename_ongoing_status_in_sla_documents erpnext.patches.v14_0.rename_ongoing_status_in_sla_documents
erpnext.patches.v14_0.delete_shopify_doctypes erpnext.patches.v14_0.delete_shopify_doctypes
erpnext.patches.v14_0.delete_healthcare_doctypes erpnext.patches.v14_0.delete_healthcare_doctypes
@@ -361,4 +362,4 @@ erpnext.stock.doctype.delivery_note.patches.drop_unused_return_against_index # 2
erpnext.patches.v14_0.set_maintain_stock_for_bom_item erpnext.patches.v14_0.set_maintain_stock_for_bom_item
execute:frappe.db.set_single_value('E Commerce Settings', 'show_actual_qty', 1) execute:frappe.db.set_single_value('E Commerce Settings', 'show_actual_qty', 1)
erpnext.patches.v14_0.delete_orphaned_asset_movement_item_records erpnext.patches.v14_0.delete_orphaned_asset_movement_item_records
erpnext.patches.v14_0.remove_cancelled_asset_capitalization_from_asset erpnext.patches.v14_0.remove_cancelled_asset_capitalization_from_asset

View File

@@ -0,0 +1,19 @@
import frappe
def execute():
frappe.db.sql(
"""
UPDATE `tabStock Ledger Entry`
SET posting_datetime = DATE_FORMAT(timestamp(posting_date, posting_time), '%Y-%m-%d %H:%i:%s')
"""
)
drop_indexes()
def drop_indexes():
if not frappe.db.has_index("tabStock Ledger Entry", "posting_sort_index"):
return
frappe.db.sql_ddl("ALTER TABLE `tabStock Ledger Entry` DROP INDEX `posting_sort_index`")

View File

@@ -832,7 +832,8 @@
"label": "Purchase Order", "label": "Purchase Order",
"options": "Purchase Order", "options": "Purchase Order",
"print_hide": 1, "print_hide": 1,
"read_only": 1 "read_only": 1,
"search_index": 1
}, },
{ {
"fieldname": "column_break_89", "fieldname": "column_break_89",
@@ -875,7 +876,7 @@
"idx": 1, "idx": 1,
"istable": 1, "istable": 1,
"links": [], "links": [],
"modified": "2023-11-24 19:07:17.715231", "modified": "2024-03-21 18:15:56.625005",
"modified_by": "Administrator", "modified_by": "Administrator",
"module": "Selling", "module": "Selling",
"name": "Sales Order Item", "name": "Sales Order Item",

View File

@@ -259,6 +259,7 @@ erpnext.PointOfSale.PastOrderSummary = class {
subject: __(frm.meta.name) + ": " + doc.name, subject: __(frm.meta.name) + ": " + doc.name,
doctype: doc.doctype, doctype: doc.doctype,
name: doc.name, name: doc.name,
content: "",
send_email: 1, send_email: 1,
print_format, print_format,
sender_full_name: frappe.user.full_name(), sender_full_name: frappe.user.full_name(),

View File

@@ -197,6 +197,8 @@ def prepare_data(
): ):
details[p_key] += r.get(qty_or_amount_field, 0) details[p_key] += r.get(qty_or_amount_field, 0)
details[variance_key] = details.get(p_key) - details.get(target_key) details[variance_key] = details.get(p_key) - details.get(target_key)
else:
details[variance_key] = details.get(p_key) - details.get(target_key)
details["total_achieved"] += details.get(p_key) details["total_achieved"] += details.get(p_key)
details["total_variance"] = details.get("total_achieved") - details.get("total_target") details["total_variance"] = details.get("total_achieved") - details.get("total_target")
@@ -209,31 +211,32 @@ def get_actual_data(filters, sales_users_or_territory_data, date_field, sales_fi
parent_doc = frappe.qb.DocType(filters.get("doctype")) parent_doc = frappe.qb.DocType(filters.get("doctype"))
child_doc = frappe.qb.DocType(filters.get("doctype") + " Item") child_doc = frappe.qb.DocType(filters.get("doctype") + " Item")
sales_team = frappe.qb.DocType("Sales Team")
query = ( query = frappe.qb.from_(parent_doc).inner_join(child_doc).on(child_doc.parent == parent_doc.name)
frappe.qb.from_(parent_doc)
.inner_join(child_doc)
.on(child_doc.parent == parent_doc.name)
.inner_join(sales_team)
.on(sales_team.parent == parent_doc.name)
.select(
child_doc.item_group,
(child_doc.stock_qty * sales_team.allocated_percentage / 100).as_("stock_qty"),
(child_doc.base_net_amount * sales_team.allocated_percentage / 100).as_("base_net_amount"),
sales_team.sales_person,
parent_doc[date_field],
)
.where(
(parent_doc.docstatus == 1)
& (parent_doc[date_field].between(fiscal_year.year_start_date, fiscal_year.year_end_date))
)
)
if sales_field == "sales_person": if sales_field == "sales_person":
query = query.where(sales_team.sales_person.isin(sales_users_or_territory_data)) sales_team = frappe.qb.DocType("Sales Team")
stock_qty = child_doc.stock_qty * sales_team.allocated_percentage / 100
net_amount = child_doc.base_net_amount * sales_team.allocated_percentage / 100
sales_field_col = sales_team[sales_field]
query = query.inner_join(sales_team).on(sales_team.parent == parent_doc.name)
else: else:
query = query.where(parent_doc[sales_field].isin(sales_users_or_territory_data)) stock_qty = child_doc.stock_qty
net_amount = child_doc.base_net_amount
sales_field_col = parent_doc[sales_field]
query = query.select(
child_doc.item_group,
parent_doc[date_field],
(stock_qty).as_("stock_qty"),
(net_amount).as_("base_net_amount"),
sales_field_col,
).where(
(parent_doc.docstatus == 1)
& (parent_doc[date_field].between(fiscal_year.year_start_date, fiscal_year.year_end_date))
& (sales_field_col.isin(sales_users_or_territory_data))
)
return query.run(as_dict=True) return query.run(as_dict=True)

View File

@@ -0,0 +1,57 @@
import frappe
from frappe.tests.utils import FrappeTestCase
from frappe.utils import flt, nowdate
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.accounts.utils import get_fiscal_year
from erpnext.selling.report.sales_partner_target_variance_based_on_item_group.sales_partner_target_variance_based_on_item_group import (
execute,
)
from erpnext.selling.report.sales_person_target_variance_based_on_item_group.test_sales_person_target_variance_based_on_item_group import (
create_sales_target_doc,
create_target_distribution,
)
class TestSalesPartnerTargetVarianceBasedOnItemGroup(FrappeTestCase):
def setUp(self):
self.fiscal_year = get_fiscal_year(nowdate())[0]
def tearDown(self):
frappe.db.rollback()
def test_achieved_target_and_variance_for_partner(self):
# Create a Target Distribution
distribution = create_target_distribution(self.fiscal_year)
# Create Sales Partner with targets for the current fiscal year
sales_partner = create_sales_target_doc(
"Sales Partner", "partner_name", "Sales Partner 1", self.fiscal_year, distribution.name
)
# Create a Sales Invoice for the Partner
si = create_sales_invoice(
rate=1000,
qty=20,
do_not_submit=True,
)
si.sales_partner = sales_partner
si.commission_rate = 5
si.submit()
# Check Achieved Target and Variance for the Sales Partner
result = execute(
frappe._dict(
{
"fiscal_year": self.fiscal_year,
"doctype": "Sales Invoice",
"period": "Yearly",
"target_on": "Quantity",
}
)
)[1]
row = frappe._dict(result[0])
self.assertSequenceEqual(
[flt(value, 2) for value in (row.total_target, row.total_achieved, row.total_variance)],
[50, 20, -30],
)

View File

@@ -18,17 +18,17 @@ class TestSalesPersonTargetVarianceBasedOnItemGroup(FrappeTestCase):
def test_achieved_target_and_variance(self): def test_achieved_target_and_variance(self):
# Create a Target Distribution # Create a Target Distribution
distribution = frappe.new_doc("Monthly Distribution") distribution = create_target_distribution(self.fiscal_year)
distribution.distribution_id = "Target Report Distribution"
distribution.fiscal_year = self.fiscal_year
distribution.get_months()
distribution.insert()
# Create sales people with targets # Create sales people with targets for the current fiscal year
person_1 = create_sales_person_with_target("Sales Person 1", self.fiscal_year, distribution.name) person_1 = create_sales_target_doc(
person_2 = create_sales_person_with_target("Sales Person 2", self.fiscal_year, distribution.name) "Sales Person", "sales_person_name", "Sales Person 1", self.fiscal_year, distribution.name
)
person_2 = create_sales_target_doc(
"Sales Person", "sales_person_name", "Sales Person 2", self.fiscal_year, distribution.name
)
# Create a Sales Order with 50-50 contribution # Create a Sales Order with 50-50 contribution between both Sales people
so = make_sales_order( so = make_sales_order(
rate=1000, rate=1000,
qty=20, qty=20,
@@ -69,10 +69,20 @@ class TestSalesPersonTargetVarianceBasedOnItemGroup(FrappeTestCase):
) )
def create_sales_person_with_target(sales_person_name, fiscal_year, distribution_id): def create_target_distribution(fiscal_year):
sales_person = frappe.new_doc("Sales Person") distribution = frappe.new_doc("Monthly Distribution")
sales_person.sales_person_name = sales_person_name distribution.distribution_id = "Target Report Distribution"
sales_person.append( distribution.fiscal_year = fiscal_year
distribution.get_months()
return distribution.insert()
def create_sales_target_doc(
sales_field_dt, sales_field_name, sales_field_value, fiscal_year, distribution_id
):
sales_target_doc = frappe.new_doc(sales_field_dt)
sales_target_doc.set(sales_field_name, sales_field_value)
sales_target_doc.append(
"targets", "targets",
{ {
"fiscal_year": fiscal_year, "fiscal_year": fiscal_year,
@@ -81,4 +91,6 @@ def create_sales_person_with_target(sales_person_name, fiscal_year, distribution
"distribution_id": distribution_id, "distribution_id": distribution_id,
}, },
) )
return sales_person.insert() if sales_field_dt == "Sales Partner":
sales_target_doc.commission_rate = 5
return sales_target_doc.insert()

228
erpnext/setup/demo.py Normal file
View File

@@ -0,0 +1,228 @@
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import json
import os
from random import randint
import frappe
from frappe import _
from frappe.utils import add_days, getdate
from erpnext.accounts.doctype.payment_entry.payment_entry import get_payment_entry
from erpnext.accounts.utils import get_fiscal_year
from erpnext.buying.doctype.purchase_order.purchase_order import make_purchase_invoice
from erpnext.selling.doctype.sales_order.sales_order import make_sales_invoice
from erpnext.setup.setup_wizard.operations.install_fixtures import create_bank_account
def setup_demo_data():
from frappe.utils.telemetry import capture
capture("demo_data_creation_started", "erpnext")
try:
company = create_demo_company()
process_masters()
make_transactions(company)
frappe.cache.delete_keys("bootinfo")
frappe.publish_realtime("demo_data_complete")
except Exception:
frappe.log_error("Failed to create demo data")
capture("demo_data_creation_failed", "erpnext", properties={"exception": frappe.get_traceback()})
raise
capture("demo_data_creation_completed", "erpnext")
@frappe.whitelist()
def clear_demo_data():
from frappe.utils.telemetry import capture
frappe.only_for("System Manager")
capture("demo_data_erased", "erpnext")
try:
company = frappe.db.get_single_value("Global Defaults", "demo_company")
create_transaction_deletion_record(company)
clear_masters()
delete_company(company)
default_company = frappe.db.get_single_value("Global Defaults", "default_company")
frappe.db.set_default("company", default_company)
except Exception:
frappe.db.rollback()
frappe.log_error("Failed to erase demo data")
frappe.throw(
_("Failed to erase demo data, please delete the demo company manually."),
title=_("Could Not Delete Demo Data"),
)
def create_demo_company():
company = frappe.db.get_all("Company")[0].name
company_doc = frappe.get_doc("Company", company)
# Make a dummy company
new_company = frappe.new_doc("Company")
new_company.company_name = company_doc.company_name + " (Demo)"
new_company.abbr = company_doc.abbr + "D"
new_company.enable_perpetual_inventory = 1
new_company.default_currency = company_doc.default_currency
new_company.country = company_doc.country
new_company.chart_of_accounts_based_on = "Standard Template"
new_company.chart_of_accounts = company_doc.chart_of_accounts
new_company.insert()
# Set Demo Company as default to
frappe.db.set_single_value("Global Defaults", "demo_company", new_company.name)
frappe.db.set_default("company", new_company.name)
bank_account = create_bank_account({"company_name": new_company.name})
frappe.db.set_value("Company", new_company.name, "default_bank_account", bank_account.name)
return new_company.name
def process_masters():
for doctype in frappe.get_hooks("demo_master_doctypes"):
data = read_data_file_using_hooks(doctype)
if data:
for item in json.loads(data):
create_demo_record(item)
def create_demo_record(doctype):
frappe.get_doc(doctype).insert(ignore_permissions=True)
def make_transactions(company):
frappe.db.set_single_value("Stock Settings", "allow_negative_stock", 1)
from erpnext.accounts.utils import FiscalYearError
try:
start_date = get_fiscal_year(date=getdate())[1]
except FiscalYearError:
# User might have setup fiscal year for previous or upcoming years
active_fiscal_years = frappe.db.get_all("Fiscal Year", filters={"disabled": 0}, as_list=1)
if active_fiscal_years:
start_date = frappe.db.get_value("Fiscal Year", active_fiscal_years[0][0], "year_start_date")
else:
frappe.throw(_("There are no active Fiscal Years for which Demo Data can be generated."))
for doctype in frappe.get_hooks("demo_transaction_doctypes"):
data = read_data_file_using_hooks(doctype)
if data:
for item in json.loads(data):
create_transaction(item, company, start_date)
convert_order_to_invoices()
frappe.db.set_single_value("Stock Settings", "allow_negative_stock", 0)
def create_transaction(doctype, company, start_date):
document_type = doctype.get("doctype")
warehouse = get_warehouse(company)
if document_type == "Purchase Order":
posting_date = get_random_date(start_date, 1, 25)
else:
posting_date = get_random_date(start_date, 31, 350)
doctype.update(
{
"company": company,
"set_posting_time": 1,
"transaction_date": posting_date,
"schedule_date": posting_date,
"delivery_date": posting_date,
"set_warehouse": warehouse,
}
)
doc = frappe.get_doc(doctype)
doc.save(ignore_permissions=True)
doc.submit()
def convert_order_to_invoices():
for document in ["Purchase Order", "Sales Order"]:
# Keep some orders intentionally unbilled/unpaid
for i, order in enumerate(
frappe.db.get_all(
document, filters={"docstatus": 1}, fields=["name", "transaction_date"], limit=6
)
):
if document == "Purchase Order":
invoice = make_purchase_invoice(order.name)
elif document == "Sales Order":
invoice = make_sales_invoice(order.name)
invoice.set_posting_time = 1
invoice.posting_date = order.transaction_date
invoice.due_date = order.transaction_date
invoice.bill_date = order.transaction_date
if invoice.get("payment_schedule"):
invoice.payment_schedule[0].due_date = order.transaction_date
invoice.update_stock = 1
invoice.submit()
if i % 2 != 0:
payment = get_payment_entry(invoice.doctype, invoice.name)
payment.posting_date = order.transaction_date
payment.reference_no = invoice.name
payment.submit()
def get_random_date(start_date, start_range, end_range):
return add_days(start_date, randint(start_range, end_range))
def create_transaction_deletion_record(company):
transaction_deletion_record = frappe.new_doc("Transaction Deletion Record")
transaction_deletion_record.company = company
transaction_deletion_record.process_in_single_transaction = True
transaction_deletion_record.save(ignore_permissions=True)
transaction_deletion_record.submit()
transaction_deletion_record.start_deletion_tasks()
def clear_masters():
for doctype in frappe.get_hooks("demo_master_doctypes")[::-1]:
data = read_data_file_using_hooks(doctype)
if data:
for item in json.loads(data):
clear_demo_record(item)
def clear_demo_record(document):
document_type = document.get("doctype")
del document["doctype"]
valid_columns = frappe.get_meta(document_type).get_valid_columns()
filters = document
for key in list(filters):
if key not in valid_columns:
filters.pop(key, None)
doc = frappe.get_doc(document_type, filters)
doc.delete(ignore_permissions=True)
def delete_company(company):
frappe.db.set_single_value("Global Defaults", "demo_company", "")
frappe.delete_doc("Company", company, ignore_permissions=True)
def read_data_file_using_hooks(doctype):
path = os.path.join(os.path.dirname(__file__), "demo_data")
with open(os.path.join(path, doctype + ".json"), "r") as f:
data = f.read()
return data
def get_warehouse(company):
warehouses = frappe.db.get_all("Warehouse", {"company": company, "is_group": 0})
return warehouses[randint(0, 3)].name

View File

@@ -169,43 +169,49 @@ frappe.ui.form.on("Company", {
}, },
delete_company_transactions: function (frm) { delete_company_transactions: function (frm) {
frappe.verify_password(function () { frappe.call({
var d = frappe.prompt( method: "erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record.is_deletion_doc_running",
{ args: {
fieldtype: "Data", company: frm.doc.name,
fieldname: "company_name", },
label: __("Please enter the company name to confirm"), freeze: true,
reqd: 1, callback: function (r) {
description: __( if (!r.exc) {
"Please make sure you really want to delete all the transactions for this company. Your master data will remain as it is. This action cannot be undone." frappe.verify_password(function () {
), var d = frappe.prompt(
}, {
function (data) { fieldtype: "Data",
if (data.company_name !== frm.doc.name) { fieldname: "company_name",
frappe.msgprint(__("Company name not same")); label: __("Please enter the company name to confirm"),
return; reqd: 1,
} description: __(
frappe.call({ "Please make sure you really want to delete all the transactions for this company. Your master data will remain as it is. This action cannot be undone."
method: "erpnext.setup.doctype.company.company.create_transaction_deletion_request", ),
args: { },
company: data.company_name, function (data) {
}, if (data.company_name !== frm.doc.name) {
freeze: true, frappe.msgprint(__("Company name not same"));
callback: function (r, rt) { return;
if (!r.exc) }
frappe.msgprint( frappe.call({
__("Successfully deleted all transactions related to this company!") method: "erpnext.setup.doctype.company.company.create_transaction_deletion_request",
); args: {
}, company: data.company_name,
onerror: function () { },
frappe.msgprint(__("Wrong Password")); freeze: true,
}, callback: function (r, rt) {},
onerror: function () {
frappe.msgprint(__("Wrong Password"));
},
});
},
__("Delete all the Transactions for this Company"),
__("Delete")
);
d.get_primary_btn().addClass("btn-danger");
}); });
}, }
__("Delete all the Transactions for this Company"), },
__("Delete")
);
d.get_primary_btn().addClass("btn-danger");
}); });
}, },
}); });

View File

@@ -11,7 +11,7 @@ from frappe.cache_manager import clear_defaults_cache
from frappe.contacts.address_and_contact import load_address_and_contact from frappe.contacts.address_and_contact import load_address_and_contact
from frappe.custom.doctype.property_setter.property_setter import make_property_setter from frappe.custom.doctype.property_setter.property_setter import make_property_setter
from frappe.desk.page.setup_wizard.setup_wizard import make_records from frappe.desk.page.setup_wizard.setup_wizard import make_records
from frappe.utils import cint, formatdate, get_timestamp, today from frappe.utils import cint, formatdate, get_link_to_form, get_timestamp, today
from frappe.utils.nestedset import NestedSet, rebuild_tree from frappe.utils.nestedset import NestedSet, rebuild_tree
from erpnext.accounts.doctype.account.account import get_account_currency from erpnext.accounts.doctype.account.account import get_account_currency
@@ -812,6 +812,19 @@ def get_default_company_address(name, sort_key="is_primary_address", existing_ad
@frappe.whitelist() @frappe.whitelist()
def create_transaction_deletion_request(company): def create_transaction_deletion_request(company):
from erpnext.setup.doctype.transaction_deletion_record.transaction_deletion_record import (
is_deletion_doc_running,
)
is_deletion_doc_running(company)
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company}) tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert()
tdr.submit() tdr.submit()
tdr.start_deletion_tasks()
frappe.msgprint(
_("A Transaction Deletion Document: {0} is triggered for {0}").format(
get_link_to_form("Transaction Deletion Record", tdr.name)
),
frappe.bold(company),
)

View File

@@ -28,6 +28,7 @@ class TestTransactionDeletionRecord(unittest.TestCase):
for i in range(5): for i in range(5):
create_task("Dunder Mifflin Paper Co") create_task("Dunder Mifflin Paper Co")
tdr = create_transaction_deletion_request("Dunder Mifflin Paper Co") tdr = create_transaction_deletion_request("Dunder Mifflin Paper Co")
tdr.reload()
for doctype in tdr.doctypes: for doctype in tdr.doctypes:
if doctype.doctype_name == "Task": if doctype.doctype_name == "Task":
self.assertEqual(doctype.no_of_docs, 5) self.assertEqual(doctype.no_of_docs, 5)
@@ -49,7 +50,9 @@ def create_company(company_name):
def create_transaction_deletion_request(company): def create_transaction_deletion_request(company):
tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company}) tdr = frappe.get_doc({"doctype": "Transaction Deletion Record", "company": company})
tdr.insert() tdr.insert()
tdr.process_in_single_transaction = True
tdr.submit() tdr.submit()
tdr.start_deletion_tasks()
return tdr return tdr

View File

@@ -10,20 +10,24 @@ frappe.ui.form.on("Transaction Deletion Record", {
callback: function (r) { callback: function (r) {
doctypes_to_be_ignored_array = r.message; doctypes_to_be_ignored_array = r.message;
populate_doctypes_to_be_ignored(doctypes_to_be_ignored_array, frm); populate_doctypes_to_be_ignored(doctypes_to_be_ignored_array, frm);
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored"); frm.refresh_field("doctypes_to_be_ignored");
}, },
}); });
} }
frm.get_field("doctypes_to_be_ignored").grid.cannot_add_rows = true;
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false);
frm.refresh_field("doctypes_to_be_ignored");
}, },
refresh: function (frm) { refresh: function (frm) {
frm.fields_dict["doctypes_to_be_ignored"].grid.set_column_disp("no_of_docs", false); if (frm.doc.docstatus == 1 && ["Queued", "Failed"].find((x) => x == frm.doc.status)) {
frm.refresh_field("doctypes_to_be_ignored"); let execute_btn = frm.doc.status == "Queued" ? __("Start Deletion") : __("Retry");
frm.add_custom_button(execute_btn, () => {
// Entry point for chain of events
frm.call({
method: "start_deletion_tasks",
doc: frm.doc,
});
});
}
}, },
}); });

View File

@@ -7,10 +7,21 @@
"engine": "InnoDB", "engine": "InnoDB",
"field_order": [ "field_order": [
"company", "company",
"section_break_qpwb",
"status",
"error_log",
"tasks_section",
"delete_bin_data",
"delete_leads_and_addresses",
"reset_company_default_values",
"clear_notifications",
"initialize_doctypes_table",
"delete_transactions",
"section_break_tbej",
"doctypes", "doctypes",
"doctypes_to_be_ignored", "doctypes_to_be_ignored",
"amended_from", "amended_from",
"status" "process_in_single_transaction"
], ],
"fields": [ "fields": [
{ {
@@ -25,14 +36,16 @@
"fieldname": "doctypes", "fieldname": "doctypes",
"fieldtype": "Table", "fieldtype": "Table",
"label": "Summary", "label": "Summary",
"options": "Transaction Deletion Record Item", "no_copy": 1,
"options": "Transaction Deletion Record Details",
"read_only": 1 "read_only": 1
}, },
{ {
"fieldname": "doctypes_to_be_ignored", "fieldname": "doctypes_to_be_ignored",
"fieldtype": "Table", "fieldtype": "Table",
"label": "Excluded DocTypes", "label": "Excluded DocTypes",
"options": "Transaction Deletion Record Item" "options": "Transaction Deletion Record Item",
"read_only": 1
}, },
{ {
"fieldname": "amended_from", "fieldname": "amended_from",
@@ -46,18 +59,96 @@
{ {
"fieldname": "status", "fieldname": "status",
"fieldtype": "Select", "fieldtype": "Select",
"hidden": 1,
"label": "Status", "label": "Status",
"options": "Draft\nCompleted" "no_copy": 1,
"options": "Queued\nRunning\nFailed\nCompleted\nCancelled",
"read_only": 1
},
{
"fieldname": "section_break_tbej",
"fieldtype": "Section Break"
},
{
"fieldname": "tasks_section",
"fieldtype": "Section Break",
"label": "Tasks"
},
{
"default": "0",
"fieldname": "delete_bin_data",
"fieldtype": "Check",
"label": "Delete Bins",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_leads_and_addresses",
"fieldtype": "Check",
"label": "Delete Leads and Addresses",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "clear_notifications",
"fieldtype": "Check",
"label": "Clear Notifications",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "reset_company_default_values",
"fieldtype": "Check",
"label": "Reset Company Default Values",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "delete_transactions",
"fieldtype": "Check",
"label": "Delete Transactions",
"no_copy": 1,
"read_only": 1
},
{
"default": "0",
"fieldname": "initialize_doctypes_table",
"fieldtype": "Check",
"label": "Initialize Summary Table",
"no_copy": 1,
"read_only": 1
},
{
"depends_on": "eval: doc.error_log",
"fieldname": "error_log",
"fieldtype": "Long Text",
"label": "Error Log"
},
{
"fieldname": "section_break_qpwb",
"fieldtype": "Section Break"
},
{
"default": "0",
"fieldname": "process_in_single_transaction",
"fieldtype": "Check",
"hidden": 1,
"label": "Process in Single Transaction",
"no_copy": 1,
"read_only": 1
} }
], ],
"index_web_pages_for_search": 1, "index_web_pages_for_search": 1,
"is_submittable": 1, "is_submittable": 1,
"links": [], "links": [],
"modified": "2021-08-04 20:15:59.071493", "modified": "2024-03-21 10:29:19.456413",
"modified_by": "Administrator", "modified_by": "Administrator",
"module": "Setup", "module": "Setup",
"name": "Transaction Deletion Record", "name": "Transaction Deletion Record",
"naming_rule": "Expression (old style)",
"owner": "Administrator", "owner": "Administrator",
"permissions": [ "permissions": [
{ {
@@ -76,5 +167,6 @@
], ],
"sort_field": "modified", "sort_field": "modified",
"sort_order": "DESC", "sort_order": "DESC",
"states": [],
"track_changes": 1 "track_changes": 1
} }

View File

@@ -1,18 +1,31 @@
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and contributors # Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt # For license information, please see license.txt
from collections import OrderedDict
import frappe import frappe
from frappe import _, qb from frappe import _, qb
from frappe.desk.notifications import clear_notifications from frappe.desk.notifications import clear_notifications
from frappe.model.document import Document from frappe.model.document import Document
from frappe.utils import cint, create_batch from frappe.utils import cint, comma_and, create_batch, get_link_to_form
from frappe.utils.background_jobs import create_job_id, is_job_enqueued
class TransactionDeletionRecord(Document): class TransactionDeletionRecord(Document):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(TransactionDeletionRecord, self).__init__(*args, **kwargs) super(TransactionDeletionRecord, self).__init__(*args, **kwargs)
self.batch_size = 5000 self.batch_size = 5000
# Tasks are listed by their execution order
self.task_to_internal_method_map = OrderedDict(
{
"Delete Bins": "delete_bins",
"Delete Leads and Addresses": "delete_lead_addresses",
"Reset Company Values": "reset_company_values",
"Clear Notifications": "delete_notifications",
"Initialize Summary Table": "initialize_doctypes_to_be_deleted_table",
"Delete Transactions": "delete_company_transactions",
}
)
def validate(self): def validate(self):
frappe.only_for("System Manager") frappe.only_for("System Manager")
@@ -29,104 +42,266 @@ class TransactionDeletionRecord(Document):
title=_("Not Allowed"), title=_("Not Allowed"),
) )
def generate_job_name_for_task(self, task=None):
method = self.task_to_internal_method_map[task]
return f"{self.name}_{method}"
def generate_job_name_for_next_tasks(self, task=None):
job_names = []
current_task_idx = list(self.task_to_internal_method_map).index(task)
for idx, task in enumerate(self.task_to_internal_method_map.keys(), 0):
# generate job_name for next tasks
if idx > current_task_idx:
job_names.append(self.generate_job_name_for_task(task))
return job_names
def generate_job_name_for_all_tasks(self):
job_names = []
for task in self.task_to_internal_method_map.keys():
job_names.append(self.generate_job_name_for_task(task))
return job_names
def before_submit(self): def before_submit(self):
if queued_docs := frappe.db.get_all(
"Transaction Deletion Record",
filters={"company": self.company, "status": ("in", ["Running", "Queued"]), "docstatus": 1},
pluck="name",
):
frappe.throw(
_(
"Cannot enqueue multi docs for one company. {0} is already queued/running for company: {1}"
).format(
comma_and([get_link_to_form("Transaction Deletion Record", x) for x in queued_docs]),
frappe.bold(self.company),
)
)
if not self.doctypes_to_be_ignored: if not self.doctypes_to_be_ignored:
self.populate_doctypes_to_be_ignored_table() self.populate_doctypes_to_be_ignored_table()
self.delete_bins() def reset_task_flags(self):
self.delete_lead_addresses() self.clear_notifications = 0
self.reset_company_values() self.delete_bin_data = 0
clear_notifications() self.delete_leads_and_addresses = 0
self.delete_company_transactions() self.delete_transactions = 0
self.initialize_doctypes_table = 0
self.reset_company_default_values = 0
def before_save(self):
self.status = ""
self.doctypes.clear()
self.reset_task_flags()
def on_submit(self):
self.db_set("status", "Queued")
def on_cancel(self):
self.db_set("status", "Cancelled")
def enqueue_task(self, task: str | None = None):
if task and task in self.task_to_internal_method_map:
# make sure that none of next tasks are already running
job_names = self.generate_job_name_for_next_tasks(task=task)
self.validate_running_task_for_doc(job_names=job_names)
# Generate Job Id to uniquely identify each task for this document
job_id = self.generate_job_name_for_task(task)
if self.process_in_single_transaction:
self.execute_task(task_to_execute=task)
else:
frappe.enqueue(
"frappe.utils.background_jobs.run_doc_method",
doctype=self.doctype,
name=self.name,
doc_method="execute_task",
job_id=job_id,
queue="long",
enqueue_after_commit=True,
task_to_execute=task,
)
def execute_task(self, task_to_execute: str | None = None):
if task_to_execute:
method = self.task_to_internal_method_map[task_to_execute]
if task := getattr(self, method, None):
try:
task()
except Exception as err:
frappe.db.rollback()
traceback = frappe.get_traceback(with_context=True)
if traceback:
message = "Traceback: <br>" + traceback
frappe.db.set_value(self.doctype, self.name, "error_log", message)
frappe.db.set_value(self.doctype, self.name, "status", "Failed")
def delete_notifications(self):
self.validate_doc_status()
if not self.clear_notifications:
clear_notifications()
self.db_set("clear_notifications", 1)
self.enqueue_task(task="Initialize Summary Table")
def populate_doctypes_to_be_ignored_table(self): def populate_doctypes_to_be_ignored_table(self):
doctypes_to_be_ignored_list = get_doctypes_to_be_ignored() doctypes_to_be_ignored_list = get_doctypes_to_be_ignored()
for doctype in doctypes_to_be_ignored_list: for doctype in doctypes_to_be_ignored_list:
self.append("doctypes_to_be_ignored", {"doctype_name": doctype}) self.append("doctypes_to_be_ignored", {"doctype_name": doctype})
def delete_bins(self): def validate_running_task_for_doc(self, job_names: list = None):
frappe.db.sql( # at most only one task should be runnning
"""delete from `tabBin` where warehouse in running_tasks = []
(select name from tabWarehouse where company=%s)""", for x in job_names:
self.company, if is_job_enqueued(x):
) running_tasks.append(create_job_id(x))
def delete_lead_addresses(self): if running_tasks:
"""Delete addresses to which leads are linked""" frappe.throw(
leads = frappe.get_all("Lead", filters={"company": self.company}) _("{0} is already running for {1}").format(
leads = ["'%s'" % row.get("name") for row in leads] comma_and([get_link_to_form("RQ Job", x) for x in running_tasks]), self.name
addresses = []
if leads:
addresses = frappe.db.sql_list(
"""select parent from `tabDynamic Link` where link_name
in ({leads})""".format(
leads=",".join(leads)
) )
) )
if addresses: def validate_doc_status(self):
addresses = ["%s" % frappe.db.escape(addr) for addr in addresses] if self.status != "Running":
frappe.throw(
frappe.db.sql( _("{0} is not running. Cannot trigger events for this Document").format(
"""delete from `tabAddress` where name in ({addresses}) and get_link_to_form("Transaction Deletion Record", self.name)
name not in (select distinct dl1.parent from `tabDynamic Link` dl1
inner join `tabDynamic Link` dl2 on dl1.parent=dl2.parent
and dl1.link_doctype<>dl2.link_doctype)""".format(
addresses=",".join(addresses)
)
) )
)
frappe.db.sql( @frappe.whitelist()
"""delete from `tabDynamic Link` where link_doctype='Lead' def start_deletion_tasks(self):
and parenttype='Address' and link_name in ({leads})""".format( # This method is the entry point for the chain of events that follow
self.db_set("status", "Running")
self.enqueue_task(task="Delete Bins")
def delete_bins(self):
self.validate_doc_status()
if not self.delete_bin_data:
frappe.db.sql(
"""delete from `tabBin` where warehouse in
(select name from tabWarehouse where company=%s)""",
self.company,
)
self.db_set("delete_bin_data", 1)
self.enqueue_task(task="Delete Leads and Addresses")
def delete_lead_addresses(self):
"""Delete addresses to which leads are linked"""
self.validate_doc_status()
if not self.delete_leads_and_addresses:
leads = frappe.get_all("Lead", filters={"company": self.company})
leads = ["'%s'" % row.get("name") for row in leads]
addresses = []
if leads:
addresses = frappe.db.sql_list(
"""select parent from `tabDynamic Link` where link_name
in ({leads})""".format(
leads=",".join(leads) leads=",".join(leads)
) )
) )
frappe.db.sql( if addresses:
"""update `tabCustomer` set lead_name=NULL where lead_name in ({leads})""".format( addresses = ["%s" % frappe.db.escape(addr) for addr in addresses]
leads=",".join(leads)
frappe.db.sql(
"""delete from `tabAddress` where name in ({addresses}) and
name not in (select distinct dl1.parent from `tabDynamic Link` dl1
inner join `tabDynamic Link` dl2 on dl1.parent=dl2.parent
and dl1.link_doctype<>dl2.link_doctype)""".format(
addresses=",".join(addresses)
)
)
frappe.db.sql(
"""delete from `tabDynamic Link` where link_doctype='Lead'
and parenttype='Address' and link_name in ({leads})""".format(
leads=",".join(leads)
)
)
frappe.db.sql(
"""update `tabCustomer` set lead_name=NULL where lead_name in ({leads})""".format(
leads=",".join(leads)
)
) )
) self.db_set("delete_leads_and_addresses", 1)
self.enqueue_task(task="Reset Company Values")
def reset_company_values(self): def reset_company_values(self):
company_obj = frappe.get_doc("Company", self.company) self.validate_doc_status()
company_obj.total_monthly_sales = 0 if not self.reset_company_default_values:
company_obj.sales_monthly_history = None company_obj = frappe.get_doc("Company", self.company)
company_obj.save() company_obj.total_monthly_sales = 0
company_obj.sales_monthly_history = None
company_obj.save()
self.db_set("reset_company_default_values", 1)
self.enqueue_task(task="Clear Notifications")
def initialize_doctypes_to_be_deleted_table(self):
self.validate_doc_status()
if not self.initialize_doctypes_table:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
tables = self.get_all_child_doctypes()
for docfield in docfields:
if docfield["parent"] != self.doctype:
no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield["parent"], docfield["fieldname"]
)
if no_of_docs > 0:
# Initialize
self.populate_doctypes_table(tables, docfield["parent"], docfield["fieldname"], 0)
self.db_set("initialize_doctypes_table", 1)
self.enqueue_task(task="Delete Transactions")
def delete_company_transactions(self): def delete_company_transactions(self):
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list() self.validate_doc_status()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list) if not self.delete_transactions:
doctypes_to_be_ignored_list = self.get_doctypes_to_be_ignored_list()
docfields = self.get_doctypes_with_company_field(doctypes_to_be_ignored_list)
tables = self.get_all_child_doctypes() tables = self.get_all_child_doctypes()
for docfield in docfields: for docfield in self.doctypes:
if docfield["parent"] != self.doctype: if docfield.doctype_name != self.doctype and not docfield.done:
no_of_docs = self.get_number_of_docs_linked_with_specified_company( no_of_docs = self.get_number_of_docs_linked_with_specified_company(
docfield["parent"], docfield["fieldname"] docfield.doctype_name, docfield.docfield_name
)
if no_of_docs > 0:
self.delete_version_log(docfield["parent"], docfield["fieldname"])
reference_docs = frappe.get_all(
docfield["parent"], filters={docfield["fieldname"]: self.company}
) )
reference_doc_names = [r.name for r in reference_docs] if no_of_docs > 0:
reference_docs = frappe.get_all(
docfield.doctype_name, filters={docfield.docfield_name: self.company}, limit=self.batch_size
)
reference_doc_names = [r.name for r in reference_docs]
self.delete_communications(docfield["parent"], reference_doc_names) self.delete_version_log(docfield.doctype_name, reference_doc_names)
self.delete_comments(docfield["parent"], reference_doc_names) self.delete_communications(docfield.doctype_name, reference_doc_names)
self.unlink_attachments(docfield["parent"], reference_doc_names) self.delete_comments(docfield.doctype_name, reference_doc_names)
self.unlink_attachments(docfield.doctype_name, reference_doc_names)
self.delete_child_tables(docfield.doctype_name, reference_doc_names)
self.delete_docs_linked_with_specified_company(docfield.doctype_name, reference_doc_names)
processed = int(docfield.no_of_docs) + len(reference_doc_names)
frappe.db.set_value(docfield.doctype, docfield.name, "no_of_docs", processed)
else:
# reset naming series
naming_series = frappe.db.get_value("DocType", docfield.doctype_name, "autoname")
if naming_series:
if "#" in naming_series:
self.update_naming_series(naming_series, docfield.doctype_name)
frappe.db.set_value(docfield.doctype, docfield.name, "done", 1)
self.populate_doctypes_table(tables, docfield["parent"], no_of_docs) pending_doctypes = frappe.db.get_all(
"Transaction Deletion Record Details",
self.delete_child_tables(docfield["parent"], docfield["fieldname"]) filters={"parent": self.name, "done": 0},
self.delete_docs_linked_with_specified_company(docfield["parent"], docfield["fieldname"]) pluck="doctype_name",
)
naming_series = frappe.db.get_value("DocType", docfield["parent"], "autoname") if pending_doctypes:
if naming_series: # as method is enqueued after commit, calling itself will not make validate_doc_status to throw
if "#" in naming_series: # recursively call this task to delete all transactions
self.update_naming_series(naming_series, docfield["parent"]) self.enqueue_task(task="Delete Transactions")
else:
self.db_set("status", "Completed")
self.db_set("delete_transactions", 1)
self.db_set("error_log", None)
def get_doctypes_to_be_ignored_list(self): def get_doctypes_to_be_ignored_list(self):
singles = frappe.get_all("DocType", filters={"issingle": 1}, pluck="name") singles = frappe.get_all("DocType", filters={"issingle": 1}, pluck="name")
@@ -155,25 +330,24 @@ class TransactionDeletionRecord(Document):
def get_number_of_docs_linked_with_specified_company(self, doctype, company_fieldname): def get_number_of_docs_linked_with_specified_company(self, doctype, company_fieldname):
return frappe.db.count(doctype, {company_fieldname: self.company}) return frappe.db.count(doctype, {company_fieldname: self.company})
def populate_doctypes_table(self, tables, doctype, no_of_docs): def populate_doctypes_table(self, tables, doctype, fieldname, no_of_docs):
self.flags.ignore_validate_update_after_submit = True
if doctype not in tables: if doctype not in tables:
self.append("doctypes", {"doctype_name": doctype, "no_of_docs": no_of_docs}) self.append(
"doctypes", {"doctype_name": doctype, "docfield_name": fieldname, "no_of_docs": no_of_docs}
def delete_child_tables(self, doctype, company_fieldname): )
parent_docs_to_be_deleted = frappe.get_all( self.save(ignore_permissions=True)
doctype, {company_fieldname: self.company}, pluck="name"
)
def delete_child_tables(self, doctype, reference_doc_names):
child_tables = frappe.get_all( child_tables = frappe.get_all(
"DocField", filters={"fieldtype": "Table", "parent": doctype}, pluck="options" "DocField", filters={"fieldtype": "Table", "parent": doctype}, pluck="options"
) )
for batch in create_batch(parent_docs_to_be_deleted, self.batch_size): for table in child_tables:
for table in child_tables: frappe.db.delete(table, {"parent": ["in", reference_doc_names]})
frappe.db.delete(table, {"parent": ["in", batch]})
def delete_docs_linked_with_specified_company(self, doctype, company_fieldname): def delete_docs_linked_with_specified_company(self, doctype, reference_doc_names):
frappe.db.delete(doctype, {company_fieldname: self.company}) frappe.db.delete(doctype, {"name": ("in", reference_doc_names)})
def update_naming_series(self, naming_series, doctype_name): def update_naming_series(self, naming_series, doctype_name):
if "." in naming_series: if "." in naming_series:
@@ -194,17 +368,11 @@ class TransactionDeletionRecord(Document):
frappe.db.sql("""update `tabSeries` set current = %s where name=%s""", (last, prefix)) frappe.db.sql("""update `tabSeries` set current = %s where name=%s""", (last, prefix))
def delete_version_log(self, doctype, company_fieldname): def delete_version_log(self, doctype, docnames):
dt = qb.DocType(doctype) versions = qb.DocType("Version")
names = qb.from_(dt).select(dt.name).where(dt[company_fieldname] == self.company).run(as_list=1) qb.from_(versions).delete().where(
names = [x[0] for x in names] (versions.ref_doctype == doctype) & (versions.docname.isin(docnames))
).run()
if names:
versions = qb.DocType("Version")
for batch in create_batch(names, self.batch_size):
qb.from_(versions).delete().where(
(versions.ref_doctype == doctype) & (versions.docname.isin(batch))
).run()
def delete_communications(self, doctype, reference_doc_names): def delete_communications(self, doctype, reference_doc_names):
communications = frappe.get_all( communications = frappe.get_all(
@@ -276,3 +444,34 @@ def get_doctypes_to_be_ignored():
doctypes_to_be_ignored.extend(frappe.get_hooks("company_data_to_be_ignored") or []) doctypes_to_be_ignored.extend(frappe.get_hooks("company_data_to_be_ignored") or [])
return doctypes_to_be_ignored return doctypes_to_be_ignored
@frappe.whitelist()
def is_deletion_doc_running(company: str | None = None, err_msg: str | None = None):
if company:
if running_deletion_jobs := frappe.db.get_all(
"Transaction Deletion Record",
filters={"docstatus": 1, "company": company, "status": "Running"},
):
if not err_msg:
err_msg = ""
frappe.throw(
title=_("Deletion in Progress!"),
msg=_("Transaction Deletion Document: {0} is running for this Company. {1}").format(
get_link_to_form("Transaction Deletion Record", running_deletion_jobs[0].name), err_msg
),
)
def check_for_running_deletion_job(doc, method=None):
# Check if DocType has 'company' field
df = qb.DocType("DocField")
if (
not_allowed := qb.from_(df)
.select(df.parent)
.where((df.fieldname == "company") & (df.parent == doc.doctype))
.run()
):
is_deletion_doc_running(
doc.company, _("Cannot make any transactions until the deletion job is completed")
)

View File

@@ -2,11 +2,15 @@
// License: GNU General Public License v3. See license.txt // License: GNU General Public License v3. See license.txt
frappe.listview_settings["Transaction Deletion Record"] = { frappe.listview_settings["Transaction Deletion Record"] = {
add_fields: ["status"],
get_indicator: function (doc) { get_indicator: function (doc) {
if (doc.docstatus == 0) { let colors = {
return [__("Draft"), "red"]; Queued: "orange",
} else { Completed: "green",
return [__("Completed"), "green"]; Running: "blue",
} Failed: "red",
};
let status = doc.status;
return [__(status), colors[status], "status,=," + status];
}, },
}; };

View File

@@ -5,8 +5,7 @@
"editable_grid": 1, "editable_grid": 1,
"engine": "InnoDB", "engine": "InnoDB",
"field_order": [ "field_order": [
"doctype_name", "doctype_name"
"no_of_docs"
], ],
"fields": [ "fields": [
{ {
@@ -16,18 +15,12 @@
"label": "DocType", "label": "DocType",
"options": "DocType", "options": "DocType",
"reqd": 1 "reqd": 1
},
{
"fieldname": "no_of_docs",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Number of Docs"
} }
], ],
"index_web_pages_for_search": 1, "index_web_pages_for_search": 1,
"istable": 1, "istable": 1,
"links": [], "links": [],
"modified": "2021-05-08 23:10:46.166744", "modified": "2024-02-04 10:56:27.413691",
"modified_by": "Administrator", "modified_by": "Administrator",
"module": "Setup", "module": "Setup",
"name": "Transaction Deletion Record Item", "name": "Transaction Deletion Record Item",
@@ -35,5 +28,6 @@
"permissions": [], "permissions": [],
"sort_field": "modified", "sort_field": "modified",
"sort_order": "DESC", "sort_order": "DESC",
"states": [],
"track_changes": 1 "track_changes": 1
} }

View File

@@ -809,7 +809,8 @@
"label": "Purchase Order", "label": "Purchase Order",
"options": "Purchase Order", "options": "Purchase Order",
"print_hide": 1, "print_hide": 1,
"read_only": 1 "read_only": 1,
"search_index": 1
}, },
{ {
"fieldname": "column_break_82", "fieldname": "column_break_82",
@@ -870,7 +871,7 @@
"index_web_pages_for_search": 1, "index_web_pages_for_search": 1,
"istable": 1, "istable": 1,
"links": [], "links": [],
"modified": "2023-11-14 18:37:38.638144", "modified": "2024-03-21 18:15:07.603672",
"modified_by": "Administrator", "modified_by": "Administrator",
"module": "Stock", "module": "Stock",
"name": "Delivery Note Item", "name": "Delivery Note Item",

View File

@@ -3,7 +3,7 @@
import frappe import frappe
from frappe.tests.utils import FrappeTestCase, change_settings from frappe.tests.utils import FrappeTestCase, change_settings
from frappe.utils import add_days, cint, cstr, flt, today from frappe.utils import add_days, cint, cstr, flt, nowtime, today
from pypika import functions as fn from pypika import functions as fn
import erpnext import erpnext
@@ -2224,6 +2224,95 @@ class TestPurchaseReceipt(FrappeTestCase):
pr.reload() pr.reload()
self.assertEqual(pr.per_billed, 100) self.assertEqual(pr.per_billed, 100)
def test_sle_qty_after_transaction(self):
item = make_item(
"_Test Item Qty After Transaction",
properties={"is_stock_item": 1, "valuation_method": "FIFO"},
).name
posting_date = today()
posting_time = nowtime()
# Step 1: Create Purchase Receipt
pr = make_purchase_receipt(
item_code=item,
qty=1,
rate=100,
posting_date=posting_date,
posting_time=posting_time,
do_not_save=1,
)
for i in range(9):
pr.append(
"items",
{
"item_code": item,
"qty": 1,
"rate": 100,
"warehouse": pr.items[0].warehouse,
"cost_center": pr.items[0].cost_center,
"expense_account": pr.items[0].expense_account,
"uom": pr.items[0].uom,
"stock_uom": pr.items[0].stock_uom,
"conversion_factor": pr.items[0].conversion_factor,
},
)
self.assertEqual(len(pr.items), 10)
pr.save()
pr.submit()
data = frappe.get_all(
"Stock Ledger Entry",
fields=["qty_after_transaction", "creation", "posting_datetime"],
filters={"voucher_no": pr.name, "is_cancelled": 0},
order_by="creation",
)
for index, d in enumerate(data):
self.assertEqual(d.qty_after_transaction, 1 + index)
# Step 2: Create Purchase Receipt
pr = make_purchase_receipt(
item_code=item,
qty=1,
rate=100,
posting_date=posting_date,
posting_time=posting_time,
do_not_save=1,
)
for i in range(9):
pr.append(
"items",
{
"item_code": item,
"qty": 1,
"rate": 100,
"warehouse": pr.items[0].warehouse,
"cost_center": pr.items[0].cost_center,
"expense_account": pr.items[0].expense_account,
"uom": pr.items[0].uom,
"stock_uom": pr.items[0].stock_uom,
"conversion_factor": pr.items[0].conversion_factor,
},
)
self.assertEqual(len(pr.items), 10)
pr.save()
pr.submit()
data = frappe.get_all(
"Stock Ledger Entry",
fields=["qty_after_transaction", "creation", "posting_datetime"],
filters={"voucher_no": pr.name, "is_cancelled": 0},
order_by="creation",
)
for index, d in enumerate(data):
self.assertEqual(d.qty_after_transaction, 11 + index)
def prepare_data_for_internal_transfer(): def prepare_data_for_internal_transfer():
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_internal_supplier from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_internal_supplier

View File

@@ -1671,24 +1671,22 @@ class TestStockEntry(FrappeTestCase):
item_code = "Test Negative Item - 001" item_code = "Test Negative Item - 001"
item_doc = create_item(item_code=item_code, is_stock_item=1, valuation_rate=10) item_doc = create_item(item_code=item_code, is_stock_item=1, valuation_rate=10)
make_stock_entry( se1 = make_stock_entry(
item_code=item_code, item_code=item_code,
posting_date=add_days(today(), -3), posting_date=add_days(today(), -3),
posting_time="00:00:00", posting_time="00:00:00",
purpose="Material Receipt", target="_Test Warehouse - _TC",
qty=10, qty=10,
to_warehouse="_Test Warehouse - _TC", to_warehouse="_Test Warehouse - _TC",
do_not_save=True,
) )
make_stock_entry( se2 = make_stock_entry(
item_code=item_code, item_code=item_code,
posting_date=today(), posting_date=today(),
posting_time="00:00:00", posting_time="00:00:00",
purpose="Material Receipt", source="_Test Warehouse - _TC",
qty=8, qty=8,
from_warehouse="_Test Warehouse - _TC", from_warehouse="_Test Warehouse - _TC",
do_not_save=True,
) )
sr_doc = create_stock_reconciliation( sr_doc = create_stock_reconciliation(

View File

@@ -11,6 +11,7 @@
"warehouse", "warehouse",
"posting_date", "posting_date",
"posting_time", "posting_time",
"posting_datetime",
"is_adjustment_entry", "is_adjustment_entry",
"column_break_6", "column_break_6",
"voucher_type", "voucher_type",
@@ -96,7 +97,6 @@
"oldfieldtype": "Date", "oldfieldtype": "Date",
"print_width": "100px", "print_width": "100px",
"read_only": 1, "read_only": 1,
"search_index": 1,
"width": "100px" "width": "100px"
}, },
{ {
@@ -249,7 +249,6 @@
"options": "Company", "options": "Company",
"print_width": "150px", "print_width": "150px",
"read_only": 1, "read_only": 1,
"search_index": 1,
"width": "150px" "width": "150px"
}, },
{ {
@@ -316,6 +315,11 @@
"fieldname": "is_adjustment_entry", "fieldname": "is_adjustment_entry",
"fieldtype": "Check", "fieldtype": "Check",
"label": "Is Adjustment Entry" "label": "Is Adjustment Entry"
},
{
"fieldname": "posting_datetime",
"fieldtype": "Datetime",
"label": "Posting Datetime"
} }
], ],
"hide_toolbar": 1, "hide_toolbar": 1,
@@ -324,7 +328,7 @@
"in_create": 1, "in_create": 1,
"index_web_pages_for_search": 1, "index_web_pages_for_search": 1,
"links": [], "links": [],
"modified": "2024-03-13 09:56:13.021696", "modified": "2024-02-07 09:18:13.999231",
"modified_by": "Administrator", "modified_by": "Administrator",
"module": "Stock", "module": "Stock",
"name": "Stock Ledger Entry", "name": "Stock Ledger Entry",

View File

@@ -52,6 +52,12 @@ class StockLedgerEntry(Document):
self.validate_with_last_transaction_posting_time() self.validate_with_last_transaction_posting_time()
self.validate_inventory_dimension_negative_stock() self.validate_inventory_dimension_negative_stock()
def set_posting_datetime(self):
from erpnext.stock.utils import get_combine_datetime
self.posting_datetime = get_combine_datetime(self.posting_date, self.posting_time)
self.db_set("posting_datetime", self.posting_datetime)
def validate_inventory_dimension_negative_stock(self): def validate_inventory_dimension_negative_stock(self):
if self.is_cancelled: if self.is_cancelled:
return return
@@ -122,6 +128,7 @@ class StockLedgerEntry(Document):
return inv_dimension_dict return inv_dimension_dict
def on_submit(self): def on_submit(self):
self.set_posting_datetime()
self.check_stock_frozen_date() self.check_stock_frozen_date()
self.calculate_batch_qty() self.calculate_batch_qty()
@@ -293,9 +300,7 @@ class StockLedgerEntry(Document):
def on_doctype_update(): def on_doctype_update():
frappe.db.add_index(
"Stock Ledger Entry", fields=["posting_date", "posting_time"], index_name="posting_sort_index"
)
frappe.db.add_index("Stock Ledger Entry", ["voucher_no", "voucher_type"]) frappe.db.add_index("Stock Ledger Entry", ["voucher_no", "voucher_type"])
frappe.db.add_index("Stock Ledger Entry", ["batch_no", "item_code", "warehouse"]) frappe.db.add_index("Stock Ledger Entry", ["batch_no", "item_code", "warehouse"])
frappe.db.add_index("Stock Ledger Entry", ["warehouse", "item_code"], "item_warehouse") frappe.db.add_index("Stock Ledger Entry", ["warehouse", "item_code"], "item_warehouse")
frappe.db.add_index("Stock Ledger Entry", ["posting_datetime", "creation"])

View File

@@ -2,6 +2,7 @@
# See license.txt # See license.txt
import json import json
import time
from uuid import uuid4 from uuid import uuid4
import frappe import frappe
@@ -1066,7 +1067,7 @@ class TestStockLedgerEntry(FrappeTestCase, StockTestMixin):
frappe.qb.from_(sle) frappe.qb.from_(sle)
.select("qty_after_transaction") .select("qty_after_transaction")
.where((sle.item_code == item) & (sle.warehouse == warehouse) & (sle.is_cancelled == 0)) .where((sle.item_code == item) & (sle.warehouse == warehouse) & (sle.is_cancelled == 0))
.orderby(CombineDatetime(sle.posting_date, sle.posting_time)) .orderby(sle.posting_datetime)
.orderby(sle.creation) .orderby(sle.creation)
).run(pluck=True) ).run(pluck=True)
@@ -1143,6 +1144,89 @@ class TestStockLedgerEntry(FrappeTestCase, StockTestMixin):
except Exception as e: except Exception as e:
self.fail("Double processing of qty for clashing timestamp.") self.fail("Double processing of qty for clashing timestamp.")
def test_previous_sle_with_clashed_timestamp(self):
item = make_item().name
warehouse = "_Test Warehouse - _TC"
reciept1 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=100,
rate=10,
posting_date="2021-01-01",
posting_time="02:00:00",
)
time.sleep(3)
reciept2 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=5,
posting_date="2021-01-01",
rate=10,
posting_time="02:00:00.1234",
)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept1.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 100)
self.assertEqual(sle[0].actual_qty, 100)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept2.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 105)
self.assertEqual(sle[0].actual_qty, 5)
def test_backdated_sle_with_same_timestamp(self):
item = make_item().name
warehouse = "_Test Warehouse - _TC"
reciept1 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=5,
posting_date="2021-01-01",
rate=10,
posting_time="02:00:00.1234",
)
time.sleep(3)
# backdated entry with same timestamp but different ms part
reciept2 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=100,
rate=10,
posting_date="2021-01-01",
posting_time="02:00:00",
)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept1.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 5)
self.assertEqual(sle[0].actual_qty, 5)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept2.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 105)
self.assertEqual(sle[0].actual_qty, 100)
@change_settings("System Settings", {"float_precision": 3, "currency_precision": 2}) @change_settings("System Settings", {"float_precision": 3, "currency_precision": 2})
def test_transfer_invariants(self): def test_transfer_invariants(self):
"""Extact stock value should be transferred.""" """Extact stock value should be transferred."""

View File

@@ -5,7 +5,7 @@
import frappe import frappe
from frappe import _ from frappe import _
from frappe.query_builder import Field from frappe.query_builder import Field
from frappe.query_builder.functions import CombineDatetime, Min from frappe.query_builder.functions import Min
from frappe.utils import add_days, getdate, today from frappe.utils import add_days, getdate, today
import erpnext import erpnext
@@ -75,7 +75,7 @@ def get_data(report_filters):
& (sle.company == report_filters.company) & (sle.company == report_filters.company)
& (sle.is_cancelled == 0) & (sle.is_cancelled == 0)
) )
.orderby(CombineDatetime(sle.posting_date, sle.posting_time), sle.creation) .orderby(sle.posting_datetime, sle.creation)
).run(as_dict=True) ).run(as_dict=True)
for d in data: for d in data:

View File

@@ -213,13 +213,11 @@ def get_stock_ledger_entries(filters, items):
query = ( query = (
frappe.qb.from_(sle) frappe.qb.from_(sle)
.force_index("posting_sort_index")
.left_join(sle2) .left_join(sle2)
.on( .on(
(sle.item_code == sle2.item_code) (sle.item_code == sle2.item_code)
& (sle.warehouse == sle2.warehouse) & (sle.warehouse == sle2.warehouse)
& (sle.posting_date < sle2.posting_date) & (sle.posting_datetime < sle2.posting_datetime)
& (sle.posting_time < sle2.posting_time)
& (sle.name < sle2.name) & (sle.name < sle2.name)
) )
.select(sle.item_code, sle.warehouse, sle.qty_after_transaction, sle.company) .select(sle.item_code, sle.warehouse, sle.qty_after_transaction, sle.company)

View File

@@ -8,7 +8,7 @@ from typing import Any, Dict, List, Optional, TypedDict
import frappe import frappe
from frappe import _ from frappe import _
from frappe.query_builder import Order from frappe.query_builder import Order
from frappe.query_builder.functions import Coalesce, CombineDatetime from frappe.query_builder.functions import Coalesce
from frappe.utils import add_days, cint, date_diff, flt, getdate from frappe.utils import add_days, cint, date_diff, flt, getdate
from frappe.utils.nestedset import get_descendants_of from frappe.utils.nestedset import get_descendants_of
@@ -283,7 +283,7 @@ class StockBalanceReport(object):
item_table.item_name, item_table.item_name,
) )
.where((sle.docstatus < 2) & (sle.is_cancelled == 0)) .where((sle.docstatus < 2) & (sle.is_cancelled == 0))
.orderby(CombineDatetime(sle.posting_date, sle.posting_time)) .orderby(sle.posting_datetime)
.orderby(sle.creation) .orderby(sle.creation)
.orderby(sle.actual_qty) .orderby(sle.actual_qty)
) )

View File

@@ -276,7 +276,7 @@ def get_stock_ledger_entries(filters, items):
frappe.qb.from_(sle) frappe.qb.from_(sle)
.select( .select(
sle.item_code, sle.item_code,
CombineDatetime(sle.posting_date, sle.posting_time).as_("date"), sle.posting_datetime.as_("date"),
sle.warehouse, sle.warehouse,
sle.posting_date, sle.posting_date,
sle.posting_time, sle.posting_time,

View File

@@ -7,13 +7,14 @@ from typing import Optional, Set, Tuple
import frappe import frappe
from frappe import _ from frappe import _
from frappe.model.meta import get_field_precision from frappe.model.meta import get_field_precision
from frappe.query_builder.functions import CombineDatetime, Sum from frappe.query_builder.functions import Sum
from frappe.utils import cint, cstr, flt, get_link_to_form, getdate, now, nowdate from frappe.utils import cint, cstr, flt, get_link_to_form, getdate, now, nowdate
import erpnext import erpnext
from erpnext.stock.doctype.bin.bin import update_qty as update_bin_qty from erpnext.stock.doctype.bin.bin import update_qty as update_bin_qty
from erpnext.stock.doctype.inventory_dimension.inventory_dimension import get_inventory_dimensions from erpnext.stock.doctype.inventory_dimension.inventory_dimension import get_inventory_dimensions
from erpnext.stock.utils import ( from erpnext.stock.utils import (
get_combine_datetime,
get_incoming_outgoing_rate_for_cancel, get_incoming_outgoing_rate_for_cancel,
get_incoming_rate, get_incoming_rate,
get_or_make_bin, get_or_make_bin,
@@ -69,6 +70,7 @@ def make_sl_entries(sl_entries, allow_negative_stock=False, via_landed_cost_vouc
args = sle_doc.as_dict() args = sle_doc.as_dict()
args["allow_zero_valuation_rate"] = sle.get("allow_zero_valuation_rate") or False args["allow_zero_valuation_rate"] = sle.get("allow_zero_valuation_rate") or False
args["posting_datetime"] = get_combine_datetime(args.posting_date, args.posting_time)
if sle.get("voucher_type") == "Stock Reconciliation": if sle.get("voucher_type") == "Stock Reconciliation":
# preserve previous_qty_after_transaction for qty reposting # preserve previous_qty_after_transaction for qty reposting
@@ -431,12 +433,14 @@ class update_entries_after(object):
self.process_sle(sle) self.process_sle(sle)
def get_sle_against_current_voucher(self): def get_sle_against_current_voucher(self):
self.args["time_format"] = "%H:%i:%s" self.args["posting_datetime"] = get_combine_datetime(
self.args.posting_date, self.args.posting_time
)
return frappe.db.sql( return frappe.db.sql(
""" """
select select
*, timestamp(posting_date, posting_time) as "timestamp" *, posting_datetime as "timestamp"
from from
`tabStock Ledger Entry` `tabStock Ledger Entry`
where where
@@ -444,8 +448,7 @@ class update_entries_after(object):
and warehouse = %(warehouse)s and warehouse = %(warehouse)s
and is_cancelled = 0 and is_cancelled = 0
and ( and (
posting_date = %(posting_date)s and posting_datetime = %(posting_datetime)s
time_format(posting_time, %(time_format)s) = time_format(%(posting_time)s, %(time_format)s)
) )
order by order by
creation ASC creation ASC
@@ -1186,11 +1189,11 @@ class update_entries_after(object):
def get_previous_sle_of_current_voucher(args, operator="<", exclude_current_voucher=False): def get_previous_sle_of_current_voucher(args, operator="<", exclude_current_voucher=False):
"""get stock ledger entries filtered by specific posting datetime conditions""" """get stock ledger entries filtered by specific posting datetime conditions"""
args["time_format"] = "%H:%i:%s"
if not args.get("posting_date"): if not args.get("posting_date"):
args["posting_date"] = "1900-01-01" args["posting_datetime"] = "1900-01-01 00:00:00"
if not args.get("posting_time"):
args["posting_time"] = "00:00" if not args.get("posting_datetime"):
args["posting_datetime"] = get_combine_datetime(args["posting_date"], args["posting_time"])
voucher_condition = "" voucher_condition = ""
if exclude_current_voucher: if exclude_current_voucher:
@@ -1199,23 +1202,20 @@ def get_previous_sle_of_current_voucher(args, operator="<", exclude_current_vouc
sle = frappe.db.sql( sle = frappe.db.sql(
""" """
select *, timestamp(posting_date, posting_time) as "timestamp" select *, posting_datetime as "timestamp"
from `tabStock Ledger Entry` from `tabStock Ledger Entry`
where item_code = %(item_code)s where item_code = %(item_code)s
and warehouse = %(warehouse)s and warehouse = %(warehouse)s
and is_cancelled = 0 and is_cancelled = 0
{voucher_condition} {voucher_condition}
and ( and (
posting_date < %(posting_date)s or posting_datetime {operator} %(posting_datetime)s
(
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) {operator} time_format(%(posting_time)s, %(time_format)s)
)
) )
order by timestamp(posting_date, posting_time) desc, creation desc order by posting_datetime desc, creation desc
limit 1 limit 1
for update""".format( for update""".format(
operator=operator, voucher_condition=voucher_condition operator=operator,
voucher_condition=voucher_condition,
), ),
args, args,
as_dict=1, as_dict=1,
@@ -1256,9 +1256,7 @@ def get_stock_ledger_entries(
extra_cond=None, extra_cond=None,
): ):
"""get stock ledger entries filtered by specific posting datetime conditions""" """get stock ledger entries filtered by specific posting datetime conditions"""
conditions = " and timestamp(posting_date, posting_time) {0} timestamp(%(posting_date)s, %(posting_time)s)".format( conditions = " and posting_datetime {0} %(posting_datetime)s".format(operator)
operator
)
if previous_sle.get("warehouse"): if previous_sle.get("warehouse"):
conditions += " and warehouse = %(warehouse)s" conditions += " and warehouse = %(warehouse)s"
elif previous_sle.get("warehouse_condition"): elif previous_sle.get("warehouse_condition"):
@@ -1284,9 +1282,11 @@ def get_stock_ledger_entries(
) )
if not previous_sle.get("posting_date"): if not previous_sle.get("posting_date"):
previous_sle["posting_date"] = "1900-01-01" previous_sle["posting_datetime"] = "1900-01-01 00:00:00"
if not previous_sle.get("posting_time"): else:
previous_sle["posting_time"] = "00:00" previous_sle["posting_datetime"] = get_combine_datetime(
previous_sle["posting_date"], previous_sle["posting_time"]
)
if operator in (">", "<=") and previous_sle.get("name"): if operator in (">", "<=") and previous_sle.get("name"):
conditions += " and name!=%(name)s" conditions += " and name!=%(name)s"
@@ -1299,12 +1299,12 @@ def get_stock_ledger_entries(
return frappe.db.sql( return frappe.db.sql(
""" """
select *, timestamp(posting_date, posting_time) as "timestamp" select *, posting_datetime as "timestamp"
from `tabStock Ledger Entry` from `tabStock Ledger Entry`
where item_code = %%(item_code)s where item_code = %%(item_code)s
and is_cancelled = 0 and is_cancelled = 0
%(conditions)s %(conditions)s
order by timestamp(posting_date, posting_time) %(order)s, creation %(order)s order by posting_datetime %(order)s, creation %(order)s
%(limit)s %(for_update)s""" %(limit)s %(for_update)s"""
% { % {
"conditions": conditions, "conditions": conditions,
@@ -1330,7 +1330,7 @@ def get_sle_by_voucher_detail_no(voucher_detail_no, excluded_sle=None):
"posting_date", "posting_date",
"posting_time", "posting_time",
"voucher_detail_no", "voucher_detail_no",
"timestamp(posting_date, posting_time) as timestamp", "posting_datetime as timestamp",
], ],
as_dict=1, as_dict=1,
) )
@@ -1340,15 +1340,18 @@ def get_batch_incoming_rate(
item_code, warehouse, batch_no, posting_date, posting_time, creation=None item_code, warehouse, batch_no, posting_date, posting_time, creation=None
): ):
import datetime
sle = frappe.qb.DocType("Stock Ledger Entry") sle = frappe.qb.DocType("Stock Ledger Entry")
timestamp_condition = CombineDatetime(sle.posting_date, sle.posting_time) < CombineDatetime( posting_datetime = get_combine_datetime(posting_date, posting_time)
posting_date, posting_time if not creation:
) posting_datetime = posting_datetime + datetime.timedelta(milliseconds=1)
timestamp_condition = sle.posting_datetime < posting_datetime
if creation: if creation:
timestamp_condition |= ( timestamp_condition |= (
CombineDatetime(sle.posting_date, sle.posting_time) sle.posting_datetime == get_combine_datetime(posting_date, posting_time)
== CombineDatetime(posting_date, posting_time)
) & (sle.creation < creation) ) & (sle.creation < creation)
batch_details = ( batch_details = (
@@ -1411,7 +1414,7 @@ def get_valuation_rate(
AND valuation_rate >= 0 AND valuation_rate >= 0
AND is_cancelled = 0 AND is_cancelled = 0
AND NOT (voucher_no = %s AND voucher_type = %s) AND NOT (voucher_no = %s AND voucher_type = %s)
order by posting_date desc, posting_time desc, name desc limit 1""", order by posting_datetime desc, name desc limit 1""",
(item_code, warehouse, voucher_no, voucher_type), (item_code, warehouse, voucher_no, voucher_type),
) )
@@ -1472,7 +1475,7 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
datetime_limit_condition = "" datetime_limit_condition = ""
qty_shift = args.actual_qty qty_shift = args.actual_qty
args["time_format"] = "%H:%i:%s" args["posting_datetime"] = get_combine_datetime(args["posting_date"], args["posting_time"])
# find difference/shift in qty caused by stock reconciliation # find difference/shift in qty caused by stock reconciliation
if args.voucher_type == "Stock Reconciliation": if args.voucher_type == "Stock Reconciliation":
@@ -1482,8 +1485,6 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
next_stock_reco_detail = get_next_stock_reco(args) next_stock_reco_detail = get_next_stock_reco(args)
if next_stock_reco_detail: if next_stock_reco_detail:
detail = next_stock_reco_detail[0] detail = next_stock_reco_detail[0]
# add condition to update SLEs before this date & time
datetime_limit_condition = get_datetime_limit_condition(detail) datetime_limit_condition = get_datetime_limit_condition(detail)
frappe.db.sql( frappe.db.sql(
@@ -1496,13 +1497,9 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
and voucher_no != %(voucher_no)s and voucher_no != %(voucher_no)s
and is_cancelled = 0 and is_cancelled = 0
and ( and (
posting_date > %(posting_date)s or posting_datetime > %(posting_datetime)s
(
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) > time_format(%(posting_time)s, %(time_format)s)
)
) )
{datetime_limit_condition} {datetime_limit_condition}
""", """,
args, args,
) )
@@ -1557,20 +1554,11 @@ def get_next_stock_reco(kwargs):
& (sle.voucher_no != kwargs.get("voucher_no")) & (sle.voucher_no != kwargs.get("voucher_no"))
& (sle.is_cancelled == 0) & (sle.is_cancelled == 0)
& ( & (
( sle.posting_datetime
CombineDatetime(sle.posting_date, sle.posting_time) >= get_combine_datetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
> CombineDatetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
| (
(
CombineDatetime(sle.posting_date, sle.posting_time)
== CombineDatetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
& (sle.creation > kwargs.get("creation"))
)
) )
) )
.orderby(CombineDatetime(sle.posting_date, sle.posting_time)) .orderby(sle.posting_datetime)
.orderby(sle.creation) .orderby(sle.creation)
.limit(1) .limit(1)
) )
@@ -1582,11 +1570,13 @@ def get_next_stock_reco(kwargs):
def get_datetime_limit_condition(detail): def get_datetime_limit_condition(detail):
posting_datetime = get_combine_datetime(detail.posting_date, detail.posting_time)
return f""" return f"""
and and
(timestamp(posting_date, posting_time) < timestamp('{detail.posting_date}', '{detail.posting_time}') (posting_datetime < '{posting_datetime}'
or ( or (
timestamp(posting_date, posting_time) = timestamp('{detail.posting_date}', '{detail.posting_time}') posting_datetime = '{posting_datetime}'
and creation < '{detail.creation}' and creation < '{detail.creation}'
) )
)""" )"""
@@ -1659,14 +1649,11 @@ def get_future_sle_with_negative_qty(sle):
(SLE.item_code == sle.item_code) (SLE.item_code == sle.item_code)
& (SLE.warehouse == sle.warehouse) & (SLE.warehouse == sle.warehouse)
& (SLE.voucher_no != sle.voucher_no) & (SLE.voucher_no != sle.voucher_no)
& ( & (SLE.posting_datetime >= get_combine_datetime(sle.posting_date, sle.posting_time))
CombineDatetime(SLE.posting_date, SLE.posting_time)
>= CombineDatetime(sle.posting_date, sle.posting_time)
)
& (SLE.is_cancelled == 0) & (SLE.is_cancelled == 0)
& (SLE.qty_after_transaction < 0) & (SLE.qty_after_transaction < 0)
) )
.orderby(CombineDatetime(SLE.posting_date, SLE.posting_time)) .orderby(SLE.posting_datetime)
.limit(1) .limit(1)
) )
@@ -1681,20 +1668,20 @@ def get_future_sle_with_negative_batch_qty(args):
""" """
with batch_ledger as ( with batch_ledger as (
select select
posting_date, posting_time, voucher_type, voucher_no, posting_date, posting_time, posting_datetime, voucher_type, voucher_no,
sum(actual_qty) over (order by posting_date, posting_time, creation) as cumulative_total sum(actual_qty) over (order by posting_datetime, creation) as cumulative_total
from `tabStock Ledger Entry` from `tabStock Ledger Entry`
where where
item_code = %(item_code)s item_code = %(item_code)s
and warehouse = %(warehouse)s and warehouse = %(warehouse)s
and batch_no=%(batch_no)s and batch_no=%(batch_no)s
and is_cancelled = 0 and is_cancelled = 0
order by posting_date, posting_time, creation order by posting_datetime, creation
) )
select * from batch_ledger select * from batch_ledger
where where
cumulative_total < 0.0 cumulative_total < 0.0
and timestamp(posting_date, posting_time) >= timestamp(%(posting_date)s, %(posting_time)s) and posting_datetime >= %(posting_datetime)s
limit 1 limit 1
""", """,
args, args,
@@ -1746,6 +1733,7 @@ def is_internal_transfer(sle):
def get_stock_value_difference(item_code, warehouse, posting_date, posting_time, voucher_no=None): def get_stock_value_difference(item_code, warehouse, posting_date, posting_time, voucher_no=None):
table = frappe.qb.DocType("Stock Ledger Entry") table = frappe.qb.DocType("Stock Ledger Entry")
posting_datetime = get_combine_datetime(posting_date, posting_time)
query = ( query = (
frappe.qb.from_(table) frappe.qb.from_(table)
@@ -1754,10 +1742,7 @@ def get_stock_value_difference(item_code, warehouse, posting_date, posting_time,
(table.is_cancelled == 0) (table.is_cancelled == 0)
& (table.item_code == item_code) & (table.item_code == item_code)
& (table.warehouse == warehouse) & (table.warehouse == warehouse)
& ( & (table.posting_datetime <= posting_datetime)
(table.posting_date < posting_date)
| ((table.posting_date == posting_date) & (table.posting_time <= posting_time))
)
) )
) )

View File

@@ -8,7 +8,7 @@ from typing import Dict, Optional
import frappe import frappe
from frappe import _ from frappe import _
from frappe.query_builder.functions import CombineDatetime, IfNull, Sum from frappe.query_builder.functions import CombineDatetime, IfNull, Sum
from frappe.utils import cstr, flt, get_link_to_form, nowdate, nowtime from frappe.utils import cstr, flt, get_link_to_form, get_time, getdate, nowdate, nowtime
import erpnext import erpnext
from erpnext.stock.doctype.warehouse.warehouse import get_child_warehouses from erpnext.stock.doctype.warehouse.warehouse import get_child_warehouses
@@ -619,3 +619,18 @@ def _update_item_info(scan_result: Dict[str, Optional[str]]) -> Dict[str, Option
): ):
scan_result.update(item_info) scan_result.update(item_info)
return scan_result return scan_result
def get_combine_datetime(posting_date, posting_time):
import datetime
if isinstance(posting_date, str):
posting_date = getdate(posting_date)
if isinstance(posting_time, str):
posting_time = get_time(posting_time)
if isinstance(posting_time, datetime.timedelta):
posting_time = (datetime.datetime.min + posting_time).time()
return datetime.datetime.combine(posting_date, posting_time).replace(microsecond=0)

View File

@@ -162,7 +162,7 @@ def create_log(doc_name, e, from_doctype, to_doctype, status, log_date=None, res
transaction_log.from_doctype = from_doctype transaction_log.from_doctype = from_doctype
transaction_log.to_doctype = to_doctype transaction_log.to_doctype = to_doctype
transaction_log.retried = restarted transaction_log.retried = restarted
transaction_log.save() transaction_log.save(ignore_permissions=True)
def show_job_status(fail_count, deserialized_data_count, to_doctype): def show_job_status(fail_count, deserialized_data_count, to_doctype):