Initial commit
This commit is contained in:
77
Fusion Accounting/models/__init__.py
Normal file
77
Fusion Accounting/models/__init__.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from . import account_account
|
||||
from . import account_bank_statement
|
||||
from . import account_chart_template
|
||||
from . import account_fiscal_year
|
||||
from . import account_journal_dashboard
|
||||
from . import account_move
|
||||
from . import account_payment
|
||||
from . import account_reconcile_model
|
||||
from . import account_reconcile_model_line
|
||||
from . import account_tax
|
||||
from . import digest
|
||||
from . import res_config_settings
|
||||
from . import res_company
|
||||
from . import bank_rec_widget
|
||||
from . import bank_rec_widget_line
|
||||
from . import ir_ui_menu
|
||||
from . import res_currency
|
||||
from . import res_partner
|
||||
from . import account_report
|
||||
from . import account_analytic_report
|
||||
from . import bank_reconciliation_report
|
||||
from . import account_general_ledger
|
||||
from . import account_generic_tax_report
|
||||
from . import account_journal_report
|
||||
from . import account_cash_flow_report
|
||||
from . import account_deferred_reports
|
||||
from . import account_multicurrency_revaluation_report
|
||||
from . import account_move_line
|
||||
from . import account_trial_balance_report
|
||||
from . import account_aged_partner_balance
|
||||
from . import account_partner_ledger
|
||||
from . import mail_activity
|
||||
from . import mail_activity_type
|
||||
from . import chart_template
|
||||
from . import ir_actions
|
||||
from . import account_sales_report
|
||||
from . import executive_summary_report
|
||||
from . import budget
|
||||
from . import balance_sheet
|
||||
from . import account_fiscal_position
|
||||
from . import account_asset,account_journal
|
||||
from . import account_journal_csv
|
||||
from . import bank_statement_import_ofx
|
||||
from . import bank_statement_import_qif
|
||||
from . import bank_statement_import_camt
|
||||
from . import batch_payment
|
||||
from . import check_printing
|
||||
from . import sepa_credit_transfer
|
||||
from . import sepa_direct_debit
|
||||
from . import payment_qr_code
|
||||
from . import followup
|
||||
from . import res_partner_followup
|
||||
from . import loan
|
||||
from . import loan_line
|
||||
from . import edi_document
|
||||
from . import edi_format
|
||||
from . import ubl_generator
|
||||
from . import cii_generator
|
||||
from . import account_move_edi
|
||||
from . import external_tax_provider
|
||||
from . import avatax_provider
|
||||
from . import tax_python
|
||||
from . import account_move_external_tax
|
||||
from . import fiscal_category
|
||||
from . import saft_export
|
||||
from . import saft_import
|
||||
from . import intrastat_report
|
||||
from . import document_extraction
|
||||
from . import invoice_extraction
|
||||
from . import inter_company_rules
|
||||
from . import three_way_match
|
||||
from . import account_transfer
|
||||
from . import debit_note
|
||||
from . import cash_basis_report
|
||||
# integration_bridges is loaded conditionally - requires fleet/hr_expense/helpdesk
|
||||
# Uncomment when those modules are installed:
|
||||
# from . import integration_bridges
|
||||
BIN
Fusion Accounting/models/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Fusion Accounting/models/__pycache__/account_tax.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/account_tax.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Fusion Accounting/models/__pycache__/budget.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/budget.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
Fusion Accounting/models/__pycache__/digest.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/digest.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
Fusion Accounting/models/__pycache__/ir_actions.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/ir_actions.cpython-310.pyc
Normal file
Binary file not shown.
BIN
Fusion Accounting/models/__pycache__/ir_ui_menu.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/ir_ui_menu.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Fusion Accounting/models/__pycache__/res_company.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/res_company.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Fusion Accounting/models/__pycache__/res_partner.cpython-310.pyc
Normal file
BIN
Fusion Accounting/models/__pycache__/res_partner.cpython-310.pyc
Normal file
Binary file not shown.
86
Fusion Accounting/models/account_account.py
Normal file
86
Fusion Accounting/models/account_account.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# Fusion Accounting - Account extensions
|
||||
# Provides reconciliation actions, asset automation, and budget linkage
|
||||
|
||||
import ast
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
|
||||
|
||||
class FusionAccountAccount(models.Model):
|
||||
"""Augments the standard chart of accounts with asset management
|
||||
and reconciliation capabilities for Fusion Accounting."""
|
||||
|
||||
_inherit = "account.account"
|
||||
|
||||
# ---- Relational Fields ----
|
||||
exclude_provision_currency_ids = fields.Many2many(
|
||||
comodel_name='res.currency',
|
||||
relation='account_account_exclude_res_currency_provision',
|
||||
help="Currencies excluded from foreign exchange provision calculations.",
|
||||
)
|
||||
budget_item_ids = fields.One2many(
|
||||
comodel_name='account.report.budget.item',
|
||||
inverse_name='account_id',
|
||||
)
|
||||
asset_model_ids = fields.Many2many(
|
||||
comodel_name='account.asset',
|
||||
domain=[('state', '=', 'model')],
|
||||
help="When this account appears on a vendor bill or credit note, "
|
||||
"an asset record is generated per linked model.",
|
||||
tracking=True,
|
||||
)
|
||||
|
||||
# ---- Selection & Computed ----
|
||||
create_asset = fields.Selection(
|
||||
selection=[
|
||||
('no', 'No'),
|
||||
('draft', 'Create in draft'),
|
||||
('validate', 'Create and validate'),
|
||||
],
|
||||
required=True,
|
||||
default='no',
|
||||
tracking=True,
|
||||
)
|
||||
can_create_asset = fields.Boolean(
|
||||
compute="_compute_asset_creation_eligible",
|
||||
)
|
||||
form_view_ref = fields.Char(
|
||||
compute='_compute_asset_creation_eligible',
|
||||
)
|
||||
multiple_assets_per_line = fields.Boolean(
|
||||
string='Multiple Assets per Line',
|
||||
default=False,
|
||||
tracking=True,
|
||||
help="Generate individual asset records per unit quantity on "
|
||||
"the bill line rather than a single consolidated asset.",
|
||||
)
|
||||
|
||||
# ---- Compute Methods ----
|
||||
@api.depends('account_type')
|
||||
def _compute_asset_creation_eligible(self):
|
||||
"""Determine whether the account type supports automatic
|
||||
asset creation and set the appropriate form view reference."""
|
||||
eligible_types = ('asset_fixed', 'asset_non_current')
|
||||
for acct in self:
|
||||
acct.can_create_asset = acct.account_type in eligible_types
|
||||
acct.form_view_ref = 'fusion_accountingview_account_asset_form'
|
||||
|
||||
# ---- Onchange ----
|
||||
@api.onchange('create_asset')
|
||||
def _onchange_reset_multiple_assets(self):
|
||||
"""Disable per-line asset splitting when asset creation is turned off."""
|
||||
for acct in self:
|
||||
if acct.create_asset == 'no':
|
||||
acct.multiple_assets_per_line = False
|
||||
|
||||
# ---- Actions ----
|
||||
def action_open_reconcile(self):
|
||||
"""Navigate to unreconciled journal items filtered by this account."""
|
||||
self.ensure_one()
|
||||
act_data = self.env['ir.actions.act_window']._for_xml_id(
|
||||
'fusion_accounting.action_move_line_posted_unreconciled'
|
||||
)
|
||||
parsed_domain = ast.literal_eval(act_data.get('domain', '[]'))
|
||||
parsed_domain.append(('account_id', '=', self.id))
|
||||
act_data['domain'] = parsed_domain
|
||||
return act_data
|
||||
550
Fusion Accounting/models/account_aged_partner_balance.py
Normal file
550
Fusion Accounting/models/account_aged_partner_balance.py
Normal file
@@ -0,0 +1,550 @@
|
||||
# Fusion Accounting - Aged Partner Balance Report Handler
|
||||
|
||||
import datetime
|
||||
|
||||
from odoo import models, fields, _
|
||||
from odoo.tools import SQL
|
||||
from odoo.tools.misc import format_date
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from itertools import chain
|
||||
|
||||
|
||||
class AgedPartnerBalanceCustomHandler(models.AbstractModel):
|
||||
"""Base handler for aged receivable / payable reports.
|
||||
|
||||
Groups outstanding amounts into configurable aging buckets so the user
|
||||
can visualise how long balances have been open.
|
||||
"""
|
||||
|
||||
_name = 'account.aged.partner.balance.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Aged Partner Balance Custom Handler'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Display & options
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
return {
|
||||
'css_custom_class': 'aged_partner_balance',
|
||||
'templates': {
|
||||
'AccountReportLineName': 'fusion_accounting.AgedPartnerBalanceLineName',
|
||||
},
|
||||
'components': {
|
||||
'AccountReportFilters': 'fusion_accounting.AgedPartnerBalanceFilters',
|
||||
},
|
||||
}
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
"""Configure multi-currency, aging interval, and column labels."""
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
|
||||
cols_to_hide = set()
|
||||
|
||||
options['multi_currency'] = report.env.user.has_group('base.group_multi_currency')
|
||||
options['show_currency'] = (
|
||||
options['multi_currency']
|
||||
and (previous_options or {}).get('show_currency', False)
|
||||
)
|
||||
if not options['show_currency']:
|
||||
cols_to_hide.update(['amount_currency', 'currency'])
|
||||
|
||||
options['show_account'] = (previous_options or {}).get('show_account', False)
|
||||
if not options['show_account']:
|
||||
cols_to_hide.add('account_name')
|
||||
|
||||
options['columns'] = [
|
||||
c for c in options['columns']
|
||||
if c['expression_label'] not in cols_to_hide
|
||||
]
|
||||
|
||||
options['order_column'] = previous_options.get('order_column') or {
|
||||
'expression_label': 'invoice_date',
|
||||
'direction': 'ASC',
|
||||
}
|
||||
options['aging_based_on'] = previous_options.get('aging_based_on') or 'base_on_maturity_date'
|
||||
options['aging_interval'] = previous_options.get('aging_interval') or 30
|
||||
|
||||
# Relabel period columns to reflect the chosen interval
|
||||
bucket_size = options['aging_interval']
|
||||
for col in options['columns']:
|
||||
label = col['expression_label']
|
||||
if label.startswith('period'):
|
||||
bucket_idx = int(label.replace('period', '')) - 1
|
||||
if 0 <= bucket_idx < 4:
|
||||
lo = bucket_size * bucket_idx + 1
|
||||
hi = bucket_size * (bucket_idx + 1)
|
||||
col['name'] = f'{lo}-{hi}'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Post-processing
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_line_postprocessor(self, report, options, lines):
|
||||
"""Inject the partner trust level into each partner line."""
|
||||
partner_line_map = {}
|
||||
for ln in lines:
|
||||
mdl, mid = report._get_model_info_from_id(ln['id'])
|
||||
if mdl == 'res.partner':
|
||||
partner_line_map[mid] = ln
|
||||
|
||||
if partner_line_map:
|
||||
partners = self.env['res.partner'].browse(partner_line_map)
|
||||
for partner, ln_dict in zip(partners, partner_line_map.values()):
|
||||
ln_dict['trust'] = partner.with_company(
|
||||
partner.company_id or self.env.company
|
||||
).trust
|
||||
|
||||
return lines
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Report engines
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _report_custom_engine_aged_receivable(
|
||||
self, expressions, options, date_scope,
|
||||
current_groupby, next_groupby,
|
||||
offset=0, limit=None, warnings=None,
|
||||
):
|
||||
return self._aged_partner_report_custom_engine_common(
|
||||
options, 'asset_receivable', current_groupby, next_groupby,
|
||||
offset=offset, limit=limit,
|
||||
)
|
||||
|
||||
def _report_custom_engine_aged_payable(
|
||||
self, expressions, options, date_scope,
|
||||
current_groupby, next_groupby,
|
||||
offset=0, limit=None, warnings=None,
|
||||
):
|
||||
return self._aged_partner_report_custom_engine_common(
|
||||
options, 'liability_payable', current_groupby, next_groupby,
|
||||
offset=offset, limit=limit,
|
||||
)
|
||||
|
||||
def _aged_partner_report_custom_engine_common(
|
||||
self, options, account_type, current_groupby, next_groupby,
|
||||
offset=0, limit=None,
|
||||
):
|
||||
"""Core query and aggregation logic shared by receivable and payable.
|
||||
|
||||
Builds aging periods dynamically from the chosen interval, runs a
|
||||
single SQL query that joins against a period table, and returns
|
||||
either a flat result or a list of ``(grouping_key, result)`` pairs.
|
||||
"""
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
report._check_groupby_fields(
|
||||
(next_groupby.split(',') if next_groupby else [])
|
||||
+ ([current_groupby] if current_groupby else [])
|
||||
)
|
||||
|
||||
def _subtract_days(dt, n):
|
||||
return fields.Date.to_string(dt - relativedelta(days=n))
|
||||
|
||||
# Determine the aging date field
|
||||
if options['aging_based_on'] == 'base_on_invoice_date':
|
||||
age_field = SQL.identifier('invoice_date')
|
||||
else:
|
||||
age_field = SQL.identifier('date_maturity')
|
||||
|
||||
report_end = fields.Date.from_string(options['date']['date_to'])
|
||||
interval = options['aging_interval']
|
||||
|
||||
# Build period boundaries: [(start_or_None, stop_or_None), ...]
|
||||
period_list = [(False, fields.Date.to_string(report_end))]
|
||||
period_col_count = len([
|
||||
c for c in options['columns'] if c['expression_label'].startswith('period')
|
||||
]) - 1
|
||||
|
||||
for p in range(period_col_count):
|
||||
p_start = _subtract_days(report_end, interval * p + 1)
|
||||
p_stop = _subtract_days(report_end, interval * (p + 1)) if p < period_col_count - 1 else False
|
||||
period_list.append((p_start, p_stop))
|
||||
|
||||
# Helper: aggregate query rows into the result dictionary
|
||||
def _aggregate_rows(rpt, rows):
|
||||
agg = {f'period{k}': 0 for k in range(len(period_list))}
|
||||
for r in rows:
|
||||
for k in range(len(period_list)):
|
||||
agg[f'period{k}'] += r[f'period{k}']
|
||||
|
||||
if current_groupby == 'id':
|
||||
single = rows[0]
|
||||
cur_obj = (
|
||||
self.env['res.currency'].browse(single['currency_id'][0])
|
||||
if len(single['currency_id']) == 1 else None
|
||||
)
|
||||
agg.update({
|
||||
'invoice_date': single['invoice_date'][0] if len(single['invoice_date']) == 1 else None,
|
||||
'due_date': single['due_date'][0] if len(single['due_date']) == 1 else None,
|
||||
'amount_currency': single['amount_currency'],
|
||||
'currency_id': single['currency_id'][0] if len(single['currency_id']) == 1 else None,
|
||||
'currency': cur_obj.display_name if cur_obj else None,
|
||||
'account_name': single['account_name'][0] if len(single['account_name']) == 1 else None,
|
||||
'total': None,
|
||||
'has_sublines': single['aml_count'] > 0,
|
||||
'partner_id': single['partner_id'][0] if single['partner_id'] else None,
|
||||
})
|
||||
else:
|
||||
agg.update({
|
||||
'invoice_date': None,
|
||||
'due_date': None,
|
||||
'amount_currency': None,
|
||||
'currency_id': None,
|
||||
'currency': None,
|
||||
'account_name': None,
|
||||
'total': sum(agg[f'period{k}'] for k in range(len(period_list))),
|
||||
'has_sublines': False,
|
||||
})
|
||||
return agg
|
||||
|
||||
# Build the VALUES clause for the period table
|
||||
period_vals_fmt = '(VALUES %s)' % ','.join('(%s, %s, %s)' for _ in period_list)
|
||||
flat_params = list(chain.from_iterable(
|
||||
(p[0] or None, p[1] or None, idx)
|
||||
for idx, p in enumerate(period_list)
|
||||
))
|
||||
period_tbl = SQL(period_vals_fmt, *flat_params)
|
||||
|
||||
# Build the main report query
|
||||
base_qry = report._get_report_query(
|
||||
options, 'strict_range',
|
||||
domain=[('account_id.account_type', '=', account_type)],
|
||||
)
|
||||
acct_alias = base_qry.left_join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
acct_code_sql = self.env['account.account']._field_to_sql(acct_alias, 'code', base_qry)
|
||||
|
||||
fixed_groupby = SQL("period_table.period_index")
|
||||
if current_groupby:
|
||||
select_grp = SQL("%s AS grouping_key,", SQL.identifier("account_move_line", current_groupby))
|
||||
full_groupby = SQL("%s, %s", SQL.identifier("account_move_line", current_groupby), fixed_groupby)
|
||||
else:
|
||||
select_grp = SQL()
|
||||
full_groupby = fixed_groupby
|
||||
|
||||
sign = -1 if account_type == 'liability_payable' else 1
|
||||
|
||||
period_case_sql = SQL(',').join(
|
||||
SQL("""
|
||||
CASE WHEN period_table.period_index = %(idx)s
|
||||
THEN %(sign)s * SUM(%(bal)s)
|
||||
ELSE 0 END AS %(col_id)s
|
||||
""",
|
||||
idx=n,
|
||||
sign=sign,
|
||||
col_id=SQL.identifier(f"period{n}"),
|
||||
bal=report._currency_table_apply_rate(SQL(
|
||||
"account_move_line.balance"
|
||||
" - COALESCE(part_debit.amount, 0)"
|
||||
" + COALESCE(part_credit.amount, 0)"
|
||||
)),
|
||||
)
|
||||
for n in range(len(period_list))
|
||||
)
|
||||
|
||||
tail_sql = report._get_engine_query_tail(offset, limit)
|
||||
|
||||
full_sql = SQL(
|
||||
"""
|
||||
WITH period_table(date_start, date_stop, period_index) AS (%(period_tbl)s)
|
||||
|
||||
SELECT
|
||||
%(select_grp)s
|
||||
%(sign)s * (
|
||||
SUM(account_move_line.amount_currency)
|
||||
- COALESCE(SUM(part_debit.debit_amount_currency), 0)
|
||||
+ COALESCE(SUM(part_credit.credit_amount_currency), 0)
|
||||
) AS amount_currency,
|
||||
ARRAY_AGG(DISTINCT account_move_line.partner_id) AS partner_id,
|
||||
ARRAY_AGG(account_move_line.payment_id) AS payment_id,
|
||||
ARRAY_AGG(DISTINCT move.invoice_date) AS invoice_date,
|
||||
ARRAY_AGG(DISTINCT COALESCE(account_move_line.%(age_field)s, account_move_line.date)) AS report_date,
|
||||
ARRAY_AGG(DISTINCT %(acct_code)s) AS account_name,
|
||||
ARRAY_AGG(DISTINCT COALESCE(account_move_line.%(age_field)s, account_move_line.date)) AS due_date,
|
||||
ARRAY_AGG(DISTINCT account_move_line.currency_id) AS currency_id,
|
||||
COUNT(account_move_line.id) AS aml_count,
|
||||
ARRAY_AGG(%(acct_code)s) AS account_code,
|
||||
%(period_case_sql)s
|
||||
|
||||
FROM %(tbl_refs)s
|
||||
JOIN account_journal jnl ON jnl.id = account_move_line.journal_id
|
||||
JOIN account_move move ON move.id = account_move_line.move_id
|
||||
%(fx_join)s
|
||||
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT
|
||||
SUM(pr.amount) AS amount,
|
||||
SUM(pr.debit_amount_currency) AS debit_amount_currency,
|
||||
pr.debit_move_id
|
||||
FROM account_partial_reconcile pr
|
||||
WHERE pr.max_date <= %(cutoff)s AND pr.debit_move_id = account_move_line.id
|
||||
GROUP BY pr.debit_move_id
|
||||
) part_debit ON TRUE
|
||||
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT
|
||||
SUM(pr.amount) AS amount,
|
||||
SUM(pr.credit_amount_currency) AS credit_amount_currency,
|
||||
pr.credit_move_id
|
||||
FROM account_partial_reconcile pr
|
||||
WHERE pr.max_date <= %(cutoff)s AND pr.credit_move_id = account_move_line.id
|
||||
GROUP BY pr.credit_move_id
|
||||
) part_credit ON TRUE
|
||||
|
||||
JOIN period_table ON
|
||||
(
|
||||
period_table.date_start IS NULL
|
||||
OR COALESCE(account_move_line.%(age_field)s, account_move_line.date) <= DATE(period_table.date_start)
|
||||
)
|
||||
AND
|
||||
(
|
||||
period_table.date_stop IS NULL
|
||||
OR COALESCE(account_move_line.%(age_field)s, account_move_line.date) >= DATE(period_table.date_stop)
|
||||
)
|
||||
|
||||
WHERE %(where_cond)s
|
||||
|
||||
GROUP BY %(full_groupby)s
|
||||
|
||||
HAVING
|
||||
ROUND(SUM(%(having_dr)s), %(precision)s) != 0
|
||||
OR ROUND(SUM(%(having_cr)s), %(precision)s) != 0
|
||||
|
||||
ORDER BY %(full_groupby)s
|
||||
|
||||
%(tail)s
|
||||
""",
|
||||
acct_code=acct_code_sql,
|
||||
period_tbl=period_tbl,
|
||||
select_grp=select_grp,
|
||||
period_case_sql=period_case_sql,
|
||||
sign=sign,
|
||||
age_field=age_field,
|
||||
tbl_refs=base_qry.from_clause,
|
||||
fx_join=report._currency_table_aml_join(options),
|
||||
cutoff=report_end,
|
||||
where_cond=base_qry.where_clause,
|
||||
full_groupby=full_groupby,
|
||||
having_dr=report._currency_table_apply_rate(SQL(
|
||||
"CASE WHEN account_move_line.balance > 0 THEN account_move_line.balance ELSE 0 END"
|
||||
" - COALESCE(part_debit.amount, 0)"
|
||||
)),
|
||||
having_cr=report._currency_table_apply_rate(SQL(
|
||||
"CASE WHEN account_move_line.balance < 0 THEN -account_move_line.balance ELSE 0 END"
|
||||
" - COALESCE(part_credit.amount, 0)"
|
||||
)),
|
||||
precision=self.env.company.currency_id.decimal_places,
|
||||
tail=tail_sql,
|
||||
)
|
||||
|
||||
self.env.cr.execute(full_sql)
|
||||
fetched_rows = self.env.cr.dictfetchall()
|
||||
|
||||
if not current_groupby:
|
||||
return _aggregate_rows(report, fetched_rows)
|
||||
|
||||
# Group rows by their grouping key
|
||||
grouped = {}
|
||||
for row in fetched_rows:
|
||||
gk = row['grouping_key']
|
||||
grouped.setdefault(gk, []).append(row)
|
||||
|
||||
return [(gk, _aggregate_rows(report, rows)) for gk, rows in grouped.items()]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Actions
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def open_journal_items(self, options, params):
|
||||
params['view_ref'] = 'account.view_move_line_tree_grouped_partner'
|
||||
audit_opts = {**options, 'date': {**options['date'], 'date_from': None}}
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
action = report.open_journal_items(options=audit_opts, params=params)
|
||||
action.get('context', {}).update({
|
||||
'search_default_group_by_account': 0,
|
||||
'search_default_group_by_partner': 1,
|
||||
})
|
||||
return action
|
||||
|
||||
def open_partner_ledger(self, options, params):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
rec_model, rec_id = report._get_model_info_from_id(params.get('line_id'))
|
||||
return self.env[rec_model].browse(rec_id).open_partner_ledger()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Batch unfold
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _common_custom_unfold_all_batch_data_generator(
|
||||
self, account_type, report, options, lines_to_expand_by_function,
|
||||
):
|
||||
"""Pre-load all data needed to unfold every partner in one pass."""
|
||||
output = {}
|
||||
num_periods = 6
|
||||
|
||||
for fn_name, expand_lines in lines_to_expand_by_function.items():
|
||||
for target_line in expand_lines:
|
||||
if fn_name != '_report_expand_unfoldable_line_with_groupby':
|
||||
continue
|
||||
|
||||
report_line_id = report._get_res_id_from_line_id(target_line['id'], 'account.report.line')
|
||||
custom_exprs = report.line_ids.expression_ids.filtered(
|
||||
lambda x: x.report_line_id.id == report_line_id and x.engine == 'custom'
|
||||
)
|
||||
if not custom_exprs:
|
||||
continue
|
||||
|
||||
for cg_key, cg_opts in report._split_options_per_column_group(options).items():
|
||||
by_partner = {}
|
||||
for aml_id, aml_vals in self._aged_partner_report_custom_engine_common(
|
||||
cg_opts, account_type, 'id', None,
|
||||
):
|
||||
aml_vals['aml_id'] = aml_id
|
||||
by_partner.setdefault(aml_vals['partner_id'], []).append(aml_vals)
|
||||
|
||||
partner_expr_totals = (
|
||||
output
|
||||
.setdefault(f"[{report_line_id}]=>partner_id", {})
|
||||
.setdefault(cg_key, {expr: {'value': []} for expr in custom_exprs})
|
||||
)
|
||||
|
||||
for pid, aml_list in by_partner.items():
|
||||
pv = self._prepare_partner_values()
|
||||
for k in range(num_periods):
|
||||
pv[f'period{k}'] = 0
|
||||
|
||||
aml_expr_totals = (
|
||||
output
|
||||
.setdefault(f"[{report_line_id}]partner_id:{pid}=>id", {})
|
||||
.setdefault(cg_key, {expr: {'value': []} for expr in custom_exprs})
|
||||
)
|
||||
|
||||
for aml_data in aml_list:
|
||||
for k in range(num_periods):
|
||||
period_val = aml_data[f'period{k}']
|
||||
pv[f'period{k}'] += period_val
|
||||
pv['total'] += period_val
|
||||
|
||||
for expr in custom_exprs:
|
||||
aml_expr_totals[expr]['value'].append(
|
||||
(aml_data['aml_id'], aml_data[expr.subformula])
|
||||
)
|
||||
|
||||
for expr in custom_exprs:
|
||||
partner_expr_totals[expr]['value'].append(
|
||||
(pid, pv[expr.subformula])
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
def _prepare_partner_values(self):
|
||||
return {
|
||||
'invoice_date': None,
|
||||
'due_date': None,
|
||||
'amount_currency': None,
|
||||
'currency_id': None,
|
||||
'currency': None,
|
||||
'account_name': None,
|
||||
'total': 0,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Audit action
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def aged_partner_balance_audit(self, options, params, journal_type):
|
||||
"""Open a filtered list of invoices / bills for the clicked cell."""
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
action = self.env['ir.actions.actions']._for_xml_id('account.action_amounts_to_settle')
|
||||
|
||||
excluded_type = {'purchase': 'sale', 'sale': 'purchase'}
|
||||
if options:
|
||||
action['domain'] = [
|
||||
('account_id.reconcile', '=', True),
|
||||
('journal_id.type', '!=', excluded_type.get(journal_type)),
|
||||
*self._build_domain_from_period(options, params['expression_label']),
|
||||
*report._get_options_domain(options, 'from_beginning'),
|
||||
*report._get_audit_line_groupby_domain(params['calling_line_dict_id']),
|
||||
]
|
||||
return action
|
||||
|
||||
def _build_domain_from_period(self, options, period_label):
|
||||
"""Translate a period column label into a date-maturity domain."""
|
||||
if period_label == 'total' or not period_label[-1].isdigit():
|
||||
return []
|
||||
|
||||
bucket_num = int(period_label[-1])
|
||||
end_date = datetime.datetime.strptime(options['date']['date_to'], '%Y-%m-%d')
|
||||
|
||||
if bucket_num == 0:
|
||||
return [('date_maturity', '>=', options['date']['date_to'])]
|
||||
|
||||
upper_bound = end_date - datetime.timedelta(30 * (bucket_num - 1) + 1)
|
||||
lower_bound = end_date - datetime.timedelta(30 * bucket_num)
|
||||
|
||||
if bucket_num == 5:
|
||||
return [('date_maturity', '<=', lower_bound)]
|
||||
|
||||
return [
|
||||
('date_maturity', '>=', lower_bound),
|
||||
('date_maturity', '<=', upper_bound),
|
||||
]
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Payable sub-handler
|
||||
# ======================================================================
|
||||
|
||||
class AgedPayableCustomHandler(models.AbstractModel):
|
||||
"""Specialised handler for aged payable balances."""
|
||||
|
||||
_name = 'account.aged.payable.report.handler'
|
||||
_inherit = 'account.aged.partner.balance.report.handler'
|
||||
_description = 'Aged Payable Custom Handler'
|
||||
|
||||
def open_journal_items(self, options, params):
|
||||
payable_filter = {'id': 'trade_payable', 'name': _("Payable"), 'selected': True}
|
||||
options.setdefault('account_type', []).append(payable_filter)
|
||||
return super().open_journal_items(options, params)
|
||||
|
||||
def _custom_unfold_all_batch_data_generator(self, report, options, lines_to_expand_by_function):
|
||||
ref_line = self.env.ref('fusion_accounting.aged_payable_line')
|
||||
if ref_line.groupby.replace(' ', '') == 'partner_id,id':
|
||||
return self._common_custom_unfold_all_batch_data_generator(
|
||||
'liability_payable', report, options, lines_to_expand_by_function,
|
||||
)
|
||||
return {}
|
||||
|
||||
def action_audit_cell(self, options, params):
|
||||
return super().aged_partner_balance_audit(options, params, 'purchase')
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Receivable sub-handler
|
||||
# ======================================================================
|
||||
|
||||
class AgedReceivableCustomHandler(models.AbstractModel):
|
||||
"""Specialised handler for aged receivable balances."""
|
||||
|
||||
_name = 'account.aged.receivable.report.handler'
|
||||
_inherit = 'account.aged.partner.balance.report.handler'
|
||||
_description = 'Aged Receivable Custom Handler'
|
||||
|
||||
def open_journal_items(self, options, params):
|
||||
receivable_filter = {'id': 'trade_receivable', 'name': _("Receivable"), 'selected': True}
|
||||
options.setdefault('account_type', []).append(receivable_filter)
|
||||
return super().open_journal_items(options, params)
|
||||
|
||||
def _custom_unfold_all_batch_data_generator(self, report, options, lines_to_expand_by_function):
|
||||
ref_line = self.env.ref('fusion_accounting.aged_receivable_line')
|
||||
if ref_line.groupby.replace(' ', '') == 'partner_id,id':
|
||||
return self._common_custom_unfold_all_batch_data_generator(
|
||||
'asset_receivable', report, options, lines_to_expand_by_function,
|
||||
)
|
||||
return {}
|
||||
|
||||
def action_audit_cell(self, options, params):
|
||||
return super().aged_partner_balance_audit(options, params, 'sale')
|
||||
341
Fusion Accounting/models/account_analytic_report.py
Normal file
341
Fusion Accounting/models/account_analytic_report.py
Normal file
@@ -0,0 +1,341 @@
|
||||
# Fusion Accounting - Analytic Group By for Financial Reports
|
||||
# Enables analytic plan / account column grouping on account reports
|
||||
|
||||
from odoo import models, fields, api, osv
|
||||
from odoo.addons.web.controllers.utils import clean_action
|
||||
from odoo.tools import SQL, Query
|
||||
|
||||
|
||||
class FusionReportAnalyticGroupby(models.AbstractModel):
|
||||
"""Extends the accounting report engine to support grouping by
|
||||
analytic accounts or plans via shadow-table substitution."""
|
||||
|
||||
_inherit = 'account.report'
|
||||
|
||||
filter_analytic_groupby = fields.Boolean(
|
||||
string="Analytic Group By",
|
||||
compute=lambda self: self._compute_report_option_filter('filter_analytic_groupby'),
|
||||
readonly=False,
|
||||
store=True,
|
||||
depends=['root_report_id'],
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Initialization sequencing
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_options_initializers_forced_sequence_map(self):
|
||||
"""Insert the analytic-groupby initializer between column-header
|
||||
creation and final column building (sequence 995)."""
|
||||
seq = super()._get_options_initializers_forced_sequence_map()
|
||||
seq[self._init_options_analytic_groupby] = 995
|
||||
return seq
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Option initializer
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _init_options_analytic_groupby(self, options, previous_options):
|
||||
"""Populate analytic groupby filters in *options* when the report
|
||||
advertises ``filter_analytic_groupby`` and the user has the
|
||||
analytic-accounting group."""
|
||||
if not self.filter_analytic_groupby:
|
||||
return
|
||||
|
||||
has_analytic_perm = self.env.user.has_group('analytic.group_analytic_accounting')
|
||||
if not has_analytic_perm:
|
||||
return
|
||||
|
||||
options['display_analytic_groupby'] = True
|
||||
options['display_analytic_plan_groupby'] = True
|
||||
|
||||
# --- analytic-without-aml toggle ---
|
||||
options['include_analytic_without_aml'] = previous_options.get('include_analytic_without_aml', False)
|
||||
|
||||
# --- analytic accounts ---
|
||||
prev_account_ids = [int(v) for v in previous_options.get('analytic_accounts_groupby', [])]
|
||||
chosen_accounts = (
|
||||
self.env['account.analytic.account']
|
||||
.with_context(active_test=False)
|
||||
.search([('id', 'in', prev_account_ids)])
|
||||
)
|
||||
options['analytic_accounts_groupby'] = chosen_accounts.ids
|
||||
options['selected_analytic_account_groupby_names'] = chosen_accounts.mapped('name')
|
||||
|
||||
# --- analytic plans ---
|
||||
prev_plan_ids = [int(v) for v in previous_options.get('analytic_plans_groupby', [])]
|
||||
chosen_plans = self.env['account.analytic.plan'].search([('id', 'in', prev_plan_ids)])
|
||||
options['analytic_plans_groupby'] = chosen_plans.ids
|
||||
options['selected_analytic_plan_groupby_names'] = chosen_plans.mapped('name')
|
||||
|
||||
self._build_analytic_column_headers(options)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Readonly-query interaction
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _init_options_readonly_query(self, options, previous_options):
|
||||
super()._init_options_readonly_query(options, previous_options)
|
||||
# Analytic columns use a shadow table ⇒ disable readonly shortcut
|
||||
options['readonly_query'] = (
|
||||
options['readonly_query'] and not options.get('analytic_groupby_option')
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Column header generation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _build_analytic_column_headers(self, options):
|
||||
"""Create extra column headers for every selected analytic plan
|
||||
or individual analytic account."""
|
||||
extra_headers = []
|
||||
|
||||
# --- plans → accounts within each plan ---
|
||||
plan_recs = self.env['account.analytic.plan'].browse(options.get('analytic_plans_groupby'))
|
||||
for plan in plan_recs:
|
||||
child_accounts = self.env['account.analytic.account'].search([
|
||||
('plan_id', 'child_of', plan.id),
|
||||
])
|
||||
extra_headers.append({
|
||||
'name': plan.name,
|
||||
'forced_options': {
|
||||
'analytic_groupby_option': True,
|
||||
'analytic_accounts_list': tuple(child_accounts.ids),
|
||||
},
|
||||
})
|
||||
|
||||
# --- individual accounts ---
|
||||
acct_recs = self.env['account.analytic.account'].browse(options.get('analytic_accounts_groupby'))
|
||||
for acct in acct_recs:
|
||||
extra_headers.append({
|
||||
'name': acct.name,
|
||||
'forced_options': {
|
||||
'analytic_groupby_option': True,
|
||||
'analytic_accounts_list': (acct.id,),
|
||||
},
|
||||
})
|
||||
|
||||
if not extra_headers:
|
||||
return
|
||||
|
||||
budget_active = any(b for b in options.get('budgets', []) if b.get('selected'))
|
||||
if budget_active:
|
||||
# Place analytic headers next to budget headers on the same level
|
||||
options['column_headers'][-1] = extra_headers + options['column_headers'][-1]
|
||||
else:
|
||||
# Append a new header tier for analytic columns + a blank for totals
|
||||
extra_headers.append({'name': ''})
|
||||
options['column_headers'] = [
|
||||
*options['column_headers'],
|
||||
extra_headers,
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Shadow-table preparation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@api.model
|
||||
def _prepare_lines_for_analytic_groupby(self):
|
||||
"""Build a temporary ``analytic_temp_account_move_line`` table that
|
||||
mirrors the *account_move_line* schema but is populated from
|
||||
*account_analytic_line*. Created once per SQL transaction."""
|
||||
self.env.cr.execute(
|
||||
"SELECT 1 FROM information_schema.tables "
|
||||
"WHERE table_name = 'analytic_temp_account_move_line'"
|
||||
)
|
||||
if self.env.cr.fetchone():
|
||||
return # already prepared in this transaction
|
||||
|
||||
root_plan, additional_plans = self.env['account.analytic.plan']._get_all_plans()
|
||||
all_plans = root_plan + additional_plans
|
||||
|
||||
analytic_col_refs = SQL(", ").join(
|
||||
SQL('"account_analytic_line".%s', SQL.identifier(p._column_name()))
|
||||
for p in all_plans
|
||||
)
|
||||
distribution_expr = SQL(
|
||||
'to_jsonb(UNNEST(ARRAY[%s]))', analytic_col_refs,
|
||||
)
|
||||
|
||||
field_mapping = {
|
||||
'id': SQL("account_analytic_line.id"),
|
||||
'balance': SQL("-amount"),
|
||||
'display_type': 'product',
|
||||
'parent_state': 'posted',
|
||||
'account_id': SQL.identifier("general_account_id"),
|
||||
'debit': SQL("CASE WHEN amount < 0 THEN -amount ELSE 0 END"),
|
||||
'credit': SQL("CASE WHEN amount > 0 THEN amount ELSE 0 END"),
|
||||
'analytic_distribution': distribution_expr,
|
||||
}
|
||||
|
||||
# Fill in the remaining stored fields with values from the linked AML
|
||||
aml_fields_meta = self.env['account.move.line'].fields_get()
|
||||
persisted_fields = {
|
||||
fname
|
||||
for fname, meta in aml_fields_meta.items()
|
||||
if meta['type'] not in ('many2many', 'one2many') and meta.get('store')
|
||||
}
|
||||
for fname in persisted_fields:
|
||||
if fname not in field_mapping:
|
||||
field_mapping[fname] = SQL('"account_move_line".%s', SQL.identifier(fname))
|
||||
|
||||
col_names_sql, val_exprs_sql = (
|
||||
self.env['account.move.line']._prepare_aml_shadowing_for_report(field_mapping)
|
||||
)
|
||||
|
||||
shadow_sql = SQL("""
|
||||
-- Build temporary shadow table inheriting AML schema
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS analytic_temp_account_move_line ()
|
||||
INHERITS (account_move_line) ON COMMIT DROP;
|
||||
ALTER TABLE analytic_temp_account_move_line NO INHERIT account_move_line;
|
||||
ALTER TABLE analytic_temp_account_move_line
|
||||
DROP CONSTRAINT IF EXISTS account_move_line_check_amount_currency_balance_sign;
|
||||
ALTER TABLE analytic_temp_account_move_line ALTER COLUMN move_id DROP NOT NULL;
|
||||
ALTER TABLE analytic_temp_account_move_line ALTER COLUMN currency_id DROP NOT NULL;
|
||||
|
||||
INSERT INTO analytic_temp_account_move_line (%(col_names)s)
|
||||
SELECT %(val_exprs)s
|
||||
FROM account_analytic_line
|
||||
LEFT JOIN account_move_line
|
||||
ON account_analytic_line.move_line_id = account_move_line.id
|
||||
WHERE account_analytic_line.general_account_id IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS analytic_temp_aml__composite_idx
|
||||
ON analytic_temp_account_move_line (analytic_distribution, journal_id, date, company_id);
|
||||
|
||||
ANALYZE analytic_temp_account_move_line;
|
||||
""", col_names=col_names_sql, val_exprs=val_exprs_sql)
|
||||
|
||||
self.env.cr.execute(shadow_sql)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Query overrides
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_report_query(self, options, date_scope, domain=None) -> Query:
|
||||
"""When analytic-groupby columns are active, inject the context
|
||||
flag that causes `_where_calc` to swap the AML table."""
|
||||
ctx_self = self.with_context(
|
||||
account_report_analytic_groupby=options.get('analytic_groupby_option'),
|
||||
)
|
||||
query = super(FusionReportAnalyticGroupby, ctx_self)._get_report_query(options, date_scope, domain)
|
||||
|
||||
if options.get('analytic_accounts'):
|
||||
if 'analytic_accounts_list' in options:
|
||||
# Shadow table stores bare integer ids in analytic_distribution
|
||||
acct_str_ids = tuple(str(aid) for aid in options['analytic_accounts'])
|
||||
query.add_where(SQL(
|
||||
"account_move_line.analytic_distribution IN %s",
|
||||
acct_str_ids,
|
||||
))
|
||||
else:
|
||||
# Real AML table – JSON distribution with percentages
|
||||
acct_id_list = [[str(aid) for aid in options['analytic_accounts']]]
|
||||
query.add_where(SQL(
|
||||
'%s && %s',
|
||||
acct_id_list,
|
||||
self.env['account.move.line']._query_analytic_accounts(),
|
||||
))
|
||||
|
||||
return query
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Audit action
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def action_audit_cell(self, options, params):
|
||||
"""Redirect the audit action to analytic lines when the column
|
||||
being audited belongs to an analytic-groupby column group."""
|
||||
col_opts = self._get_column_group_options(options, params['column_group_key'])
|
||||
|
||||
if not col_opts.get('analytic_groupby_option'):
|
||||
return super().action_audit_cell(options, params)
|
||||
|
||||
# Translate AML domain → analytic line domain
|
||||
rpt_line = self.env['account.report.line'].browse(params['report_line_id'])
|
||||
expr = rpt_line.expression_ids.filtered(lambda e: e.label == params['expression_label'])
|
||||
raw_domain = self._get_audit_line_domain(col_opts, expr, params)
|
||||
|
||||
AnalyticLine = self.env['account.analytic.line']
|
||||
converted_domain = []
|
||||
for leaf in raw_domain:
|
||||
if len(leaf) == 1:
|
||||
converted_domain.append(leaf)
|
||||
continue
|
||||
|
||||
fld, op, val = leaf
|
||||
root_field = fld.split('.')[0]
|
||||
|
||||
if root_field == 'account_id':
|
||||
converted_domain.append((fld.replace('account_id', 'general_account_id'), op, val))
|
||||
elif fld == 'analytic_distribution':
|
||||
converted_domain.append(('auto_account_id', 'in', val))
|
||||
elif root_field not in AnalyticLine._fields:
|
||||
expr_leaf = [(f'move_line_id.{fld}', op, val)]
|
||||
if options.get('include_analytic_without_aml'):
|
||||
expr_leaf = osv.expression.OR([
|
||||
[('move_line_id', '=', False)],
|
||||
expr_leaf,
|
||||
])
|
||||
converted_domain.extend(expr_leaf)
|
||||
else:
|
||||
converted_domain.append(leaf)
|
||||
|
||||
act = clean_action(
|
||||
self.env.ref('analytic.account_analytic_line_action_entries')._get_action_dict(),
|
||||
env=self.env,
|
||||
)
|
||||
act['domain'] = converted_domain
|
||||
return act
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Journal domain
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@api.model
|
||||
def _get_options_journals_domain(self, options):
|
||||
"""Allow journal-less lines when analytic lines without a parent
|
||||
move line are included."""
|
||||
base_domain = super()._get_options_journals_domain(options)
|
||||
if options.get('include_analytic_without_aml'):
|
||||
base_domain = osv.expression.OR([
|
||||
base_domain,
|
||||
[('journal_id', '=', False)],
|
||||
])
|
||||
return base_domain
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Options domain
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_options_domain(self, options, date_scope):
|
||||
self.ensure_one()
|
||||
base = super()._get_options_domain(options, date_scope)
|
||||
|
||||
acct_filter = options.get('analytic_accounts_list')
|
||||
if acct_filter:
|
||||
base = osv.expression.AND([
|
||||
base,
|
||||
[('analytic_distribution', 'in', list(acct_filter))],
|
||||
])
|
||||
|
||||
return base
|
||||
|
||||
|
||||
class FusionAMLAnalyticShadow(models.Model):
|
||||
"""Hooks into `_where_calc` to swap the AML table for the analytic
|
||||
shadow table when the report context flag is set."""
|
||||
|
||||
_inherit = "account.move.line"
|
||||
|
||||
def _where_calc(self, domain, active_test=True):
|
||||
"""Replace the base ``account_move_line`` table reference with the
|
||||
``analytic_temp_account_move_line`` shadow table whenever the
|
||||
``account_report_analytic_groupby`` context key is truthy, unless
|
||||
a cash-basis report is active (which already replaces the table)."""
|
||||
qry = super()._where_calc(domain, active_test)
|
||||
ctx = self.env.context
|
||||
if ctx.get('account_report_analytic_groupby') and not ctx.get('account_report_cash_basis'):
|
||||
self.env['account.report']._prepare_lines_for_analytic_groupby()
|
||||
qry._tables['account_move_line'] = SQL.identifier('analytic_temp_account_move_line')
|
||||
return qry
|
||||
2191
Fusion Accounting/models/account_asset.py
Normal file
2191
Fusion Accounting/models/account_asset.py
Normal file
File diff suppressed because it is too large
Load Diff
330
Fusion Accounting/models/account_bank_statement.py
Normal file
330
Fusion Accounting/models/account_bank_statement.py
Normal file
@@ -0,0 +1,330 @@
|
||||
# Fusion Accounting - Bank Statement & Statement Line Extensions
|
||||
# Reconciliation widget support, auto-reconciliation CRON, partner matching
|
||||
|
||||
import logging
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from itertools import product
|
||||
from markupsafe import Markup
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.addons.base.models.res_bank import sanitize_account_number
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.osv import expression
|
||||
from odoo.tools import html2plaintext
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionBankStatement(models.Model):
|
||||
"""Extends bank statements with reconciliation widget integration
|
||||
and PDF attachment generation."""
|
||||
|
||||
_name = "account.bank.statement"
|
||||
_inherit = ['mail.thread.main.attachment', 'account.bank.statement']
|
||||
|
||||
# ---- Actions ----
|
||||
def action_open_bank_reconcile_widget(self):
|
||||
"""Launch the bank reconciliation widget scoped to this statement."""
|
||||
self.ensure_one()
|
||||
return self.env['account.bank.statement.line']._action_open_bank_reconciliation_widget(
|
||||
name=self.name,
|
||||
default_context={
|
||||
'search_default_statement_id': self.id,
|
||||
'search_default_journal_id': self.journal_id.id,
|
||||
},
|
||||
extra_domain=[('statement_id', '=', self.id)],
|
||||
)
|
||||
|
||||
def action_generate_attachment(self):
|
||||
"""Render statement as PDF and attach it to the record."""
|
||||
report_sudo = self.env['ir.actions.report'].sudo()
|
||||
stmt_report_action = self.env.ref('account.action_report_account_statement')
|
||||
for stmt in self:
|
||||
stmt_report = stmt_report_action.sudo()
|
||||
pdf_bytes, _mime = report_sudo._render_qweb_pdf(
|
||||
stmt_report, res_ids=stmt.ids,
|
||||
)
|
||||
filename = (
|
||||
_("Bank Statement %s.pdf", stmt.name)
|
||||
if stmt.name
|
||||
else _("Bank Statement.pdf")
|
||||
)
|
||||
stmt.attachment_ids |= self.env['ir.attachment'].create({
|
||||
'name': filename,
|
||||
'type': 'binary',
|
||||
'mimetype': 'application/pdf',
|
||||
'raw': pdf_bytes,
|
||||
'res_model': stmt._name,
|
||||
'res_id': stmt.id,
|
||||
})
|
||||
return stmt_report_action.report_action(docids=self)
|
||||
|
||||
|
||||
class FusionBankStatementLine(models.Model):
|
||||
"""Extends bank statement lines with reconciliation workflow,
|
||||
automated matching via CRON, and partner detection heuristics."""
|
||||
|
||||
_inherit = 'account.bank.statement.line'
|
||||
|
||||
# ---- Fields ----
|
||||
cron_last_check = fields.Datetime()
|
||||
|
||||
# Ensure each imported transaction is unique
|
||||
unique_import_id = fields.Char(
|
||||
string='Import ID',
|
||||
readonly=True,
|
||||
copy=False,
|
||||
)
|
||||
|
||||
_sql_constraints = [
|
||||
(
|
||||
'unique_import_id',
|
||||
'unique (unique_import_id)',
|
||||
'A bank account transaction can be imported only once!',
|
||||
),
|
||||
]
|
||||
|
||||
# ---- Quick Actions ----
|
||||
def action_save_close(self):
|
||||
"""Close the current form after saving."""
|
||||
return {'type': 'ir.actions.act_window_close'}
|
||||
|
||||
def action_save_new(self):
|
||||
"""Save and immediately open a fresh statement line form."""
|
||||
window_action = self.env['ir.actions.act_window']._for_xml_id(
|
||||
'fusion_accounting.action_bank_statement_line_form_bank_rec_widget'
|
||||
)
|
||||
window_action['context'] = {
|
||||
'default_journal_id': self.env.context['default_journal_id'],
|
||||
}
|
||||
return window_action
|
||||
|
||||
# ---- Reconciliation Widget ----
|
||||
@api.model
|
||||
def _action_open_bank_reconciliation_widget(
|
||||
self, extra_domain=None, default_context=None, name=None, kanban_first=True,
|
||||
):
|
||||
"""Return an action dict that opens the bank reconciliation widget."""
|
||||
xml_suffix = '_kanban' if kanban_first else ''
|
||||
act_ref = f'fusion_accounting.action_bank_statement_line_transactions{xml_suffix}'
|
||||
widget_action = self.env['ir.actions.act_window']._for_xml_id(act_ref)
|
||||
widget_action.update({
|
||||
'name': name or _("Bank Reconciliation"),
|
||||
'context': default_context or {},
|
||||
'domain': [('state', '!=', 'cancel')] + (extra_domain or []),
|
||||
})
|
||||
# Provide a helpful empty-state message listing supported import formats
|
||||
available_fmts = self.env['account.journal']._get_bank_statements_available_import_formats()
|
||||
widget_action['help'] = Markup(
|
||||
"<p class='o_view_nocontent_smiling_face'>{heading}</p>"
|
||||
"<p>{detail}<br/>{hint}</p>"
|
||||
).format(
|
||||
heading=_('Nothing to do here!'),
|
||||
detail=_('No transactions matching your filters were found.'),
|
||||
hint=_('Click "New" or upload a %s.', ", ".join(available_fmts)),
|
||||
)
|
||||
return widget_action
|
||||
|
||||
def action_open_recon_st_line(self):
|
||||
"""Open the reconciliation widget focused on a single statement line."""
|
||||
self.ensure_one()
|
||||
return self.env['account.bank.statement.line']._action_open_bank_reconciliation_widget(
|
||||
name=self.name,
|
||||
default_context={
|
||||
'default_statement_id': self.statement_id.id,
|
||||
'default_journal_id': self.journal_id.id,
|
||||
'default_st_line_id': self.id,
|
||||
'search_default_id': self.id,
|
||||
},
|
||||
)
|
||||
|
||||
# ---- Auto-Reconciliation CRON ----
|
||||
def _cron_try_auto_reconcile_statement_lines(self, batch_size=None, limit_time=0):
|
||||
"""Attempt to automatically reconcile statement lines using
|
||||
configured reconciliation models. Processes unreconciled lines
|
||||
prioritised by those never previously checked by the CRON."""
|
||||
|
||||
def _fetch_candidates(eligible_companies):
|
||||
"""Return a batch of unreconciled lines and a marker for the next batch."""
|
||||
leftover_id = None
|
||||
fetch_limit = (batch_size + 1) if batch_size else None
|
||||
search_domain = [
|
||||
('is_reconciled', '=', False),
|
||||
('create_date', '>', run_start.date() - relativedelta(months=3)),
|
||||
('company_id', 'in', eligible_companies.ids),
|
||||
]
|
||||
candidates = self.search(
|
||||
search_domain,
|
||||
limit=fetch_limit,
|
||||
order="cron_last_check ASC NULLS FIRST, id",
|
||||
)
|
||||
if batch_size and len(candidates) > batch_size:
|
||||
leftover_id = candidates[batch_size].id
|
||||
candidates = candidates[:batch_size]
|
||||
return candidates, leftover_id
|
||||
|
||||
run_start = fields.Datetime.now()
|
||||
|
||||
# Identify companies that have auto-reconcile models configured
|
||||
recon_companies = child_cos = (
|
||||
self.env['account.reconcile.model']
|
||||
.search_fetch(
|
||||
[
|
||||
('auto_reconcile', '=', True),
|
||||
('rule_type', 'in', ('writeoff_suggestion', 'invoice_matching')),
|
||||
],
|
||||
['company_id'],
|
||||
)
|
||||
.company_id
|
||||
)
|
||||
if not recon_companies:
|
||||
return
|
||||
while child_cos := child_cos.child_ids:
|
||||
recon_companies += child_cos
|
||||
|
||||
target_lines, next_line_id = (
|
||||
(self, None) if self else _fetch_candidates(recon_companies)
|
||||
)
|
||||
|
||||
auto_matched_count = 0
|
||||
for idx, st_line in enumerate(target_lines):
|
||||
if limit_time and (fields.Datetime.now().timestamp() - run_start.timestamp()) > limit_time:
|
||||
next_line_id = st_line.id
|
||||
target_lines = target_lines[:idx]
|
||||
break
|
||||
|
||||
rec_widget = self.env['bank.rec.widget'].with_context(
|
||||
default_st_line_id=st_line.id,
|
||||
).new({})
|
||||
rec_widget._action_trigger_matching_rules()
|
||||
|
||||
if rec_widget.state == 'valid' and rec_widget.matching_rules_allow_auto_reconcile:
|
||||
try:
|
||||
rec_widget._action_validate()
|
||||
if st_line.is_reconciled:
|
||||
model_names = ', '.join(
|
||||
st_line.move_id.line_ids.reconcile_model_id.mapped('name')
|
||||
)
|
||||
st_line.move_id.message_post(
|
||||
body=_(
|
||||
"This transaction was auto-reconciled using model '%s'.",
|
||||
model_names,
|
||||
),
|
||||
)
|
||||
auto_matched_count += 1
|
||||
except UserError as exc:
|
||||
_log.info(
|
||||
"Auto-reconciliation of statement line %s failed: %s",
|
||||
st_line.id, str(exc),
|
||||
)
|
||||
continue
|
||||
|
||||
target_lines.write({'cron_last_check': run_start})
|
||||
|
||||
if next_line_id:
|
||||
pending_line = self.env['account.bank.statement.line'].browse(next_line_id)
|
||||
if auto_matched_count or not pending_line.cron_last_check:
|
||||
self.env.ref(
|
||||
'fusion_accounting.auto_reconcile_bank_statement_line'
|
||||
)._trigger()
|
||||
|
||||
# ---- Partner Detection ----
|
||||
def _retrieve_partner(self):
|
||||
"""Heuristically determine the partner for this statement line
|
||||
by inspecting bank account numbers, partner names, and
|
||||
reconciliation model mappings."""
|
||||
self.ensure_one()
|
||||
|
||||
# 1. Already assigned
|
||||
if self.partner_id:
|
||||
return self.partner_id
|
||||
|
||||
# 2. Match by bank account number
|
||||
if self.account_number:
|
||||
normalised_number = sanitize_account_number(self.account_number)
|
||||
if normalised_number:
|
||||
bank_domain = [('sanitized_acc_number', 'ilike', normalised_number)]
|
||||
for company_filter in (
|
||||
[('company_id', 'parent_of', self.company_id.id)],
|
||||
[('company_id', '=', False)],
|
||||
):
|
||||
matched_banks = self.env['res.partner.bank'].search(
|
||||
company_filter + bank_domain
|
||||
)
|
||||
if len(matched_banks.partner_id) == 1:
|
||||
return matched_banks.partner_id
|
||||
# Filter out archived partners when multiple matches
|
||||
active_banks = matched_banks.filtered(lambda b: b.partner_id.active)
|
||||
if len(active_banks) == 1:
|
||||
return active_banks.partner_id
|
||||
|
||||
# 3. Match by partner name
|
||||
if self.partner_name:
|
||||
name_match_strategies = product(
|
||||
[
|
||||
('complete_name', '=ilike', self.partner_name),
|
||||
('complete_name', 'ilike', self.partner_name),
|
||||
],
|
||||
[
|
||||
('company_id', 'parent_of', self.company_id.id),
|
||||
('company_id', '=', False),
|
||||
],
|
||||
)
|
||||
for combined_domain in name_match_strategies:
|
||||
found_partner = self.env['res.partner'].search(
|
||||
list(combined_domain) + [('parent_id', '=', False)],
|
||||
limit=2,
|
||||
)
|
||||
if len(found_partner) == 1:
|
||||
return found_partner
|
||||
|
||||
# 4. Match through reconcile model partner mappings
|
||||
applicable_models = self.env['account.reconcile.model'].search([
|
||||
*self.env['account.reconcile.model']._check_company_domain(self.company_id),
|
||||
('rule_type', '!=', 'writeoff_button'),
|
||||
])
|
||||
for recon_model in applicable_models:
|
||||
mapped_partner = recon_model._get_partner_from_mapping(self)
|
||||
if mapped_partner and recon_model._is_applicable_for(self, mapped_partner):
|
||||
return mapped_partner
|
||||
|
||||
return self.env['res.partner']
|
||||
|
||||
# ---- Text Extraction for Matching ----
|
||||
def _get_st_line_strings_for_matching(self, allowed_fields=None):
|
||||
"""Collect textual values from the statement line for use in
|
||||
matching algorithms."""
|
||||
self.ensure_one()
|
||||
collected_strings = []
|
||||
if not allowed_fields or 'payment_ref' in allowed_fields:
|
||||
if self.payment_ref:
|
||||
collected_strings.append(self.payment_ref)
|
||||
if not allowed_fields or 'narration' in allowed_fields:
|
||||
plain_notes = html2plaintext(self.narration or "")
|
||||
if plain_notes:
|
||||
collected_strings.append(plain_notes)
|
||||
if not allowed_fields or 'ref' in allowed_fields:
|
||||
if self.ref:
|
||||
collected_strings.append(self.ref)
|
||||
return collected_strings
|
||||
|
||||
# ---- Domain Helpers ----
|
||||
def _get_default_amls_matching_domain(self):
|
||||
"""Exclude stock valuation accounts from the default matching domain."""
|
||||
base_domain = super()._get_default_amls_matching_domain()
|
||||
stock_categories = self.env['product.category'].search([
|
||||
'|',
|
||||
('property_stock_account_input_categ_id', '!=', False),
|
||||
('property_stock_account_output_categ_id', '!=', False),
|
||||
])
|
||||
excluded_accounts = (
|
||||
stock_categories.mapped('property_stock_account_input_categ_id')
|
||||
+ stock_categories.mapped('property_stock_account_output_categ_id')
|
||||
)
|
||||
if excluded_accounts:
|
||||
return expression.AND([
|
||||
base_domain,
|
||||
[('account_id', 'not in', tuple(set(excluded_accounts.ids)))],
|
||||
])
|
||||
return base_domain
|
||||
854
Fusion Accounting/models/account_cash_flow_report.py
Normal file
854
Fusion Accounting/models/account_cash_flow_report.py
Normal file
@@ -0,0 +1,854 @@
|
||||
# Fusion Accounting - Cash Flow Statement Report Handler
|
||||
|
||||
from odoo import models, _
|
||||
from odoo.tools import SQL, Query
|
||||
|
||||
|
||||
class CashFlowReportCustomHandler(models.AbstractModel):
|
||||
"""Generates the cash flow statement using the direct method.
|
||||
|
||||
Reference: https://www.investopedia.com/terms/d/direct_method.asp
|
||||
|
||||
The handler fetches liquidity journal entries, splits them into
|
||||
operating / investing / financing buckets based on account tags,
|
||||
and renders both section totals and per-account detail rows.
|
||||
"""
|
||||
|
||||
_name = 'account.cash.flow.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Cash Flow Report Custom Handler'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public entry points
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
"""Build every line of the cash flow statement.
|
||||
|
||||
Returns a list of ``(sequence, line_dict)`` tuples ready for the
|
||||
report engine.
|
||||
"""
|
||||
output_lines = []
|
||||
|
||||
section_structure = self._build_section_structure()
|
||||
computed_data = self._compute_report_data(report, options, section_structure)
|
||||
|
||||
# Render each section header
|
||||
for section_key, section_meta in section_structure.items():
|
||||
output_lines.append(
|
||||
(0, self._render_section_line(report, options, section_key, section_meta, computed_data))
|
||||
)
|
||||
|
||||
# Render detail rows grouped by account under this section
|
||||
if section_key in computed_data and 'aml_groupby_account' in computed_data[section_key]:
|
||||
detail_entries = computed_data[section_key]['aml_groupby_account'].values()
|
||||
|
||||
# Separate entries with / without an account code for sorting
|
||||
coded_entries = [e for e in detail_entries if e['account_code'] is not None]
|
||||
uncoded_entries = [e for e in detail_entries if e['account_code'] is None]
|
||||
|
||||
sorted_details = sorted(coded_entries, key=lambda r: r['account_code']) + uncoded_entries
|
||||
for detail in sorted_details:
|
||||
output_lines.append((0, self._render_detail_line(report, options, detail)))
|
||||
|
||||
# Append an unexplained-difference line when the numbers don't tie
|
||||
diff_line = self._render_unexplained_difference(report, options, computed_data)
|
||||
if diff_line:
|
||||
output_lines.append((0, diff_line))
|
||||
|
||||
return output_lines
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
"""Restrict selectable journals to bank, cash, and general types."""
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
report._init_options_journals(
|
||||
options,
|
||||
previous_options=previous_options,
|
||||
additional_journals_domain=[('type', 'in', ('bank', 'cash', 'general'))],
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Data computation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _compute_report_data(self, report, options, section_structure):
|
||||
"""Aggregate all cash-flow numbers into *report_data*.
|
||||
|
||||
The returned dictionary maps section keys (from
|
||||
``_build_section_structure``) to balance and per-account detail
|
||||
dictionaries.
|
||||
"""
|
||||
report_data = {}
|
||||
|
||||
liquidity_acct_ids = self._fetch_liquidity_account_ids(report, options)
|
||||
if not liquidity_acct_ids:
|
||||
return report_data
|
||||
|
||||
# Beginning-of-period balances
|
||||
for row in self._query_liquidity_balances(report, options, liquidity_acct_ids, 'to_beginning_of_period'):
|
||||
self._merge_into_report_data('opening_balance', row, section_structure, report_data)
|
||||
self._merge_into_report_data('closing_balance', row, section_structure, report_data)
|
||||
|
||||
# Period movements
|
||||
for row in self._query_liquidity_balances(report, options, liquidity_acct_ids, 'strict_range'):
|
||||
self._merge_into_report_data('closing_balance', row, section_structure, report_data)
|
||||
|
||||
tag_map = self._resolve_cashflow_tags()
|
||||
cf_tag_ids = self._list_cashflow_tag_ids()
|
||||
|
||||
# Liquidity-side entries
|
||||
for grouped_rows in self._fetch_liquidity_side_entries(report, options, liquidity_acct_ids, cf_tag_ids):
|
||||
for row_data in grouped_rows.values():
|
||||
self._route_entry_to_section(tag_map, row_data, section_structure, report_data)
|
||||
|
||||
# Reconciled counterpart entries
|
||||
for grouped_rows in self._fetch_reconciled_counterparts(report, options, liquidity_acct_ids, cf_tag_ids):
|
||||
for row_data in grouped_rows.values():
|
||||
self._route_entry_to_section(tag_map, row_data, section_structure, report_data)
|
||||
|
||||
return report_data
|
||||
|
||||
def _merge_into_report_data(self, section_key, row, section_structure, report_data):
|
||||
"""Insert or accumulate *row* into *report_data* under *section_key*.
|
||||
|
||||
Also propagates the balance upward through parent sections so that
|
||||
all ancestor totals stay correct.
|
||||
|
||||
The *report_data* dictionary uses two sub-keys per section:
|
||||
* ``balance`` – a ``{column_group_key: float}`` mapping
|
||||
* ``aml_groupby_account`` – per-account detail rows
|
||||
"""
|
||||
|
||||
def _propagate_to_parent(sec_key, col_grp, amount, structure, data):
|
||||
"""Walk the parent chain and add *amount* to every ancestor."""
|
||||
parent_ref = structure[sec_key].get('parent_line_id')
|
||||
if parent_ref:
|
||||
data.setdefault(parent_ref, {'balance': {}})
|
||||
data[parent_ref]['balance'].setdefault(col_grp, 0.0)
|
||||
data[parent_ref]['balance'][col_grp] += amount
|
||||
_propagate_to_parent(parent_ref, col_grp, amount, structure, data)
|
||||
|
||||
col_grp = row['column_group_key']
|
||||
acct_id = row['account_id']
|
||||
acct_code = row['account_code']
|
||||
acct_label = row['account_name']
|
||||
amt = row['balance']
|
||||
tag_ref = row.get('account_tag_id')
|
||||
|
||||
if self.env.company.currency_id.is_zero(amt):
|
||||
return
|
||||
|
||||
report_data.setdefault(section_key, {
|
||||
'balance': {},
|
||||
'aml_groupby_account': {},
|
||||
})
|
||||
|
||||
report_data[section_key]['aml_groupby_account'].setdefault(acct_id, {
|
||||
'parent_line_id': section_key,
|
||||
'account_id': acct_id,
|
||||
'account_code': acct_code,
|
||||
'account_name': acct_label,
|
||||
'account_tag_id': tag_ref,
|
||||
'level': section_structure[section_key]['level'] + 1,
|
||||
'balance': {},
|
||||
})
|
||||
|
||||
report_data[section_key]['balance'].setdefault(col_grp, 0.0)
|
||||
report_data[section_key]['balance'][col_grp] += amt
|
||||
|
||||
acct_entry = report_data[section_key]['aml_groupby_account'][acct_id]
|
||||
acct_entry['balance'].setdefault(col_grp, 0.0)
|
||||
acct_entry['balance'][col_grp] += amt
|
||||
|
||||
_propagate_to_parent(section_key, col_grp, amt, section_structure, report_data)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Tag helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _resolve_cashflow_tags(self):
|
||||
"""Return a mapping of activity type to account.account.tag ID."""
|
||||
return {
|
||||
'operating': self.env.ref('account.account_tag_operating').id,
|
||||
'investing': self.env.ref('account.account_tag_investing').id,
|
||||
'financing': self.env.ref('account.account_tag_financing').id,
|
||||
}
|
||||
|
||||
def _list_cashflow_tag_ids(self):
|
||||
"""Return an iterable of all cash-flow-relevant tag IDs."""
|
||||
return self._resolve_cashflow_tags().values()
|
||||
|
||||
def _route_entry_to_section(self, tag_map, entry, section_structure, report_data):
|
||||
"""Determine the correct report section for a single entry and
|
||||
merge it into *report_data*.
|
||||
|
||||
Receivable / payable lines go to advance-payment sections.
|
||||
Other lines are classified by tag + sign (cash in vs cash out).
|
||||
"""
|
||||
acct_type = entry['account_account_type']
|
||||
amt = entry['balance']
|
||||
|
||||
if acct_type == 'asset_receivable':
|
||||
target = 'advance_payments_customer'
|
||||
elif acct_type == 'liability_payable':
|
||||
target = 'advance_payments_suppliers'
|
||||
elif amt < 0:
|
||||
tag_id = entry.get('account_tag_id')
|
||||
if tag_id == tag_map['operating']:
|
||||
target = 'paid_operating_activities'
|
||||
elif tag_id == tag_map['investing']:
|
||||
target = 'investing_activities_cash_out'
|
||||
elif tag_id == tag_map['financing']:
|
||||
target = 'financing_activities_cash_out'
|
||||
else:
|
||||
target = 'unclassified_activities_cash_out'
|
||||
elif amt > 0:
|
||||
tag_id = entry.get('account_tag_id')
|
||||
if tag_id == tag_map['operating']:
|
||||
target = 'received_operating_activities'
|
||||
elif tag_id == tag_map['investing']:
|
||||
target = 'investing_activities_cash_in'
|
||||
elif tag_id == tag_map['financing']:
|
||||
target = 'financing_activities_cash_in'
|
||||
else:
|
||||
target = 'unclassified_activities_cash_in'
|
||||
else:
|
||||
return
|
||||
|
||||
self._merge_into_report_data(target, entry, section_structure, report_data)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# SQL queries
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _fetch_liquidity_account_ids(self, report, options):
|
||||
"""Return a tuple of account IDs used by liquidity journals.
|
||||
|
||||
Includes default accounts of bank/cash journals as well as any
|
||||
payment-method-specific accounts.
|
||||
"""
|
||||
chosen_journal_ids = [j['id'] for j in report._get_options_journals(options)]
|
||||
|
||||
if chosen_journal_ids:
|
||||
where_fragment = "aj.id IN %s"
|
||||
where_args = [tuple(chosen_journal_ids)]
|
||||
else:
|
||||
where_fragment = "aj.type IN ('bank', 'cash', 'general')"
|
||||
where_args = []
|
||||
|
||||
self.env.cr.execute(f'''
|
||||
SELECT
|
||||
array_remove(ARRAY_AGG(DISTINCT aa.id), NULL),
|
||||
array_remove(ARRAY_AGG(DISTINCT apml.payment_account_id), NULL)
|
||||
FROM account_journal aj
|
||||
JOIN res_company rc ON aj.company_id = rc.id
|
||||
LEFT JOIN account_payment_method_line apml
|
||||
ON aj.id = apml.journal_id
|
||||
LEFT JOIN account_account aa
|
||||
ON aj.default_account_id = aa.id
|
||||
AND aa.account_type IN ('asset_cash', 'liability_credit_card')
|
||||
WHERE {where_fragment}
|
||||
''', where_args)
|
||||
|
||||
fetched = self.env.cr.fetchone()
|
||||
combined = set((fetched[0] or []) + (fetched[1] or []))
|
||||
return tuple(combined) if combined else ()
|
||||
|
||||
def _build_move_ids_subquery(self, report, liquidity_acct_ids, col_group_opts) -> SQL:
|
||||
"""Build a sub-select that returns move IDs touching liquidity accounts."""
|
||||
base_query = report._get_report_query(
|
||||
col_group_opts, 'strict_range',
|
||||
[('account_id', 'in', list(liquidity_acct_ids))],
|
||||
)
|
||||
return SQL(
|
||||
'''
|
||||
SELECT array_agg(DISTINCT account_move_line.move_id) AS move_id
|
||||
FROM %(tbl_refs)s
|
||||
WHERE %(conditions)s
|
||||
''',
|
||||
tbl_refs=base_query.from_clause,
|
||||
conditions=base_query.where_clause,
|
||||
)
|
||||
|
||||
def _query_liquidity_balances(self, report, options, liquidity_acct_ids, scope):
|
||||
"""Compute per-account balances for liquidity accounts.
|
||||
|
||||
*scope* is either ``'to_beginning_of_period'`` (opening) or
|
||||
``'strict_range'`` (period movement).
|
||||
"""
|
||||
sql_parts = []
|
||||
|
||||
for col_key, col_opts in report._split_options_per_column_group(options).items():
|
||||
qry = report._get_report_query(col_opts, scope, domain=[('account_id', 'in', liquidity_acct_ids)])
|
||||
acct_alias = qry.join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
code_sql = self.env['account.account']._field_to_sql(acct_alias, 'code', qry)
|
||||
name_sql = self.env['account.account']._field_to_sql(acct_alias, 'name')
|
||||
|
||||
sql_parts.append(SQL(
|
||||
'''
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
account_move_line.account_id,
|
||||
%(code_sql)s AS account_code,
|
||||
%(name_sql)s AS account_name,
|
||||
SUM(%(bal_expr)s) AS balance
|
||||
FROM %(tbl_refs)s
|
||||
%(fx_join)s
|
||||
WHERE %(conditions)s
|
||||
GROUP BY account_move_line.account_id, account_code, account_name
|
||||
''',
|
||||
col_key=col_key,
|
||||
code_sql=code_sql,
|
||||
name_sql=name_sql,
|
||||
tbl_refs=qry.from_clause,
|
||||
bal_expr=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
fx_join=report._currency_table_aml_join(col_opts),
|
||||
conditions=qry.where_clause,
|
||||
))
|
||||
|
||||
self.env.cr.execute(SQL(' UNION ALL ').join(sql_parts))
|
||||
return self.env.cr.dictfetchall()
|
||||
|
||||
def _fetch_liquidity_side_entries(self, report, options, liquidity_acct_ids, cf_tag_ids):
|
||||
"""Retrieve the non-liquidity side of moves that touch liquidity accounts.
|
||||
|
||||
Three sub-queries per column group capture:
|
||||
1. Credit-side partial reconciliation amounts
|
||||
2. Debit-side partial reconciliation amounts
|
||||
3. Full line balances (for unreconciled portions)
|
||||
|
||||
Returns a list of dicts keyed by ``(account_id, column_group_key)``.
|
||||
"""
|
||||
aggregated = {}
|
||||
sql_parts = []
|
||||
|
||||
for col_key, col_opts in report._split_options_per_column_group(options).items():
|
||||
move_sub = self._build_move_ids_subquery(report, liquidity_acct_ids, col_opts)
|
||||
q = Query(self.env, 'account_move_line')
|
||||
acct_alias = q.join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
code_sql = self.env['account.account']._field_to_sql(acct_alias, 'code', q)
|
||||
name_sql = self.env['account.account']._field_to_sql(acct_alias, 'name')
|
||||
type_sql = SQL.identifier(acct_alias, 'account_type')
|
||||
|
||||
sql_parts.append(SQL(
|
||||
'''
|
||||
(WITH liq_moves AS (%(move_sub)s)
|
||||
|
||||
-- 1) Credit-side partial amounts
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
account_move_line.account_id,
|
||||
%(code_sql)s AS account_code,
|
||||
%(name_sql)s AS account_name,
|
||||
%(type_sql)s AS account_account_type,
|
||||
aat.account_account_tag_id AS account_tag_id,
|
||||
SUM(%(partial_bal)s) AS balance
|
||||
FROM %(from_cl)s
|
||||
%(fx_join)s
|
||||
LEFT JOIN account_partial_reconcile
|
||||
ON account_partial_reconcile.credit_move_id = account_move_line.id
|
||||
LEFT JOIN account_account_account_tag aat
|
||||
ON aat.account_account_id = account_move_line.account_id
|
||||
AND aat.account_account_tag_id IN %(cf_tags)s
|
||||
WHERE account_move_line.move_id IN (SELECT unnest(liq_moves.move_id) FROM liq_moves)
|
||||
AND account_move_line.account_id NOT IN %(liq_accts)s
|
||||
AND account_partial_reconcile.max_date BETWEEN %(dt_from)s AND %(dt_to)s
|
||||
GROUP BY account_move_line.company_id, account_move_line.account_id,
|
||||
account_code, account_name, account_account_type,
|
||||
aat.account_account_tag_id
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- 2) Debit-side partial amounts (negated)
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
account_move_line.account_id,
|
||||
%(code_sql)s AS account_code,
|
||||
%(name_sql)s AS account_name,
|
||||
%(type_sql)s AS account_account_type,
|
||||
aat.account_account_tag_id AS account_tag_id,
|
||||
-SUM(%(partial_bal)s) AS balance
|
||||
FROM %(from_cl)s
|
||||
%(fx_join)s
|
||||
LEFT JOIN account_partial_reconcile
|
||||
ON account_partial_reconcile.debit_move_id = account_move_line.id
|
||||
LEFT JOIN account_account_account_tag aat
|
||||
ON aat.account_account_id = account_move_line.account_id
|
||||
AND aat.account_account_tag_id IN %(cf_tags)s
|
||||
WHERE account_move_line.move_id IN (SELECT unnest(liq_moves.move_id) FROM liq_moves)
|
||||
AND account_move_line.account_id NOT IN %(liq_accts)s
|
||||
AND account_partial_reconcile.max_date BETWEEN %(dt_from)s AND %(dt_to)s
|
||||
GROUP BY account_move_line.company_id, account_move_line.account_id,
|
||||
account_code, account_name, account_account_type,
|
||||
aat.account_account_tag_id
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- 3) Full line balances
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
account_move_line.account_id,
|
||||
%(code_sql)s AS account_code,
|
||||
%(name_sql)s AS account_name,
|
||||
%(type_sql)s AS account_account_type,
|
||||
aat.account_account_tag_id AS account_tag_id,
|
||||
SUM(%(line_bal)s) AS balance
|
||||
FROM %(from_cl)s
|
||||
%(fx_join)s
|
||||
LEFT JOIN account_account_account_tag aat
|
||||
ON aat.account_account_id = account_move_line.account_id
|
||||
AND aat.account_account_tag_id IN %(cf_tags)s
|
||||
WHERE account_move_line.move_id IN (SELECT unnest(liq_moves.move_id) FROM liq_moves)
|
||||
AND account_move_line.account_id NOT IN %(liq_accts)s
|
||||
GROUP BY account_move_line.account_id, account_code, account_name,
|
||||
account_account_type, aat.account_account_tag_id)
|
||||
''',
|
||||
col_key=col_key,
|
||||
move_sub=move_sub,
|
||||
code_sql=code_sql,
|
||||
name_sql=name_sql,
|
||||
type_sql=type_sql,
|
||||
from_cl=q.from_clause,
|
||||
fx_join=report._currency_table_aml_join(col_opts),
|
||||
partial_bal=report._currency_table_apply_rate(SQL("account_partial_reconcile.amount")),
|
||||
line_bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
cf_tags=tuple(cf_tag_ids),
|
||||
liq_accts=liquidity_acct_ids,
|
||||
dt_from=col_opts['date']['date_from'],
|
||||
dt_to=col_opts['date']['date_to'],
|
||||
))
|
||||
|
||||
self.env.cr.execute(SQL(' UNION ALL ').join(sql_parts))
|
||||
|
||||
for rec in self.env.cr.dictfetchall():
|
||||
acct_id = rec['account_id']
|
||||
aggregated.setdefault(acct_id, {})
|
||||
aggregated[acct_id].setdefault(rec['column_group_key'], {
|
||||
'column_group_key': rec['column_group_key'],
|
||||
'account_id': acct_id,
|
||||
'account_code': rec['account_code'],
|
||||
'account_name': rec['account_name'],
|
||||
'account_account_type': rec['account_account_type'],
|
||||
'account_tag_id': rec['account_tag_id'],
|
||||
'balance': 0.0,
|
||||
})
|
||||
aggregated[acct_id][rec['column_group_key']]['balance'] -= rec['balance']
|
||||
|
||||
return list(aggregated.values())
|
||||
|
||||
def _fetch_reconciled_counterparts(self, report, options, liquidity_acct_ids, cf_tag_ids):
|
||||
"""Retrieve moves reconciled with liquidity moves but that are not
|
||||
themselves liquidity moves.
|
||||
|
||||
Each amount is valued proportionally to what has actually been paid,
|
||||
so a partially-paid invoice appears at the paid percentage.
|
||||
"""
|
||||
reconciled_acct_ids_by_col = {cg: set() for cg in options['column_groups']}
|
||||
pct_map = {cg: {} for cg in options['column_groups']}
|
||||
fx_table = report._get_currency_table(options)
|
||||
|
||||
# Step 1 – gather reconciliation amounts per move / account
|
||||
step1_parts = []
|
||||
for col_key, col_opts in report._split_options_per_column_group(options).items():
|
||||
move_sub = self._build_move_ids_subquery(report, liquidity_acct_ids, col_opts)
|
||||
step1_parts.append(SQL(
|
||||
'''
|
||||
(WITH liq_moves AS (%(move_sub)s)
|
||||
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
dr.move_id, dr.account_id,
|
||||
SUM(%(partial_amt)s) AS balance
|
||||
FROM account_move_line AS cr
|
||||
LEFT JOIN account_partial_reconcile
|
||||
ON account_partial_reconcile.credit_move_id = cr.id
|
||||
JOIN %(fx_tbl)s
|
||||
ON account_currency_table.company_id = account_partial_reconcile.company_id
|
||||
AND account_currency_table.rate_type = 'current'
|
||||
INNER JOIN account_move_line AS dr
|
||||
ON dr.id = account_partial_reconcile.debit_move_id
|
||||
WHERE cr.move_id IN (SELECT unnest(liq_moves.move_id) FROM liq_moves)
|
||||
AND cr.account_id NOT IN %(liq_accts)s
|
||||
AND cr.credit > 0.0
|
||||
AND dr.move_id NOT IN (SELECT unnest(liq_moves.move_id) FROM liq_moves)
|
||||
AND account_partial_reconcile.max_date BETWEEN %(dt_from)s AND %(dt_to)s
|
||||
GROUP BY dr.move_id, dr.account_id
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
cr2.move_id, cr2.account_id,
|
||||
-SUM(%(partial_amt)s) AS balance
|
||||
FROM account_move_line AS dr2
|
||||
LEFT JOIN account_partial_reconcile
|
||||
ON account_partial_reconcile.debit_move_id = dr2.id
|
||||
JOIN %(fx_tbl)s
|
||||
ON account_currency_table.company_id = account_partial_reconcile.company_id
|
||||
AND account_currency_table.rate_type = 'current'
|
||||
INNER JOIN account_move_line AS cr2
|
||||
ON cr2.id = account_partial_reconcile.credit_move_id
|
||||
WHERE dr2.move_id IN (SELECT unnest(liq_moves.move_id) FROM liq_moves)
|
||||
AND dr2.account_id NOT IN %(liq_accts)s
|
||||
AND dr2.debit > 0.0
|
||||
AND cr2.move_id NOT IN (SELECT unnest(liq_moves.move_id) FROM liq_moves)
|
||||
AND account_partial_reconcile.max_date BETWEEN %(dt_from)s AND %(dt_to)s
|
||||
GROUP BY cr2.move_id, cr2.account_id)
|
||||
''',
|
||||
move_sub=move_sub,
|
||||
col_key=col_key,
|
||||
liq_accts=liquidity_acct_ids,
|
||||
dt_from=col_opts['date']['date_from'],
|
||||
dt_to=col_opts['date']['date_to'],
|
||||
fx_tbl=fx_table,
|
||||
partial_amt=report._currency_table_apply_rate(SQL("account_partial_reconcile.amount")),
|
||||
))
|
||||
|
||||
self.env.cr.execute(SQL(' UNION ALL ').join(step1_parts))
|
||||
|
||||
for rec in self.env.cr.dictfetchall():
|
||||
cg = rec['column_group_key']
|
||||
pct_map[cg].setdefault(rec['move_id'], {})
|
||||
pct_map[cg][rec['move_id']].setdefault(rec['account_id'], [0.0, 0.0])
|
||||
pct_map[cg][rec['move_id']][rec['account_id']][0] += rec['balance']
|
||||
reconciled_acct_ids_by_col[cg].add(rec['account_id'])
|
||||
|
||||
if not any(pct_map.values()):
|
||||
return []
|
||||
|
||||
# Step 2 – total balance per move / reconciled account
|
||||
step2_parts = []
|
||||
for col in options['columns']:
|
||||
cg = col['column_group_key']
|
||||
mv_ids = tuple(pct_map[cg].keys()) or (None,)
|
||||
ac_ids = tuple(reconciled_acct_ids_by_col[cg]) or (None,)
|
||||
step2_parts.append(SQL(
|
||||
'''
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
account_move_line.move_id,
|
||||
account_move_line.account_id,
|
||||
SUM(%(bal_expr)s) AS balance
|
||||
FROM account_move_line
|
||||
JOIN %(fx_tbl)s
|
||||
ON account_currency_table.company_id = account_move_line.company_id
|
||||
AND account_currency_table.rate_type = 'current'
|
||||
WHERE account_move_line.move_id IN %(mv_ids)s
|
||||
AND account_move_line.account_id IN %(ac_ids)s
|
||||
GROUP BY account_move_line.move_id, account_move_line.account_id
|
||||
''',
|
||||
col_key=cg,
|
||||
fx_tbl=fx_table,
|
||||
bal_expr=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
mv_ids=mv_ids,
|
||||
ac_ids=ac_ids,
|
||||
))
|
||||
|
||||
self.env.cr.execute(SQL(' UNION ALL ').join(step2_parts))
|
||||
for rec in self.env.cr.dictfetchall():
|
||||
cg = rec['column_group_key']
|
||||
mv = rec['move_id']
|
||||
ac = rec['account_id']
|
||||
if ac in pct_map[cg].get(mv, {}):
|
||||
pct_map[cg][mv][ac][1] += rec['balance']
|
||||
|
||||
# Step 3 – fetch full detail with account type & tag, then apply pct
|
||||
result_map = {}
|
||||
|
||||
detail_q = Query(self.env, 'account_move_line')
|
||||
acct_a = detail_q.join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
code_fld = self.env['account.account']._field_to_sql(acct_a, 'code', detail_q)
|
||||
name_fld = self.env['account.account']._field_to_sql(acct_a, 'name')
|
||||
type_fld = SQL.identifier(acct_a, 'account_type')
|
||||
|
||||
step3_parts = []
|
||||
for col in options['columns']:
|
||||
cg = col['column_group_key']
|
||||
step3_parts.append(SQL(
|
||||
'''
|
||||
SELECT
|
||||
%(col_key)s AS column_group_key,
|
||||
account_move_line.move_id,
|
||||
account_move_line.account_id,
|
||||
%(code_fld)s AS account_code,
|
||||
%(name_fld)s AS account_name,
|
||||
%(type_fld)s AS account_account_type,
|
||||
aat.account_account_tag_id AS account_tag_id,
|
||||
SUM(%(bal_expr)s) AS balance
|
||||
FROM %(from_cl)s
|
||||
%(fx_join)s
|
||||
LEFT JOIN account_account_account_tag aat
|
||||
ON aat.account_account_id = account_move_line.account_id
|
||||
AND aat.account_account_tag_id IN %(cf_tags)s
|
||||
WHERE account_move_line.move_id IN %(mv_ids)s
|
||||
GROUP BY account_move_line.move_id, account_move_line.account_id,
|
||||
account_code, account_name, account_account_type,
|
||||
aat.account_account_tag_id
|
||||
''',
|
||||
col_key=cg,
|
||||
code_fld=code_fld,
|
||||
name_fld=name_fld,
|
||||
type_fld=type_fld,
|
||||
from_cl=detail_q.from_clause,
|
||||
fx_join=report._currency_table_aml_join(options),
|
||||
bal_expr=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
cf_tags=tuple(cf_tag_ids),
|
||||
mv_ids=tuple(pct_map[cg].keys()) or (None,),
|
||||
))
|
||||
|
||||
self.env.cr.execute(SQL(' UNION ALL ').join(step3_parts))
|
||||
|
||||
for rec in self.env.cr.dictfetchall():
|
||||
cg = rec['column_group_key']
|
||||
mv = rec['move_id']
|
||||
ac = rec['account_id']
|
||||
line_bal = rec['balance']
|
||||
|
||||
# Sum reconciled & total for the whole move
|
||||
sum_reconciled = 0.0
|
||||
sum_total = 0.0
|
||||
for r_amt, t_amt in pct_map[cg][mv].values():
|
||||
sum_reconciled += r_amt
|
||||
sum_total += t_amt
|
||||
|
||||
# Compute the applicable portion
|
||||
if sum_total and ac not in pct_map[cg][mv]:
|
||||
ratio = sum_reconciled / sum_total
|
||||
line_bal *= ratio
|
||||
elif not sum_total and ac in pct_map[cg][mv]:
|
||||
line_bal = -pct_map[cg][mv][ac][0]
|
||||
else:
|
||||
continue
|
||||
|
||||
result_map.setdefault(ac, {})
|
||||
result_map[ac].setdefault(cg, {
|
||||
'column_group_key': cg,
|
||||
'account_id': ac,
|
||||
'account_code': rec['account_code'],
|
||||
'account_name': rec['account_name'],
|
||||
'account_account_type': rec['account_account_type'],
|
||||
'account_tag_id': rec['account_tag_id'],
|
||||
'balance': 0.0,
|
||||
})
|
||||
result_map[ac][cg]['balance'] -= line_bal
|
||||
|
||||
return list(result_map.values())
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Line rendering
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _build_section_structure(self):
|
||||
"""Define the hierarchical layout of the cash flow statement.
|
||||
|
||||
Returns an ordered dictionary whose keys identify each section and
|
||||
whose values carry the display name, nesting level, parent reference,
|
||||
and optional CSS class.
|
||||
"""
|
||||
return {
|
||||
'opening_balance': {
|
||||
'name': _('Cash and cash equivalents, beginning of period'),
|
||||
'level': 0,
|
||||
},
|
||||
'net_increase': {
|
||||
'name': _('Net increase in cash and cash equivalents'),
|
||||
'level': 0,
|
||||
'unfolded': True,
|
||||
},
|
||||
'operating_activities': {
|
||||
'name': _('Cash flows from operating activities'),
|
||||
'level': 2,
|
||||
'parent_line_id': 'net_increase',
|
||||
'class': 'fw-bold',
|
||||
'unfolded': True,
|
||||
},
|
||||
'advance_payments_customer': {
|
||||
'name': _('Advance Payments received from customers'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'operating_activities',
|
||||
},
|
||||
'received_operating_activities': {
|
||||
'name': _('Cash received from operating activities'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'operating_activities',
|
||||
},
|
||||
'advance_payments_suppliers': {
|
||||
'name': _('Advance payments made to suppliers'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'operating_activities',
|
||||
},
|
||||
'paid_operating_activities': {
|
||||
'name': _('Cash paid for operating activities'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'operating_activities',
|
||||
},
|
||||
'investing_activities': {
|
||||
'name': _('Cash flows from investing & extraordinary activities'),
|
||||
'level': 2,
|
||||
'parent_line_id': 'net_increase',
|
||||
'class': 'fw-bold',
|
||||
'unfolded': True,
|
||||
},
|
||||
'investing_activities_cash_in': {
|
||||
'name': _('Cash in'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'investing_activities',
|
||||
},
|
||||
'investing_activities_cash_out': {
|
||||
'name': _('Cash out'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'investing_activities',
|
||||
},
|
||||
'financing_activities': {
|
||||
'name': _('Cash flows from financing activities'),
|
||||
'level': 2,
|
||||
'parent_line_id': 'net_increase',
|
||||
'class': 'fw-bold',
|
||||
'unfolded': True,
|
||||
},
|
||||
'financing_activities_cash_in': {
|
||||
'name': _('Cash in'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'financing_activities',
|
||||
},
|
||||
'financing_activities_cash_out': {
|
||||
'name': _('Cash out'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'financing_activities',
|
||||
},
|
||||
'unclassified_activities': {
|
||||
'name': _('Cash flows from unclassified activities'),
|
||||
'level': 2,
|
||||
'parent_line_id': 'net_increase',
|
||||
'class': 'fw-bold',
|
||||
'unfolded': True,
|
||||
},
|
||||
'unclassified_activities_cash_in': {
|
||||
'name': _('Cash in'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'unclassified_activities',
|
||||
},
|
||||
'unclassified_activities_cash_out': {
|
||||
'name': _('Cash out'),
|
||||
'level': 4,
|
||||
'parent_line_id': 'unclassified_activities',
|
||||
},
|
||||
'closing_balance': {
|
||||
'name': _('Cash and cash equivalents, closing balance'),
|
||||
'level': 0,
|
||||
},
|
||||
}
|
||||
|
||||
def _render_section_line(self, report, options, section_key, section_meta, report_data):
|
||||
"""Produce a single section / header line dictionary."""
|
||||
line_id = report._get_generic_line_id(None, None, markup=section_key)
|
||||
has_detail = (
|
||||
section_key in report_data
|
||||
and 'aml_groupby_account' in report_data[section_key]
|
||||
)
|
||||
|
||||
col_vals = []
|
||||
for col in options['columns']:
|
||||
expr = col['expression_label']
|
||||
cg = col['column_group_key']
|
||||
raw = (
|
||||
report_data[section_key][expr].get(cg, 0.0)
|
||||
if section_key in report_data
|
||||
else 0.0
|
||||
)
|
||||
col_vals.append(report._build_column_dict(raw, col, options=options))
|
||||
|
||||
return {
|
||||
'id': line_id,
|
||||
'name': section_meta['name'],
|
||||
'level': section_meta['level'],
|
||||
'class': section_meta.get('class', ''),
|
||||
'columns': col_vals,
|
||||
'unfoldable': has_detail,
|
||||
'unfolded': (
|
||||
line_id in options['unfolded_lines']
|
||||
or section_meta.get('unfolded')
|
||||
or (options.get('unfold_all') and has_detail)
|
||||
),
|
||||
}
|
||||
|
||||
def _render_detail_line(self, report, options, detail):
|
||||
"""Produce a per-account detail line under a section."""
|
||||
parent_id = report._get_generic_line_id(None, None, detail['parent_line_id'])
|
||||
line_id = report._get_generic_line_id(
|
||||
'account.account', detail['account_id'], parent_line_id=parent_id,
|
||||
)
|
||||
|
||||
col_vals = []
|
||||
for col in options['columns']:
|
||||
expr = col['expression_label']
|
||||
cg = col['column_group_key']
|
||||
raw = detail[expr].get(cg, 0.0)
|
||||
col_vals.append(report._build_column_dict(raw, col, options=options))
|
||||
|
||||
display_name = (
|
||||
f"{detail['account_code']} {detail['account_name']}"
|
||||
if detail['account_code']
|
||||
else detail['account_name']
|
||||
)
|
||||
|
||||
return {
|
||||
'id': line_id,
|
||||
'name': display_name,
|
||||
'caret_options': 'account.account',
|
||||
'level': detail['level'],
|
||||
'parent_id': parent_id,
|
||||
'columns': col_vals,
|
||||
}
|
||||
|
||||
def _render_unexplained_difference(self, report, options, report_data):
|
||||
"""If closing != opening + net_increase, emit an extra line showing
|
||||
the gap so the user can investigate."""
|
||||
found_gap = False
|
||||
col_vals = []
|
||||
|
||||
for col in options['columns']:
|
||||
expr = col['expression_label']
|
||||
cg = col['column_group_key']
|
||||
|
||||
opening = (
|
||||
report_data['opening_balance'][expr].get(cg, 0.0)
|
||||
if 'opening_balance' in report_data else 0.0
|
||||
)
|
||||
closing = (
|
||||
report_data['closing_balance'][expr].get(cg, 0.0)
|
||||
if 'closing_balance' in report_data else 0.0
|
||||
)
|
||||
net_chg = (
|
||||
report_data['net_increase'][expr].get(cg, 0.0)
|
||||
if 'net_increase' in report_data else 0.0
|
||||
)
|
||||
|
||||
gap = closing - opening - net_chg
|
||||
|
||||
if not self.env.company.currency_id.is_zero(gap):
|
||||
found_gap = True
|
||||
|
||||
col_vals.append(report._build_column_dict(
|
||||
gap,
|
||||
{'figure_type': 'monetary', 'expression_label': 'balance'},
|
||||
options=options,
|
||||
))
|
||||
|
||||
if found_gap:
|
||||
return {
|
||||
'id': report._get_generic_line_id(None, None, markup='unexplained_difference'),
|
||||
'name': _('Unexplained Difference'),
|
||||
'level': 1,
|
||||
'columns': col_vals,
|
||||
}
|
||||
return None
|
||||
100
Fusion Accounting/models/account_chart_template.py
Normal file
100
Fusion Accounting/models/account_chart_template.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# Fusion Accounting - Chart Template Extensions
|
||||
# Populates deferred journal/account defaults when the module is installed
|
||||
|
||||
from odoo.addons.account.models.chart_template import template
|
||||
from odoo import models
|
||||
|
||||
|
||||
class FusionChartTemplate(models.AbstractModel):
|
||||
"""Extends the chart-of-accounts template loader to supply default
|
||||
values for deferred-revenue and deferred-expense journals and
|
||||
accounts when Fusion Accounting is installed."""
|
||||
|
||||
_inherit = 'account.chart.template'
|
||||
|
||||
def _get_fusion_accounting_res_company(self, chart_template):
|
||||
"""Return company-level defaults for deferred journals and
|
||||
accounts, falling back to the chart template data when the
|
||||
company does not yet have values configured."""
|
||||
current_company = self.env.company
|
||||
template_data = self._get_chart_template_data(chart_template)
|
||||
co_defaults = template_data['res.company'].get(current_company.id, {})
|
||||
|
||||
# Ensure prerequisite XML-IDs exist for journals & accounts
|
||||
prerequisite_models = {
|
||||
key: val
|
||||
for key, val in template_data.items()
|
||||
if key in ['account.journal', 'account.account']
|
||||
}
|
||||
self._pre_reload_data(
|
||||
current_company,
|
||||
template_data['template_data'],
|
||||
prerequisite_models,
|
||||
)
|
||||
|
||||
return {
|
||||
current_company.id: {
|
||||
'deferred_expense_journal_id': (
|
||||
current_company.deferred_expense_journal_id.id
|
||||
or co_defaults.get('deferred_expense_journal_id')
|
||||
),
|
||||
'deferred_revenue_journal_id': (
|
||||
current_company.deferred_revenue_journal_id.id
|
||||
or co_defaults.get('deferred_revenue_journal_id')
|
||||
),
|
||||
'deferred_expense_account_id': (
|
||||
current_company.deferred_expense_account_id.id
|
||||
or co_defaults.get('deferred_expense_account_id')
|
||||
),
|
||||
'deferred_revenue_account_id': (
|
||||
current_company.deferred_revenue_account_id.id
|
||||
or co_defaults.get('deferred_revenue_account_id')
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
def _get_chart_template_data(self, chart_template):
|
||||
"""Augment the chart template data by assigning sensible
|
||||
defaults for deferred journals and accounts when none are
|
||||
explicitly defined in the template."""
|
||||
data = super()._get_chart_template_data(chart_template)
|
||||
|
||||
for _co_id, co_vals in data['res.company'].items():
|
||||
# Default deferred expense journal → first general journal
|
||||
co_vals['deferred_expense_journal_id'] = (
|
||||
co_vals.get('deferred_expense_journal_id')
|
||||
or next(
|
||||
(xid for xid, jdata in data['account.journal'].items()
|
||||
if jdata['type'] == 'general'),
|
||||
None,
|
||||
)
|
||||
)
|
||||
# Default deferred revenue journal → first general journal
|
||||
co_vals['deferred_revenue_journal_id'] = (
|
||||
co_vals.get('deferred_revenue_journal_id')
|
||||
or next(
|
||||
(xid for xid, jdata in data['account.journal'].items()
|
||||
if jdata['type'] == 'general'),
|
||||
None,
|
||||
)
|
||||
)
|
||||
# Default deferred expense account → first current asset
|
||||
co_vals['deferred_expense_account_id'] = (
|
||||
co_vals.get('deferred_expense_account_id')
|
||||
or next(
|
||||
(xid for xid, adata in data['account.account'].items()
|
||||
if adata['account_type'] == 'asset_current'),
|
||||
None,
|
||||
)
|
||||
)
|
||||
# Default deferred revenue account → first current liability
|
||||
co_vals['deferred_revenue_account_id'] = (
|
||||
co_vals.get('deferred_revenue_account_id')
|
||||
or next(
|
||||
(xid for xid, adata in data['account.account'].items()
|
||||
if adata['account_type'] == 'liability_current'),
|
||||
None,
|
||||
)
|
||||
)
|
||||
|
||||
return data
|
||||
621
Fusion Accounting/models/account_deferred_reports.py
Normal file
621
Fusion Accounting/models/account_deferred_reports.py
Normal file
@@ -0,0 +1,621 @@
|
||||
# Fusion Accounting - Deferred Revenue / Expense Report Handlers
|
||||
# Computes period-by-period deferral breakdowns, generates closing entries
|
||||
|
||||
import calendar
|
||||
from collections import defaultdict
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import models, fields, _, api, Command
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import groupby, SQL
|
||||
from odoo.addons.fusion_accounting.models.account_move import DEFERRED_DATE_MIN, DEFERRED_DATE_MAX
|
||||
|
||||
|
||||
class FusionDeferredReportHandler(models.AbstractModel):
|
||||
"""Base handler for deferred expense / revenue reports. Provides
|
||||
shared domain construction, SQL queries, grouping logic, and
|
||||
deferral-entry generation. Concrete sub-handlers set the report
|
||||
type via ``_get_deferred_report_type``."""
|
||||
|
||||
_name = 'account.deferred.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Deferred Expense Report Custom Handler'
|
||||
|
||||
def _get_deferred_report_type(self):
|
||||
raise NotImplementedError(
|
||||
"Subclasses must return either 'expense' or 'revenue'."
|
||||
)
|
||||
|
||||
# =====================================================================
|
||||
# DOMAIN & QUERY HELPERS
|
||||
# =====================================================================
|
||||
|
||||
def _get_domain(self, report, options, filter_already_generated=False, filter_not_started=False):
|
||||
"""Build the search domain for deferred journal items within
|
||||
the selected report period."""
|
||||
base_domain = report._get_options_domain(options, "from_beginning")
|
||||
if self._get_deferred_report_type() == 'expense':
|
||||
acct_types = ('expense', 'expense_depreciation', 'expense_direct_cost')
|
||||
else:
|
||||
acct_types = ('income', 'income_other')
|
||||
|
||||
base_domain += [
|
||||
('account_id.account_type', 'in', acct_types),
|
||||
('deferred_start_date', '!=', False),
|
||||
('deferred_end_date', '!=', False),
|
||||
('deferred_end_date', '>=', options['date']['date_from']),
|
||||
('move_id.date', '<=', options['date']['date_to']),
|
||||
]
|
||||
# Exclude lines that fall entirely within the period
|
||||
base_domain += [
|
||||
'!', '&', '&', '&', '&', '&',
|
||||
('deferred_start_date', '>=', options['date']['date_from']),
|
||||
('deferred_start_date', '<=', options['date']['date_to']),
|
||||
('deferred_end_date', '>=', options['date']['date_from']),
|
||||
('deferred_end_date', '<=', options['date']['date_to']),
|
||||
('move_id.date', '>=', options['date']['date_from']),
|
||||
('move_id.date', '<=', options['date']['date_to']),
|
||||
]
|
||||
if filter_already_generated:
|
||||
base_domain += [
|
||||
('deferred_end_date', '>=', options['date']['date_from']),
|
||||
'!',
|
||||
'&',
|
||||
('move_id.deferred_move_ids.date', '=', options['date']['date_to']),
|
||||
('move_id.deferred_move_ids.state', '=', 'posted'),
|
||||
]
|
||||
if filter_not_started:
|
||||
base_domain += [('deferred_start_date', '>', options['date']['date_to'])]
|
||||
return base_domain
|
||||
|
||||
@api.model
|
||||
def _get_select(self):
|
||||
"""Column expressions for the deferred-lines query."""
|
||||
acct_name_expr = self.env['account.account']._field_to_sql(
|
||||
'account_move_line__account_id', 'name',
|
||||
)
|
||||
return [
|
||||
SQL("account_move_line.id AS line_id"),
|
||||
SQL("account_move_line.account_id AS account_id"),
|
||||
SQL("account_move_line.partner_id AS partner_id"),
|
||||
SQL("account_move_line.product_id AS product_id"),
|
||||
SQL("account_move_line__product_template_id.categ_id AS product_category_id"),
|
||||
SQL("account_move_line.name AS line_name"),
|
||||
SQL("account_move_line.deferred_start_date AS deferred_start_date"),
|
||||
SQL("account_move_line.deferred_end_date AS deferred_end_date"),
|
||||
SQL("account_move_line.deferred_end_date - account_move_line.deferred_start_date AS diff_days"),
|
||||
SQL("account_move_line.balance AS balance"),
|
||||
SQL("account_move_line.analytic_distribution AS analytic_distribution"),
|
||||
SQL("account_move_line__move_id.id as move_id"),
|
||||
SQL("account_move_line__move_id.name AS move_name"),
|
||||
SQL("%s AS account_name", acct_name_expr),
|
||||
]
|
||||
|
||||
def _get_lines(self, report, options, filter_already_generated=False):
|
||||
"""Execute the deferred-lines query and return raw dicts."""
|
||||
search_domain = self._get_domain(report, options, filter_already_generated)
|
||||
qry = report._get_report_query(options, domain=search_domain, date_scope='from_beginning')
|
||||
cols = SQL(', ').join(self._get_select())
|
||||
|
||||
full_query = SQL(
|
||||
"""
|
||||
SELECT %(cols)s
|
||||
FROM %(from_clause)s
|
||||
LEFT JOIN product_product AS account_move_line__product_id
|
||||
ON account_move_line.product_id = account_move_line__product_id.id
|
||||
LEFT JOIN product_template AS account_move_line__product_template_id
|
||||
ON account_move_line__product_id.product_tmpl_id = account_move_line__product_template_id.id
|
||||
WHERE %(where_clause)s
|
||||
ORDER BY account_move_line.deferred_start_date, account_move_line.id
|
||||
""",
|
||||
cols=cols,
|
||||
from_clause=qry.from_clause,
|
||||
where_clause=qry.where_clause,
|
||||
)
|
||||
self.env.cr.execute(full_query)
|
||||
return self.env.cr.dictfetchall()
|
||||
|
||||
# =====================================================================
|
||||
# GROUPING HELPERS
|
||||
# =====================================================================
|
||||
|
||||
@api.model
|
||||
def _get_grouping_fields_deferred_lines(self, filter_already_generated=False, grouping_field='account_id'):
|
||||
return (grouping_field,)
|
||||
|
||||
@api.model
|
||||
def _group_by_deferred_fields(self, line, filter_already_generated=False, grouping_field='account_id'):
|
||||
return tuple(
|
||||
line[k] for k in self._get_grouping_fields_deferred_lines(filter_already_generated, grouping_field)
|
||||
)
|
||||
|
||||
@api.model
|
||||
def _get_grouping_fields_deferral_lines(self):
|
||||
return ()
|
||||
|
||||
@api.model
|
||||
def _group_by_deferral_fields(self, line):
|
||||
return tuple(line[k] for k in self._get_grouping_fields_deferral_lines())
|
||||
|
||||
@api.model
|
||||
def _group_deferred_amounts_by_grouping_field(
|
||||
self, deferred_amounts_by_line, periods, is_reverse,
|
||||
filter_already_generated=False, grouping_field='account_id',
|
||||
):
|
||||
"""Group deferred amounts per grouping field and compute period
|
||||
totals. Returns ``(per_key_totals, aggregate_totals)``."""
|
||||
grouped_iter = groupby(
|
||||
deferred_amounts_by_line,
|
||||
key=lambda row: self._group_by_deferred_fields(row, filter_already_generated, grouping_field),
|
||||
)
|
||||
per_key = {}
|
||||
aggregate = {p: 0 for p in periods + ['totals_aggregated']}
|
||||
multiplier = 1 if is_reverse else -1
|
||||
|
||||
for key, key_lines in grouped_iter:
|
||||
key_lines = list(key_lines)
|
||||
key_totals = self._get_current_key_totals_dict(key_lines, multiplier)
|
||||
aggregate['totals_aggregated'] += key_totals['amount_total']
|
||||
for period in periods:
|
||||
period_val = multiplier * sum(ln[period] for ln in key_lines)
|
||||
key_totals[period] = period_val
|
||||
aggregate[period] += self.env.company.currency_id.round(period_val)
|
||||
per_key[key] = key_totals
|
||||
|
||||
return per_key, aggregate
|
||||
|
||||
@api.model
|
||||
def _get_current_key_totals_dict(self, key_lines, multiplier):
|
||||
return {
|
||||
'account_id': key_lines[0]['account_id'],
|
||||
'product_id': key_lines[0]['product_id'],
|
||||
'product_category_id': key_lines[0]['product_category_id'],
|
||||
'amount_total': multiplier * sum(ln['balance'] for ln in key_lines),
|
||||
'move_ids': {ln['move_id'] for ln in key_lines},
|
||||
}
|
||||
|
||||
# =====================================================================
|
||||
# REPORT DISPLAY
|
||||
# =====================================================================
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
return {
|
||||
'templates': {
|
||||
'AccountReportFilters': 'fusion_accounting.DeferredFilters',
|
||||
},
|
||||
}
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
|
||||
per_col_group = report._split_options_per_column_group(options)
|
||||
for col_dict in options['columns']:
|
||||
col_opts = per_col_group[col_dict['column_group_key']]
|
||||
col_dict['name'] = col_opts['date']['string']
|
||||
col_dict['date_from'] = col_opts['date']['date_from']
|
||||
col_dict['date_to'] = col_opts['date']['date_to']
|
||||
|
||||
options['columns'] = list(reversed(options['columns']))
|
||||
|
||||
total_col = [{
|
||||
**options['columns'][0],
|
||||
'name': _('Total'),
|
||||
'expression_label': 'total',
|
||||
'date_from': DEFERRED_DATE_MIN,
|
||||
'date_to': DEFERRED_DATE_MAX,
|
||||
}]
|
||||
not_started_col = [{
|
||||
**options['columns'][0],
|
||||
'name': _('Not Started'),
|
||||
'expression_label': 'not_started',
|
||||
'date_from': options['columns'][-1]['date_to'],
|
||||
'date_to': DEFERRED_DATE_MAX,
|
||||
}]
|
||||
before_col = [{
|
||||
**options['columns'][0],
|
||||
'name': _('Before'),
|
||||
'expression_label': 'before',
|
||||
'date_from': DEFERRED_DATE_MIN,
|
||||
'date_to': options['columns'][0]['date_from'],
|
||||
}]
|
||||
later_col = [{
|
||||
**options['columns'][0],
|
||||
'name': _('Later'),
|
||||
'expression_label': 'later',
|
||||
'date_from': options['columns'][-1]['date_to'],
|
||||
'date_to': DEFERRED_DATE_MAX,
|
||||
}]
|
||||
|
||||
options['columns'] = total_col + not_started_col + before_col + options['columns'] + later_col
|
||||
options['column_headers'] = []
|
||||
options['deferred_report_type'] = self._get_deferred_report_type()
|
||||
options['deferred_grouping_field'] = previous_options.get('deferred_grouping_field') or 'account_id'
|
||||
|
||||
co = self.env.company
|
||||
report_type = self._get_deferred_report_type()
|
||||
is_manual = (
|
||||
(report_type == 'expense' and co.generate_deferred_expense_entries_method == 'manual')
|
||||
or (report_type == 'revenue' and co.generate_deferred_revenue_entries_method == 'manual')
|
||||
)
|
||||
if is_manual:
|
||||
options['buttons'].append({
|
||||
'name': _('Generate entry'),
|
||||
'action': 'action_generate_entry',
|
||||
'sequence': 80,
|
||||
'always_show': True,
|
||||
})
|
||||
|
||||
def action_audit_cell(self, options, params):
|
||||
"""Open a list of the invoices/bills and deferral entries
|
||||
that underlie the clicked cell in the deferred report."""
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
col_data = next(
|
||||
(c for c in options['columns']
|
||||
if c['column_group_key'] == params.get('column_group_key')
|
||||
and c['expression_label'] == params.get('expression_label')),
|
||||
None,
|
||||
)
|
||||
if not col_data:
|
||||
return
|
||||
|
||||
col_from = fields.Date.to_date(col_data['date_from'])
|
||||
col_to = fields.Date.to_date(col_data['date_to'])
|
||||
rpt_from = fields.Date.to_date(options['date']['date_from'])
|
||||
rpt_to = fields.Date.to_date(options['date']['date_to'])
|
||||
|
||||
if col_data['expression_label'] in ('not_started', 'later'):
|
||||
col_from = rpt_to + relativedelta(days=1)
|
||||
if col_data['expression_label'] == 'before':
|
||||
col_to = rpt_from - relativedelta(days=1)
|
||||
|
||||
_grp_model, grp_record_id = report._get_model_info_from_id(
|
||||
params.get('calling_line_dict_id'),
|
||||
)
|
||||
|
||||
source_domain = self._get_domain(
|
||||
report, options,
|
||||
filter_not_started=(col_data['expression_label'] == 'not_started'),
|
||||
)
|
||||
if grp_record_id:
|
||||
source_domain.append(
|
||||
(options['deferred_grouping_field'], '=', grp_record_id)
|
||||
)
|
||||
|
||||
source_moves = self.env['account.move.line'].search(source_domain).move_id
|
||||
visible_line_ids = source_moves.line_ids.ids
|
||||
if col_data['expression_label'] != 'total':
|
||||
visible_line_ids += source_moves.deferred_move_ids.line_ids.ids
|
||||
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'name': _('Deferred Entries'),
|
||||
'res_model': 'account.move.line',
|
||||
'domain': [('id', 'in', visible_line_ids)],
|
||||
'views': [(self.env.ref('fusion_accounting.view_deferred_entries_tree').id, 'list')],
|
||||
'context': {
|
||||
'search_default_pl_accounts': True,
|
||||
f'search_default_{options["deferred_grouping_field"]}': grp_record_id,
|
||||
'date_from': col_from,
|
||||
'date_to': col_to,
|
||||
'search_default_date_between': True,
|
||||
'expand': True,
|
||||
},
|
||||
}
|
||||
|
||||
def _caret_options_initializer(self):
|
||||
return {
|
||||
'deferred_caret': [
|
||||
{'name': _("Journal Items"), 'action': 'open_journal_items'},
|
||||
],
|
||||
}
|
||||
|
||||
def _customize_warnings(self, report, options, all_column_groups_expression_totals, warnings):
|
||||
rpt_type = self._get_deferred_report_type()
|
||||
co = self.env.company
|
||||
is_manual_and_generated = (
|
||||
(rpt_type == 'expense' and co.generate_deferred_expense_entries_method == 'manual'
|
||||
or rpt_type == 'revenue' and co.generate_deferred_revenue_entries_method == 'manual')
|
||||
and self.env['account.move'].search_count(
|
||||
report._get_generated_deferral_entries_domain(options),
|
||||
)
|
||||
)
|
||||
if is_manual_and_generated:
|
||||
warnings['fusion_accounting.deferred_report_warning_already_posted'] = {
|
||||
'alert_type': 'warning',
|
||||
}
|
||||
|
||||
def open_journal_items(self, options, params):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
rec_model, rec_id = report._get_model_info_from_id(params.get('line_id'))
|
||||
item_domain = self._get_domain(report, options)
|
||||
if rec_model == 'account.account' and rec_id:
|
||||
item_domain += [('account_id', '=', rec_id)]
|
||||
elif rec_model == 'product.product' and rec_id:
|
||||
item_domain += [('product_id', '=', rec_id)]
|
||||
elif rec_model == 'product.category' and rec_id:
|
||||
item_domain += [('product_category_id', '=', rec_id)]
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'name': _("Deferred Entries"),
|
||||
'res_model': 'account.move.line',
|
||||
'domain': item_domain,
|
||||
'views': [(self.env.ref('fusion_accounting.view_deferred_entries_tree').id, 'list')],
|
||||
'context': {
|
||||
'search_default_group_by_move': True,
|
||||
'expand': True,
|
||||
},
|
||||
}
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
"""Build the report lines by computing deferred amounts per
|
||||
period and grouping field."""
|
||||
|
||||
def _format_columns(row_totals):
|
||||
return [
|
||||
{
|
||||
**report._build_column_dict(
|
||||
row_totals[(
|
||||
fields.Date.to_date(col['date_from']),
|
||||
fields.Date.to_date(col['date_to']),
|
||||
col['expression_label'],
|
||||
)],
|
||||
col,
|
||||
options=options,
|
||||
currency=self.env.company.currency_id,
|
||||
),
|
||||
'auditable': True,
|
||||
}
|
||||
for col in options['columns']
|
||||
]
|
||||
|
||||
raw_lines = self._get_lines(report, options)
|
||||
col_periods = [
|
||||
(
|
||||
fields.Date.from_string(c['date_from']),
|
||||
fields.Date.from_string(c['date_to']),
|
||||
c['expression_label'],
|
||||
)
|
||||
for c in options['columns']
|
||||
]
|
||||
|
||||
per_line_amounts = self.env['account.move']._get_deferred_amounts_by_line(
|
||||
raw_lines, col_periods, self._get_deferred_report_type(),
|
||||
)
|
||||
per_key, totals = self._group_deferred_amounts_by_grouping_field(
|
||||
deferred_amounts_by_line=per_line_amounts,
|
||||
periods=col_periods,
|
||||
is_reverse=(self._get_deferred_report_type() == 'expense'),
|
||||
filter_already_generated=False,
|
||||
grouping_field=options['deferred_grouping_field'],
|
||||
)
|
||||
|
||||
output_lines = []
|
||||
grp_model_name = self.env['account.move.line'][options['deferred_grouping_field']]._name
|
||||
for key_totals in per_key.values():
|
||||
grp_record = self.env[grp_model_name].browse(
|
||||
key_totals[options['deferred_grouping_field']]
|
||||
)
|
||||
field_desc = self.env['account.move.line'][options['deferred_grouping_field']]._description
|
||||
if options['deferred_grouping_field'] == 'product_id':
|
||||
field_desc = _("Product")
|
||||
display_label = grp_record.display_name or _("(No %s)", field_desc)
|
||||
output_lines.append((0, {
|
||||
'id': report._get_generic_line_id(grp_model_name, grp_record.id),
|
||||
'name': display_label,
|
||||
'caret_options': 'deferred_caret',
|
||||
'level': 1,
|
||||
'columns': _format_columns(key_totals),
|
||||
}))
|
||||
|
||||
if per_key:
|
||||
output_lines.append((0, {
|
||||
'id': report._get_generic_line_id(None, None, markup='total'),
|
||||
'name': 'Total',
|
||||
'level': 1,
|
||||
'columns': _format_columns(totals),
|
||||
}))
|
||||
|
||||
return output_lines
|
||||
|
||||
# =====================================================================
|
||||
# ENTRY GENERATION
|
||||
# =====================================================================
|
||||
|
||||
def action_generate_entry(self, options):
|
||||
new_moves = self._generate_deferral_entry(options)
|
||||
return {
|
||||
'name': _('Deferred Entries'),
|
||||
'type': 'ir.actions.act_window',
|
||||
'views': [(False, "list"), (False, "form")],
|
||||
'domain': [('id', 'in', new_moves.ids)],
|
||||
'res_model': 'account.move',
|
||||
'context': {
|
||||
'search_default_group_by_move': True,
|
||||
'expand': True,
|
||||
},
|
||||
'target': 'current',
|
||||
}
|
||||
|
||||
def _generate_deferral_entry(self, options):
|
||||
"""Create the deferral move and its reversal for the selected period."""
|
||||
rpt_type = self._get_deferred_report_type()
|
||||
co = self.env.company
|
||||
target_journal = (
|
||||
co.deferred_expense_journal_id if rpt_type == "expense"
|
||||
else co.deferred_revenue_journal_id
|
||||
)
|
||||
if not target_journal:
|
||||
raise UserError(_("Please configure the deferred journal in accounting settings."))
|
||||
|
||||
period_start = fields.Date.to_date(DEFERRED_DATE_MIN)
|
||||
period_end = fields.Date.from_string(options['date']['date_to'])
|
||||
last_day = calendar.monthrange(period_end.year, period_end.month)[1]
|
||||
if period_end.day != last_day:
|
||||
raise UserError(
|
||||
_("Entries can only be generated for periods ending on the last day of a month.")
|
||||
)
|
||||
if co._get_violated_lock_dates(period_end, False, target_journal):
|
||||
raise UserError(_("Entries cannot be generated for a locked period."))
|
||||
|
||||
options['all_entries'] = False
|
||||
report = self.env["account.report"].browse(options["report_id"])
|
||||
self.env['account.move.line'].flush_model()
|
||||
|
||||
raw_lines = self._get_lines(report, options, filter_already_generated=True)
|
||||
period_info = self.env['account.report']._get_dates_period(
|
||||
period_start, period_end, 'range', period_type='month',
|
||||
)
|
||||
entry_ref = _("Grouped Deferral Entry of %s", period_info['string'])
|
||||
reversal_ref = _("Reversal of Grouped Deferral Entry of %s", period_info['string'])
|
||||
|
||||
deferral_account = (
|
||||
co.deferred_expense_account_id if rpt_type == 'expense'
|
||||
else co.deferred_revenue_account_id
|
||||
)
|
||||
move_cmds, orig_move_ids = self._get_deferred_lines(
|
||||
raw_lines, deferral_account,
|
||||
(period_start, period_end, 'current'),
|
||||
rpt_type == 'expense', entry_ref,
|
||||
)
|
||||
if not move_cmds:
|
||||
raise UserError(_("No entry to generate."))
|
||||
|
||||
deferral_move = self.env['account.move'].with_context(
|
||||
skip_account_deprecation_check=True,
|
||||
).create({
|
||||
'move_type': 'entry',
|
||||
'deferred_original_move_ids': [Command.set(orig_move_ids)],
|
||||
'journal_id': target_journal.id,
|
||||
'date': period_end,
|
||||
'auto_post': 'at_date',
|
||||
'ref': entry_ref,
|
||||
})
|
||||
deferral_move.write({'line_ids': move_cmds})
|
||||
|
||||
reversal = deferral_move._reverse_moves()
|
||||
reversal.write({
|
||||
'date': deferral_move.date + relativedelta(days=1),
|
||||
'ref': reversal_ref,
|
||||
})
|
||||
reversal.line_ids.name = reversal_ref
|
||||
|
||||
combined = deferral_move + reversal
|
||||
self.env.cr.execute_values("""
|
||||
INSERT INTO account_move_deferred_rel(original_move_id, deferred_move_id)
|
||||
VALUES %s
|
||||
ON CONFLICT DO NOTHING
|
||||
""", [
|
||||
(orig_id, dm.id)
|
||||
for orig_id in orig_move_ids
|
||||
for dm in combined
|
||||
])
|
||||
combined._post(soft=True)
|
||||
return combined
|
||||
|
||||
@api.model
|
||||
def _get_deferred_lines(self, raw_lines, deferral_account, period, is_reverse, label):
|
||||
"""Compute the journal-item commands for a deferral entry and
|
||||
return ``(line_commands, original_move_ids)``."""
|
||||
if not deferral_account:
|
||||
raise UserError(_("Please configure the deferred accounts in accounting settings."))
|
||||
|
||||
per_line_amounts = self.env['account.move']._get_deferred_amounts_by_line(
|
||||
raw_lines, [period], is_reverse,
|
||||
)
|
||||
per_key, agg_totals = self._group_deferred_amounts_by_grouping_field(
|
||||
per_line_amounts, [period], is_reverse, filter_already_generated=True,
|
||||
)
|
||||
if agg_totals['totals_aggregated'] == agg_totals[period]:
|
||||
return [], set()
|
||||
|
||||
# Build per-key analytic distributions
|
||||
dist_per_key = defaultdict(lambda: defaultdict(float))
|
||||
deferral_dist = defaultdict(lambda: defaultdict(float))
|
||||
for ln in raw_lines:
|
||||
if not ln['analytic_distribution']:
|
||||
continue
|
||||
total_ratio = (
|
||||
(ln['balance'] / agg_totals['totals_aggregated'])
|
||||
if agg_totals['totals_aggregated'] else 0
|
||||
)
|
||||
key_data = per_key.get(self._group_by_deferred_fields(ln, True))
|
||||
key_ratio = (
|
||||
(ln['balance'] / key_data['amount_total'])
|
||||
if key_data and key_data['amount_total'] else 0
|
||||
)
|
||||
for analytic_id, pct in ln['analytic_distribution'].items():
|
||||
dist_per_key[self._group_by_deferred_fields(ln, True)][analytic_id] += pct * key_ratio
|
||||
deferral_dist[self._group_by_deferral_fields(ln)][analytic_id] += pct * total_ratio
|
||||
|
||||
currency = self.env.company.currency_id
|
||||
balance_remainder = 0
|
||||
entry_lines = []
|
||||
source_move_ids = set()
|
||||
sign = 1 if is_reverse else -1
|
||||
|
||||
for key, kv in per_key.items():
|
||||
for amt in (-kv['amount_total'], kv[period]):
|
||||
if amt != 0 and kv[period] != kv['amount_total']:
|
||||
source_move_ids |= kv['move_ids']
|
||||
adjusted_balance = currency.round(sign * amt)
|
||||
entry_lines.append(Command.create(
|
||||
self.env['account.move.line']._get_deferred_lines_values(
|
||||
account_id=kv['account_id'],
|
||||
balance=adjusted_balance,
|
||||
ref=label,
|
||||
analytic_distribution=dist_per_key[key] or False,
|
||||
line=kv,
|
||||
)
|
||||
))
|
||||
balance_remainder += adjusted_balance
|
||||
|
||||
# Group deferral-account lines
|
||||
grouped_values = {
|
||||
k: list(v)
|
||||
for k, v in groupby(per_key.values(), key=self._group_by_deferral_fields)
|
||||
}
|
||||
deferral_lines = []
|
||||
for key, key_items in grouped_values.items():
|
||||
key_balance = 0
|
||||
for item in key_items:
|
||||
if item[period] != item['amount_total']:
|
||||
key_balance += currency.round(
|
||||
sign * (item['amount_total'] - item[period])
|
||||
)
|
||||
deferral_lines.append(Command.create(
|
||||
self.env['account.move.line']._get_deferred_lines_values(
|
||||
account_id=deferral_account.id,
|
||||
balance=key_balance,
|
||||
ref=label,
|
||||
analytic_distribution=deferral_dist[key] or False,
|
||||
line=key_items[0],
|
||||
)
|
||||
))
|
||||
balance_remainder += key_balance
|
||||
|
||||
if not currency.is_zero(balance_remainder):
|
||||
deferral_lines.append(Command.create({
|
||||
'account_id': deferral_account.id,
|
||||
'balance': -balance_remainder,
|
||||
'name': label,
|
||||
}))
|
||||
|
||||
return entry_lines + deferral_lines, source_move_ids
|
||||
|
||||
|
||||
class FusionDeferredExpenseHandler(models.AbstractModel):
|
||||
_name = 'account.deferred.expense.report.handler'
|
||||
_inherit = 'account.deferred.report.handler'
|
||||
_description = 'Deferred Expense Custom Handler'
|
||||
|
||||
def _get_deferred_report_type(self):
|
||||
return 'expense'
|
||||
|
||||
|
||||
class FusionDeferredRevenueHandler(models.AbstractModel):
|
||||
_name = 'account.deferred.revenue.report.handler'
|
||||
_inherit = 'account.deferred.report.handler'
|
||||
_description = 'Deferred Revenue Custom Handler'
|
||||
|
||||
def _get_deferred_report_type(self):
|
||||
return 'revenue'
|
||||
35
Fusion Accounting/models/account_fiscal_position.py
Normal file
35
Fusion Accounting/models/account_fiscal_position.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Fusion Accounting - Fiscal Position Extensions
|
||||
# Automated draft tax closing moves for foreign VAT positions
|
||||
|
||||
from odoo import models
|
||||
|
||||
|
||||
class FusionFiscalPosition(models.Model):
|
||||
"""Extends fiscal positions to generate draft tax-closing entries
|
||||
whenever a foreign VAT number is set or updated."""
|
||||
|
||||
_inherit = 'account.fiscal.position'
|
||||
|
||||
def _inverse_foreign_vat(self):
|
||||
"""When the foreign_vat field is written, propagate draft
|
||||
closing moves for each affected fiscal position."""
|
||||
super()._inverse_foreign_vat()
|
||||
for fpos in self:
|
||||
if fpos.foreign_vat:
|
||||
fpos._create_draft_closing_move_for_foreign_vat()
|
||||
|
||||
def _create_draft_closing_move_for_foreign_vat(self):
|
||||
"""For every existing draft tax-closing entry, ensure a
|
||||
corresponding closing move exists for this fiscal position."""
|
||||
self.ensure_one()
|
||||
draft_closings = self.env['account.move'].search([
|
||||
('tax_closing_report_id', '!=', False),
|
||||
('state', '=', 'draft'),
|
||||
])
|
||||
for closing_date, grouped_entries in draft_closings.grouped('date').items():
|
||||
for entry in grouped_entries:
|
||||
self.company_id._get_and_update_tax_closing_moves(
|
||||
closing_date,
|
||||
entry.tax_closing_report_id,
|
||||
fiscal_positions=self,
|
||||
)
|
||||
71
Fusion Accounting/models/account_fiscal_year.py
Normal file
71
Fusion Accounting/models/account_fiscal_year.py
Normal file
@@ -0,0 +1,71 @@
|
||||
# Fusion Accounting - Fiscal Year Management
|
||||
# Defines company-specific fiscal year periods with overlap validation
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
|
||||
class FusionFiscalYear(models.Model):
|
||||
"""Represents a fiscal year period for a company. Enforces
|
||||
non-overlapping date ranges and prevents child-company assignments."""
|
||||
|
||||
_name = 'account.fiscal.year'
|
||||
_description = 'Fiscal Year'
|
||||
|
||||
name = fields.Char(
|
||||
string='Name',
|
||||
required=True,
|
||||
)
|
||||
date_from = fields.Date(
|
||||
string='Start Date',
|
||||
required=True,
|
||||
help='First day of the fiscal year (inclusive).',
|
||||
)
|
||||
date_to = fields.Date(
|
||||
string='End Date',
|
||||
required=True,
|
||||
help='Last day of the fiscal year (inclusive).',
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string='Company',
|
||||
required=True,
|
||||
default=lambda self: self.env.company,
|
||||
)
|
||||
|
||||
@api.constrains('date_from', 'date_to', 'company_id')
|
||||
def _validate_fiscal_year_dates(self):
|
||||
"""Ensure fiscal years do not overlap for the same company and
|
||||
that the date range is logically ordered. Fiscal years on child
|
||||
companies are disallowed.
|
||||
|
||||
Overlap scenarios checked:
|
||||
s1 s2 e1 e2 -> new starts inside existing
|
||||
s2 s1 e2 e1 -> existing starts inside new
|
||||
s1 s2 e2 e1 -> existing fully inside new
|
||||
"""
|
||||
for fiscal_year in self:
|
||||
if fiscal_year.date_to < fiscal_year.date_from:
|
||||
raise ValidationError(
|
||||
_('The end date cannot be earlier than the start date.')
|
||||
)
|
||||
|
||||
if fiscal_year.company_id.parent_id:
|
||||
raise ValidationError(
|
||||
_('Fiscal years cannot be defined on subsidiary companies.')
|
||||
)
|
||||
|
||||
overlap_domain = [
|
||||
('id', '!=', fiscal_year.id),
|
||||
('company_id', '=', fiscal_year.company_id.id),
|
||||
'|', '|',
|
||||
'&', ('date_from', '<=', fiscal_year.date_from), ('date_to', '>=', fiscal_year.date_from),
|
||||
'&', ('date_from', '<=', fiscal_year.date_to), ('date_to', '>=', fiscal_year.date_to),
|
||||
'&', ('date_from', '<=', fiscal_year.date_from), ('date_to', '>=', fiscal_year.date_to),
|
||||
]
|
||||
|
||||
if self.search_count(overlap_domain) > 0:
|
||||
raise ValidationError(
|
||||
_('Fiscal years for the same company must not overlap. '
|
||||
'Please adjust the start or end dates.')
|
||||
)
|
||||
753
Fusion Accounting/models/account_general_ledger.py
Normal file
753
Fusion Accounting/models/account_general_ledger.py
Normal file
@@ -0,0 +1,753 @@
|
||||
# Fusion Accounting - General Ledger Report Handler
|
||||
|
||||
import json
|
||||
|
||||
from odoo import models, fields, api, _
|
||||
from odoo.tools.misc import format_date
|
||||
from odoo.tools import get_lang, SQL
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
from datetime import timedelta
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class GeneralLedgerCustomHandler(models.AbstractModel):
|
||||
"""Produces the General Ledger report.
|
||||
|
||||
Aggregates journal items by account and period, handles initial balances,
|
||||
unaffected-earnings allocation, and optional tax-declaration sections.
|
||||
"""
|
||||
|
||||
_name = 'account.general.ledger.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'General Ledger Custom Handler'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Display configuration
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
return {
|
||||
'templates': {
|
||||
'AccountReportLineName': 'fusion_accounting.GeneralLedgerLineName',
|
||||
},
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Options
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
"""Strip the multi-currency column when the user lacks the group,
|
||||
and auto-unfold when printing."""
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
|
||||
if self.env.user.has_group('base.group_multi_currency'):
|
||||
options['multi_currency'] = True
|
||||
else:
|
||||
options['columns'] = [
|
||||
c for c in options['columns']
|
||||
if c['expression_label'] != 'amount_currency'
|
||||
]
|
||||
|
||||
# When printing the whole report, unfold everything unless the user
|
||||
# explicitly selected specific lines.
|
||||
options['unfold_all'] = (
|
||||
(options['export_mode'] == 'print' and not options.get('unfolded_lines'))
|
||||
or options['unfold_all']
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Dynamic lines
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
"""Return ``[(seq, line_dict), ...]`` for every account row plus
|
||||
an optional tax-declaration block and a grand-total row."""
|
||||
result_lines = []
|
||||
period_start = fields.Date.from_string(options['date']['date_from'])
|
||||
comp_currency = self.env.company.currency_id
|
||||
|
||||
running_totals = defaultdict(lambda: {'debit': 0, 'credit': 0, 'balance': 0})
|
||||
|
||||
for account_rec, col_grp_vals in self._aggregate_account_values(report, options):
|
||||
per_col = {}
|
||||
any_current = False
|
||||
|
||||
for col_key, bucket in col_grp_vals.items():
|
||||
main = bucket.get('sum', {})
|
||||
unaff = bucket.get('unaffected_earnings', {})
|
||||
|
||||
dr = main.get('debit', 0.0) + unaff.get('debit', 0.0)
|
||||
cr = main.get('credit', 0.0) + unaff.get('credit', 0.0)
|
||||
bal = main.get('balance', 0.0) + unaff.get('balance', 0.0)
|
||||
|
||||
per_col[col_key] = {
|
||||
'amount_currency': main.get('amount_currency', 0.0) + unaff.get('amount_currency', 0.0),
|
||||
'debit': dr,
|
||||
'credit': cr,
|
||||
'balance': bal,
|
||||
}
|
||||
|
||||
latest_date = main.get('max_date')
|
||||
if latest_date and latest_date >= period_start:
|
||||
any_current = True
|
||||
|
||||
running_totals[col_key]['debit'] += dr
|
||||
running_totals[col_key]['credit'] += cr
|
||||
running_totals[col_key]['balance'] += bal
|
||||
|
||||
result_lines.append(
|
||||
self._build_account_header_line(report, options, account_rec, any_current, per_col)
|
||||
)
|
||||
|
||||
# Round the accumulated balance
|
||||
for totals in running_totals.values():
|
||||
totals['balance'] = comp_currency.round(totals['balance'])
|
||||
|
||||
# Tax-declaration section (single column group + single journal of sale/purchase type)
|
||||
active_journals = report._get_options_journals(options)
|
||||
if (
|
||||
len(options['column_groups']) == 1
|
||||
and len(active_journals) == 1
|
||||
and active_journals[0]['type'] in ('sale', 'purchase')
|
||||
):
|
||||
result_lines += self._produce_tax_declaration_lines(
|
||||
report, options, active_journals[0]['type']
|
||||
)
|
||||
|
||||
# Grand total
|
||||
result_lines.append(self._build_grand_total_line(report, options, running_totals))
|
||||
|
||||
return [(0, ln) for ln in result_lines]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Batch unfold helper
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_unfold_all_batch_data_generator(self, report, options, lines_to_expand_by_function):
|
||||
"""Pre-load data for all accounts that need unfolding so the engine
|
||||
does not issue per-account queries."""
|
||||
target_acct_ids = []
|
||||
for line_info in lines_to_expand_by_function.get('_report_expand_unfoldable_line_general_ledger', []):
|
||||
mdl, mdl_id = report._get_model_info_from_id(line_info['id'])
|
||||
if mdl == 'account.account':
|
||||
target_acct_ids.append(mdl_id)
|
||||
|
||||
page_size = report.load_more_limit if report.load_more_limit and not options.get('export_mode') else None
|
||||
overflow_flags = {}
|
||||
|
||||
full_aml_data = self._fetch_aml_data(report, options, target_acct_ids)[0]
|
||||
|
||||
if page_size:
|
||||
trimmed_aml_data = {}
|
||||
for acct_id, acct_rows in full_aml_data.items():
|
||||
page = {}
|
||||
for key, val in acct_rows.items():
|
||||
if len(page) >= page_size:
|
||||
overflow_flags[acct_id] = True
|
||||
break
|
||||
page[key] = val
|
||||
trimmed_aml_data[acct_id] = page
|
||||
else:
|
||||
trimmed_aml_data = full_aml_data
|
||||
|
||||
return {
|
||||
'initial_balances': self._fetch_opening_balances(report, target_acct_ids, options),
|
||||
'aml_results': trimmed_aml_data,
|
||||
'has_more': overflow_flags,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Tax declaration
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _produce_tax_declaration_lines(self, report, options, tax_type):
|
||||
"""Append a Tax Declaration section when viewing a single
|
||||
sale / purchase journal."""
|
||||
header_labels = {
|
||||
'debit': _("Base Amount"),
|
||||
'credit': _("Tax Amount"),
|
||||
}
|
||||
|
||||
output = [
|
||||
{
|
||||
'id': report._get_generic_line_id(None, None, markup='tax_decl_header_1'),
|
||||
'name': _('Tax Declaration'),
|
||||
'columns': [{} for _ in options['columns']],
|
||||
'level': 1,
|
||||
'unfoldable': False,
|
||||
'unfolded': False,
|
||||
},
|
||||
{
|
||||
'id': report._get_generic_line_id(None, None, markup='tax_decl_header_2'),
|
||||
'name': _('Name'),
|
||||
'columns': [
|
||||
{'name': header_labels.get(c['expression_label'], '')}
|
||||
for c in options['columns']
|
||||
],
|
||||
'level': 3,
|
||||
'unfoldable': False,
|
||||
'unfolded': False,
|
||||
},
|
||||
]
|
||||
|
||||
tax_report = self.env.ref('account.generic_tax_report')
|
||||
tax_opts = tax_report.get_options({
|
||||
**options,
|
||||
'selected_variant_id': tax_report.id,
|
||||
'forced_domain': [('tax_line_id.type_tax_use', '=', tax_type)],
|
||||
})
|
||||
tax_lines = tax_report._get_lines(tax_opts)
|
||||
parent_marker = tax_report._get_generic_line_id(None, None, markup=tax_type)
|
||||
|
||||
for tl in tax_lines:
|
||||
if tl.get('parent_id') != parent_marker:
|
||||
continue
|
||||
src_cols = tl['columns']
|
||||
mapped = {
|
||||
'debit': src_cols[0],
|
||||
'credit': src_cols[1],
|
||||
}
|
||||
tl['columns'] = [mapped.get(c['expression_label'], {}) for c in options['columns']]
|
||||
output.append(tl)
|
||||
|
||||
return output
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Core queries
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _aggregate_account_values(self, report, options):
|
||||
"""Execute summary queries and assign unaffected-earnings.
|
||||
|
||||
Returns ``[(account_record, {col_group_key: {...}, ...}), ...]``
|
||||
"""
|
||||
combined_sql = self._build_summary_query(report, options)
|
||||
if not combined_sql:
|
||||
return []
|
||||
|
||||
by_account = {}
|
||||
by_company = {}
|
||||
|
||||
self.env.cr.execute(combined_sql)
|
||||
for row in self.env.cr.dictfetchall():
|
||||
if row['groupby'] is None:
|
||||
continue
|
||||
|
||||
cg = row['column_group_key']
|
||||
bucket = row['key']
|
||||
|
||||
if bucket == 'sum':
|
||||
by_account.setdefault(row['groupby'], {k: {} for k in options['column_groups']})
|
||||
by_account[row['groupby']][cg][bucket] = row
|
||||
elif bucket == 'initial_balance':
|
||||
by_account.setdefault(row['groupby'], {k: {} for k in options['column_groups']})
|
||||
by_account[row['groupby']][cg][bucket] = row
|
||||
elif bucket == 'unaffected_earnings':
|
||||
by_company.setdefault(row['groupby'], {k: {} for k in options['column_groups']})
|
||||
by_company[row['groupby']][cg] = row
|
||||
|
||||
# Assign unaffected earnings to the equity_unaffected account
|
||||
if by_company:
|
||||
candidate_accounts = self.env['account.account'].search([
|
||||
('display_name', 'ilike', options.get('filter_search_bar')),
|
||||
*self.env['account.account']._check_company_domain(list(by_company.keys())),
|
||||
('account_type', '=', 'equity_unaffected'),
|
||||
])
|
||||
for comp_id, comp_data in by_company.items():
|
||||
target_acct = candidate_accounts.filtered(
|
||||
lambda a: self.env['res.company'].browse(comp_id).root_id in a.company_ids
|
||||
)
|
||||
if not target_acct:
|
||||
continue
|
||||
|
||||
for cg in options['column_groups']:
|
||||
by_account.setdefault(
|
||||
target_acct.id,
|
||||
{k: {'unaffected_earnings': {}} for k in options['column_groups']},
|
||||
)
|
||||
unaff = comp_data.get(cg)
|
||||
if not unaff:
|
||||
continue
|
||||
existing = by_account[target_acct.id][cg].get('unaffected_earnings')
|
||||
if existing:
|
||||
for fld in ('amount_currency', 'debit', 'credit', 'balance'):
|
||||
existing[fld] = existing.get(fld, 0.0) + unaff[fld]
|
||||
else:
|
||||
by_account[target_acct.id][cg]['unaffected_earnings'] = unaff
|
||||
|
||||
if by_account:
|
||||
accounts = self.env['account.account'].search([('id', 'in', list(by_account.keys()))])
|
||||
else:
|
||||
accounts = self.env['account.account']
|
||||
|
||||
return [(acct, by_account[acct.id]) for acct in accounts]
|
||||
|
||||
def _build_summary_query(self, report, options) -> SQL:
|
||||
"""Construct the UNION ALL query that retrieves period sums and
|
||||
unaffected-earnings sums for every account."""
|
||||
per_col = report._split_options_per_column_group(options)
|
||||
parts = []
|
||||
|
||||
for col_key, grp_opts in per_col.items():
|
||||
# Decide date scope
|
||||
scope = 'strict_range' if grp_opts.get('general_ledger_strict_range') else 'from_beginning'
|
||||
|
||||
domain_extra = []
|
||||
if not grp_opts.get('general_ledger_strict_range'):
|
||||
fy_start = fields.Date.from_string(grp_opts['date']['date_from'])
|
||||
fy_dates = self.env.company.compute_fiscalyear_dates(fy_start)
|
||||
domain_extra += [
|
||||
'|',
|
||||
('date', '>=', fy_dates['date_from']),
|
||||
('account_id.include_initial_balance', '=', True),
|
||||
]
|
||||
|
||||
if grp_opts.get('export_mode') == 'print' and grp_opts.get('filter_search_bar'):
|
||||
domain_extra.append(('account_id', 'ilike', grp_opts['filter_search_bar']))
|
||||
|
||||
if grp_opts.get('include_current_year_in_unaff_earnings'):
|
||||
domain_extra += [('account_id.include_initial_balance', '=', True)]
|
||||
|
||||
qry = report._get_report_query(grp_opts, scope, domain=domain_extra)
|
||||
parts.append(SQL(
|
||||
"""
|
||||
SELECT
|
||||
account_move_line.account_id AS groupby,
|
||||
'sum' AS key,
|
||||
MAX(account_move_line.date) AS max_date,
|
||||
%(col_key)s AS column_group_key,
|
||||
COALESCE(SUM(account_move_line.amount_currency), 0.0) AS amount_currency,
|
||||
SUM(%(dr)s) AS debit,
|
||||
SUM(%(cr)s) AS credit,
|
||||
SUM(%(bal)s) AS balance
|
||||
FROM %(tbl)s
|
||||
%(fx)s
|
||||
WHERE %(cond)s
|
||||
GROUP BY account_move_line.account_id
|
||||
""",
|
||||
col_key=col_key,
|
||||
tbl=qry.from_clause,
|
||||
dr=report._currency_table_apply_rate(SQL("account_move_line.debit")),
|
||||
cr=report._currency_table_apply_rate(SQL("account_move_line.credit")),
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
fx=report._currency_table_aml_join(grp_opts),
|
||||
cond=qry.where_clause,
|
||||
))
|
||||
|
||||
# Unaffected earnings sub-query
|
||||
if not grp_opts.get('general_ledger_strict_range'):
|
||||
unaff_opts = self._get_options_unaffected_earnings(grp_opts)
|
||||
unaff_domain = [('account_id.include_initial_balance', '=', False)]
|
||||
unaff_qry = report._get_report_query(unaff_opts, 'strict_range', domain=unaff_domain)
|
||||
parts.append(SQL(
|
||||
"""
|
||||
SELECT
|
||||
account_move_line.company_id AS groupby,
|
||||
'unaffected_earnings' AS key,
|
||||
NULL AS max_date,
|
||||
%(col_key)s AS column_group_key,
|
||||
COALESCE(SUM(account_move_line.amount_currency), 0.0) AS amount_currency,
|
||||
SUM(%(dr)s) AS debit,
|
||||
SUM(%(cr)s) AS credit,
|
||||
SUM(%(bal)s) AS balance
|
||||
FROM %(tbl)s
|
||||
%(fx)s
|
||||
WHERE %(cond)s
|
||||
GROUP BY account_move_line.company_id
|
||||
""",
|
||||
col_key=col_key,
|
||||
tbl=unaff_qry.from_clause,
|
||||
dr=report._currency_table_apply_rate(SQL("account_move_line.debit")),
|
||||
cr=report._currency_table_apply_rate(SQL("account_move_line.credit")),
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
fx=report._currency_table_aml_join(grp_opts),
|
||||
cond=unaff_qry.where_clause,
|
||||
))
|
||||
|
||||
return SQL(" UNION ALL ").join(parts)
|
||||
|
||||
def _get_options_unaffected_earnings(self, options):
|
||||
"""Return modified options for computing prior-year unaffected
|
||||
earnings (P&L accounts before the current fiscal year)."""
|
||||
modified = options.copy()
|
||||
modified.pop('filter_search_bar', None)
|
||||
|
||||
fy = self.env.company.compute_fiscalyear_dates(
|
||||
fields.Date.from_string(options['date']['date_from'])
|
||||
)
|
||||
cutoff = (
|
||||
fields.Date.from_string(modified['date']['date_to'])
|
||||
if options.get('include_current_year_in_unaff_earnings')
|
||||
else fy['date_from'] - timedelta(days=1)
|
||||
)
|
||||
modified['date'] = self.env['account.report']._get_dates_period(None, cutoff, 'single')
|
||||
return modified
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# AML detail queries
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _fetch_aml_data(self, report, options, account_ids, offset=0, limit=None):
|
||||
"""Load individual move lines for the given accounts.
|
||||
|
||||
Returns ``({account_id: {(aml_id, date): {col_grp: row}}}, has_more)``
|
||||
"""
|
||||
container = {aid: {} for aid in account_ids}
|
||||
raw_sql = self._build_aml_query(report, options, account_ids, offset=offset, limit=limit)
|
||||
self.env.cr.execute(raw_sql)
|
||||
|
||||
row_count = 0
|
||||
overflow = False
|
||||
for row in self.env.cr.dictfetchall():
|
||||
row_count += 1
|
||||
if row_count == limit:
|
||||
overflow = True
|
||||
break
|
||||
|
||||
# Build a display-friendly communication field
|
||||
if row['ref'] and row['account_type'] != 'asset_receivable':
|
||||
row['communication'] = f"{row['ref']} - {row['name']}"
|
||||
else:
|
||||
row['communication'] = row['name']
|
||||
|
||||
composite_key = (row['id'], row['date'])
|
||||
acct_bucket = container[row['account_id']]
|
||||
|
||||
if composite_key not in acct_bucket:
|
||||
acct_bucket[composite_key] = {cg: {} for cg in options['column_groups']}
|
||||
|
||||
prior = acct_bucket[composite_key][row['column_group_key']]
|
||||
if prior:
|
||||
prior['debit'] += row['debit']
|
||||
prior['credit'] += row['credit']
|
||||
prior['balance'] += row['balance']
|
||||
prior['amount_currency'] += row['amount_currency']
|
||||
else:
|
||||
acct_bucket[composite_key][row['column_group_key']] = row
|
||||
|
||||
return container, overflow
|
||||
|
||||
def _build_aml_query(self, report, options, account_ids, offset=0, limit=None) -> SQL:
|
||||
"""SQL for individual move lines within the strict period range."""
|
||||
extra_domain = [('account_id', 'in', account_ids)] if account_ids is not None else None
|
||||
fragments = []
|
||||
journal_label = self.env['account.journal']._field_to_sql('journal', 'name')
|
||||
|
||||
for col_key, grp_opts in report._split_options_per_column_group(options).items():
|
||||
qry = report._get_report_query(grp_opts, domain=extra_domain, date_scope='strict_range')
|
||||
acct_a = qry.join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
code_f = self.env['account.account']._field_to_sql(acct_a, 'code', qry)
|
||||
name_f = self.env['account.account']._field_to_sql(acct_a, 'name')
|
||||
type_f = self.env['account.account']._field_to_sql(acct_a, 'account_type')
|
||||
|
||||
fragments.append(SQL(
|
||||
'''
|
||||
SELECT
|
||||
account_move_line.id,
|
||||
account_move_line.date,
|
||||
account_move_line.date_maturity,
|
||||
account_move_line.name,
|
||||
account_move_line.ref,
|
||||
account_move_line.company_id,
|
||||
account_move_line.account_id,
|
||||
account_move_line.payment_id,
|
||||
account_move_line.partner_id,
|
||||
account_move_line.currency_id,
|
||||
account_move_line.amount_currency,
|
||||
COALESCE(account_move_line.invoice_date, account_move_line.date) AS invoice_date,
|
||||
account_move_line.date AS date,
|
||||
%(dr)s AS debit,
|
||||
%(cr)s AS credit,
|
||||
%(bal)s AS balance,
|
||||
mv.name AS move_name,
|
||||
co.currency_id AS company_currency_id,
|
||||
prt.name AS partner_name,
|
||||
mv.move_type AS move_type,
|
||||
%(code_f)s AS account_code,
|
||||
%(name_f)s AS account_name,
|
||||
%(type_f)s AS account_type,
|
||||
journal.code AS journal_code,
|
||||
%(journal_label)s AS journal_name,
|
||||
fr.id AS full_rec_name,
|
||||
%(col_key)s AS column_group_key
|
||||
FROM %(tbl)s
|
||||
JOIN account_move mv ON mv.id = account_move_line.move_id
|
||||
%(fx)s
|
||||
LEFT JOIN res_company co ON co.id = account_move_line.company_id
|
||||
LEFT JOIN res_partner prt ON prt.id = account_move_line.partner_id
|
||||
LEFT JOIN account_journal journal ON journal.id = account_move_line.journal_id
|
||||
LEFT JOIN account_full_reconcile fr ON fr.id = account_move_line.full_reconcile_id
|
||||
WHERE %(cond)s
|
||||
ORDER BY account_move_line.date, account_move_line.move_name, account_move_line.id
|
||||
''',
|
||||
code_f=code_f,
|
||||
name_f=name_f,
|
||||
type_f=type_f,
|
||||
journal_label=journal_label,
|
||||
col_key=col_key,
|
||||
tbl=qry.from_clause,
|
||||
fx=report._currency_table_aml_join(grp_opts),
|
||||
dr=report._currency_table_apply_rate(SQL("account_move_line.debit")),
|
||||
cr=report._currency_table_apply_rate(SQL("account_move_line.credit")),
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
cond=qry.where_clause,
|
||||
))
|
||||
|
||||
combined = SQL(" UNION ALL ").join(SQL("(%s)", f) for f in fragments)
|
||||
|
||||
if offset:
|
||||
combined = SQL('%s OFFSET %s ', combined, offset)
|
||||
if limit:
|
||||
combined = SQL('%s LIMIT %s ', combined, limit)
|
||||
|
||||
return combined
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Initial balance
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _fetch_opening_balances(self, report, account_ids, options):
|
||||
"""Compute the opening balance per account at the start of the
|
||||
reporting period."""
|
||||
parts = []
|
||||
for col_key, grp_opts in report._split_options_per_column_group(options).items():
|
||||
init_opts = self._get_options_initial_balance(grp_opts)
|
||||
domain = [('account_id', 'in', account_ids)]
|
||||
|
||||
if not init_opts.get('general_ledger_strict_range'):
|
||||
domain += [
|
||||
'|',
|
||||
('date', '>=', init_opts['date']['date_from']),
|
||||
('account_id.include_initial_balance', '=', True),
|
||||
]
|
||||
if init_opts.get('include_current_year_in_unaff_earnings'):
|
||||
domain += [('account_id.include_initial_balance', '=', True)]
|
||||
|
||||
qry = report._get_report_query(init_opts, 'from_beginning', domain=domain)
|
||||
parts.append(SQL(
|
||||
"""
|
||||
SELECT
|
||||
account_move_line.account_id AS groupby,
|
||||
'initial_balance' AS key,
|
||||
NULL AS max_date,
|
||||
%(col_key)s AS column_group_key,
|
||||
COALESCE(SUM(account_move_line.amount_currency), 0.0) AS amount_currency,
|
||||
SUM(%(dr)s) AS debit,
|
||||
SUM(%(cr)s) AS credit,
|
||||
SUM(%(bal)s) AS balance
|
||||
FROM %(tbl)s
|
||||
%(fx)s
|
||||
WHERE %(cond)s
|
||||
GROUP BY account_move_line.account_id
|
||||
""",
|
||||
col_key=col_key,
|
||||
tbl=qry.from_clause,
|
||||
dr=report._currency_table_apply_rate(SQL("account_move_line.debit")),
|
||||
cr=report._currency_table_apply_rate(SQL("account_move_line.credit")),
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
fx=report._currency_table_aml_join(grp_opts),
|
||||
cond=qry.where_clause,
|
||||
))
|
||||
|
||||
self.env.cr.execute(SQL(" UNION ALL ").join(parts))
|
||||
|
||||
init_map = {
|
||||
aid: {cg: {} for cg in options['column_groups']}
|
||||
for aid in account_ids
|
||||
}
|
||||
for row in self.env.cr.dictfetchall():
|
||||
init_map[row['groupby']][row['column_group_key']] = row
|
||||
|
||||
accts = self.env['account.account'].browse(account_ids)
|
||||
return {a.id: (a, init_map[a.id]) for a in accts}
|
||||
|
||||
def _get_options_initial_balance(self, options):
|
||||
"""Derive an options dict whose date range ends just before the
|
||||
report's ``date_from``, suitable for computing opening balances."""
|
||||
derived = options.copy()
|
||||
|
||||
# End date
|
||||
raw_to = (
|
||||
derived['comparison']['periods'][-1]['date_from']
|
||||
if derived.get('comparison', {}).get('periods')
|
||||
else derived['date']['date_from']
|
||||
)
|
||||
end_dt = fields.Date.from_string(raw_to) - timedelta(days=1)
|
||||
|
||||
# Start date: if date_from aligns with a fiscal-year boundary take the
|
||||
# previous FY; otherwise use the current FY start.
|
||||
start_dt = fields.Date.from_string(derived['date']['date_from'])
|
||||
fy = self.env.company.compute_fiscalyear_dates(start_dt)
|
||||
|
||||
if start_dt == fy['date_from']:
|
||||
prev_fy = self.env.company.compute_fiscalyear_dates(start_dt - timedelta(days=1))
|
||||
begin_dt = prev_fy['date_from']
|
||||
include_curr_yr = True
|
||||
else:
|
||||
begin_dt = fy['date_from']
|
||||
include_curr_yr = False
|
||||
|
||||
derived['date'] = self.env['account.report']._get_dates_period(begin_dt, end_dt, 'range')
|
||||
derived['include_current_year_in_unaff_earnings'] = include_curr_yr
|
||||
return derived
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Line builders
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _build_account_header_line(self, report, options, account, has_entries, col_data):
|
||||
"""Produce the foldable account-level line."""
|
||||
cols = []
|
||||
for col_def in options['columns']:
|
||||
expr = col_def['expression_label']
|
||||
raw = col_data.get(col_def['column_group_key'], {}).get(expr)
|
||||
|
||||
display_val = (
|
||||
None
|
||||
if raw is None or (expr == 'amount_currency' and not account.currency_id)
|
||||
else raw
|
||||
)
|
||||
cols.append(report._build_column_dict(
|
||||
display_val, col_def, options=options,
|
||||
currency=account.currency_id if expr == 'amount_currency' else None,
|
||||
))
|
||||
|
||||
lid = report._get_generic_line_id('account.account', account.id)
|
||||
is_unfolded = any(
|
||||
report._get_res_id_from_line_id(ul, 'account.account') == account.id
|
||||
for ul in options.get('unfolded_lines')
|
||||
)
|
||||
|
||||
return {
|
||||
'id': lid,
|
||||
'name': account.display_name,
|
||||
'columns': cols,
|
||||
'level': 1,
|
||||
'unfoldable': has_entries,
|
||||
'unfolded': has_entries and (is_unfolded or options.get('unfold_all')),
|
||||
'expand_function': '_report_expand_unfoldable_line_general_ledger',
|
||||
}
|
||||
|
||||
def _get_aml_line(self, report, parent_line_id, options, col_dict, running_bal):
|
||||
"""Build a single move-line row under a given account header."""
|
||||
cols = []
|
||||
for col_def in options['columns']:
|
||||
expr = col_def['expression_label']
|
||||
raw = col_dict[col_def['column_group_key']].get(expr)
|
||||
cur = None
|
||||
|
||||
if raw is not None:
|
||||
if expr == 'amount_currency':
|
||||
cur = self.env['res.currency'].browse(col_dict[col_def['column_group_key']]['currency_id'])
|
||||
raw = None if cur == self.env.company.currency_id else raw
|
||||
elif expr == 'balance':
|
||||
raw += (running_bal[col_def['column_group_key']] or 0)
|
||||
|
||||
cols.append(report._build_column_dict(raw, col_def, options=options, currency=cur))
|
||||
|
||||
aml_id = None
|
||||
move_label = None
|
||||
caret = None
|
||||
row_date = None
|
||||
for grp_data in col_dict.values():
|
||||
aml_id = grp_data.get('id', '')
|
||||
if aml_id:
|
||||
caret = 'account.payment' if grp_data.get('payment_id') else 'account.move.line'
|
||||
move_label = grp_data['move_name']
|
||||
row_date = str(grp_data.get('date', ''))
|
||||
break
|
||||
|
||||
return {
|
||||
'id': report._get_generic_line_id(
|
||||
'account.move.line', aml_id,
|
||||
parent_line_id=parent_line_id, markup=row_date,
|
||||
),
|
||||
'caret_options': caret,
|
||||
'parent_id': parent_line_id,
|
||||
'name': move_label,
|
||||
'columns': cols,
|
||||
'level': 3,
|
||||
}
|
||||
|
||||
@api.model
|
||||
def _build_grand_total_line(self, report, options, col_totals):
|
||||
"""Build the bottom total row."""
|
||||
cols = []
|
||||
for col_def in options['columns']:
|
||||
raw = col_totals[col_def['column_group_key']].get(col_def['expression_label'])
|
||||
cols.append(report._build_column_dict(raw if raw is not None else None, col_def, options=options))
|
||||
|
||||
return {
|
||||
'id': report._get_generic_line_id(None, None, markup='total'),
|
||||
'name': _('Total'),
|
||||
'level': 1,
|
||||
'columns': cols,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Caret / expand handlers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def caret_option_audit_tax(self, options, params):
|
||||
return self.env['account.generic.tax.report.handler'].caret_option_audit_tax(options, params)
|
||||
|
||||
def _report_expand_unfoldable_line_general_ledger(
|
||||
self, line_dict_id, groupby, options, progress, offset, unfold_all_batch_data=None,
|
||||
):
|
||||
"""Called when an account line is unfolded. Returns initial-balance,
|
||||
individual AML lines, and load-more metadata."""
|
||||
|
||||
def _extract_running_balance(line_dict):
|
||||
return {
|
||||
c['column_group_key']: lc.get('no_format', 0)
|
||||
for c, lc in zip(options['columns'], line_dict['columns'])
|
||||
if c['expression_label'] == 'balance'
|
||||
}
|
||||
|
||||
report = self.env.ref('fusion_accounting.general_ledger_report')
|
||||
mdl, mdl_id = report._get_model_info_from_id(line_dict_id)
|
||||
if mdl != 'account.account':
|
||||
raise UserError(_("Invalid line ID for general ledger expansion: %s", line_dict_id))
|
||||
|
||||
lines = []
|
||||
|
||||
# Opening balance (only on first page)
|
||||
if offset == 0:
|
||||
if unfold_all_batch_data:
|
||||
acct_rec, init_by_cg = unfold_all_batch_data['initial_balances'][mdl_id]
|
||||
else:
|
||||
acct_rec, init_by_cg = self._fetch_opening_balances(report, [mdl_id], options)[mdl_id]
|
||||
|
||||
opening_line = report._get_partner_and_general_ledger_initial_balance_line(
|
||||
options, line_dict_id, init_by_cg, acct_rec.currency_id,
|
||||
)
|
||||
if opening_line:
|
||||
lines.append(opening_line)
|
||||
progress = _extract_running_balance(opening_line)
|
||||
|
||||
# Move lines
|
||||
page_size = report.load_more_limit + 1 if report.load_more_limit and options['export_mode'] != 'print' else None
|
||||
if unfold_all_batch_data:
|
||||
aml_rows = unfold_all_batch_data['aml_results'][mdl_id]
|
||||
has_more = unfold_all_batch_data['has_more'].get(mdl_id, False)
|
||||
else:
|
||||
aml_rows, has_more = self._fetch_aml_data(report, options, [mdl_id], offset=offset, limit=page_size)
|
||||
aml_rows = aml_rows[mdl_id]
|
||||
|
||||
running = progress
|
||||
for entry in aml_rows.values():
|
||||
row_line = self._get_aml_line(report, line_dict_id, options, entry, running)
|
||||
lines.append(row_line)
|
||||
running = _extract_running_balance(row_line)
|
||||
|
||||
return {
|
||||
'lines': lines,
|
||||
'offset_increment': report.load_more_limit,
|
||||
'has_more': has_more,
|
||||
'progress': running,
|
||||
}
|
||||
879
Fusion Accounting/models/account_generic_tax_report.py
Normal file
879
Fusion Accounting/models/account_generic_tax_report.py
Normal file
@@ -0,0 +1,879 @@
|
||||
# Fusion Accounting - Generic Tax Report Handlers
|
||||
# Base tax-report handler, generic handler, and account/tax grouping variants
|
||||
|
||||
import ast
|
||||
from collections import defaultdict
|
||||
|
||||
from odoo import models, api, fields, Command, _
|
||||
from odoo.addons.web.controllers.utils import clean_action
|
||||
from odoo.exceptions import UserError, RedirectWarning
|
||||
from odoo.osv import expression
|
||||
from odoo.tools import SQL
|
||||
|
||||
|
||||
class FusionTaxReportHandler(models.AbstractModel):
|
||||
"""Base handler providing the Closing Entry button and tax-period
|
||||
configuration for all tax reports (generic and country-specific)."""
|
||||
|
||||
_name = 'account.tax.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Account Report Handler for Tax Reports'
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
period_type_map = {'monthly': 'month', 'trimester': 'quarter', 'year': 'year'}
|
||||
|
||||
options['buttons'].append({
|
||||
'name': _('Closing Entry'),
|
||||
'action': 'action_periodic_vat_entries',
|
||||
'sequence': 110,
|
||||
'always_show': True,
|
||||
})
|
||||
self._enable_export_buttons_for_common_vat_groups_in_branches(options)
|
||||
|
||||
start_day, start_month = self.env.company._get_tax_closing_start_date_attributes(report)
|
||||
tax_period = self.env.company._get_tax_periodicity(report)
|
||||
options['tax_periodicity'] = {
|
||||
'periodicity': tax_period,
|
||||
'months_per_period': self.env.company._get_tax_periodicity_months_delay(report),
|
||||
'start_day': start_day,
|
||||
'start_month': start_month,
|
||||
}
|
||||
|
||||
options['show_tax_period_filter'] = (
|
||||
tax_period not in period_type_map or start_day != 1 or start_month != 1
|
||||
)
|
||||
if not options['show_tax_period_filter']:
|
||||
std_period = period_type_map[tax_period]
|
||||
options['date']['filter'] = options['date']['filter'].replace('tax_period', std_period)
|
||||
options['date']['period_type'] = options['date']['period_type'].replace('tax_period', std_period)
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
cfg = defaultdict(dict)
|
||||
cfg['templates']['AccountReportFilters'] = 'fusion_accounting.GenericTaxReportFiltersCustomizable'
|
||||
return cfg
|
||||
|
||||
def _customize_warnings(self, report, options, all_column_groups_expression_totals, warnings):
|
||||
if 'fusion_accounting.common_warning_draft_in_period' in warnings:
|
||||
has_non_closing_drafts = self.env['account.move'].search_count([
|
||||
('state', '=', 'draft'),
|
||||
('date', '<=', options['date']['date_to']),
|
||||
('tax_closing_report_id', '=', False),
|
||||
], limit=1)
|
||||
if not has_non_closing_drafts:
|
||||
warnings.pop('fusion_accounting.common_warning_draft_in_period')
|
||||
|
||||
qry = report._get_report_query(options, 'strict_range')
|
||||
inactive_rows = self.env.execute_query(SQL("""
|
||||
SELECT 1
|
||||
FROM %s
|
||||
JOIN account_account_tag_account_move_line_rel aml_tag
|
||||
ON account_move_line.id = aml_tag.account_move_line_id
|
||||
JOIN account_account_tag tag
|
||||
ON aml_tag.account_account_tag_id = tag.id
|
||||
WHERE %s AND NOT tag.active
|
||||
LIMIT 1
|
||||
""", qry.from_clause, qry.where_clause))
|
||||
if inactive_rows:
|
||||
warnings['fusion_accounting.tax_report_warning_inactive_tags'] = {}
|
||||
|
||||
# ================================================================
|
||||
# TAX CLOSING
|
||||
# ================================================================
|
||||
|
||||
def _is_period_equal_to_options(self, report, options):
|
||||
opt_to = fields.Date.from_string(options['date']['date_to'])
|
||||
opt_from = fields.Date.from_string(options['date']['date_from'])
|
||||
boundary_from, boundary_to = self.env.company._get_tax_closing_period_boundaries(opt_to, report)
|
||||
return boundary_from == opt_from and boundary_to == opt_to
|
||||
|
||||
def action_periodic_vat_entries(self, options, from_post=False):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
if (
|
||||
options['date']['period_type'] != 'tax_period'
|
||||
and not self._is_period_equal_to_options(report, options)
|
||||
and not self.env.context.get('override_tax_closing_warning')
|
||||
):
|
||||
if len(options['companies']) > 1 and (
|
||||
report.filter_multi_company != 'tax_units'
|
||||
or not (report.country_id and options['available_tax_units'])
|
||||
):
|
||||
warning_msg = _(
|
||||
"You're about to generate closing entries for multiple companies. "
|
||||
"Each will follow its own tax periodicity."
|
||||
)
|
||||
else:
|
||||
warning_msg = _(
|
||||
"The selected dates don't match a tax period. The closing entry "
|
||||
"will target the closest matching period."
|
||||
)
|
||||
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'fusion_accounting.redirect_action',
|
||||
'target': 'new',
|
||||
'params': {
|
||||
'depending_action': self.with_context(
|
||||
override_tax_closing_warning=True,
|
||||
).action_periodic_vat_entries(options),
|
||||
'message': warning_msg,
|
||||
'button_text': _("Proceed"),
|
||||
},
|
||||
'context': {'dialog_size': 'medium', 'override_tax_closing_warning': True},
|
||||
}
|
||||
|
||||
generated_moves = self._get_periodic_vat_entries(options, from_post=from_post)
|
||||
action = self.env["ir.actions.actions"]._for_xml_id("account.action_move_journal_line")
|
||||
action = clean_action(action, env=self.env)
|
||||
action.pop('domain', None)
|
||||
|
||||
if len(generated_moves) == 1:
|
||||
action['views'] = [(self.env.ref('account.view_move_form').id, 'form')]
|
||||
action['res_id'] = generated_moves.id
|
||||
else:
|
||||
action['domain'] = [('id', 'in', generated_moves.ids)]
|
||||
action['context'] = dict(ast.literal_eval(action['context']))
|
||||
action['context'].pop('search_default_posted', None)
|
||||
return action
|
||||
|
||||
def _get_periodic_vat_entries(self, options, from_post=False):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
if options.get('integer_rounding'):
|
||||
options['integer_rounding_enabled'] = True
|
||||
|
||||
result_moves = self.env['account.move']
|
||||
company_set = self.env['res.company'].browse(report.get_report_company_ids(options))
|
||||
|
||||
existing = self._get_tax_closing_entries_for_closed_period(
|
||||
report, options, company_set, posted_only=False,
|
||||
)
|
||||
result_moves += existing
|
||||
result_moves += self._generate_tax_closing_entries(
|
||||
report, options,
|
||||
companies=company_set - existing.company_id,
|
||||
from_post=from_post,
|
||||
)
|
||||
return result_moves
|
||||
|
||||
def _generate_tax_closing_entries(self, report, options, closing_moves=None, companies=None, from_post=False):
|
||||
if companies is None:
|
||||
companies = self.env['res.company'].browse(report.get_report_company_ids(options))
|
||||
if closing_moves is None:
|
||||
closing_moves = self.env['account.move']
|
||||
|
||||
period_end = fields.Date.from_string(options['date']['date_to'])
|
||||
moves_by_company = defaultdict(lambda: self.env['account.move'])
|
||||
|
||||
remaining_cos = companies.filtered(lambda c: c not in closing_moves.company_id)
|
||||
if closing_moves:
|
||||
for mv in closing_moves.filtered(lambda m: m.state == 'draft'):
|
||||
moves_by_company[mv.company_id] |= mv
|
||||
|
||||
for co in remaining_cos:
|
||||
include_dom, fpos_set = self._get_fpos_info_for_tax_closing(co, report, options)
|
||||
co_moves = co._get_and_update_tax_closing_moves(
|
||||
period_end, report, fiscal_positions=fpos_set, include_domestic=include_dom,
|
||||
)
|
||||
moves_by_company[co] = co_moves
|
||||
closing_moves += co_moves
|
||||
|
||||
for co, co_moves in moves_by_company.items():
|
||||
countries = self.env['res.country']
|
||||
for mv in co_moves:
|
||||
if mv.fiscal_position_id.foreign_vat:
|
||||
countries |= mv.fiscal_position_id.country_id
|
||||
else:
|
||||
countries |= co.account_fiscal_country_id
|
||||
|
||||
if self.env['account.tax.group']._check_misconfigured_tax_groups(co, countries):
|
||||
self._redirect_to_misconfigured_tax_groups(co, countries)
|
||||
|
||||
for mv in co_moves:
|
||||
if from_post and mv == moves_by_company.get(self.env.company):
|
||||
continue
|
||||
|
||||
mv_opts = {
|
||||
**options,
|
||||
'fiscal_position': mv.fiscal_position_id.id if mv.fiscal_position_id else 'domestic',
|
||||
}
|
||||
line_cmds, tg_subtotals = self._compute_vat_closing_entry(co, mv_opts)
|
||||
line_cmds += self._add_tax_group_closing_items(tg_subtotals, mv)
|
||||
|
||||
if mv.line_ids:
|
||||
line_cmds += [Command.delete(aml.id) for aml in mv.line_ids]
|
||||
|
||||
if line_cmds:
|
||||
mv.write({'line_ids': line_cmds})
|
||||
|
||||
return closing_moves
|
||||
|
||||
def _get_tax_closing_entries_for_closed_period(self, report, options, companies, posted_only=True):
|
||||
found = self.env['account.move']
|
||||
for co in companies:
|
||||
_s, p_end = co._get_tax_closing_period_boundaries(
|
||||
fields.Date.from_string(options['date']['date_to']), report,
|
||||
)
|
||||
inc_dom, fpos = self._get_fpos_info_for_tax_closing(co, report, options)
|
||||
fpos_ids = fpos.ids + ([False] if inc_dom else [])
|
||||
state_cond = ('state', '=', 'posted') if posted_only else ('state', '!=', 'cancel')
|
||||
found += self.env['account.move'].search([
|
||||
('company_id', '=', co.id),
|
||||
('fiscal_position_id', 'in', fpos_ids),
|
||||
('date', '=', p_end),
|
||||
('tax_closing_report_id', '=', options['report_id']),
|
||||
state_cond,
|
||||
], limit=1)
|
||||
return found
|
||||
|
||||
@api.model
|
||||
def _compute_vat_closing_entry(self, company, options):
|
||||
self = self.with_company(company)
|
||||
self.env['account.tax'].flush_model(['name', 'tax_group_id'])
|
||||
self.env['account.tax.repartition.line'].flush_model(['use_in_tax_closing'])
|
||||
self.env['account.move.line'].flush_model([
|
||||
'account_id', 'debit', 'credit', 'move_id', 'tax_line_id',
|
||||
'date', 'company_id', 'display_type', 'parent_state',
|
||||
])
|
||||
self.env['account.move'].flush_model(['state'])
|
||||
|
||||
adjusted_opts = {**options, 'all_entries': False, 'date': dict(options['date'])}
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
p_start, p_end = company._get_tax_closing_period_boundaries(
|
||||
fields.Date.from_string(options['date']['date_to']), report,
|
||||
)
|
||||
adjusted_opts['date']['date_from'] = fields.Date.to_string(p_start)
|
||||
adjusted_opts['date']['date_to'] = fields.Date.to_string(p_end)
|
||||
adjusted_opts['date']['period_type'] = 'custom'
|
||||
adjusted_opts['date']['filter'] = 'custom'
|
||||
adjusted_opts = report.with_context(
|
||||
allowed_company_ids=company.ids,
|
||||
).get_options(previous_options=adjusted_opts)
|
||||
adjusted_opts['fiscal_position'] = options['fiscal_position']
|
||||
|
||||
qry = self.env.ref('account.generic_tax_report')._get_report_query(
|
||||
adjusted_opts, 'strict_range',
|
||||
domain=self._get_vat_closing_entry_additional_domain(),
|
||||
)
|
||||
tax_name_expr = self.env['account.tax']._field_to_sql('tax', 'name')
|
||||
stmt = SQL("""
|
||||
SELECT "account_move_line".tax_line_id as tax_id,
|
||||
tax.tax_group_id as tax_group_id,
|
||||
%(tax_name)s as tax_name,
|
||||
"account_move_line".account_id,
|
||||
COALESCE(SUM("account_move_line".balance), 0) as amount
|
||||
FROM account_tax tax, account_tax_repartition_line repartition, %(tbl)s
|
||||
WHERE %(where)s
|
||||
AND tax.id = "account_move_line".tax_line_id
|
||||
AND repartition.id = "account_move_line".tax_repartition_line_id
|
||||
AND repartition.use_in_tax_closing
|
||||
GROUP BY tax.tax_group_id, "account_move_line".tax_line_id, tax.name, "account_move_line".account_id
|
||||
""", tax_name=tax_name_expr, tbl=qry.from_clause, where=qry.where_clause)
|
||||
self.env.cr.execute(stmt)
|
||||
raw_results = self.env.cr.dictfetchall()
|
||||
raw_results = self._postprocess_vat_closing_entry_results(company, adjusted_opts, raw_results)
|
||||
|
||||
tg_ids = [r['tax_group_id'] for r in raw_results]
|
||||
tax_groups = {}
|
||||
for tg, row in zip(self.env['account.tax.group'].browse(tg_ids), raw_results):
|
||||
tax_groups.setdefault(tg, {}).setdefault(row.get('tax_id'), []).append(
|
||||
(row.get('tax_name'), row.get('account_id'), row.get('amount'))
|
||||
)
|
||||
|
||||
line_cmds = []
|
||||
tg_subtotals = {}
|
||||
cur = self.env.company.currency_id
|
||||
|
||||
for tg, tax_entries in tax_groups.items():
|
||||
if not tg.tax_receivable_account_id or not tg.tax_payable_account_id:
|
||||
continue
|
||||
tg_total = 0
|
||||
for _tid, vals_list in tax_entries.items():
|
||||
for t_name, acct_id, amt in vals_list:
|
||||
line_cmds.append((0, 0, {
|
||||
'name': t_name,
|
||||
'debit': abs(amt) if amt < 0 else 0,
|
||||
'credit': amt if amt > 0 else 0,
|
||||
'account_id': acct_id,
|
||||
}))
|
||||
tg_total += amt
|
||||
|
||||
if not cur.is_zero(tg_total):
|
||||
key = (
|
||||
tg.advance_tax_payment_account_id.id or False,
|
||||
tg.tax_receivable_account_id.id,
|
||||
tg.tax_payable_account_id.id,
|
||||
)
|
||||
tg_subtotals[key] = tg_subtotals.get(key, 0) + tg_total
|
||||
|
||||
if not line_cmds:
|
||||
rep_in = self.env['account.tax.repartition.line'].search([
|
||||
*self.env['account.tax.repartition.line']._check_company_domain(company),
|
||||
('repartition_type', '=', 'tax'),
|
||||
('document_type', '=', 'invoice'),
|
||||
('tax_id.type_tax_use', '=', 'purchase'),
|
||||
], limit=1)
|
||||
rep_out = self.env['account.tax.repartition.line'].search([
|
||||
*self.env['account.tax.repartition.line']._check_company_domain(company),
|
||||
('repartition_type', '=', 'tax'),
|
||||
('document_type', '=', 'invoice'),
|
||||
('tax_id.type_tax_use', '=', 'sale'),
|
||||
], limit=1)
|
||||
if rep_out.account_id and rep_in.account_id:
|
||||
line_cmds = [
|
||||
Command.create({'name': _('Tax Received Adjustment'), 'debit': 0, 'credit': 0.0, 'account_id': rep_out.account_id.id}),
|
||||
Command.create({'name': _('Tax Paid Adjustment'), 'debit': 0.0, 'credit': 0, 'account_id': rep_in.account_id.id}),
|
||||
]
|
||||
|
||||
return line_cmds, tg_subtotals
|
||||
|
||||
def _get_vat_closing_entry_additional_domain(self):
|
||||
return []
|
||||
|
||||
def _postprocess_vat_closing_entry_results(self, company, options, results):
|
||||
return results
|
||||
|
||||
def _vat_closing_entry_results_rounding(self, company, options, results, rounding_accounts, vat_results_summary):
|
||||
if not rounding_accounts.get('profit') or not rounding_accounts.get('loss'):
|
||||
return results
|
||||
|
||||
total_amt = sum(r['amount'] for r in results)
|
||||
last_tg_id = results[-1]['tax_group_id'] if results else None
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
|
||||
for ln in report._get_lines(options):
|
||||
mdl, rec_id = report._get_model_info_from_id(ln['id'])
|
||||
if mdl != 'account.report.line':
|
||||
continue
|
||||
for (op_type, rpt_line_id, col_label) in vat_results_summary:
|
||||
for col in ln['columns']:
|
||||
if rec_id != rpt_line_id or col['expression_label'] != col_label:
|
||||
continue
|
||||
if op_type in {'due', 'total'}:
|
||||
total_amt += col['no_format']
|
||||
elif op_type == 'deductible':
|
||||
total_amt -= col['no_format']
|
||||
|
||||
diff = company.currency_id.round(total_amt)
|
||||
if not company.currency_id.is_zero(diff):
|
||||
results.append({
|
||||
'tax_name': _('Difference from rounding taxes'),
|
||||
'amount': diff * -1,
|
||||
'tax_group_id': last_tg_id,
|
||||
'account_id': rounding_accounts['profit'].id if diff < 0 else rounding_accounts['loss'].id,
|
||||
})
|
||||
return results
|
||||
|
||||
@api.model
|
||||
def _add_tax_group_closing_items(self, tg_subtotals, closing_move):
|
||||
sql_balance = '''
|
||||
SELECT SUM(aml.balance) AS balance
|
||||
FROM account_move_line aml
|
||||
LEFT JOIN account_move move ON move.id = aml.move_id
|
||||
WHERE aml.account_id = %s AND aml.date <= %s AND move.state = 'posted' AND aml.company_id = %s
|
||||
'''
|
||||
cur = closing_move.company_id.currency_id
|
||||
cmds = []
|
||||
balanced_accounts = []
|
||||
|
||||
def _balance_account(acct_id, lbl):
|
||||
self.env.cr.execute(sql_balance, (acct_id, closing_move.date, closing_move.company_id.id))
|
||||
row = self.env.cr.dictfetchone()
|
||||
bal = row.get('balance') or 0
|
||||
if not cur.is_zero(bal):
|
||||
cmds.append((0, 0, {
|
||||
'name': lbl,
|
||||
'debit': abs(bal) if bal < 0 else 0,
|
||||
'credit': abs(bal) if bal > 0 else 0,
|
||||
'account_id': acct_id,
|
||||
}))
|
||||
return bal
|
||||
|
||||
for key, val in tg_subtotals.items():
|
||||
running = val
|
||||
if key[0] and key[0] not in balanced_accounts:
|
||||
running += _balance_account(key[0], _('Balance tax advance payment account'))
|
||||
balanced_accounts.append(key[0])
|
||||
if key[1] and key[1] not in balanced_accounts:
|
||||
running += _balance_account(key[1], _('Balance tax current account (receivable)'))
|
||||
balanced_accounts.append(key[1])
|
||||
if key[2] and key[2] not in balanced_accounts:
|
||||
running += _balance_account(key[2], _('Balance tax current account (payable)'))
|
||||
balanced_accounts.append(key[2])
|
||||
if not cur.is_zero(running):
|
||||
cmds.append(Command.create({
|
||||
'name': _('Payable tax amount') if running < 0 else _('Receivable tax amount'),
|
||||
'debit': running if running > 0 else 0,
|
||||
'credit': abs(running) if running < 0 else 0,
|
||||
'account_id': key[2] if running < 0 else key[1],
|
||||
}))
|
||||
return cmds
|
||||
|
||||
@api.model
|
||||
def _redirect_to_misconfigured_tax_groups(self, company, countries):
|
||||
raise RedirectWarning(
|
||||
_('Please specify the accounts necessary for the Tax Closing Entry.'),
|
||||
{
|
||||
'type': 'ir.actions.act_window', 'name': 'Tax groups',
|
||||
'res_model': 'account.tax.group', 'view_mode': 'list',
|
||||
'views': [[False, 'list']],
|
||||
'domain': ['|', ('country_id', 'in', countries.ids), ('country_id', '=', False)],
|
||||
},
|
||||
_('Configure your TAX accounts - %s', company.display_name),
|
||||
)
|
||||
|
||||
def _get_fpos_info_for_tax_closing(self, company, report, options):
|
||||
if options['fiscal_position'] == 'domestic':
|
||||
fpos = self.env['account.fiscal.position']
|
||||
elif options['fiscal_position'] == 'all':
|
||||
fpos = self.env['account.fiscal.position'].search([
|
||||
*self.env['account.fiscal.position']._check_company_domain(company),
|
||||
('foreign_vat', '!=', False),
|
||||
])
|
||||
else:
|
||||
fpos = self.env['account.fiscal.position'].browse([options['fiscal_position']])
|
||||
|
||||
if options['fiscal_position'] == 'all':
|
||||
fiscal_country = company.account_fiscal_country_id
|
||||
include_dom = (
|
||||
not fpos or not report.country_id
|
||||
or fiscal_country == fpos[0].country_id
|
||||
)
|
||||
else:
|
||||
include_dom = options['fiscal_position'] == 'domestic'
|
||||
|
||||
return include_dom, fpos
|
||||
|
||||
def _get_amls_with_archived_tags_domain(self, options):
|
||||
domain = [
|
||||
('tax_tag_ids.active', '=', False),
|
||||
('parent_state', '=', 'posted'),
|
||||
('date', '>=', options['date']['date_from']),
|
||||
]
|
||||
if options['date']['mode'] == 'single':
|
||||
domain.append(('date', '<=', options['date']['date_to']))
|
||||
return domain
|
||||
|
||||
def action_open_amls_with_archived_tags(self, options, params=None):
|
||||
return {
|
||||
'name': _("Journal items with archived tax tags"),
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.move.line',
|
||||
'domain': self._get_amls_with_archived_tags_domain(options),
|
||||
'context': {'active_test': False},
|
||||
'views': [(self.env.ref('fusion_accounting.view_archived_tag_move_tree').id, 'list')],
|
||||
}
|
||||
|
||||
|
||||
class FusionGenericTaxReportHandler(models.AbstractModel):
|
||||
"""Handler for the standard generic tax report (Tax -> Tax grouping)."""
|
||||
|
||||
_name = 'account.generic.tax.report.handler'
|
||||
_inherit = 'account.tax.report.handler'
|
||||
_description = 'Generic Tax Report Custom Handler'
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
cfg = super()._get_custom_display_config()
|
||||
cfg['css_custom_class'] = 'generic_tax_report'
|
||||
cfg['templates']['AccountReportLineName'] = 'fusion_accounting.TaxReportLineName'
|
||||
return cfg
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options=None):
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
if (
|
||||
not report.country_id
|
||||
and len(options['available_vat_fiscal_positions']) <= (0 if options['allow_domestic'] else 1)
|
||||
and len(options['companies']) <= 1
|
||||
):
|
||||
options['allow_domestic'] = False
|
||||
options['fiscal_position'] = 'all'
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
return self._get_dynamic_lines(report, options, 'default', warnings)
|
||||
|
||||
def _caret_options_initializer(self):
|
||||
return {
|
||||
'generic_tax_report': [
|
||||
{'name': _("Audit"), 'action': 'caret_option_audit_tax'},
|
||||
],
|
||||
}
|
||||
|
||||
def _get_dynamic_lines(self, report, options, grouping, warnings=None):
|
||||
opts_per_cg = report._split_options_per_column_group(options)
|
||||
|
||||
if grouping == 'tax_account':
|
||||
gb_fields = [('src_tax', 'type_tax_use'), ('src_tax', 'id'), ('account', 'id')]
|
||||
comodel_list = [None, 'account.tax', 'account.account']
|
||||
elif grouping == 'account_tax':
|
||||
gb_fields = [('src_tax', 'type_tax_use'), ('account', 'id'), ('src_tax', 'id')]
|
||||
comodel_list = [None, 'account.account', 'account.tax']
|
||||
else:
|
||||
gb_fields = [('src_tax', 'type_tax_use'), ('src_tax', 'id')]
|
||||
comodel_list = [None, 'account.tax']
|
||||
|
||||
if grouping in ('tax_account', 'account_tax'):
|
||||
amount_tree = self._read_generic_tax_report_amounts(report, opts_per_cg, gb_fields)
|
||||
else:
|
||||
amount_tree = self._read_generic_tax_report_amounts_no_tax_details(report, options, opts_per_cg)
|
||||
|
||||
id_sets = [set() for _ in gb_fields]
|
||||
|
||||
def _collect_ids(node, depth=0):
|
||||
for k, v in node.items():
|
||||
if k:
|
||||
id_sets[depth].add(k)
|
||||
if v.get('children'):
|
||||
_collect_ids(v['children'], depth + 1)
|
||||
|
||||
_collect_ids(amount_tree)
|
||||
|
||||
sort_maps = []
|
||||
for i, cm in enumerate(comodel_list):
|
||||
if cm:
|
||||
recs = self.env[cm].with_context(active_test=False).search([('id', 'in', tuple(id_sets[i]))])
|
||||
sort_maps.append({r.id: (r, j) for j, r in enumerate(recs)})
|
||||
else:
|
||||
sel = self.env['account.tax']._fields['type_tax_use'].selection
|
||||
sort_maps.append({v[0]: (v, j) for j, v in enumerate(sel) if v[0] in id_sets[i]})
|
||||
|
||||
output = []
|
||||
self._populate_lines_recursively(report, options, output, sort_maps, gb_fields, amount_tree, warnings=warnings)
|
||||
return output
|
||||
|
||||
# ================================================================
|
||||
# AMOUNT COMPUTATION
|
||||
# ================================================================
|
||||
|
||||
@api.model
|
||||
def _read_generic_tax_report_amounts_no_tax_details(self, report, options, opts_per_cg):
|
||||
co_ids = report.get_report_company_ids(options)
|
||||
co_domain = self.env['account.tax']._check_company_domain(co_ids)
|
||||
co_where = self.env['account.tax'].with_context(active_test=False)._where_calc(co_domain)
|
||||
self.env.cr.execute(SQL('''
|
||||
SELECT account_tax.id, account_tax.type_tax_use,
|
||||
ARRAY_AGG(child_tax.id) AS child_tax_ids,
|
||||
ARRAY_AGG(DISTINCT child_tax.type_tax_use) AS child_types
|
||||
FROM account_tax_filiation_rel account_tax_rel
|
||||
JOIN account_tax ON account_tax.id = account_tax_rel.parent_tax
|
||||
JOIN account_tax child_tax ON child_tax.id = account_tax_rel.child_tax
|
||||
WHERE account_tax.amount_type = 'group' AND %s
|
||||
GROUP BY account_tax.id
|
||||
''', co_where.where_clause or SQL("TRUE")))
|
||||
|
||||
group_info = {}
|
||||
child_map = {}
|
||||
for row in self.env.cr.dictfetchall():
|
||||
row['to_expand'] = row['child_types'] != ['none']
|
||||
group_info[row['id']] = row
|
||||
for cid in row['child_tax_ids']:
|
||||
child_map[cid] = row['id']
|
||||
|
||||
results = defaultdict(lambda: {
|
||||
'base_amount': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_amount': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_non_deductible': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_deductible': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_due': {cg: 0.0 for cg in options['column_groups']},
|
||||
'children': defaultdict(lambda: {
|
||||
'base_amount': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_amount': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_non_deductible': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_deductible': {cg: 0.0 for cg in options['column_groups']},
|
||||
'tax_due': {cg: 0.0 for cg in options['column_groups']},
|
||||
}),
|
||||
})
|
||||
|
||||
for cg_key, cg_opts in opts_per_cg.items():
|
||||
qry = report._get_report_query(cg_opts, 'strict_range')
|
||||
|
||||
# Base amounts
|
||||
self.env.cr.execute(SQL('''
|
||||
SELECT tax.id AS tax_id, tax.type_tax_use AS tax_type_tax_use,
|
||||
src_group_tax.id AS src_group_tax_id, src_group_tax.type_tax_use AS src_group_tax_type_tax_use,
|
||||
src_tax.id AS src_tax_id, src_tax.type_tax_use AS src_tax_type_tax_use,
|
||||
SUM(account_move_line.balance) AS base_amount
|
||||
FROM %(tbl)s
|
||||
JOIN account_move_line_account_tax_rel tax_rel ON account_move_line.id = tax_rel.account_move_line_id
|
||||
JOIN account_tax tax ON tax.id = tax_rel.account_tax_id
|
||||
LEFT JOIN account_tax src_tax ON src_tax.id = account_move_line.tax_line_id
|
||||
LEFT JOIN account_tax src_group_tax ON src_group_tax.id = account_move_line.group_tax_id
|
||||
WHERE %(where)s
|
||||
AND (account_move_line__move_id.always_tax_exigible OR account_move_line__move_id.tax_cash_basis_rec_id IS NOT NULL OR tax.tax_exigibility != 'on_payment')
|
||||
AND (
|
||||
(account_move_line.tax_line_id IS NOT NULL AND (src_tax.type_tax_use IN ('sale','purchase') OR src_group_tax.type_tax_use IN ('sale','purchase')))
|
||||
OR (account_move_line.tax_line_id IS NULL AND tax.type_tax_use IN ('sale','purchase'))
|
||||
)
|
||||
GROUP BY tax.id, src_group_tax.id, src_tax.id
|
||||
ORDER BY src_group_tax.sequence, src_group_tax.id, src_tax.sequence, src_tax.id, tax.sequence, tax.id
|
||||
''', tbl=qry.from_clause, where=qry.where_clause))
|
||||
|
||||
groups_with_extra = set()
|
||||
for r in self.env.cr.dictfetchall():
|
||||
is_tax_ln = bool(r['src_tax_id'])
|
||||
if is_tax_ln:
|
||||
if r['src_group_tax_id'] and not group_info.get(r['src_group_tax_id'], {}).get('to_expand') and r['tax_id'] in group_info.get(r['src_group_tax_id'], {}).get('child_tax_ids', []):
|
||||
pass
|
||||
elif r['tax_type_tax_use'] == 'none' and child_map.get(r['tax_id']):
|
||||
gid = child_map[r['tax_id']]
|
||||
if gid not in groups_with_extra:
|
||||
gi = group_info[gid]
|
||||
results[gi['type_tax_use']]['children'][gid]['base_amount'][cg_key] += r['base_amount']
|
||||
groups_with_extra.add(gid)
|
||||
else:
|
||||
ttu = r['src_group_tax_type_tax_use'] or r['src_tax_type_tax_use']
|
||||
results[ttu]['children'][r['tax_id']]['base_amount'][cg_key] += r['base_amount']
|
||||
else:
|
||||
if r['tax_id'] in group_info and group_info[r['tax_id']]['to_expand']:
|
||||
gi = group_info[r['tax_id']]
|
||||
for child_id in gi['child_tax_ids']:
|
||||
results[gi['type_tax_use']]['children'][child_id]['base_amount'][cg_key] += r['base_amount']
|
||||
else:
|
||||
results[r['tax_type_tax_use']]['children'][r['tax_id']]['base_amount'][cg_key] += r['base_amount']
|
||||
|
||||
# Tax amounts
|
||||
sel_ded = join_ded = gb_ded = SQL()
|
||||
if cg_opts.get('account_journal_report_tax_deductibility_columns'):
|
||||
sel_ded = SQL(", repartition.use_in_tax_closing AS trl_tax_closing, SIGN(repartition.factor_percent) AS trl_factor")
|
||||
join_ded = SQL("JOIN account_tax_repartition_line repartition ON account_move_line.tax_repartition_line_id = repartition.id")
|
||||
gb_ded = SQL(', repartition.use_in_tax_closing, SIGN(repartition.factor_percent)')
|
||||
|
||||
self.env.cr.execute(SQL('''
|
||||
SELECT tax.id AS tax_id, tax.type_tax_use AS tax_type_tax_use,
|
||||
group_tax.id AS group_tax_id, group_tax.type_tax_use AS group_tax_type_tax_use,
|
||||
SUM(account_move_line.balance) AS tax_amount %(sel_ded)s
|
||||
FROM %(tbl)s
|
||||
JOIN account_tax tax ON tax.id = account_move_line.tax_line_id
|
||||
%(join_ded)s
|
||||
LEFT JOIN account_tax group_tax ON group_tax.id = account_move_line.group_tax_id
|
||||
WHERE %(where)s
|
||||
AND (account_move_line__move_id.always_tax_exigible OR account_move_line__move_id.tax_cash_basis_rec_id IS NOT NULL OR tax.tax_exigibility != 'on_payment')
|
||||
AND ((group_tax.id IS NULL AND tax.type_tax_use IN ('sale','purchase')) OR (group_tax.id IS NOT NULL AND group_tax.type_tax_use IN ('sale','purchase')))
|
||||
GROUP BY tax.id, group_tax.id %(gb_ded)s
|
||||
''', sel_ded=sel_ded, tbl=qry.from_clause, join_ded=join_ded, where=qry.where_clause, gb_ded=gb_ded))
|
||||
|
||||
for r in self.env.cr.dictfetchall():
|
||||
tid = r['tax_id']
|
||||
if r['group_tax_id']:
|
||||
ttu = r['group_tax_type_tax_use']
|
||||
if not group_info.get(r['group_tax_id'], {}).get('to_expand'):
|
||||
tid = r['group_tax_id']
|
||||
else:
|
||||
ttu = r['group_tax_type_tax_use'] or r['tax_type_tax_use']
|
||||
|
||||
results[ttu]['tax_amount'][cg_key] += r['tax_amount']
|
||||
results[ttu]['children'][tid]['tax_amount'][cg_key] += r['tax_amount']
|
||||
|
||||
if cg_opts.get('account_journal_report_tax_deductibility_columns'):
|
||||
detail_label = False
|
||||
if r['trl_factor'] > 0 and ttu == 'purchase':
|
||||
detail_label = 'tax_deductible' if r['trl_tax_closing'] else 'tax_non_deductible'
|
||||
elif r['trl_tax_closing'] and (r['trl_factor'] > 0, ttu) in ((False, 'purchase'), (True, 'sale')):
|
||||
detail_label = 'tax_due'
|
||||
if detail_label:
|
||||
results[ttu][detail_label][cg_key] += r['tax_amount'] * r['trl_factor']
|
||||
results[ttu]['children'][tid][detail_label][cg_key] += r['tax_amount'] * r['trl_factor']
|
||||
|
||||
return results
|
||||
|
||||
def _read_generic_tax_report_amounts(self, report, opts_per_cg, gb_fields):
|
||||
needs_group = False
|
||||
select_parts, gb_parts = [], []
|
||||
for alias, fld in gb_fields:
|
||||
select_parts.append(SQL("%s AS %s", SQL.identifier(alias, fld), SQL.identifier(f'{alias}_{fld}')))
|
||||
gb_parts.append(SQL.identifier(alias, fld))
|
||||
if alias == 'src_tax':
|
||||
select_parts.append(SQL("%s AS %s", SQL.identifier('tax', fld), SQL.identifier(f'tax_{fld}')))
|
||||
gb_parts.append(SQL.identifier('tax', fld))
|
||||
needs_group = True
|
||||
|
||||
expand_set = set()
|
||||
if needs_group:
|
||||
groups = self.env['account.tax'].with_context(active_test=False).search([('amount_type', '=', 'group')])
|
||||
for g in groups:
|
||||
if set(g.children_tax_ids.mapped('type_tax_use')) != {'none'}:
|
||||
expand_set.add(g.id)
|
||||
|
||||
tree = {}
|
||||
for cg_key, cg_opts in opts_per_cg.items():
|
||||
qry = report._get_report_query(cg_opts, 'strict_range')
|
||||
td_qry = self.env['account.move.line']._get_query_tax_details(qry.from_clause, qry.where_clause)
|
||||
seen_keys = set()
|
||||
|
||||
self.env.cr.execute(SQL('''
|
||||
SELECT %(sel)s, trl.document_type = 'refund' AS is_refund,
|
||||
SUM(CASE WHEN tdr.display_type = 'rounding' THEN 0 ELSE tdr.base_amount END) AS base_amount,
|
||||
SUM(tdr.tax_amount) AS tax_amount
|
||||
FROM (%(td)s) AS tdr
|
||||
JOIN account_tax_repartition_line trl ON trl.id = tdr.tax_repartition_line_id
|
||||
JOIN account_tax tax ON tax.id = tdr.tax_id
|
||||
JOIN account_tax src_tax ON src_tax.id = COALESCE(tdr.group_tax_id, tdr.tax_id) AND src_tax.type_tax_use IN ('sale','purchase')
|
||||
JOIN account_account account ON account.id = tdr.base_account_id
|
||||
WHERE tdr.tax_exigible
|
||||
GROUP BY tdr.tax_repartition_line_id, trl.document_type, %(gb)s
|
||||
ORDER BY src_tax.sequence, src_tax.id, tax.sequence, tax.id
|
||||
''', sel=SQL(',').join(select_parts), td=td_qry, gb=SQL(',').join(gb_parts)))
|
||||
|
||||
for row in self.env.cr.dictfetchall():
|
||||
node = tree
|
||||
cum_key = [row['is_refund']]
|
||||
for alias, fld in gb_fields:
|
||||
gk = f'{alias}_{fld}'
|
||||
if gk == 'src_tax_id' and row['src_tax_id'] in expand_set:
|
||||
cum_key.append(row[gk])
|
||||
gk = 'tax_id'
|
||||
rk = row[gk]
|
||||
cum_key.append(rk)
|
||||
ck_tuple = tuple(cum_key)
|
||||
node.setdefault(rk, {
|
||||
'base_amount': {k: 0.0 for k in cg_opts['column_groups']},
|
||||
'tax_amount': {k: 0.0 for k in cg_opts['column_groups']},
|
||||
'children': {},
|
||||
})
|
||||
sub = node[rk]
|
||||
if ck_tuple not in seen_keys:
|
||||
sub['base_amount'][cg_key] += row['base_amount']
|
||||
sub['tax_amount'][cg_key] += row['tax_amount']
|
||||
node = sub['children']
|
||||
seen_keys.add(ck_tuple)
|
||||
return tree
|
||||
|
||||
def _populate_lines_recursively(
|
||||
self, report, options, lines, sort_maps, gb_fields, node,
|
||||
index=0, type_tax_use=None, parent_line_id=None, warnings=None,
|
||||
):
|
||||
if index == len(gb_fields):
|
||||
return
|
||||
alias, fld = gb_fields[index]
|
||||
gk = f'{alias}_{fld}'
|
||||
smap = sort_maps[index]
|
||||
sorted_keys = sorted(node.keys(), key=lambda k: smap[k][1])
|
||||
|
||||
for key in sorted_keys:
|
||||
if gk == 'src_tax_type_tax_use':
|
||||
type_tax_use = key
|
||||
sign = -1 if type_tax_use == 'sale' else 1
|
||||
|
||||
amounts = node[key]
|
||||
cols = []
|
||||
for col in options['columns']:
|
||||
el = col.get('expression_label')
|
||||
if el == 'net':
|
||||
cv = sign * amounts['base_amount'][col['column_group_key']] if index == len(gb_fields) - 1 else ''
|
||||
if el == 'tax':
|
||||
cv = sign * amounts['tax_amount'][col['column_group_key']]
|
||||
cols.append(report._build_column_dict(cv, col, options=options))
|
||||
|
||||
if el == 'tax' and options.get('account_journal_report_tax_deductibility_columns'):
|
||||
for dt in ('tax_non_deductible', 'tax_deductible', 'tax_due'):
|
||||
cols.append(report._build_column_dict(
|
||||
col_value=sign * amounts[dt][col['column_group_key']],
|
||||
col_data={'figure_type': 'monetary', 'column_group_key': col['column_group_key'], 'expression_label': dt},
|
||||
options=options,
|
||||
))
|
||||
|
||||
defaults = {'columns': cols, 'level': index if index == 0 else index + 1, 'unfoldable': False}
|
||||
rpt_line = self._build_report_line(report, options, defaults, gk, smap[key][0], parent_line_id, warnings)
|
||||
|
||||
if gk == 'src_tax_id':
|
||||
rpt_line['caret_options'] = 'generic_tax_report'
|
||||
|
||||
lines.append((0, rpt_line))
|
||||
self._populate_lines_recursively(
|
||||
report, options, lines, sort_maps, gb_fields,
|
||||
amounts.get('children'), index=index + 1,
|
||||
type_tax_use=type_tax_use, parent_line_id=rpt_line['id'],
|
||||
warnings=warnings,
|
||||
)
|
||||
|
||||
def _build_report_line(self, report, options, defaults, gk, value, parent_id, warnings=None):
|
||||
ln = dict(defaults)
|
||||
if parent_id is not None:
|
||||
ln['parent_id'] = parent_id
|
||||
|
||||
if gk == 'src_tax_type_tax_use':
|
||||
ln['id'] = report._get_generic_line_id(None, None, markup=value[0], parent_line_id=parent_id)
|
||||
ln['name'] = value[1]
|
||||
elif gk == 'src_tax_id':
|
||||
tax = value
|
||||
ln['id'] = report._get_generic_line_id(tax._name, tax.id, parent_line_id=parent_id)
|
||||
if tax.amount_type == 'percent':
|
||||
ln['name'] = f"{tax.name} ({tax.amount}%)"
|
||||
if warnings is not None:
|
||||
self._check_line_consistency(report, options, ln, tax, warnings)
|
||||
elif tax.amount_type == 'fixed':
|
||||
ln['name'] = f"{tax.name} ({tax.amount})"
|
||||
else:
|
||||
ln['name'] = tax.name
|
||||
if options.get('multi-company'):
|
||||
ln['name'] = f"{ln['name']} - {tax.company_id.display_name}"
|
||||
elif gk == 'account_id':
|
||||
acct = value
|
||||
ln['id'] = report._get_generic_line_id(acct._name, acct.id, parent_line_id=parent_id)
|
||||
ln['name'] = f"{acct.display_name} - {acct.company_id.display_name}" if options.get('multi-company') else acct.display_name
|
||||
return ln
|
||||
|
||||
def _check_line_consistency(self, report, options, ln, tax, warnings=None):
|
||||
eff_rate = tax.amount * sum(
|
||||
tax.invoice_repartition_line_ids.filtered(
|
||||
lambda r: r.repartition_type == 'tax'
|
||||
).mapped('factor')
|
||||
) / 100
|
||||
for cg_key, cg_opts in report._split_options_per_column_group(options).items():
|
||||
net = next((c['no_format'] for c in ln['columns'] if c['column_group_key'] == cg_key and c['expression_label'] == 'net'), 0)
|
||||
tax_val = next((c['no_format'] for c in ln['columns'] if c['column_group_key'] == cg_key and c['expression_label'] == 'tax'), 0)
|
||||
if net == '':
|
||||
continue
|
||||
cur = self.env.company.currency_id
|
||||
expected = float(net or 0) * eff_rate
|
||||
if cur.compare_amounts(expected, tax_val):
|
||||
err = abs(abs(tax_val) - abs(expected)) / float(net or 1)
|
||||
if err > 0.001:
|
||||
ln['alert'] = True
|
||||
warnings['fusion_accounting.tax_report_warning_lines_consistency'] = {'alert_type': 'danger'}
|
||||
return
|
||||
|
||||
# ================================================================
|
||||
# CARET / AUDIT
|
||||
# ================================================================
|
||||
|
||||
def caret_option_audit_tax(self, options, params):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
mdl, tax_id = report._get_model_info_from_id(params['line_id'])
|
||||
if mdl != 'account.tax':
|
||||
raise UserError(_("Cannot audit tax from a non-tax model."))
|
||||
|
||||
tax = self.env['account.tax'].browse(tax_id)
|
||||
if tax.amount_type == 'group':
|
||||
affect_domain = [('tax_ids', 'in', tax.children_tax_ids.ids), ('tax_repartition_line_id', '!=', False)]
|
||||
else:
|
||||
affect_domain = [('tax_ids', '=', tax.id), ('tax_ids.type_tax_use', '=', tax.type_tax_use), ('tax_repartition_line_id', '!=', False)]
|
||||
|
||||
domain = report._get_options_domain(options, 'strict_range') + expression.OR((
|
||||
[('tax_ids', 'in', tax.ids), ('tax_ids.type_tax_use', '=', tax.type_tax_use), ('tax_repartition_line_id', '=', False)],
|
||||
[('group_tax_id', '=', tax.id) if tax.amount_type == 'group' else ('tax_line_id', '=', tax.id)],
|
||||
affect_domain,
|
||||
))
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'name': _('Journal Items for Tax Audit'),
|
||||
'res_model': 'account.move.line',
|
||||
'views': [[self.env.ref('account.view_move_line_tax_audit_tree').id, 'list']],
|
||||
'domain': domain,
|
||||
'context': {**self.env.context, 'search_default_group_by_account': 2, 'expand': 1},
|
||||
}
|
||||
|
||||
|
||||
class FusionGenericTaxReportHandlerAT(models.AbstractModel):
|
||||
_name = 'account.generic.tax.report.handler.account.tax'
|
||||
_inherit = 'account.generic.tax.report.handler'
|
||||
_description = 'Generic Tax Report Custom Handler (Account -> Tax)'
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
return super()._get_dynamic_lines(report, options, 'account_tax', warnings)
|
||||
|
||||
|
||||
class FusionGenericTaxReportHandlerTA(models.AbstractModel):
|
||||
_name = 'account.generic.tax.report.handler.tax.account'
|
||||
_inherit = 'account.generic.tax.report.handler'
|
||||
_description = 'Generic Tax Report Custom Handler (Tax -> Account)'
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
return super()._get_dynamic_lines(report, options, 'tax_account', warnings)
|
||||
332
Fusion Accounting/models/account_journal.py
Normal file
332
Fusion Accounting/models/account_journal.py
Normal file
@@ -0,0 +1,332 @@
|
||||
# Fusion Accounting - Journal Extensions for Bank Statement Import
|
||||
# File-based import pipeline: parse, validate, create, reconcile
|
||||
|
||||
from odoo import models, tools, _
|
||||
from odoo.addons.base.models.res_bank import sanitize_account_number
|
||||
from odoo.exceptions import UserError, RedirectWarning
|
||||
|
||||
|
||||
class FusionAccountJournal(models.Model):
|
||||
"""Extends journals with a pluggable bank-statement file import
|
||||
pipeline. Sub-modules register parsers by overriding
|
||||
``_parse_bank_statement_file``."""
|
||||
|
||||
_inherit = "account.journal"
|
||||
|
||||
# ---- Available Import Formats ----
|
||||
def _get_bank_statements_available_import_formats(self):
|
||||
"""Return a list of supported file-format labels (e.g. 'OFX').
|
||||
Override in sub-modules to register additional formats."""
|
||||
return []
|
||||
|
||||
def __get_bank_statements_available_sources(self):
|
||||
"""Append file-import option to the bank-statement source selector
|
||||
when at least one import format is registered."""
|
||||
sources = super(FusionAccountJournal, self).__get_bank_statements_available_sources()
|
||||
known_formats = self._get_bank_statements_available_import_formats()
|
||||
if known_formats:
|
||||
known_formats.sort()
|
||||
fmt_label = ', '.join(known_formats)
|
||||
sources.append((
|
||||
"file_import",
|
||||
_("Manual (or import %(import_formats)s)", import_formats=fmt_label),
|
||||
))
|
||||
return sources
|
||||
|
||||
# ---- Document Upload Entry Point ----
|
||||
def create_document_from_attachment(self, attachment_ids=None):
|
||||
"""Route attachment uploads to the bank-statement importer when
|
||||
the journal is of type bank, credit, or cash."""
|
||||
target_journal = self or self.browse(self.env.context.get('default_journal_id'))
|
||||
if target_journal.type in ('bank', 'credit', 'cash'):
|
||||
uploaded_files = self.env['ir.attachment'].browse(attachment_ids)
|
||||
if not uploaded_files:
|
||||
raise UserError(_("No attachment was provided"))
|
||||
return target_journal._import_bank_statement(uploaded_files)
|
||||
return super().create_document_from_attachment(attachment_ids)
|
||||
|
||||
# ---- Core Import Pipeline ----
|
||||
def _import_bank_statement(self, attachments):
|
||||
"""Orchestrate the full import pipeline: parse -> validate ->
|
||||
find journal -> complete values -> create statements -> reconcile.
|
||||
|
||||
Returns an action opening the reconciliation widget for the
|
||||
newly imported lines."""
|
||||
if any(not att.raw for att in attachments):
|
||||
raise UserError(_("You uploaded an invalid or empty file."))
|
||||
|
||||
created_statement_ids = []
|
||||
import_notifications = {}
|
||||
import_errors = {}
|
||||
|
||||
for att in attachments:
|
||||
try:
|
||||
currency_code, acct_number, parsed_stmts = self._parse_bank_statement_file(att)
|
||||
self._check_parsed_data(parsed_stmts, acct_number)
|
||||
target_journal = self._find_additional_data(currency_code, acct_number)
|
||||
|
||||
if not target_journal.default_account_id:
|
||||
raise UserError(
|
||||
_('You must set a Default Account for the journal: %s', target_journal.name)
|
||||
)
|
||||
|
||||
parsed_stmts = self._complete_bank_statement_vals(
|
||||
parsed_stmts, target_journal, acct_number, att,
|
||||
)
|
||||
stmt_ids, _line_ids, notifs = self._create_bank_statements(parsed_stmts)
|
||||
created_statement_ids.extend(stmt_ids)
|
||||
|
||||
# Auto-set the import source on the journal
|
||||
if target_journal.bank_statements_source != 'file_import':
|
||||
target_journal.sudo().bank_statements_source = 'file_import'
|
||||
|
||||
combined_msg = ""
|
||||
for n in notifs:
|
||||
combined_msg += f"{n['message']}"
|
||||
if notifs:
|
||||
import_notifications[att.name] = combined_msg
|
||||
|
||||
except (UserError, RedirectWarning) as exc:
|
||||
import_errors[att.name] = exc.args[0]
|
||||
|
||||
statements = self.env['account.bank.statement'].browse(created_statement_ids)
|
||||
lines_to_reconcile = statements.line_ids
|
||||
|
||||
if lines_to_reconcile:
|
||||
cron_time_limit = tools.config['limit_time_real_cron'] or -1
|
||||
effective_limit = cron_time_limit if 0 < cron_time_limit < 180 else 180
|
||||
lines_to_reconcile._cron_try_auto_reconcile_statement_lines(
|
||||
limit_time=effective_limit,
|
||||
)
|
||||
|
||||
widget_action = self.env['account.bank.statement.line']._action_open_bank_reconciliation_widget(
|
||||
extra_domain=[('statement_id', 'in', statements.ids)],
|
||||
default_context={
|
||||
'search_default_not_matched': True,
|
||||
'default_journal_id': statements[:1].journal_id.id,
|
||||
'notifications': import_notifications,
|
||||
},
|
||||
)
|
||||
|
||||
if import_errors:
|
||||
err_summary = _("The following files could not be imported:\n")
|
||||
err_summary += "\n".join(
|
||||
f"- {fname}: {msg}" for fname, msg in import_errors.items()
|
||||
)
|
||||
if statements:
|
||||
self.env.cr.commit()
|
||||
raise RedirectWarning(
|
||||
err_summary, widget_action,
|
||||
_('View successfully imported statements'),
|
||||
)
|
||||
else:
|
||||
raise UserError(err_summary)
|
||||
|
||||
return widget_action
|
||||
|
||||
# ---- Parsing (Chain of Responsibility) ----
|
||||
def _parse_bank_statement_file(self, attachment) -> tuple:
|
||||
"""Parse *attachment* into structured statement data. Each module
|
||||
that adds format support must extend this method and return
|
||||
``super()`` if the format is not recognised.
|
||||
|
||||
:returns: ``(currency_code, account_number, statements_data)``
|
||||
:raises RedirectWarning: when no parser can handle the file.
|
||||
"""
|
||||
raise RedirectWarning(
|
||||
message=_("Could not interpret the uploaded file.\n"
|
||||
"Do you have the appropriate import module installed?"),
|
||||
action=self.env.ref('base.open_module_tree').id,
|
||||
button_text=_("Go to Apps"),
|
||||
additional_context={
|
||||
'search_default_name': 'account_bank_statement_import',
|
||||
'search_default_extra': True,
|
||||
},
|
||||
)
|
||||
|
||||
# ---- Validation ----
|
||||
def _check_parsed_data(self, stmts_vals, acct_number):
|
||||
"""Verify that the parsed data contains at least one statement
|
||||
with at least one transaction."""
|
||||
if not stmts_vals:
|
||||
raise UserError(_(
|
||||
"This file contains no statement for account %s.\n"
|
||||
"If the file covers multiple accounts, import it on each one separately.",
|
||||
acct_number,
|
||||
))
|
||||
has_transactions = any(
|
||||
sv.get('transactions') for sv in stmts_vals
|
||||
)
|
||||
if not has_transactions:
|
||||
raise UserError(_(
|
||||
"This file contains no transaction for account %s.\n"
|
||||
"If the file covers multiple accounts, import it on each one separately.",
|
||||
acct_number,
|
||||
))
|
||||
|
||||
# ---- Bank Account Matching ----
|
||||
def _statement_import_check_bank_account(self, acct_number):
|
||||
"""Compare *acct_number* against the journal's bank account,
|
||||
accommodating special formats (CH, BNP France, LCL)."""
|
||||
sanitised = self.bank_account_id.sanitized_acc_number.split(" ")[0]
|
||||
# BNP France: 27-char IBAN vs 11-char local
|
||||
if len(sanitised) == 27 and len(acct_number) == 11 and sanitised[:2].upper() == "FR":
|
||||
return sanitised[14:-2] == acct_number
|
||||
# Credit Lyonnais (LCL): 27-char IBAN vs 7-char local
|
||||
if len(sanitised) == 27 and len(acct_number) == 7 and sanitised[:2].upper() == "FR":
|
||||
return sanitised[18:-2] == acct_number
|
||||
return sanitised == acct_number
|
||||
|
||||
def _find_additional_data(self, currency_code, acct_number):
|
||||
"""Locate the matching journal based on currency and account
|
||||
number, creating the bank account link if necessary."""
|
||||
co_currency = self.env.company.currency_id
|
||||
stmt_currency = None
|
||||
normalised_acct = sanitize_account_number(acct_number)
|
||||
|
||||
if currency_code:
|
||||
stmt_currency = self.env['res.currency'].search(
|
||||
[('name', '=ilike', currency_code)], limit=1,
|
||||
)
|
||||
if not stmt_currency:
|
||||
raise UserError(_("No currency found matching '%s'.", currency_code))
|
||||
if stmt_currency == co_currency:
|
||||
stmt_currency = False
|
||||
|
||||
target_journal = self
|
||||
if acct_number:
|
||||
if target_journal and not target_journal.bank_account_id:
|
||||
target_journal.set_bank_account(acct_number)
|
||||
elif not target_journal:
|
||||
target_journal = self.search([
|
||||
('bank_account_id.sanitized_acc_number', '=', normalised_acct),
|
||||
])
|
||||
if not target_journal:
|
||||
partial = self.search([
|
||||
('bank_account_id.sanitized_acc_number', 'ilike', normalised_acct),
|
||||
])
|
||||
if len(partial) == 1:
|
||||
target_journal = partial
|
||||
else:
|
||||
if not self._statement_import_check_bank_account(normalised_acct):
|
||||
raise UserError(_(
|
||||
'The statement account (%(account)s) does not match '
|
||||
'the journal account (%(journal)s).',
|
||||
account=acct_number,
|
||||
journal=target_journal.bank_account_id.acc_number,
|
||||
))
|
||||
|
||||
if target_journal:
|
||||
j_currency = target_journal.currency_id or target_journal.company_id.currency_id
|
||||
if stmt_currency is None:
|
||||
stmt_currency = j_currency
|
||||
if stmt_currency and stmt_currency != j_currency:
|
||||
raise UserError(_(
|
||||
'Statement currency (%(code)s) differs from journal '
|
||||
'currency (%(journal)s).',
|
||||
code=(stmt_currency.name if stmt_currency else co_currency.name),
|
||||
journal=(j_currency.name if j_currency else co_currency.name),
|
||||
))
|
||||
|
||||
if not target_journal:
|
||||
raise UserError(
|
||||
_('Unable to determine the target journal. Please select one manually.')
|
||||
)
|
||||
return target_journal
|
||||
|
||||
# ---- Value Completion ----
|
||||
def _complete_bank_statement_vals(self, stmts_vals, journal, acct_number, attachment):
|
||||
"""Enrich raw parsed values with journal references, unique import
|
||||
IDs, and partner-bank associations."""
|
||||
for sv in stmts_vals:
|
||||
if not sv.get('reference'):
|
||||
sv['reference'] = attachment.name
|
||||
for txn in sv['transactions']:
|
||||
txn['journal_id'] = journal.id
|
||||
|
||||
uid = txn.get('unique_import_id')
|
||||
if uid:
|
||||
normalised = sanitize_account_number(acct_number)
|
||||
prefix = f"{normalised}-" if normalised else ""
|
||||
txn['unique_import_id'] = f"{prefix}{journal.id}-{uid}"
|
||||
|
||||
if not txn.get('partner_bank_id'):
|
||||
ident_str = txn.get('account_number')
|
||||
if ident_str:
|
||||
if txn.get('partner_id'):
|
||||
bank_match = self.env['res.partner.bank'].search([
|
||||
('acc_number', '=', ident_str),
|
||||
('partner_id', '=', txn['partner_id']),
|
||||
])
|
||||
else:
|
||||
bank_match = self.env['res.partner.bank'].search([
|
||||
('acc_number', '=', ident_str),
|
||||
('company_id', 'in', (False, journal.company_id.id)),
|
||||
])
|
||||
if bank_match and len(bank_match) == 1:
|
||||
txn['partner_bank_id'] = bank_match.id
|
||||
txn['partner_id'] = bank_match.partner_id.id
|
||||
return stmts_vals
|
||||
|
||||
# ---- Statement Creation ----
|
||||
def _create_bank_statements(self, stmts_vals, raise_no_imported_file=True):
|
||||
"""Create bank statements from the enriched values, skipping
|
||||
duplicate transactions and generating PDF attachments for
|
||||
complete statements.
|
||||
|
||||
:returns: ``(statement_ids, line_ids, notifications)``
|
||||
"""
|
||||
BankStmt = self.env['account.bank.statement']
|
||||
BankStmtLine = self.env['account.bank.statement.line']
|
||||
|
||||
new_stmt_ids = []
|
||||
new_line_ids = []
|
||||
skipped_imports = []
|
||||
|
||||
for sv in stmts_vals:
|
||||
accepted_txns = []
|
||||
for txn in sv['transactions']:
|
||||
uid = txn.get('unique_import_id')
|
||||
already_exists = (
|
||||
uid
|
||||
and BankStmtLine.sudo().search(
|
||||
[('unique_import_id', '=', uid)], limit=1,
|
||||
)
|
||||
)
|
||||
if txn['amount'] != 0 and not already_exists:
|
||||
accepted_txns.append(txn)
|
||||
else:
|
||||
skipped_imports.append(txn)
|
||||
if sv.get('balance_start') is not None:
|
||||
sv['balance_start'] += float(txn['amount'])
|
||||
|
||||
if accepted_txns:
|
||||
sv.pop('transactions', None)
|
||||
sv['line_ids'] = [[0, False, line] for line in accepted_txns]
|
||||
new_stmt = BankStmt.with_context(
|
||||
default_journal_id=self.id,
|
||||
).create(sv)
|
||||
if not new_stmt.name:
|
||||
new_stmt.name = sv['reference']
|
||||
new_stmt_ids.append(new_stmt.id)
|
||||
new_line_ids.extend(new_stmt.line_ids.ids)
|
||||
|
||||
if new_stmt.is_complete and not self.env.context.get('skip_pdf_attachment_generation'):
|
||||
new_stmt.action_generate_attachment()
|
||||
|
||||
if not new_line_ids and raise_no_imported_file:
|
||||
raise UserError(_('You already have imported that file.'))
|
||||
|
||||
user_notifications = []
|
||||
num_skipped = len(skipped_imports)
|
||||
if num_skipped:
|
||||
user_notifications.append({
|
||||
'type': 'warning',
|
||||
'message': (
|
||||
_("%d transactions had already been imported and were ignored.", num_skipped)
|
||||
if num_skipped > 1
|
||||
else _("1 transaction had already been imported and was ignored.")
|
||||
),
|
||||
})
|
||||
|
||||
return new_stmt_ids, new_line_ids, user_notifications
|
||||
73
Fusion Accounting/models/account_journal_csv.py
Normal file
73
Fusion Accounting/models/account_journal_csv.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# Fusion Accounting - CSV/XLS/XLSX Bank Statement Import
|
||||
# Registers spreadsheet formats and routes uploads to the base_import wizard
|
||||
|
||||
from odoo import _, models
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
|
||||
class FusionJournalCSVImport(models.Model):
|
||||
"""Extends the journal import pipeline with CSV, XLS, and XLSX
|
||||
support. Uploads matching these formats are routed through
|
||||
the ``base_import`` wizard for column mapping."""
|
||||
|
||||
_inherit = 'account.journal'
|
||||
|
||||
# ---- Format Registration ----
|
||||
def _get_bank_statements_available_import_formats(self):
|
||||
"""Append spreadsheet formats to the list of importable types."""
|
||||
supported = super()._get_bank_statements_available_import_formats()
|
||||
supported.extend(['CSV', 'XLS', 'XLSX'])
|
||||
return supported
|
||||
|
||||
# ---- Helpers ----
|
||||
def _is_spreadsheet_file(self, filename):
|
||||
"""Return True when *filename* has a CSV/XLS/XLSX extension."""
|
||||
return bool(
|
||||
filename
|
||||
and filename.lower().strip().endswith(('.csv', '.xls', '.xlsx'))
|
||||
)
|
||||
|
||||
# ---- Import Override ----
|
||||
def _import_bank_statement(self, attachments):
|
||||
"""Intercept spreadsheet uploads and redirect them to the
|
||||
interactive column-mapping wizard. Non-spreadsheet files fall
|
||||
through to the standard import chain.
|
||||
|
||||
Mixing CSV files with other formats or uploading more than one
|
||||
CSV file at a time is not permitted.
|
||||
"""
|
||||
if len(attachments) > 1:
|
||||
is_spreadsheet = [
|
||||
bool(self._is_spreadsheet_file(att.name)) for att in attachments
|
||||
]
|
||||
if True in is_spreadsheet and False in is_spreadsheet:
|
||||
raise UserError(
|
||||
_('Mixing CSV/XLS files with other file types is not allowed.')
|
||||
)
|
||||
if is_spreadsheet.count(True) > 1:
|
||||
raise UserError(_('Only one CSV/XLS file can be selected at a time.'))
|
||||
return super()._import_bank_statement(attachments)
|
||||
|
||||
if not self._is_spreadsheet_file(attachments.name):
|
||||
return super()._import_bank_statement(attachments)
|
||||
|
||||
# Create the base_import wizard and launch the interactive mapper
|
||||
env_ctx = dict(self.env.context)
|
||||
wizard = self.env['base_import.import'].create({
|
||||
'res_model': 'account.bank.statement.line',
|
||||
'file': attachments.raw,
|
||||
'file_name': attachments.name,
|
||||
'file_type': attachments.mimetype,
|
||||
})
|
||||
env_ctx['wizard_id'] = wizard.id
|
||||
env_ctx['default_journal_id'] = self.id
|
||||
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'import_bank_stmt',
|
||||
'params': {
|
||||
'model': 'account.bank.statement.line',
|
||||
'context': env_ctx,
|
||||
'filename': 'bank_statement_import.csv',
|
||||
},
|
||||
}
|
||||
202
Fusion Accounting/models/account_journal_dashboard.py
Normal file
202
Fusion Accounting/models/account_journal_dashboard.py
Normal file
@@ -0,0 +1,202 @@
|
||||
import ast
|
||||
|
||||
from odoo import models
|
||||
|
||||
# Journal types that represent liquidity accounts (bank, cash, credit card)
|
||||
LIQUIDITY_JOURNAL_TYPES = ('bank', 'cash', 'credit')
|
||||
|
||||
|
||||
class AccountJournal(models.Model):
|
||||
"""Extends account.journal to add bank reconciliation dashboard actions.
|
||||
|
||||
Provides methods that link the journal dashboard cards to the
|
||||
bank reconciliation widget and related views, enabling quick
|
||||
navigation from the Accounting dashboard.
|
||||
"""
|
||||
|
||||
_inherit = 'account.journal'
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Reconciliation actions
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def action_open_reconcile(self):
|
||||
"""Open the appropriate reconciliation view for this journal.
|
||||
|
||||
For liquidity journals (bank / cash / credit), opens the bank
|
||||
reconciliation widget filtered to show only unmatched statement
|
||||
lines belonging to this journal.
|
||||
|
||||
For all other journal types (sale, purchase, general …), opens
|
||||
the list of posted but unreconciled journal items so the user
|
||||
can manually reconcile them.
|
||||
|
||||
Returns:
|
||||
dict: A window action descriptor.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
if self.type in LIQUIDITY_JOURNAL_TYPES:
|
||||
reconcile_ctx = {
|
||||
'default_journal_id': self.id,
|
||||
'search_default_journal_id': self.id,
|
||||
'search_default_not_matched': True,
|
||||
}
|
||||
stmt_line_model = self.env['account.bank.statement.line']
|
||||
return stmt_line_model._action_open_bank_reconciliation_widget(
|
||||
default_context=reconcile_ctx,
|
||||
)
|
||||
|
||||
# Non-liquidity journals: show unreconciled move lines
|
||||
xml_ref = 'fusion_accounting.action_move_line_posted_unreconciled'
|
||||
return self.env['ir.actions.act_window']._for_xml_id(xml_ref)
|
||||
|
||||
def action_open_to_check(self):
|
||||
"""Open bank reconciliation showing only items flagged for review.
|
||||
|
||||
Navigates to the bank reconciliation widget with the
|
||||
*to check* search filter enabled so the user sees only
|
||||
statement lines that were flagged during import or matching.
|
||||
|
||||
Returns:
|
||||
dict: A window action descriptor.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
review_ctx = {
|
||||
'default_journal_id': self.id,
|
||||
'search_default_journal_id': self.id,
|
||||
'search_default_to_check': True,
|
||||
}
|
||||
stmt_line_model = self.env['account.bank.statement.line']
|
||||
return stmt_line_model._action_open_bank_reconciliation_widget(
|
||||
default_context=review_ctx,
|
||||
)
|
||||
|
||||
def action_open_bank_transactions(self):
|
||||
"""Open a flat list of all bank transactions for this journal.
|
||||
|
||||
Unlike the default kanban-first reconciliation view, this
|
||||
method forces the list view to appear first, giving the user
|
||||
a tabular overview of every transaction.
|
||||
|
||||
Returns:
|
||||
dict: A window action descriptor.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
txn_ctx = {
|
||||
'default_journal_id': self.id,
|
||||
'search_default_journal_id': self.id,
|
||||
}
|
||||
stmt_line_model = self.env['account.bank.statement.line']
|
||||
return stmt_line_model._action_open_bank_reconciliation_widget(
|
||||
default_context=txn_ctx,
|
||||
kanban_first=False,
|
||||
)
|
||||
|
||||
def action_open_reconcile_statement(self):
|
||||
"""Open bank reconciliation for a single statement.
|
||||
|
||||
The target statement id is expected in the environment context
|
||||
under the key ``statement_id``. This is typically set by a
|
||||
button on the bank statement form or dashboard.
|
||||
|
||||
Returns:
|
||||
dict: A window action descriptor.
|
||||
"""
|
||||
target_statement_id = self.env.context.get('statement_id')
|
||||
stmt_ctx = {
|
||||
'search_default_statement_id': target_statement_id,
|
||||
}
|
||||
stmt_line_model = self.env['account.bank.statement.line']
|
||||
return stmt_line_model._action_open_bank_reconciliation_widget(
|
||||
default_context=stmt_ctx,
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Dashboard open_action override
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def open_action(self):
|
||||
"""Route the dashboard click to the bank reconciliation widget.
|
||||
|
||||
When the user clicks on a liquidity journal card in the
|
||||
Accounting dashboard and no specific ``action_name`` has been
|
||||
provided in the context, redirect to the bank reconciliation
|
||||
widget filtered for this journal's default account.
|
||||
|
||||
For every other case the standard behaviour is preserved by
|
||||
delegating to ``super()``.
|
||||
|
||||
Returns:
|
||||
dict: A window action descriptor.
|
||||
"""
|
||||
is_liquidity = self.type in LIQUIDITY_JOURNAL_TYPES
|
||||
has_explicit_action = self.env.context.get('action_name')
|
||||
|
||||
if is_liquidity and not has_explicit_action:
|
||||
self.ensure_one()
|
||||
account_filter = [
|
||||
('line_ids.account_id', '=', self.default_account_id.id),
|
||||
]
|
||||
widget_ctx = {
|
||||
'default_journal_id': self.id,
|
||||
'search_default_journal_id': self.id,
|
||||
}
|
||||
stmt_line_model = self.env['account.bank.statement.line']
|
||||
return stmt_line_model._action_open_bank_reconciliation_widget(
|
||||
extra_domain=account_filter,
|
||||
default_context=widget_ctx,
|
||||
)
|
||||
|
||||
return super().open_action()
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Dashboard data helpers
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def _fill_general_dashboard_data(self, dashboard_data):
|
||||
"""Augment general-journal dashboard data with tax periodicity flag.
|
||||
|
||||
For journals of type *general*, adds the boolean key
|
||||
``is_account_tax_periodicity_journal`` that indicates whether
|
||||
the journal is the company's designated tax-closing journal.
|
||||
|
||||
Args:
|
||||
dashboard_data (dict): Mapping of journal id -> dashboard
|
||||
data dict, mutated in place.
|
||||
"""
|
||||
super()._fill_general_dashboard_data(dashboard_data)
|
||||
|
||||
general_journals = self.filtered(lambda j: j.type == 'general')
|
||||
for jrnl in general_journals:
|
||||
tax_journal = jrnl.company_id._get_tax_closing_journal()
|
||||
is_tax_periodicity = jrnl == tax_journal
|
||||
dashboard_data[jrnl.id]['is_account_tax_periodicity_journal'] = is_tax_periodicity
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# General Ledger shortcut
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def action_open_bank_balance_in_gl(self):
|
||||
"""Open the General Ledger report pre-filtered to this journal's bank account.
|
||||
|
||||
Loads the General Ledger action and injects the default
|
||||
account code filter so the report immediately displays
|
||||
only the lines relevant to the journal's primary account.
|
||||
|
||||
Returns:
|
||||
dict: A window action descriptor for the General Ledger report.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
gl_action_ref = 'fusion_accounting.action_account_report_general_ledger'
|
||||
gl_action = self.env['ir.actions.actions']._for_xml_id(gl_action_ref)
|
||||
|
||||
# Merge the account filter into the existing action context
|
||||
existing_ctx = ast.literal_eval(gl_action.get('context', '{}'))
|
||||
existing_ctx['default_filter_accounts'] = self.default_account_id.code
|
||||
gl_action['context'] = existing_ctx
|
||||
|
||||
return gl_action
|
||||
930
Fusion Accounting/models/account_journal_report.py
Normal file
930
Fusion Accounting/models/account_journal_report.py
Normal file
@@ -0,0 +1,930 @@
|
||||
# Fusion Accounting - Journal Report Handler
|
||||
# Full journal audit with tax summaries, PDF/XLSX export, bank journal support
|
||||
|
||||
import io
|
||||
import datetime
|
||||
|
||||
from PIL import ImageFont
|
||||
from markupsafe import Markup
|
||||
from collections import defaultdict
|
||||
|
||||
from odoo import models, _
|
||||
from odoo.tools import SQL
|
||||
from odoo.tools.misc import file_path
|
||||
try:
|
||||
from odoo.tools.misc import xlsxwriter
|
||||
except ImportError:
|
||||
import xlsxwriter
|
||||
|
||||
XLSX_GRAY_200 = '#EEEEEE'
|
||||
XLSX_BORDER_COLOR = '#B4B4B4'
|
||||
XLSX_FONT_SIZE_DEFAULT = 8
|
||||
XLSX_FONT_SIZE_HEADING = 11
|
||||
|
||||
|
||||
class FusionJournalReportHandler(models.AbstractModel):
|
||||
"""Custom handler for the Journal Audit report. Produces detailed
|
||||
per-journal line listings, tax summaries (per-journal and global),
|
||||
and supports PDF and XLSX export."""
|
||||
|
||||
_name = "account.journal.report.handler"
|
||||
_inherit = "account.report.custom.handler"
|
||||
_description = "Journal Report Custom Handler"
|
||||
|
||||
# ================================================================
|
||||
# OPTIONS
|
||||
# ================================================================
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
options['ignore_totals_below_sections'] = True
|
||||
options['show_payment_lines'] = previous_options.get('show_payment_lines', True)
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
return {
|
||||
'css_custom_class': 'journal_report',
|
||||
'pdf_css_custom_class': 'journal_report_pdf',
|
||||
'components': {
|
||||
'AccountReportLine': 'fusion_accounting.JournalReportLine',
|
||||
},
|
||||
'templates': {
|
||||
'AccountReportFilters': 'fusion_accounting.JournalReportFilters',
|
||||
'AccountReportLineName': 'fusion_accounting.JournalReportLineName',
|
||||
},
|
||||
}
|
||||
|
||||
# ================================================================
|
||||
# CUSTOM ENGINE
|
||||
# ================================================================
|
||||
|
||||
def _report_custom_engine_journal_report(
|
||||
self, expressions, options, date_scope, current_groupby,
|
||||
next_groupby, offset=0, limit=None, warnings=None,
|
||||
):
|
||||
def _assemble_result(groupby_key, row):
|
||||
if groupby_key == 'account_id':
|
||||
code = row['account_code'][0]
|
||||
elif groupby_key == 'journal_id':
|
||||
code = row['journal_code'][0]
|
||||
else:
|
||||
code = None
|
||||
return row['grouping_key'], {
|
||||
'code': code,
|
||||
'credit': row['credit'],
|
||||
'debit': row['debit'],
|
||||
'balance': row['balance'] if groupby_key == 'account_id' else None,
|
||||
}
|
||||
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
report._check_groupby_fields(
|
||||
(next_groupby.split(',') if next_groupby else [])
|
||||
+ ([current_groupby] if current_groupby else []),
|
||||
)
|
||||
|
||||
if not current_groupby:
|
||||
return {'code': None, 'debit': None, 'credit': None, 'balance': None}
|
||||
|
||||
qry = report._get_report_query(options, 'strict_range')
|
||||
acct_alias = qry.join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
acct_code = self.env['account.account']._field_to_sql(acct_alias, 'code', qry)
|
||||
gb_col = SQL.identifier('account_move_line', current_groupby)
|
||||
sel_gb = SQL('%s AS grouping_key', gb_col)
|
||||
|
||||
stmt = SQL(
|
||||
"""
|
||||
SELECT %(sel_gb)s,
|
||||
ARRAY_AGG(DISTINCT %(acct_code)s) AS account_code,
|
||||
ARRAY_AGG(DISTINCT j.code) AS journal_code,
|
||||
SUM("account_move_line".debit) AS debit,
|
||||
SUM("account_move_line".credit) AS credit,
|
||||
SUM("account_move_line".balance) AS balance
|
||||
FROM %(tbl)s
|
||||
JOIN account_move am ON am.id = account_move_line.move_id
|
||||
JOIN account_journal j ON j.id = am.journal_id
|
||||
JOIN res_company cp ON cp.id = am.company_id
|
||||
WHERE %(pmt_filter)s AND %(where)s
|
||||
GROUP BY %(gb_col)s
|
||||
ORDER BY %(gb_col)s
|
||||
""",
|
||||
sel_gb=sel_gb,
|
||||
acct_code=acct_code,
|
||||
tbl=qry.from_clause,
|
||||
where=qry.where_clause,
|
||||
pmt_filter=self._get_payment_lines_filter_case_statement(options),
|
||||
gb_col=gb_col,
|
||||
)
|
||||
self.env.cr.execute(stmt)
|
||||
return [_assemble_result(current_groupby, r) for r in self.env.cr.dictfetchall()]
|
||||
|
||||
# ================================================================
|
||||
# LINE POST-PROCESSING
|
||||
# ================================================================
|
||||
|
||||
def _custom_line_postprocessor(self, report, options, lines):
|
||||
"""Inject tax summary sub-tables after journal account sections
|
||||
and append a global tax summary when applicable."""
|
||||
enriched = []
|
||||
for idx, ln in enumerate(lines):
|
||||
enriched.append(ln)
|
||||
line_model, res_id = report._get_model_info_from_id(ln['id'])
|
||||
|
||||
if line_model == 'account.journal':
|
||||
ln['journal_id'] = res_id
|
||||
elif line_model == 'account.account':
|
||||
id_map = report._get_res_ids_from_line_id(
|
||||
ln['id'], ['account.journal', 'account.account'],
|
||||
)
|
||||
ln['journal_id'] = id_map['account.journal']
|
||||
ln['account_id'] = id_map['account.account']
|
||||
ln['date'] = options['date']
|
||||
|
||||
jnl = self.env['account.journal'].browse(ln['journal_id'])
|
||||
is_last_acct = (
|
||||
idx + 1 == len(lines)
|
||||
or report._get_model_info_from_id(lines[idx + 1]['id'])[0] != 'account.account'
|
||||
)
|
||||
if is_last_acct and self._section_has_tax(options, jnl.id):
|
||||
enriched.append({
|
||||
'id': report._get_generic_line_id(
|
||||
False, False,
|
||||
parent_line_id=ln['parent_id'],
|
||||
markup='tax_report_section',
|
||||
),
|
||||
'name': '',
|
||||
'parent_id': ln['parent_id'],
|
||||
'journal_id': jnl.id,
|
||||
'is_tax_section_line': True,
|
||||
'columns': [],
|
||||
'colspan': len(options['columns']) + 1,
|
||||
'level': 4,
|
||||
**self._get_tax_summary_section(
|
||||
options, {'id': jnl.id, 'type': jnl.type},
|
||||
),
|
||||
})
|
||||
|
||||
if report._get_model_info_from_id(lines[0]['id'])[0] == 'account.report.line':
|
||||
if self._section_has_tax(options, False):
|
||||
enriched.append({
|
||||
'id': report._get_generic_line_id(False, False, markup='tax_report_section_heading'),
|
||||
'name': _('Global Tax Summary'),
|
||||
'level': 0,
|
||||
'columns': [],
|
||||
'unfoldable': False,
|
||||
'colspan': len(options['columns']) + 1,
|
||||
})
|
||||
enriched.append({
|
||||
'id': report._get_generic_line_id(False, False, markup='tax_report_section'),
|
||||
'name': '',
|
||||
'is_tax_section_line': True,
|
||||
'columns': [],
|
||||
'colspan': len(options['columns']) + 1,
|
||||
'level': 4,
|
||||
'class': 'o_account_reports_ja_subtable',
|
||||
**self._get_tax_summary_section(options),
|
||||
})
|
||||
|
||||
return enriched
|
||||
|
||||
# ================================================================
|
||||
# PDF EXPORT
|
||||
# ================================================================
|
||||
|
||||
def export_to_pdf(self, options):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
base_url = report.get_base_url()
|
||||
print_opts = {
|
||||
**report.get_options(previous_options={**options, 'export_mode': 'print'}),
|
||||
'css_custom_class': self._get_custom_display_config().get(
|
||||
'pdf_css_custom_class', 'journal_report_pdf',
|
||||
),
|
||||
}
|
||||
ctx = {'mode': 'print', 'base_url': base_url, 'company': self.env.company}
|
||||
|
||||
footer_html = self.env['ir.actions.report']._render_template(
|
||||
'fusion_accounting.internal_layout', values=ctx,
|
||||
)
|
||||
footer_html = self.env['ir.actions.report']._render_template(
|
||||
'web.minimal_layout',
|
||||
values=dict(ctx, subst=True, body=Markup(footer_html.decode())),
|
||||
)
|
||||
|
||||
doc_data = self._generate_document_data_for_export(report, print_opts, 'pdf')
|
||||
body_html = self.env['ir.qweb']._render(
|
||||
'fusion_accounting.journal_report_pdf_export_main',
|
||||
{'report': report, 'options': print_opts, 'base_url': base_url, 'document_data': doc_data},
|
||||
)
|
||||
|
||||
pdf_bytes = io.BytesIO(
|
||||
self.env['ir.actions.report']._run_wkhtmltopdf(
|
||||
[body_html],
|
||||
footer=footer_html.decode(),
|
||||
landscape=False,
|
||||
specific_paperformat_args={
|
||||
'data-report-margin-top': 10,
|
||||
'data-report-header-spacing': 10,
|
||||
'data-report-margin-bottom': 15,
|
||||
},
|
||||
)
|
||||
)
|
||||
result = pdf_bytes.getvalue()
|
||||
pdf_bytes.close()
|
||||
|
||||
return {
|
||||
'file_name': report.get_default_report_filename(print_opts, 'pdf'),
|
||||
'file_content': result,
|
||||
'file_type': 'pdf',
|
||||
}
|
||||
|
||||
# ================================================================
|
||||
# XLSX EXPORT
|
||||
# ================================================================
|
||||
|
||||
def export_to_xlsx(self, options, response=None):
|
||||
wb_buffer = io.BytesIO()
|
||||
wb = xlsxwriter.Workbook(wb_buffer, {'in_memory': True, 'strings_to_formulas': False})
|
||||
report = self.env['account.report'].search([('id', '=', options['report_id'])], limit=1)
|
||||
print_opts = report.get_options(previous_options={**options, 'export_mode': 'print'})
|
||||
doc_data = self._generate_document_data_for_export(report, print_opts, 'xlsx')
|
||||
|
||||
font_cache = {}
|
||||
for sz in (XLSX_FONT_SIZE_HEADING, XLSX_FONT_SIZE_DEFAULT):
|
||||
font_cache[sz] = defaultdict()
|
||||
for variant in ('Reg', 'Bol', 'RegIta', 'BolIta'):
|
||||
try:
|
||||
path = f'web/static/fonts/lato/Lato-{variant}-webfont.ttf'
|
||||
font_cache[sz][variant] = ImageFont.truetype(file_path(path), sz)
|
||||
except (OSError, FileNotFoundError):
|
||||
font_cache[sz][variant] = ImageFont.load_default()
|
||||
|
||||
for jv in doc_data['journals_vals']:
|
||||
cx, cy = 0, 0
|
||||
ws = wb.add_worksheet(jv['name'][:31])
|
||||
cols = jv['columns']
|
||||
|
||||
for col in cols:
|
||||
alignment = 'right' if 'o_right_alignment' in col.get('class', '') else 'left'
|
||||
self._write_cell(
|
||||
cx, cy, col['name'], 1, False, report, font_cache, wb, ws,
|
||||
XLSX_FONT_SIZE_HEADING, True, XLSX_GRAY_200, alignment, 2, 2,
|
||||
)
|
||||
cx += 1
|
||||
|
||||
cy += 1
|
||||
cx = 0
|
||||
for row in jv['lines'][:-1]:
|
||||
first_aml = False
|
||||
for col in cols:
|
||||
top_bdr = 1 if first_aml else 0
|
||||
alignment = 'right' if 'o_right_alignment' in col.get('class', '') else 'left'
|
||||
|
||||
if row.get(col['label'], {}).get('data'):
|
||||
val = row[col['label']]['data']
|
||||
is_dt = isinstance(val, datetime.date)
|
||||
is_bold = False
|
||||
|
||||
if row[col['label']].get('class') and 'o_bold' in row[col['label']]['class']:
|
||||
first_aml = True
|
||||
top_bdr = 1
|
||||
is_bold = True
|
||||
|
||||
self._write_cell(
|
||||
cx, cy, val, 1, is_dt, report, font_cache, wb, ws,
|
||||
XLSX_FONT_SIZE_DEFAULT, is_bold, 'white', alignment, 0, top_bdr, XLSX_BORDER_COLOR,
|
||||
)
|
||||
else:
|
||||
self._write_cell(
|
||||
cx, cy, '', 1, False, report, font_cache, wb, ws,
|
||||
XLSX_FONT_SIZE_DEFAULT, False, 'white', alignment, 0, top_bdr, XLSX_BORDER_COLOR,
|
||||
)
|
||||
cx += 1
|
||||
cx = 0
|
||||
cy += 1
|
||||
|
||||
# Total row
|
||||
total_row = jv['lines'][-1]
|
||||
for col in cols:
|
||||
val = total_row.get(col['label'], {}).get('data', '')
|
||||
alignment = 'right' if 'o_right_alignment' in col.get('class', '') else 'left'
|
||||
self._write_cell(
|
||||
cx, cy, val, 1, False, report, font_cache, wb, ws,
|
||||
XLSX_FONT_SIZE_DEFAULT, True, XLSX_GRAY_200, alignment, 2, 2,
|
||||
)
|
||||
cx += 1
|
||||
cx = 0
|
||||
|
||||
ws.set_default_row(20)
|
||||
ws.set_row(0, 30)
|
||||
|
||||
if jv.get('tax_summary'):
|
||||
self._write_tax_summaries_to_sheet(
|
||||
report, wb, ws, font_cache, len(cols) + 1, 1, jv['tax_summary'],
|
||||
)
|
||||
|
||||
if doc_data.get('global_tax_summary'):
|
||||
self._write_tax_summaries_to_sheet(
|
||||
report, wb, wb.add_worksheet(_('Global Tax Summary')[:31]),
|
||||
font_cache, 0, 0, doc_data['global_tax_summary'],
|
||||
)
|
||||
|
||||
wb.close()
|
||||
wb_buffer.seek(0)
|
||||
xlsx_bytes = wb_buffer.read()
|
||||
wb_buffer.close()
|
||||
|
||||
return {
|
||||
'file_name': report.get_default_report_filename(options, 'xlsx'),
|
||||
'file_content': xlsx_bytes,
|
||||
'file_type': 'xlsx',
|
||||
}
|
||||
|
||||
def _write_cell(
|
||||
self, x, y, value, colspan, is_datetime, report, fonts, workbook,
|
||||
sheet, font_size, bold=False, bg_color='white', align='left',
|
||||
border_bottom=0, border_top=0, border_color='0x000000',
|
||||
):
|
||||
"""Write a styled value to the specified worksheet cell."""
|
||||
fmt = workbook.add_format({
|
||||
'font_name': 'Arial', 'font_size': font_size, 'bold': bold,
|
||||
'bg_color': bg_color, 'align': align,
|
||||
'bottom': border_bottom, 'top': border_top, 'border_color': border_color,
|
||||
})
|
||||
if colspan == 1:
|
||||
if is_datetime:
|
||||
fmt.set_num_format('yyyy-mm-dd')
|
||||
sheet.write_datetime(y, x, value, fmt)
|
||||
else:
|
||||
if isinstance(value, str):
|
||||
value = value.replace('\n', ' ')
|
||||
report._set_xlsx_cell_sizes(sheet, fonts[font_size], x, y, value, fmt, colspan > 1)
|
||||
sheet.write(y, x, value, fmt)
|
||||
else:
|
||||
sheet.merge_range(y, x, y, x + colspan - 1, value, fmt)
|
||||
|
||||
def _write_tax_summaries_to_sheet(self, report, workbook, sheet, fonts, start_x, start_y, tax_summary):
|
||||
cx, cy = start_x, start_y
|
||||
|
||||
taxes = tax_summary.get('tax_report_lines')
|
||||
if taxes:
|
||||
ar_start = start_x + 1
|
||||
cols = []
|
||||
if len(taxes) > 1:
|
||||
ar_start += 1
|
||||
cols.append(_('Country'))
|
||||
cols += [_('Name'), _('Base Amount'), _('Tax Amount')]
|
||||
if tax_summary.get('tax_non_deductible_column'):
|
||||
cols.append(_('Non-Deductible'))
|
||||
if tax_summary.get('tax_deductible_column'):
|
||||
cols.append(_('Deductible'))
|
||||
if tax_summary.get('tax_due_column'):
|
||||
cols.append(_('Due'))
|
||||
|
||||
self._write_cell(cx, cy, _('Taxes Applied'), len(cols), False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_HEADING, True, 'white', 'left', 2)
|
||||
cy += 1
|
||||
for c in cols:
|
||||
a = 'right' if cx >= ar_start else 'left'
|
||||
self._write_cell(cx, cy, c, 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, True, XLSX_GRAY_200, a, 2)
|
||||
cx += 1
|
||||
cx = start_x
|
||||
cy += 1
|
||||
|
||||
for country in taxes:
|
||||
first_country_line = True
|
||||
for tax in taxes[country]:
|
||||
if len(taxes) > 1:
|
||||
if first_country_line:
|
||||
first_country_line = False
|
||||
self._write_cell(cx, cy, country, 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, True, 'white', 'left', 1, 0, XLSX_BORDER_COLOR)
|
||||
cx += 1
|
||||
self._write_cell(cx, cy, tax['name'], 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, True, 'white', 'left', 1, 0, XLSX_BORDER_COLOR)
|
||||
self._write_cell(cx+1, cy, tax['base_amount'], 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
self._write_cell(cx+2, cy, tax['tax_amount'], 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
cx += 3
|
||||
if tax_summary.get('tax_non_deductible_column'):
|
||||
self._write_cell(cx, cy, tax['tax_non_deductible'], 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
cx += 1
|
||||
if tax_summary.get('tax_deductible_column'):
|
||||
self._write_cell(cx, cy, tax['tax_deductible'], 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
cx += 1
|
||||
if tax_summary.get('tax_due_column'):
|
||||
self._write_cell(cx, cy, tax['tax_due'], 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
cx = start_x
|
||||
cy += 1
|
||||
|
||||
cx = start_x
|
||||
cy += 2
|
||||
|
||||
grids = tax_summary.get('tax_grid_summary_lines')
|
||||
if grids:
|
||||
ar_start = start_x + 1
|
||||
gcols = []
|
||||
if len(grids) > 1:
|
||||
ar_start += 1
|
||||
gcols.append(_('Country'))
|
||||
gcols += [_('Grid'), _('+'), _('-'), _('Impact On Grid')]
|
||||
|
||||
self._write_cell(cx, cy, _('Impact On Grid'), len(gcols), False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_HEADING, True, 'white', 'left', 2)
|
||||
cy += 1
|
||||
for c in gcols:
|
||||
a = 'right' if cx >= ar_start else 'left'
|
||||
self._write_cell(cx, cy, c, 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, True, XLSX_GRAY_200, a, 2)
|
||||
cx += 1
|
||||
cx = start_x
|
||||
cy += 1
|
||||
|
||||
for country in grids:
|
||||
first_line = True
|
||||
for grid_name in grids[country]:
|
||||
if len(grids) > 1:
|
||||
if first_line:
|
||||
first_line = False
|
||||
self._write_cell(cx, cy, country, 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, True, 'white', 'left', 1, 0, XLSX_BORDER_COLOR)
|
||||
cx += 1
|
||||
self._write_cell(cx, cy, grid_name, 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, True, 'white', 'left', 1, 0, XLSX_BORDER_COLOR)
|
||||
self._write_cell(cx+1, cy, grids[country][grid_name].get('+', 0), 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
self._write_cell(cx+2, cy, grids[country][grid_name].get('-', 0), 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
self._write_cell(cx+3, cy, grids[country][grid_name]['impact'], 1, False, report, fonts, workbook, sheet, XLSX_FONT_SIZE_DEFAULT, False, 'white', 'right', 1, 0, XLSX_BORDER_COLOR)
|
||||
cx = start_x
|
||||
cy += 1
|
||||
|
||||
# ================================================================
|
||||
# DOCUMENT DATA GENERATION
|
||||
# ================================================================
|
||||
|
||||
def _generate_document_data_for_export(self, report, options, export_type='pdf'):
|
||||
"""Produce all data needed for journal report export (PDF or XLSX)."""
|
||||
self.env.flush_all()
|
||||
qry = report._get_report_query(options, 'strict_range')
|
||||
acct_alias = qry.join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
acct_code = self.env['account.account']._field_to_sql(acct_alias, 'code', qry)
|
||||
acct_name = self.env['account.account']._field_to_sql(acct_alias, 'name')
|
||||
|
||||
stmt = SQL(
|
||||
"""
|
||||
SELECT
|
||||
account_move_line.id AS move_line_id,
|
||||
account_move_line.name,
|
||||
account_move_line.date,
|
||||
account_move_line.invoice_date,
|
||||
account_move_line.amount_currency,
|
||||
account_move_line.tax_base_amount,
|
||||
account_move_line.currency_id AS move_line_currency,
|
||||
am.id AS move_id,
|
||||
am.name AS move_name,
|
||||
am.journal_id,
|
||||
am.currency_id AS move_currency,
|
||||
am.amount_total_in_currency_signed AS amount_currency_total,
|
||||
am.currency_id != cp.currency_id AS is_multicurrency,
|
||||
p.name AS partner_name,
|
||||
%(acct_code)s AS account_code,
|
||||
%(acct_name)s AS account_name,
|
||||
%(acct_alias)s.account_type AS account_type,
|
||||
COALESCE(account_move_line.debit, 0) AS debit,
|
||||
COALESCE(account_move_line.credit, 0) AS credit,
|
||||
COALESCE(account_move_line.balance, 0) AS balance,
|
||||
%(j_name)s AS journal_name,
|
||||
j.code AS journal_code,
|
||||
j.type AS journal_type,
|
||||
cp.currency_id AS company_currency,
|
||||
CASE WHEN j.type = 'sale' THEN am.payment_reference
|
||||
WHEN j.type = 'purchase' THEN am.ref END AS reference,
|
||||
array_remove(array_agg(DISTINCT %(tax_name)s), NULL) AS taxes,
|
||||
array_remove(array_agg(DISTINCT %(tag_name)s), NULL) AS tax_grids
|
||||
FROM %(tbl)s
|
||||
JOIN account_move am ON am.id = account_move_line.move_id
|
||||
LEFT JOIN res_partner p ON p.id = account_move_line.partner_id
|
||||
JOIN account_journal j ON j.id = am.journal_id
|
||||
JOIN res_company cp ON cp.id = am.company_id
|
||||
LEFT JOIN account_move_line_account_tax_rel aml_at_rel ON aml_at_rel.account_move_line_id = account_move_line.id
|
||||
LEFT JOIN account_tax parent_tax ON parent_tax.id = aml_at_rel.account_tax_id and parent_tax.amount_type = 'group'
|
||||
LEFT JOIN account_tax_filiation_rel tax_filiation_rel ON tax_filiation_rel.parent_tax = parent_tax.id
|
||||
LEFT JOIN account_tax tax ON (tax.id = aml_at_rel.account_tax_id and tax.amount_type != 'group') or tax.id = tax_filiation_rel.child_tax
|
||||
LEFT JOIN account_account_tag_account_move_line_rel tag_rel ON tag_rel.account_move_line_id = account_move_line.id
|
||||
LEFT JOIN account_account_tag tag ON tag_rel.account_account_tag_id = tag.id
|
||||
LEFT JOIN res_currency journal_curr ON journal_curr.id = j.currency_id
|
||||
WHERE %(pmt_filter)s AND %(where)s
|
||||
GROUP BY "account_move_line".id, am.id, p.id, %(acct_alias)s.id, j.id, cp.id, journal_curr.id, account_code, account_name
|
||||
ORDER BY
|
||||
CASE j.type WHEN 'sale' THEN 1 WHEN 'purchase' THEN 2 WHEN 'general' THEN 3 WHEN 'bank' THEN 4 ELSE 5 END,
|
||||
j.sequence,
|
||||
CASE WHEN am.name = '/' THEN 1 ELSE 0 END, am.date, am.name,
|
||||
CASE %(acct_alias)s.account_type
|
||||
WHEN 'liability_payable' THEN 1 WHEN 'asset_receivable' THEN 1
|
||||
WHEN 'liability_credit_card' THEN 5 WHEN 'asset_cash' THEN 5 ELSE 2 END,
|
||||
account_move_line.tax_line_id NULLS FIRST
|
||||
""",
|
||||
tbl=qry.from_clause,
|
||||
pmt_filter=self._get_payment_lines_filter_case_statement(options),
|
||||
where=qry.where_clause,
|
||||
acct_code=acct_code,
|
||||
acct_name=acct_name,
|
||||
acct_alias=SQL.identifier(acct_alias),
|
||||
j_name=self.env['account.journal']._field_to_sql('j', 'name'),
|
||||
tax_name=self.env['account.tax']._field_to_sql('tax', 'name'),
|
||||
tag_name=self.env['account.account.tag']._field_to_sql('tag', 'name'),
|
||||
)
|
||||
self.env.cr.execute(stmt)
|
||||
|
||||
by_journal = {}
|
||||
for row in self.env.cr.dictfetchall():
|
||||
by_journal.setdefault(row['journal_id'], {}).setdefault(row['move_id'], []).append(row)
|
||||
|
||||
journals_vals = []
|
||||
any_has_taxes = False
|
||||
for jnl_moves in by_journal.values():
|
||||
move_lists = list(jnl_moves.values())
|
||||
first = move_lists[0][0]
|
||||
jv = {
|
||||
'id': first['journal_id'],
|
||||
'name': first['journal_name'],
|
||||
'code': first['journal_code'],
|
||||
'type': first['journal_type'],
|
||||
}
|
||||
if self._section_has_tax(options, jv['id']):
|
||||
jv['tax_summary'] = self._get_tax_summary_section(options, jv)
|
||||
any_has_taxes = True
|
||||
jv['lines'] = self._get_export_lines_for_journal(report, options, export_type, jv, move_lists)
|
||||
jv['columns'] = self._get_columns_for_journal(jv, export_type)
|
||||
journals_vals.append(jv)
|
||||
|
||||
return {
|
||||
'journals_vals': journals_vals,
|
||||
'global_tax_summary': self._get_tax_summary_section(options) if any_has_taxes else False,
|
||||
}
|
||||
|
||||
def _get_columns_for_journal(self, journal, export_type='pdf'):
|
||||
cols = [{'name': _('Document'), 'label': 'document'}]
|
||||
if export_type == 'pdf':
|
||||
cols.append({'name': _('Account'), 'label': 'account_label'})
|
||||
else:
|
||||
cols.extend([
|
||||
{'name': _('Account Code'), 'label': 'account_code'},
|
||||
{'name': _('Account Label'), 'label': 'account_label'},
|
||||
])
|
||||
cols.extend([
|
||||
{'name': _('Name'), 'label': 'name'},
|
||||
{'name': _('Debit'), 'label': 'debit', 'class': 'o_right_alignment '},
|
||||
{'name': _('Credit'), 'label': 'credit', 'class': 'o_right_alignment '},
|
||||
])
|
||||
if journal.get('tax_summary'):
|
||||
cols.append({'name': _('Taxes'), 'label': 'taxes'})
|
||||
if journal['tax_summary'].get('tax_grid_summary_lines'):
|
||||
cols.append({'name': _('Tax Grids'), 'label': 'tax_grids'})
|
||||
if journal['type'] == 'bank':
|
||||
cols.append({'name': _('Balance'), 'label': 'balance', 'class': 'o_right_alignment '})
|
||||
if journal.get('multicurrency_column'):
|
||||
cols.append({'name': _('Amount Currency'), 'label': 'amount_currency', 'class': 'o_right_alignment '})
|
||||
return cols
|
||||
|
||||
def _get_export_lines_for_journal(self, report, options, export_type, journal_vals, move_lists):
|
||||
if journal_vals['type'] == 'bank':
|
||||
return self._get_export_lines_for_bank_journal(report, options, export_type, journal_vals, move_lists)
|
||||
|
||||
sum_cr, sum_dr = 0, 0
|
||||
rows = []
|
||||
for i, aml_list in enumerate(move_lists):
|
||||
for j, entry in enumerate(aml_list):
|
||||
doc = False
|
||||
if j == 0:
|
||||
doc = entry['move_name']
|
||||
elif j == 1:
|
||||
doc = entry['date']
|
||||
row = self._get_base_line(report, options, export_type, doc, entry, j, i % 2 != 0, journal_vals.get('tax_summary'))
|
||||
sum_cr += entry['credit']
|
||||
sum_dr += entry['debit']
|
||||
rows.append(row)
|
||||
|
||||
first_entry = aml_list[0]
|
||||
if first_entry['is_multicurrency']:
|
||||
mc_label = _('Amount in currency: %s', report._format_value(options, first_entry['amount_currency_total'], 'monetary', format_params={'currency_id': first_entry['move_currency']}))
|
||||
if len(aml_list) <= 2:
|
||||
rows.append({'document': {'data': mc_label}, 'line_class': 'o_even ' if i % 2 == 0 else 'o_odd ', 'amount': {'data': first_entry['amount_currency_total']}, 'currency_id': {'data': first_entry['move_currency']}})
|
||||
else:
|
||||
rows[-1]['document'] = {'data': mc_label}
|
||||
rows[-1]['amount'] = {'data': first_entry['amount_currency_total']}
|
||||
rows[-1]['currency_id'] = {'data': first_entry['move_currency']}
|
||||
|
||||
rows.append({})
|
||||
rows.append({
|
||||
'name': {'data': _('Total')},
|
||||
'debit': {'data': report._format_value(options, sum_dr, 'monetary')},
|
||||
'credit': {'data': report._format_value(options, sum_cr, 'monetary')},
|
||||
})
|
||||
return rows
|
||||
|
||||
def _get_export_lines_for_bank_journal(self, report, options, export_type, journal_vals, move_lists):
|
||||
rows = []
|
||||
running_balance = self._query_bank_journal_initial_balance(options, journal_vals['id'])
|
||||
rows.append({'name': {'data': _('Starting Balance')}, 'balance': {'data': report._format_value(options, running_balance, 'monetary')}})
|
||||
|
||||
sum_cr, sum_dr = 0, 0
|
||||
for i, aml_list in enumerate(move_lists):
|
||||
is_unreconciled = not any(ln for ln in aml_list if ln['account_type'] in ('liability_credit_card', 'asset_cash'))
|
||||
for j, entry in enumerate(aml_list):
|
||||
if entry['account_type'] not in ('liability_credit_card', 'asset_cash'):
|
||||
doc = ''
|
||||
if j == 0:
|
||||
doc = f'{entry["move_name"]} ({entry["date"]})'
|
||||
row = self._get_base_line(report, options, export_type, doc, entry, j, i % 2 != 0, journal_vals.get('tax_summary'))
|
||||
sum_cr += entry['credit']
|
||||
sum_dr += entry['debit']
|
||||
|
||||
if not is_unreconciled:
|
||||
line_bal = -entry['balance']
|
||||
running_balance += line_bal
|
||||
row['balance'] = {
|
||||
'data': report._format_value(options, running_balance, 'monetary'),
|
||||
'class': 'o_muted ' if self.env.company.currency_id.is_zero(line_bal) else '',
|
||||
}
|
||||
|
||||
if self.env.user.has_group('base.group_multi_currency') and entry['move_line_currency'] != entry['company_currency']:
|
||||
journal_vals['multicurrency_column'] = True
|
||||
mc_amt = -entry['amount_currency'] if not is_unreconciled else entry['amount_currency']
|
||||
mc_cur = self.env['res.currency'].browse(entry['move_line_currency'])
|
||||
row['amount_currency'] = {
|
||||
'data': report._format_value(options, mc_amt, 'monetary', format_params={'currency_id': mc_cur.id}),
|
||||
'class': 'o_muted ' if mc_cur.is_zero(mc_amt) else '',
|
||||
}
|
||||
rows.append(row)
|
||||
|
||||
rows.append({})
|
||||
rows.append({'name': {'data': _('Total')}, 'balance': {'data': report._format_value(options, running_balance, 'monetary')}})
|
||||
return rows
|
||||
|
||||
def _get_base_line(self, report, options, export_type, document, entry, line_idx, is_even, has_taxes):
|
||||
co_cur = self.env.company.currency_id
|
||||
label = entry['name'] or entry['reference']
|
||||
acct_label = entry['partner_name'] or entry['account_name']
|
||||
if entry['partner_name'] and entry['account_type'] == 'asset_receivable':
|
||||
fmt_label = _('AR %s', acct_label)
|
||||
elif entry['partner_name'] and entry['account_type'] == 'liability_payable':
|
||||
fmt_label = _('AP %s', acct_label)
|
||||
else:
|
||||
acct_label = entry['account_name']
|
||||
fmt_label = _('G %s', entry["account_code"])
|
||||
|
||||
row = {
|
||||
'line_class': 'o_even ' if is_even else 'o_odd ',
|
||||
'document': {'data': document, 'class': 'o_bold ' if line_idx == 0 else ''},
|
||||
'account_code': {'data': entry['account_code']},
|
||||
'account_label': {'data': acct_label if export_type != 'pdf' else fmt_label},
|
||||
'name': {'data': label},
|
||||
'debit': {'data': report._format_value(options, entry['debit'], 'monetary'), 'class': 'o_muted ' if co_cur.is_zero(entry['debit']) else ''},
|
||||
'credit': {'data': report._format_value(options, entry['credit'], 'monetary'), 'class': 'o_muted ' if co_cur.is_zero(entry['credit']) else ''},
|
||||
}
|
||||
if has_taxes:
|
||||
tax_display = ''
|
||||
if entry['taxes']:
|
||||
tax_display = _('T: %s', ', '.join(entry['taxes']))
|
||||
elif entry['tax_base_amount'] is not None:
|
||||
tax_display = _('B: %s', report._format_value(options, entry['tax_base_amount'], 'monetary'))
|
||||
row['taxes'] = {'data': tax_display}
|
||||
row['tax_grids'] = {'data': ', '.join(entry['tax_grids'])}
|
||||
return row
|
||||
|
||||
# ================================================================
|
||||
# QUERY HELPERS
|
||||
# ================================================================
|
||||
|
||||
def _get_payment_lines_filter_case_statement(self, options):
|
||||
if not options.get('show_payment_lines'):
|
||||
return SQL("""
|
||||
(j.type != 'bank' OR EXISTS(
|
||||
SELECT 1
|
||||
FROM account_move_line
|
||||
JOIN account_account acc ON acc.id = account_move_line.account_id
|
||||
WHERE account_move_line.move_id = am.id
|
||||
AND acc.account_type IN ('liability_credit_card', 'asset_cash')
|
||||
))
|
||||
""")
|
||||
return SQL('TRUE')
|
||||
|
||||
def _query_bank_journal_initial_balance(self, options, journal_id):
|
||||
report = self.env.ref('fusion_accounting.journal_report')
|
||||
qry = report._get_report_query(options, 'to_beginning_of_period', domain=[('journal_id', '=', journal_id)])
|
||||
stmt = SQL("""
|
||||
SELECT COALESCE(SUM(account_move_line.balance), 0) AS balance
|
||||
FROM %(tbl)s
|
||||
JOIN account_journal journal ON journal.id = "account_move_line".journal_id
|
||||
AND account_move_line.account_id = journal.default_account_id
|
||||
WHERE %(where)s
|
||||
GROUP BY journal.id
|
||||
""", tbl=qry.from_clause, where=qry.where_clause)
|
||||
self.env.cr.execute(stmt)
|
||||
rows = self.env.cr.dictfetchall()
|
||||
return rows[0]['balance'] if rows else 0
|
||||
|
||||
# ================================================================
|
||||
# TAX SUMMARIES
|
||||
# ================================================================
|
||||
|
||||
def _section_has_tax(self, options, journal_id):
|
||||
report = self.env['account.report'].browse(options.get('report_id'))
|
||||
domain = [('tax_ids', '!=', False)]
|
||||
if journal_id:
|
||||
domain.append(('journal_id', '=', journal_id))
|
||||
domain += report._get_options_domain(options, 'strict_range')
|
||||
return bool(self.env['account.move.line'].search_count(domain, limit=1))
|
||||
|
||||
def _get_tax_summary_section(self, options, journal_vals=None):
|
||||
td = {
|
||||
'date_from': options.get('date', {}).get('date_from'),
|
||||
'date_to': options.get('date', {}).get('date_to'),
|
||||
}
|
||||
if journal_vals:
|
||||
td['journal_id'] = journal_vals['id']
|
||||
td['journal_type'] = journal_vals['type']
|
||||
|
||||
tax_lines = self._get_generic_tax_summary_for_sections(options, td)
|
||||
nd_col = any(ln.get('tax_non_deductible_no_format') for vals in tax_lines.values() for ln in vals)
|
||||
ded_col = any(ln.get('tax_deductible_no_format') for vals in tax_lines.values() for ln in vals)
|
||||
due_col = any(ln.get('tax_due_no_format') for vals in tax_lines.values() for ln in vals)
|
||||
|
||||
return {
|
||||
'tax_report_lines': tax_lines,
|
||||
'tax_non_deductible_column': nd_col,
|
||||
'tax_deductible_column': ded_col,
|
||||
'tax_due_column': due_col,
|
||||
'extra_columns': int(nd_col) + int(ded_col) + int(due_col),
|
||||
'tax_grid_summary_lines': self._get_tax_grids_summary(options, td),
|
||||
}
|
||||
|
||||
def _get_generic_tax_report_options(self, options, data):
|
||||
generic_rpt = self.env.ref('account.generic_tax_report')
|
||||
prev = options.copy()
|
||||
prev.update({
|
||||
'selected_variant_id': generic_rpt.id,
|
||||
'date_from': data.get('date_from'),
|
||||
'date_to': data.get('date_to'),
|
||||
})
|
||||
tax_opts = generic_rpt.get_options(prev)
|
||||
jnl_rpt = self.env['account.report'].browse(options['report_id'])
|
||||
tax_opts['forced_domain'] = tax_opts.get('forced_domain', []) + jnl_rpt._get_options_domain(options, 'strict_range')
|
||||
|
||||
if data.get('journal_id') or data.get('journal_type'):
|
||||
tax_opts['journals'] = [{
|
||||
'id': data.get('journal_id'),
|
||||
'model': 'account.journal',
|
||||
'type': data.get('journal_type'),
|
||||
'selected': True,
|
||||
}]
|
||||
return tax_opts
|
||||
|
||||
def _get_tax_grids_summary(self, options, data):
|
||||
report = self.env.ref('fusion_accounting.journal_report')
|
||||
tax_opts = self._get_generic_tax_report_options(options, data)
|
||||
qry = report._get_report_query(tax_opts, 'strict_range')
|
||||
country_nm = self.env['res.country']._field_to_sql('country', 'name')
|
||||
tag_nm = self.env['account.account.tag']._field_to_sql('tag', 'name')
|
||||
stmt = SQL("""
|
||||
WITH tag_info (country_name, tag_id, tag_name, tag_sign, balance) AS (
|
||||
SELECT %(cn)s AS country_name, tag.id, %(tn)s AS name,
|
||||
CASE WHEN tag.tax_negate IS TRUE THEN '-' ELSE '+' END,
|
||||
SUM(COALESCE("account_move_line".balance, 0)
|
||||
* CASE WHEN "account_move_line".tax_tag_invert THEN -1 ELSE 1 END) AS balance
|
||||
FROM account_account_tag tag
|
||||
JOIN account_account_tag_account_move_line_rel rel ON tag.id = rel.account_account_tag_id
|
||||
JOIN res_country country ON country.id = tag.country_id
|
||||
, %(tbl)s
|
||||
WHERE %(where)s AND applicability = 'taxes' AND "account_move_line".id = rel.account_move_line_id
|
||||
GROUP BY country_name, tag.id
|
||||
)
|
||||
SELECT country_name, tag_id, REGEXP_REPLACE(tag_name, '^[+-]', '') AS name, balance, tag_sign AS sign
|
||||
FROM tag_info ORDER BY country_name, name
|
||||
""", cn=country_nm, tn=tag_nm, tbl=qry.from_clause, where=qry.where_clause)
|
||||
self.env.cr.execute(stmt)
|
||||
rows = self.env.cr.fetchall()
|
||||
|
||||
result = {}
|
||||
opp = {'+': '-', '-': '+'}
|
||||
for cname, _tid, gname, bal, sign in rows:
|
||||
result.setdefault(cname, {}).setdefault(gname, {})
|
||||
result[cname][gname].setdefault('tag_ids', []).append(_tid)
|
||||
result[cname][gname][sign] = report._format_value(options, bal, 'monetary')
|
||||
if opp[sign] not in result[cname][gname]:
|
||||
result[cname][gname][opp[sign]] = report._format_value(options, 0, 'monetary')
|
||||
result[cname][gname][sign + '_no_format'] = bal
|
||||
result[cname][gname]['impact'] = report._format_value(options, result[cname][gname].get('+_no_format', 0) - result[cname][gname].get('-_no_format', 0), 'monetary')
|
||||
return result
|
||||
|
||||
def _get_generic_tax_summary_for_sections(self, options, data):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
tax_opts = self._get_generic_tax_report_options(options, data)
|
||||
tax_opts['account_journal_report_tax_deductibility_columns'] = True
|
||||
tax_rpt = self.env.ref('account.generic_tax_report')
|
||||
rpt_lines = tax_rpt._get_lines(tax_opts)
|
||||
|
||||
tax_vals = {}
|
||||
for rln in rpt_lines:
|
||||
model, lid = report._parse_line_id(rln.get('id'))[-1][1:]
|
||||
if model == 'account.tax':
|
||||
tax_vals[lid] = {
|
||||
'base_amount': rln['columns'][0]['no_format'],
|
||||
'tax_amount': rln['columns'][1]['no_format'],
|
||||
'tax_non_deductible': rln['columns'][2]['no_format'],
|
||||
'tax_deductible': rln['columns'][3]['no_format'],
|
||||
'tax_due': rln['columns'][4]['no_format'],
|
||||
}
|
||||
|
||||
taxes = self.env['account.tax'].browse(tax_vals.keys())
|
||||
grouped = {}
|
||||
for tx in taxes:
|
||||
grouped.setdefault(tx.country_id.name, []).append({
|
||||
'base_amount': report._format_value(options, tax_vals[tx.id]['base_amount'], 'monetary'),
|
||||
'tax_amount': report._format_value(options, tax_vals[tx.id]['tax_amount'], 'monetary'),
|
||||
'tax_non_deductible': report._format_value(options, tax_vals[tx.id]['tax_non_deductible'], 'monetary'),
|
||||
'tax_non_deductible_no_format': tax_vals[tx.id]['tax_non_deductible'],
|
||||
'tax_deductible': report._format_value(options, tax_vals[tx.id]['tax_deductible'], 'monetary'),
|
||||
'tax_deductible_no_format': tax_vals[tx.id]['tax_deductible'],
|
||||
'tax_due': report._format_value(options, tax_vals[tx.id]['tax_due'], 'monetary'),
|
||||
'tax_due_no_format': tax_vals[tx.id]['tax_due'],
|
||||
'name': tx.name,
|
||||
'line_id': report._get_generic_line_id('account.tax', tx.id),
|
||||
})
|
||||
return dict(sorted(grouped.items()))
|
||||
|
||||
# ================================================================
|
||||
# ACTIONS
|
||||
# ================================================================
|
||||
|
||||
def journal_report_tax_tag_template_open_aml(self, options, params=None):
|
||||
tag_ids = params.get('tag_ids')
|
||||
domain = (
|
||||
self.env['account.report'].browse(options['report_id'])._get_options_domain(options, 'strict_range')
|
||||
+ [('tax_tag_ids', 'in', [tag_ids])]
|
||||
+ self.env['account.move.line']._get_tax_exigible_domain()
|
||||
)
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'name': _('Journal Items for Tax Audit'),
|
||||
'res_model': 'account.move.line',
|
||||
'views': [[self.env.ref('account.view_move_line_tax_audit_tree').id, 'list']],
|
||||
'domain': domain,
|
||||
'context': self.env.context,
|
||||
}
|
||||
|
||||
def journal_report_action_dropdown_audit_default_tax_report(self, options, params):
|
||||
return self.env['account.generic.tax.report.handler'].caret_option_audit_tax(options, params)
|
||||
|
||||
def journal_report_action_open_tax_journal_items(self, options, params):
|
||||
ctx = {
|
||||
'search_default_posted': 0 if options.get('all_entries') else 1,
|
||||
'search_default_date_between': 1,
|
||||
'date_from': params and params.get('date_from') or options.get('date', {}).get('date_from'),
|
||||
'date_to': params and params.get('date_to') or options.get('date', {}).get('date_to'),
|
||||
'search_default_journal_id': params.get('journal_id'),
|
||||
'expand': 1,
|
||||
}
|
||||
if params and params.get('tax_type') == 'tag':
|
||||
ctx.update({'search_default_group_by_tax_tags': 1, 'search_default_group_by_account': 2})
|
||||
elif params and params.get('tax_type') == 'tax':
|
||||
ctx.update({'search_default_group_by_taxes': 1, 'search_default_group_by_account': 2})
|
||||
if params and 'journal_id' in params:
|
||||
ctx['search_default_journal_id'] = [params['journal_id']]
|
||||
if options and options.get('journals') and not ctx.get('search_default_journal_id'):
|
||||
sel = [j['id'] for j in options['journals'] if j.get('selected') and j['model'] == 'account.journal']
|
||||
if len(sel) == 1:
|
||||
ctx['search_default_journal_id'] = sel
|
||||
return {
|
||||
'name': params.get('name'),
|
||||
'view_mode': 'list,pivot,graph,kanban',
|
||||
'res_model': 'account.move.line',
|
||||
'views': [(self.env.ref('account.view_move_line_tree').id, 'list')],
|
||||
'type': 'ir.actions.act_window',
|
||||
'domain': [('display_type', 'not in', ('line_section', 'line_note'))],
|
||||
'context': ctx,
|
||||
}
|
||||
|
||||
def journal_report_action_open_account_move_lines_by_account(self, options, params):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
jnl = self.env['account.journal'].browse(params['journal_id'])
|
||||
acct = self.env['account.account'].browse(params['account_id'])
|
||||
domain = [('journal_id.id', '=', jnl.id), ('account_id.id', '=', acct.id)]
|
||||
domain += report._get_options_domain(options, 'strict_range')
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'name': _("%(journal)s - %(account)s", journal=jnl.name, account=acct.name),
|
||||
'res_model': 'account.move.line',
|
||||
'views': [[False, 'list']],
|
||||
'domain': domain,
|
||||
}
|
||||
|
||||
def journal_report_open_aml_by_move(self, options, params):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
jnl = self.env['account.journal'].browse(params['journal_id'])
|
||||
ctx_extra = {'search_default_group_by_account': 0, 'show_more_partner_info': 1}
|
||||
if jnl.type in ('bank', 'credit'):
|
||||
params['view_ref'] = 'fusion_accounting.view_journal_report_audit_bank_move_line_tree'
|
||||
ctx_extra['search_default_exclude_bank_lines'] = 1
|
||||
else:
|
||||
params['view_ref'] = 'fusion_accounting.view_journal_report_audit_move_line_tree'
|
||||
ctx_extra.update({'search_default_group_by_partner': 1, 'search_default_group_by_move': 2})
|
||||
if jnl.type in ('sale', 'purchase'):
|
||||
ctx_extra['search_default_invoices_lines'] = 1
|
||||
action = report.open_journal_items(options=options, params=params)
|
||||
action.get('context', {}).update(ctx_extra)
|
||||
return action
|
||||
1842
Fusion Accounting/models/account_move.py
Normal file
1842
Fusion Accounting/models/account_move.py
Normal file
File diff suppressed because it is too large
Load Diff
352
Fusion Accounting/models/account_move_edi.py
Normal file
352
Fusion Accounting/models/account_move_edi.py
Normal file
@@ -0,0 +1,352 @@
|
||||
"""
|
||||
Fusion Accounting - Account Move EDI Extension
|
||||
|
||||
Extends the ``account.move`` model with fields and methods for
|
||||
Electronic Data Interchange (EDI) document management. Adds an EDI tab
|
||||
to the invoice form, buttons to generate/export electronic documents,
|
||||
and an import wizard that can parse UBL 2.1 or CII XML files into
|
||||
new invoice records.
|
||||
|
||||
Original implementation by Nexa Systems Inc.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import logging
|
||||
from lxml import etree
|
||||
|
||||
from odoo import api, fields, models, Command, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionAccountMoveEDI(models.Model):
|
||||
"""
|
||||
Adds EDI lifecycle tracking and import/export capabilities to
|
||||
journal entries.
|
||||
"""
|
||||
|
||||
_inherit = "account.move"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Fields
|
||||
# ------------------------------------------------------------------
|
||||
edi_document_ids = fields.One2many(
|
||||
comodel_name="fusion.edi.document",
|
||||
inverse_name="move_id",
|
||||
string="EDI Documents",
|
||||
copy=False,
|
||||
help="Electronic documents generated for this journal entry.",
|
||||
)
|
||||
edi_document_count = fields.Integer(
|
||||
string="EDI Count",
|
||||
compute="_compute_edi_document_count",
|
||||
)
|
||||
edi_state = fields.Selection(
|
||||
selection=[
|
||||
("to_send", "To Send"),
|
||||
("sent", "Sent"),
|
||||
("to_cancel", "To Cancel"),
|
||||
("cancelled", "Cancelled"),
|
||||
],
|
||||
string="EDI Status",
|
||||
compute="_compute_edi_state",
|
||||
store=True,
|
||||
help=(
|
||||
"Aggregate EDI state derived from linked EDI documents. "
|
||||
"Shows the most urgent state across all formats."
|
||||
),
|
||||
)
|
||||
edi_error_message = fields.Text(
|
||||
string="EDI Error",
|
||||
compute="_compute_edi_error_message",
|
||||
help="Concatenated error messages from all EDI documents.",
|
||||
)
|
||||
edi_blocking_level = fields.Selection(
|
||||
selection=[
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
],
|
||||
string="EDI Error Severity",
|
||||
compute="_compute_edi_error_message",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Computed fields
|
||||
# ------------------------------------------------------------------
|
||||
@api.depends("edi_document_ids")
|
||||
def _compute_edi_document_count(self):
|
||||
for move in self:
|
||||
move.edi_document_count = len(move.edi_document_ids)
|
||||
|
||||
@api.depends(
|
||||
"edi_document_ids.state",
|
||||
"edi_document_ids.error_message",
|
||||
)
|
||||
def _compute_edi_state(self):
|
||||
"""Derive an aggregate state from all linked EDI documents.
|
||||
|
||||
Priority order (highest urgency first):
|
||||
to_send > to_cancel > sent > cancelled
|
||||
|
||||
If there are no EDI documents the field is left empty.
|
||||
"""
|
||||
priority = {
|
||||
"to_send": 0,
|
||||
"to_cancel": 1,
|
||||
"sent": 2,
|
||||
"cancelled": 3,
|
||||
}
|
||||
for move in self:
|
||||
docs = move.edi_document_ids
|
||||
if not docs:
|
||||
move.edi_state = False
|
||||
continue
|
||||
move.edi_state = min(
|
||||
docs.mapped("state"),
|
||||
key=lambda s: priority.get(s, 99),
|
||||
)
|
||||
|
||||
@api.depends("edi_document_ids.error_message", "edi_document_ids.blocking_level")
|
||||
def _compute_edi_error_message(self):
|
||||
for move in self:
|
||||
errors = move.edi_document_ids.filtered("error_message")
|
||||
if errors:
|
||||
move.edi_error_message = "\n".join(
|
||||
f"[{doc.edi_format_id.name}] {doc.error_message}"
|
||||
for doc in errors
|
||||
)
|
||||
# Take the highest severity
|
||||
levels = errors.mapped("blocking_level")
|
||||
if "error" in levels:
|
||||
move.edi_blocking_level = "error"
|
||||
elif "warning" in levels:
|
||||
move.edi_blocking_level = "warning"
|
||||
else:
|
||||
move.edi_blocking_level = "info"
|
||||
else:
|
||||
move.edi_error_message = False
|
||||
move.edi_blocking_level = False
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Button Actions
|
||||
# ------------------------------------------------------------------
|
||||
def action_send_edi(self):
|
||||
"""Create EDI documents for all active formats and send them.
|
||||
|
||||
For each active ``fusion.edi.format`` that is applicable to this
|
||||
move, creates a ``fusion.edi.document`` in *to_send* state (if
|
||||
one does not already exist) and then triggers generation.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if self.state != "posted":
|
||||
raise UserError(
|
||||
_("Only posted journal entries can generate EDI documents.")
|
||||
)
|
||||
|
||||
formats = self.env["fusion.edi.format"].search([
|
||||
("active", "=", True),
|
||||
])
|
||||
|
||||
for fmt in formats:
|
||||
# Check applicability
|
||||
try:
|
||||
fmt._check_applicability(self)
|
||||
except UserError:
|
||||
continue
|
||||
|
||||
existing = self.edi_document_ids.filtered(
|
||||
lambda d: d.edi_format_id == fmt and d.state != "cancelled"
|
||||
)
|
||||
if existing:
|
||||
continue
|
||||
|
||||
self.env["fusion.edi.document"].create({
|
||||
"move_id": self.id,
|
||||
"edi_format_id": fmt.id,
|
||||
"state": "to_send",
|
||||
})
|
||||
|
||||
# Trigger generation on all pending documents
|
||||
pending = self.edi_document_ids.filtered(
|
||||
lambda d: d.state == "to_send"
|
||||
)
|
||||
if pending:
|
||||
pending.action_send()
|
||||
|
||||
def action_export_edi_xml(self):
|
||||
"""Export the first available EDI attachment for download.
|
||||
|
||||
Opens a download action for the XML file so the user can save
|
||||
it locally.
|
||||
|
||||
Returns:
|
||||
dict: An ``ir.actions.act_url`` action pointing to the
|
||||
attachment download URL.
|
||||
"""
|
||||
self.ensure_one()
|
||||
sent_docs = self.edi_document_ids.filtered(
|
||||
lambda d: d.state == "sent" and d.attachment_id
|
||||
)
|
||||
if not sent_docs:
|
||||
raise UserError(
|
||||
_("No sent EDI document with an attachment is available. "
|
||||
"Please generate EDI documents first.")
|
||||
)
|
||||
|
||||
attachment = sent_docs[0].attachment_id
|
||||
return {
|
||||
"type": "ir.actions.act_url",
|
||||
"url": f"/web/content/{attachment.id}?download=true",
|
||||
"target": "new",
|
||||
}
|
||||
|
||||
def action_view_edi_documents(self):
|
||||
"""Open the list of EDI documents for this journal entry.
|
||||
|
||||
Returns:
|
||||
dict: A window action displaying related EDI documents.
|
||||
"""
|
||||
self.ensure_one()
|
||||
return {
|
||||
"type": "ir.actions.act_window",
|
||||
"name": _("EDI Documents"),
|
||||
"res_model": "fusion.edi.document",
|
||||
"domain": [("move_id", "=", self.id)],
|
||||
"view_mode": "list,form",
|
||||
"context": {"default_move_id": self.id},
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Import
|
||||
# ------------------------------------------------------------------
|
||||
def action_import_edi_xml(self):
|
||||
"""Open a file upload wizard to import a UBL or CII XML invoice.
|
||||
|
||||
Returns:
|
||||
dict: A window action for the import wizard.
|
||||
"""
|
||||
return {
|
||||
"type": "ir.actions.act_window",
|
||||
"name": _("Import EDI Invoice"),
|
||||
"res_model": "fusion.edi.import.wizard",
|
||||
"view_mode": "form",
|
||||
"target": "new",
|
||||
"context": {
|
||||
"default_move_type": self.env.context.get(
|
||||
"default_move_type", "out_invoice"
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@api.model
|
||||
def create_invoice_from_xml(self, xml_bytes):
|
||||
"""Parse an XML file (UBL or CII) and create an invoice.
|
||||
|
||||
Auto-detects the XML format by inspecting the root element
|
||||
namespace.
|
||||
|
||||
Args:
|
||||
xml_bytes (bytes): Raw XML content.
|
||||
|
||||
Returns:
|
||||
account.move: The newly created invoice record.
|
||||
|
||||
Raises:
|
||||
UserError: When the XML format is not recognised.
|
||||
"""
|
||||
root = etree.fromstring(xml_bytes)
|
||||
ns = etree.QName(root).namespace
|
||||
|
||||
# Detect format
|
||||
if "CrossIndustryInvoice" in (ns or ""):
|
||||
fmt_code = "cii"
|
||||
parser = self.env["fusion.cii.generator"]
|
||||
values = parser.parse_cii_invoice(xml_bytes)
|
||||
elif "Invoice" in (ns or "") or "CreditNote" in (ns or ""):
|
||||
fmt_code = "ubl_21"
|
||||
parser = self.env["fusion.ubl.generator"]
|
||||
values = parser.parse_ubl_invoice(xml_bytes)
|
||||
else:
|
||||
raise UserError(
|
||||
_("Unrecognised XML format. Expected UBL 2.1 or CII.")
|
||||
)
|
||||
|
||||
return self._create_move_from_parsed(values, fmt_code)
|
||||
|
||||
@api.model
|
||||
def _create_move_from_parsed(self, values, fmt_code):
|
||||
"""Transform parsed EDI values into an ``account.move`` record.
|
||||
|
||||
Handles partner lookup/creation, currency resolution, and line
|
||||
item creation.
|
||||
|
||||
Args:
|
||||
values (dict): Parsed invoice data from a generator's
|
||||
``parse_*`` method.
|
||||
fmt_code (str): The EDI format code for logging.
|
||||
|
||||
Returns:
|
||||
account.move: The newly created draft invoice.
|
||||
"""
|
||||
Partner = self.env["res.partner"]
|
||||
Currency = self.env["res.currency"]
|
||||
|
||||
# Resolve partner
|
||||
partner = Partner
|
||||
customer_vat = values.get("customer_vat")
|
||||
customer_name = values.get("customer_name")
|
||||
supplier_name = values.get("supplier_name")
|
||||
|
||||
# For incoming invoices the "supplier" is our vendor
|
||||
if values.get("move_type") in ("in_invoice", "in_refund"):
|
||||
search_name = supplier_name
|
||||
search_vat = values.get("supplier_vat")
|
||||
else:
|
||||
search_name = customer_name
|
||||
search_vat = customer_vat
|
||||
|
||||
if search_vat:
|
||||
partner = Partner.search([("vat", "=", search_vat)], limit=1)
|
||||
if not partner and search_name:
|
||||
partner = Partner.search(
|
||||
[("name", "ilike", search_name)], limit=1
|
||||
)
|
||||
|
||||
# Resolve currency
|
||||
currency = Currency
|
||||
currency_code = values.get("currency_id")
|
||||
if currency_code:
|
||||
currency = Currency.search(
|
||||
[("name", "=", currency_code)], limit=1
|
||||
)
|
||||
|
||||
# Build line commands
|
||||
line_commands = []
|
||||
for line_vals in values.get("invoice_line_ids", []):
|
||||
line_commands.append(Command.create({
|
||||
"name": line_vals.get("name", ""),
|
||||
"quantity": line_vals.get("quantity", 1),
|
||||
"price_unit": line_vals.get("price_unit", 0),
|
||||
}))
|
||||
|
||||
move_vals = {
|
||||
"move_type": values.get("move_type", "out_invoice"),
|
||||
"ref": values.get("ref"),
|
||||
"invoice_date": values.get("invoice_date"),
|
||||
"invoice_date_due": values.get("invoice_date_due"),
|
||||
"invoice_line_ids": line_commands,
|
||||
}
|
||||
if partner:
|
||||
move_vals["partner_id"] = partner.id
|
||||
if currency:
|
||||
move_vals["currency_id"] = currency.id
|
||||
|
||||
move = self.create(move_vals)
|
||||
_log.info(
|
||||
"Created invoice %s from %s XML import.",
|
||||
move.name or "(draft)",
|
||||
fmt_code,
|
||||
)
|
||||
return move
|
||||
375
Fusion Accounting/models/account_move_external_tax.py
Normal file
375
Fusion Accounting/models/account_move_external_tax.py
Normal file
@@ -0,0 +1,375 @@
|
||||
"""
|
||||
Fusion Accounting - Invoice External Tax Integration
|
||||
=====================================================
|
||||
|
||||
Extends ``account.move`` to support external tax computation through the
|
||||
:class:`FusionExternalTaxProvider` framework. When enabled for an invoice,
|
||||
taxes are calculated by the configured external provider (e.g. AvaTax)
|
||||
instead of using Odoo's built-in tax engine.
|
||||
|
||||
Key behaviours:
|
||||
* Before posting, the external tax provider is called to compute line-level
|
||||
taxes. The resulting tax amounts are written to dedicated tax lines on the
|
||||
invoice.
|
||||
* When an invoice is reset to draft, any previously committed external
|
||||
transactions are voided so they do not appear in tax filings.
|
||||
* A status widget on the invoice form indicates whether taxes have been
|
||||
computed externally and the associated document code.
|
||||
|
||||
Copyright (c) Nexa Systems Inc. - All rights reserved.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, Command, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionMoveExternalTax(models.Model):
|
||||
"""Adds external tax provider support to journal entries / invoices."""
|
||||
|
||||
_inherit = "account.move"
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Fields
|
||||
# -------------------------------------------------------------------------
|
||||
fusion_is_tax_computed_externally = fields.Boolean(
|
||||
string="Tax Computed Externally",
|
||||
default=False,
|
||||
copy=False,
|
||||
help="Indicates that the tax amounts on this invoice were calculated "
|
||||
"by an external tax provider rather than Odoo's built-in engine.",
|
||||
)
|
||||
fusion_tax_provider_id = fields.Many2one(
|
||||
comodel_name='fusion.external.tax.provider',
|
||||
string="External Tax Provider",
|
||||
copy=False,
|
||||
help="The external tax provider used to compute taxes on this invoice.",
|
||||
)
|
||||
fusion_external_doc_code = fields.Char(
|
||||
string="External Document Code",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help="Reference code returned by the external tax provider. "
|
||||
"Used to void or adjust the transaction.",
|
||||
)
|
||||
fusion_external_tax_amount = fields.Monetary(
|
||||
string="External Tax Amount",
|
||||
currency_field='currency_id',
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help="Total tax amount as calculated by the external provider.",
|
||||
)
|
||||
fusion_external_tax_date = fields.Datetime(
|
||||
string="Tax Computation Date",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help="Timestamp of the most recent external tax computation.",
|
||||
)
|
||||
fusion_use_external_tax = fields.Boolean(
|
||||
string="Use External Tax Provider",
|
||||
compute='_compute_fusion_use_external_tax',
|
||||
store=False,
|
||||
help="Technical field: True when an external provider is active for "
|
||||
"this company and the move type supports external taxation.",
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Computed Fields
|
||||
# -------------------------------------------------------------------------
|
||||
@api.depends('company_id', 'move_type')
|
||||
def _compute_fusion_use_external_tax(self):
|
||||
"""Determine whether external tax computation is available."""
|
||||
provider_model = self.env['fusion.external.tax.provider']
|
||||
for move in self:
|
||||
provider = provider_model.get_provider(company=move.company_id)
|
||||
move.fusion_use_external_tax = bool(provider) and move.move_type in (
|
||||
'out_invoice', 'out_refund', 'in_invoice', 'in_refund',
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# External Tax Computation
|
||||
# -------------------------------------------------------------------------
|
||||
def _compute_external_taxes(self):
|
||||
"""Call the external tax provider and update invoice tax lines.
|
||||
|
||||
For each invoice in the recordset:
|
||||
1. Identifies the active external provider.
|
||||
2. Sends the product lines to the provider's ``calculate_tax`` method.
|
||||
3. Updates or creates tax lines on the invoice to reflect the
|
||||
externally computed amounts.
|
||||
4. Stores the external document code for later void/adjustment.
|
||||
"""
|
||||
provider_model = self.env['fusion.external.tax.provider']
|
||||
|
||||
for move in self:
|
||||
if move.move_type not in ('out_invoice', 'out_refund', 'in_invoice', 'in_refund'):
|
||||
continue
|
||||
|
||||
provider = move.fusion_tax_provider_id or provider_model.get_provider(
|
||||
company=move.company_id,
|
||||
)
|
||||
if not provider:
|
||||
_logger.info(
|
||||
"No active external tax provider for company %s, skipping move %s.",
|
||||
move.company_id.name, move.name,
|
||||
)
|
||||
continue
|
||||
|
||||
product_lines = move.invoice_line_ids.filtered(
|
||||
lambda l: l.display_type == 'product'
|
||||
)
|
||||
if not product_lines:
|
||||
continue
|
||||
|
||||
_logger.info(
|
||||
"Computing external taxes for move %s via provider '%s'.",
|
||||
move.name or 'Draft', provider.name,
|
||||
)
|
||||
|
||||
try:
|
||||
tax_result = provider.calculate_tax(product_lines)
|
||||
except UserError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise UserError(_(
|
||||
"External tax computation failed for invoice %(ref)s:\n%(error)s",
|
||||
ref=move.name or 'Draft',
|
||||
error=str(exc),
|
||||
))
|
||||
|
||||
# Apply results
|
||||
move._apply_external_tax_result(tax_result, provider)
|
||||
|
||||
def _apply_external_tax_result(self, tax_result, provider):
|
||||
"""Write the external tax computation result onto the invoice.
|
||||
|
||||
Creates or updates a dedicated tax line for the externally computed
|
||||
tax amount and records metadata about the computation.
|
||||
|
||||
:param tax_result: ``dict`` returned by ``provider.calculate_tax()``.
|
||||
:param provider: ``fusion.external.tax.provider`` record.
|
||||
"""
|
||||
self.ensure_one()
|
||||
doc_code = tax_result.get('doc_code', '')
|
||||
total_tax = tax_result.get('total_tax', 0.0)
|
||||
|
||||
# Find or create a dedicated "External Tax" account.tax record
|
||||
external_tax = self._get_or_create_external_tax_record(provider)
|
||||
|
||||
# Update per-line tax amounts from provider response
|
||||
line_results = {lr['line_id']: lr for lr in tax_result.get('lines', []) if lr.get('line_id')}
|
||||
|
||||
for line in self.invoice_line_ids.filtered(lambda l: l.display_type == 'product'):
|
||||
lr = line_results.get(line.id)
|
||||
if lr and external_tax:
|
||||
# Ensure the external tax is applied to the line
|
||||
if external_tax not in line.tax_ids:
|
||||
line.tax_ids = [Command.link(external_tax.id)]
|
||||
|
||||
# Store external tax metadata
|
||||
self.write({
|
||||
'fusion_is_tax_computed_externally': True,
|
||||
'fusion_tax_provider_id': provider.id,
|
||||
'fusion_external_doc_code': doc_code,
|
||||
'fusion_external_tax_amount': total_tax,
|
||||
'fusion_external_tax_date': fields.Datetime.now(),
|
||||
})
|
||||
|
||||
_logger.info(
|
||||
"External tax applied: move=%s doc_code=%s total_tax=%s",
|
||||
self.name, doc_code, total_tax,
|
||||
)
|
||||
|
||||
def _get_or_create_external_tax_record(self, provider):
|
||||
"""Find or create a placeholder ``account.tax`` for external tax lines.
|
||||
|
||||
The placeholder tax record allows the externally computed amount to be
|
||||
recorded in the standard tax line infrastructure without conflicting
|
||||
with manually configured taxes.
|
||||
|
||||
:param provider: Active ``fusion.external.tax.provider`` record.
|
||||
:returns: ``account.tax`` record or ``False``.
|
||||
"""
|
||||
company = self.company_id
|
||||
tax_xmlid = f"fusion_accounting.external_tax_{provider.code}_{company.id}"
|
||||
existing = self.env.ref(tax_xmlid, raise_if_not_found=False)
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
# Find a suitable tax account (default tax payable)
|
||||
tax_account = (
|
||||
company.account_sale_tax_id.invoice_repartition_line_ids
|
||||
.filtered(lambda rl: rl.repartition_type == 'tax')[:1]
|
||||
.account_id
|
||||
)
|
||||
if not tax_account:
|
||||
# Fall back to searching for a tax payable account
|
||||
tax_account = self.env['account.account'].search([
|
||||
('company_ids', 'in', company.id),
|
||||
('account_type', '=', 'liability_current'),
|
||||
], limit=1)
|
||||
|
||||
if not tax_account:
|
||||
_logger.warning(
|
||||
"No tax account found for external tax placeholder (company=%s). "
|
||||
"External tax lines may not be properly recorded.",
|
||||
company.name,
|
||||
)
|
||||
return False
|
||||
|
||||
# Create the placeholder tax
|
||||
tax_vals = {
|
||||
'name': f"External Tax ({provider.name})",
|
||||
'type_tax_use': 'sale',
|
||||
'amount_type': 'fixed',
|
||||
'amount': 0.0,
|
||||
'company_id': company.id,
|
||||
'active': True,
|
||||
'invoice_repartition_line_ids': [
|
||||
Command.create({'repartition_type': 'base', 'factor_percent': 100.0}),
|
||||
Command.create({
|
||||
'repartition_type': 'tax',
|
||||
'factor_percent': 100.0,
|
||||
'account_id': tax_account.id,
|
||||
}),
|
||||
],
|
||||
'refund_repartition_line_ids': [
|
||||
Command.create({'repartition_type': 'base', 'factor_percent': 100.0}),
|
||||
Command.create({
|
||||
'repartition_type': 'tax',
|
||||
'factor_percent': 100.0,
|
||||
'account_id': tax_account.id,
|
||||
}),
|
||||
],
|
||||
}
|
||||
|
||||
new_tax = self.env['account.tax'].create(tax_vals)
|
||||
|
||||
# Register under an XML ID for future lookups
|
||||
self.env['ir.model.data'].create({
|
||||
'name': f"external_tax_{provider.code}_{company.id}",
|
||||
'module': 'fusion_accounting',
|
||||
'model': 'account.tax',
|
||||
'res_id': new_tax.id,
|
||||
'noupdate': True,
|
||||
})
|
||||
|
||||
return new_tax
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Void External Taxes
|
||||
# -------------------------------------------------------------------------
|
||||
def _void_external_taxes(self):
|
||||
"""Void previously committed external tax transactions.
|
||||
|
||||
Called when an invoice is reset to draft, ensuring that the tax
|
||||
provider marks the corresponding transaction as voided.
|
||||
"""
|
||||
for move in self:
|
||||
if not move.fusion_is_tax_computed_externally or not move.fusion_external_doc_code:
|
||||
continue
|
||||
|
||||
provider = move.fusion_tax_provider_id
|
||||
if not provider:
|
||||
_logger.warning(
|
||||
"Cannot void external taxes for move %s: no provider linked.",
|
||||
move.name,
|
||||
)
|
||||
continue
|
||||
|
||||
doc_type_map = {
|
||||
'out_invoice': 'SalesInvoice',
|
||||
'out_refund': 'ReturnInvoice',
|
||||
'in_invoice': 'PurchaseInvoice',
|
||||
'in_refund': 'ReturnInvoice',
|
||||
}
|
||||
doc_type = doc_type_map.get(move.move_type, 'SalesInvoice')
|
||||
|
||||
try:
|
||||
provider.void_transaction(move.fusion_external_doc_code, doc_type=doc_type)
|
||||
_logger.info(
|
||||
"Voided external tax transaction: move=%s doc_code=%s",
|
||||
move.name, move.fusion_external_doc_code,
|
||||
)
|
||||
except UserError as exc:
|
||||
_logger.warning(
|
||||
"Failed to void external tax for move %s: %s",
|
||||
move.name, exc,
|
||||
)
|
||||
|
||||
move.write({
|
||||
'fusion_is_tax_computed_externally': False,
|
||||
'fusion_external_doc_code': False,
|
||||
'fusion_external_tax_amount': 0.0,
|
||||
'fusion_external_tax_date': False,
|
||||
})
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Post Override
|
||||
# -------------------------------------------------------------------------
|
||||
def _post(self, soft=True):
|
||||
"""Compute external taxes before the standard posting workflow.
|
||||
|
||||
Invoices that have an active external tax provider (and have not
|
||||
already been computed) will have their taxes calculated via the
|
||||
external service prior to validation and posting.
|
||||
"""
|
||||
# Compute external taxes for eligible invoices before posting
|
||||
for move in self:
|
||||
if (
|
||||
move.fusion_use_external_tax
|
||||
and not move.fusion_is_tax_computed_externally
|
||||
and move.move_type in ('out_invoice', 'out_refund', 'in_invoice', 'in_refund')
|
||||
):
|
||||
move._compute_external_taxes()
|
||||
|
||||
return super()._post(soft=soft)
|
||||
|
||||
def button_draft(self):
|
||||
"""Void external tax transactions when resetting to draft."""
|
||||
# Void external taxes before resetting
|
||||
moves_with_external = self.filtered('fusion_is_tax_computed_externally')
|
||||
if moves_with_external:
|
||||
moves_with_external._void_external_taxes()
|
||||
return super().button_draft()
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Actions
|
||||
# -------------------------------------------------------------------------
|
||||
def action_compute_external_taxes(self):
|
||||
"""Manual button action to (re-)compute external taxes on the invoice."""
|
||||
for move in self:
|
||||
if move.state == 'posted':
|
||||
raise UserError(_(
|
||||
"Cannot recompute taxes on posted invoice %(ref)s. "
|
||||
"Reset to draft first.",
|
||||
ref=move.name,
|
||||
))
|
||||
self._compute_external_taxes()
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'title': _("External Tax Computation"),
|
||||
'message': _("Taxes have been computed successfully."),
|
||||
'type': 'success',
|
||||
'sticky': False,
|
||||
},
|
||||
}
|
||||
|
||||
def action_void_external_taxes(self):
|
||||
"""Manual button action to void external taxes on the invoice."""
|
||||
self._void_external_taxes()
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'title': _("External Tax Void"),
|
||||
'message': _("External tax transactions have been voided."),
|
||||
'type': 'info',
|
||||
'sticky': False,
|
||||
},
|
||||
}
|
||||
103
Fusion Accounting/models/account_move_line.py
Normal file
103
Fusion Accounting/models/account_move_line.py
Normal file
@@ -0,0 +1,103 @@
|
||||
# Fusion Accounting - Move Line Extensions
|
||||
# Bank-line exclusion flag, tax-closing safeguards, and report shadowing
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import SQL
|
||||
|
||||
|
||||
class FusionAccountMoveLine(models.Model):
|
||||
"""Extends journal items with a computed bank-line exclusion flag,
|
||||
guards against tax manipulation on closing entries, and provides
|
||||
utilities for building temporary shadow tables used by analytic
|
||||
and budget reports."""
|
||||
|
||||
_name = "account.move.line"
|
||||
_inherit = "account.move.line"
|
||||
|
||||
# ---- Fields ----
|
||||
exclude_bank_lines = fields.Boolean(
|
||||
compute='_compute_exclude_bank_lines',
|
||||
store=True,
|
||||
)
|
||||
|
||||
# ---- Computed ----
|
||||
@api.depends('journal_id')
|
||||
def _compute_exclude_bank_lines(self):
|
||||
"""Flag lines whose account differs from their journal's
|
||||
default account, used to filter non-bank entries in bank
|
||||
journal views."""
|
||||
for ml in self:
|
||||
ml.exclude_bank_lines = (
|
||||
ml.account_id != ml.journal_id.default_account_id
|
||||
)
|
||||
|
||||
# ---- Constraints ----
|
||||
@api.constrains('tax_ids', 'tax_tag_ids')
|
||||
def _check_taxes_on_closing_entries(self):
|
||||
"""Prevent taxes from being added to tax-closing move lines."""
|
||||
for ml in self:
|
||||
if ml.move_id.tax_closing_report_id and (ml.tax_ids or ml.tax_tag_ids):
|
||||
raise UserError(
|
||||
_("Tax lines are not permitted on tax-closing entries.")
|
||||
)
|
||||
|
||||
# ---- Tax Computation Override ----
|
||||
@api.depends('product_id', 'product_uom_id', 'move_id.tax_closing_report_id')
|
||||
def _compute_tax_ids(self):
|
||||
"""Skip automatic tax computation for lines on tax-closing
|
||||
moves, which might otherwise trigger the constraint above."""
|
||||
non_closing_lines = self.filtered(
|
||||
lambda ln: not ln.move_id.tax_closing_report_id
|
||||
)
|
||||
(self - non_closing_lines).tax_ids = False
|
||||
super(FusionAccountMoveLine, non_closing_lines)._compute_tax_ids()
|
||||
|
||||
# ---- Report Shadow Table Utility ----
|
||||
@api.model
|
||||
def _prepare_aml_shadowing_for_report(self, change_equivalence_dict):
|
||||
"""Build SQL fragments for creating a temporary table that
|
||||
mirrors ``account_move_line`` but substitutes selected columns
|
||||
with alternative expressions (e.g. analytic or budget data).
|
||||
|
||||
:param change_equivalence_dict:
|
||||
Mapping ``{field_name: sql_expression}`` where each value
|
||||
replaces the corresponding column in the shadow table.
|
||||
:returns:
|
||||
A tuple ``(insert_columns, select_expressions)`` of SQL
|
||||
objects suitable for ``INSERT INTO ... SELECT ...``.
|
||||
"""
|
||||
field_metadata = self.env['account.move.line'].fields_get()
|
||||
self.env.cr.execute(
|
||||
"SELECT column_name FROM information_schema.columns "
|
||||
"WHERE table_name='account_move_line'"
|
||||
)
|
||||
db_columns = {
|
||||
row[0] for row in self.env.cr.fetchall() if row[0] in field_metadata
|
||||
}
|
||||
|
||||
select_parts = []
|
||||
for col_name in db_columns:
|
||||
if col_name in change_equivalence_dict:
|
||||
select_parts.append(SQL(
|
||||
"%(src)s AS %(alias)s",
|
||||
src=change_equivalence_dict[col_name],
|
||||
alias=SQL('"account_move_line.%s"', SQL(col_name)),
|
||||
))
|
||||
else:
|
||||
col_meta = field_metadata[col_name]
|
||||
if col_meta.get("translate"):
|
||||
pg_type = SQL('jsonb')
|
||||
else:
|
||||
pg_type = SQL(
|
||||
self.env['account.move.line']._fields[col_name].column_type[0]
|
||||
)
|
||||
select_parts.append(SQL(
|
||||
"CAST(NULL AS %(pg_type)s) AS %(alias)s",
|
||||
pg_type=pg_type,
|
||||
alias=SQL('"account_move_line.%s"', SQL(col_name)),
|
||||
))
|
||||
|
||||
insert_cols = SQL(', ').join(SQL.identifier(c) for c in db_columns)
|
||||
select_clause = SQL(', ').join(select_parts)
|
||||
return insert_cols, select_clause
|
||||
@@ -0,0 +1,379 @@
|
||||
# Fusion Accounting - Multicurrency Revaluation Report Handler
|
||||
# Computes unrealised FX gains/losses and provides an adjustment wizard
|
||||
|
||||
from itertools import chain
|
||||
|
||||
from odoo import models, fields, api, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import float_is_zero, SQL
|
||||
|
||||
|
||||
class FusionMulticurrencyRevaluationHandler(models.AbstractModel):
|
||||
"""Manages unrealised gains and losses arising from fluctuating
|
||||
exchange rates. Presents balances at both historical and current
|
||||
rates and offers an adjustment-entry wizard."""
|
||||
|
||||
_name = 'account.multicurrency.revaluation.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Multicurrency Revaluation Report Custom Handler'
|
||||
|
||||
# ---- Display Configuration ----
|
||||
def _get_custom_display_config(self):
|
||||
return {
|
||||
'components': {
|
||||
'AccountReportFilters': 'fusion_accounting.MulticurrencyRevaluationReportFilters',
|
||||
},
|
||||
'templates': {
|
||||
'AccountReportLineName': 'fusion_accounting.MulticurrencyRevaluationReportLineName',
|
||||
},
|
||||
}
|
||||
|
||||
# ---- Options ----
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
|
||||
active_currencies = self.env['res.currency'].search([('active', '=', True)])
|
||||
if len(active_currencies) < 2:
|
||||
raise UserError(_("At least two active currencies are required for this report."))
|
||||
|
||||
fx_rates = active_currencies._get_rates(
|
||||
self.env.company, options.get('date', {}).get('date_to'),
|
||||
)
|
||||
base_rate = fx_rates[self.env.company.currency_id.id]
|
||||
for cid in fx_rates:
|
||||
fx_rates[cid] /= base_rate
|
||||
|
||||
options['currency_rates'] = {
|
||||
str(cur.id): {
|
||||
'currency_id': cur.id,
|
||||
'currency_name': cur.name,
|
||||
'currency_main': self.env.company.currency_id.name,
|
||||
'rate': (
|
||||
fx_rates[cur.id]
|
||||
if not previous_options.get('currency_rates', {}).get(str(cur.id), {}).get('rate')
|
||||
else float(previous_options['currency_rates'][str(cur.id)]['rate'])
|
||||
),
|
||||
}
|
||||
for cur in active_currencies
|
||||
}
|
||||
|
||||
for cr in options['currency_rates'].values():
|
||||
if cr['rate'] == 0:
|
||||
raise UserError(_("Currency rate cannot be zero."))
|
||||
|
||||
options['company_currency'] = options['currency_rates'].pop(
|
||||
str(self.env.company.currency_id.id),
|
||||
)
|
||||
options['custom_rate'] = any(
|
||||
not float_is_zero(cr['rate'] - fx_rates[cr['currency_id']], 20)
|
||||
for cr in options['currency_rates'].values()
|
||||
)
|
||||
options['multi_currency'] = True
|
||||
options['buttons'].append({
|
||||
'name': _('Adjustment Entry'),
|
||||
'sequence': 30,
|
||||
'action': 'action_multi_currency_revaluation_open_revaluation_wizard',
|
||||
'always_show': True,
|
||||
})
|
||||
|
||||
# ---- Warnings ----
|
||||
def _customize_warnings(self, report, options, all_column_groups_expression_totals, warnings):
|
||||
if len(self.env.companies) > 1:
|
||||
warnings['fusion_accounting.multi_currency_revaluation_report_warning_multicompany'] = {
|
||||
'alert_type': 'warning',
|
||||
}
|
||||
if options['custom_rate']:
|
||||
warnings['fusion_accounting.multi_currency_revaluation_report_warning_custom_rate'] = {
|
||||
'alert_type': 'warning',
|
||||
}
|
||||
|
||||
# ---- Post-Processing ----
|
||||
def _custom_line_postprocessor(self, report, options, lines):
|
||||
adj_line_id = self.env.ref('fusion_accounting.multicurrency_revaluation_to_adjust').id
|
||||
excl_line_id = self.env.ref('fusion_accounting.multicurrency_revaluation_excluded').id
|
||||
|
||||
processed = []
|
||||
for idx, ln in enumerate(lines):
|
||||
model_name, model_id = report._get_model_info_from_id(ln['id'])
|
||||
|
||||
if model_name == 'account.report.line' and (
|
||||
(model_id == adj_line_id
|
||||
and report._get_model_info_from_id(lines[idx + 1]['id']) == ('account.report.line', excl_line_id))
|
||||
or (model_id == excl_line_id and idx == len(lines) - 1)
|
||||
):
|
||||
continue
|
||||
|
||||
elif model_name == 'res.currency':
|
||||
rate_val = float(options['currency_rates'][str(model_id)]['rate'])
|
||||
ln['name'] = '{fc} (1 {mc} = {r:.6} {fc})'.format(
|
||||
fc=ln['name'],
|
||||
mc=self.env.company.currency_id.display_name,
|
||||
r=rate_val,
|
||||
)
|
||||
|
||||
elif model_name == 'account.account':
|
||||
ln['is_included_line'] = (
|
||||
report._get_res_id_from_line_id(ln['id'], 'account.account') == adj_line_id
|
||||
)
|
||||
|
||||
ln['cur_revaluation_line_model'] = model_name
|
||||
processed.append(ln)
|
||||
|
||||
return processed
|
||||
|
||||
def _custom_groupby_line_completer(self, report, options, line_dict):
|
||||
info = report._get_model_info_from_id(line_dict['id'])
|
||||
if info[0] == 'res.currency':
|
||||
line_dict['unfolded'] = True
|
||||
line_dict['unfoldable'] = False
|
||||
|
||||
# ---- Actions ----
|
||||
def action_multi_currency_revaluation_open_revaluation_wizard(self, options):
|
||||
wiz_view = self.env.ref(
|
||||
'fusion_accounting.view_account_multicurrency_revaluation_wizard', False,
|
||||
)
|
||||
return {
|
||||
'name': _("Make Adjustment Entry"),
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.multicurrency.revaluation.wizard',
|
||||
'view_mode': 'form',
|
||||
'view_id': wiz_view.id,
|
||||
'views': [(wiz_view.id, 'form')],
|
||||
'multi': 'True',
|
||||
'target': 'new',
|
||||
'context': {
|
||||
**self.env.context,
|
||||
'multicurrency_revaluation_report_options': options,
|
||||
},
|
||||
}
|
||||
|
||||
def action_multi_currency_revaluation_open_general_ledger(self, options, params):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
acct_id = report._get_res_id_from_line_id(params['line_id'], 'account.account')
|
||||
acct_line_id = report._get_generic_line_id('account.account', acct_id)
|
||||
gl_options = self.env.ref('fusion_accounting.general_ledger_report').get_options(options)
|
||||
gl_options['unfolded_lines'] = [acct_line_id]
|
||||
|
||||
gl_action = self.env['ir.actions.actions']._for_xml_id(
|
||||
'fusion_accounting.action_account_report_general_ledger',
|
||||
)
|
||||
gl_action['params'] = {
|
||||
'options': gl_options,
|
||||
'ignore_session': True,
|
||||
}
|
||||
return gl_action
|
||||
|
||||
def action_multi_currency_revaluation_toggle_provision(self, options, params):
|
||||
"""Toggle inclusion/exclusion of an account from the provision."""
|
||||
id_map = self.env['account.report']._get_res_ids_from_line_id(
|
||||
params['line_id'], ['res.currency', 'account.account'],
|
||||
)
|
||||
acct = self.env['account.account'].browse(id_map['account.account'])
|
||||
cur = self.env['res.currency'].browse(id_map['res.currency'])
|
||||
if cur in acct.exclude_provision_currency_ids:
|
||||
acct.exclude_provision_currency_ids -= cur
|
||||
else:
|
||||
acct.exclude_provision_currency_ids += cur
|
||||
return {'type': 'ir.actions.client', 'tag': 'reload'}
|
||||
|
||||
def action_multi_currency_revaluation_open_currency_rates(self, options, params=None):
|
||||
cur_id = self.env['account.report']._get_res_id_from_line_id(
|
||||
params['line_id'], 'res.currency',
|
||||
)
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'name': _('Currency Rates (%s)', self.env['res.currency'].browse(cur_id).display_name),
|
||||
'views': [(False, 'list')],
|
||||
'res_model': 'res.currency.rate',
|
||||
'context': {**self.env.context, 'default_currency_id': cur_id, 'active_id': cur_id},
|
||||
'domain': [('currency_id', '=', cur_id)],
|
||||
}
|
||||
|
||||
# ---- Custom Engines ----
|
||||
def _report_custom_engine_multi_currency_revaluation_to_adjust(
|
||||
self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None,
|
||||
):
|
||||
return self._revaluation_custom_lines(
|
||||
options, 'to_adjust', current_groupby, next_groupby, offset=offset, limit=limit,
|
||||
)
|
||||
|
||||
def _report_custom_engine_multi_currency_revaluation_excluded(
|
||||
self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None,
|
||||
):
|
||||
return self._revaluation_custom_lines(
|
||||
options, 'excluded', current_groupby, next_groupby, offset=offset, limit=limit,
|
||||
)
|
||||
|
||||
def _revaluation_custom_lines(self, options, line_code, current_groupby, next_groupby, offset=0, limit=None):
|
||||
def _build_result(report_obj, qr):
|
||||
return {
|
||||
'balance_currency': qr['balance_currency'] if len(qr['currency_id']) == 1 else None,
|
||||
'currency_id': qr['currency_id'][0] if len(qr['currency_id']) == 1 else None,
|
||||
'balance_operation': qr['balance_operation'],
|
||||
'balance_current': qr['balance_current'],
|
||||
'adjustment': qr['adjustment'],
|
||||
'has_sublines': qr['aml_count'] > 0,
|
||||
}
|
||||
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
report._check_groupby_fields(
|
||||
(next_groupby.split(',') if next_groupby else [])
|
||||
+ ([current_groupby] if current_groupby else []),
|
||||
)
|
||||
|
||||
if not current_groupby:
|
||||
return {
|
||||
'balance_currency': None, 'currency_id': None,
|
||||
'balance_operation': None, 'balance_current': None,
|
||||
'adjustment': None, 'has_sublines': False,
|
||||
}
|
||||
|
||||
rate_values_sql = "(VALUES {})".format(
|
||||
', '.join("(%s, %s)" for _ in options['currency_rates']),
|
||||
)
|
||||
rate_params = list(chain.from_iterable(
|
||||
(cr['currency_id'], cr['rate']) for cr in options['currency_rates'].values()
|
||||
))
|
||||
custom_rate_table = SQL(rate_values_sql, *rate_params)
|
||||
report_date = options['date']['date_to']
|
||||
|
||||
no_exchange_clause = SQL(
|
||||
"""
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM account_partial_reconcile pr
|
||||
WHERE pr.exchange_move_id = account_move_line.move_id
|
||||
AND pr.max_date <= %s
|
||||
)
|
||||
""",
|
||||
report_date,
|
||||
)
|
||||
|
||||
qry = report._get_report_query(options, 'strict_range')
|
||||
tail = report._get_engine_query_tail(offset, limit)
|
||||
|
||||
provision_test = 'NOT EXISTS' if line_code == 'to_adjust' else 'EXISTS'
|
||||
|
||||
groupby_col = f"account_move_line.{current_groupby}" if current_groupby else ''
|
||||
groupby_select = f"{groupby_col} AS grouping_key," if current_groupby else ''
|
||||
|
||||
full_sql = SQL(
|
||||
"""
|
||||
WITH custom_currency_table(currency_id, rate) AS (%(rate_table)s)
|
||||
SELECT
|
||||
subquery.grouping_key,
|
||||
ARRAY_AGG(DISTINCT(subquery.currency_id)) AS currency_id,
|
||||
SUM(subquery.balance_currency) AS balance_currency,
|
||||
SUM(subquery.balance_operation) AS balance_operation,
|
||||
SUM(subquery.balance_current) AS balance_current,
|
||||
SUM(subquery.adjustment) AS adjustment,
|
||||
COUNT(subquery.aml_id) AS aml_count
|
||||
FROM (
|
||||
SELECT
|
||||
""" + groupby_select + """
|
||||
ROUND(account_move_line.balance - SUM(ara.amount_debit) + SUM(ara.amount_credit), aml_comp_currency.decimal_places) AS balance_operation,
|
||||
ROUND(account_move_line.amount_currency - SUM(ara.amount_debit_currency) + SUM(ara.amount_credit_currency), aml_currency.decimal_places) AS balance_currency,
|
||||
ROUND(account_move_line.amount_currency - SUM(ara.amount_debit_currency) + SUM(ara.amount_credit_currency), aml_currency.decimal_places) / custom_currency_table.rate AS balance_current,
|
||||
(
|
||||
ROUND(account_move_line.amount_currency - SUM(ara.amount_debit_currency) + SUM(ara.amount_credit_currency), aml_currency.decimal_places) / custom_currency_table.rate
|
||||
- ROUND(account_move_line.balance - SUM(ara.amount_debit) + SUM(ara.amount_credit), aml_comp_currency.decimal_places)
|
||||
) AS adjustment,
|
||||
account_move_line.currency_id AS currency_id,
|
||||
account_move_line.id AS aml_id
|
||||
FROM %(from_refs)s,
|
||||
account_account AS account,
|
||||
res_currency AS aml_currency,
|
||||
res_currency AS aml_comp_currency,
|
||||
custom_currency_table,
|
||||
LATERAL (
|
||||
SELECT COALESCE(SUM(part.amount), 0.0) AS amount_debit,
|
||||
ROUND(SUM(part.debit_amount_currency), curr.decimal_places) AS amount_debit_currency,
|
||||
0.0 AS amount_credit, 0.0 AS amount_credit_currency,
|
||||
account_move_line.currency_id AS currency_id,
|
||||
account_move_line.id AS aml_id
|
||||
FROM account_partial_reconcile part
|
||||
JOIN res_currency curr ON curr.id = part.debit_currency_id
|
||||
WHERE account_move_line.id = part.debit_move_id AND part.max_date <= %(dt)s
|
||||
GROUP BY aml_id, curr.decimal_places
|
||||
UNION
|
||||
SELECT 0.0 AS amount_debit, 0.0 AS amount_debit_currency,
|
||||
COALESCE(SUM(part.amount), 0.0) AS amount_credit,
|
||||
ROUND(SUM(part.credit_amount_currency), curr.decimal_places) AS amount_credit_currency,
|
||||
account_move_line.currency_id AS currency_id,
|
||||
account_move_line.id AS aml_id
|
||||
FROM account_partial_reconcile part
|
||||
JOIN res_currency curr ON curr.id = part.credit_currency_id
|
||||
WHERE account_move_line.id = part.credit_move_id AND part.max_date <= %(dt)s
|
||||
GROUP BY aml_id, curr.decimal_places
|
||||
) AS ara
|
||||
WHERE %(where)s
|
||||
AND account_move_line.account_id = account.id
|
||||
AND account_move_line.currency_id = aml_currency.id
|
||||
AND account_move_line.company_currency_id = aml_comp_currency.id
|
||||
AND account_move_line.currency_id = custom_currency_table.currency_id
|
||||
AND account.account_type NOT IN ('income', 'income_other', 'expense', 'expense_depreciation', 'expense_direct_cost', 'off_balance')
|
||||
AND (
|
||||
account.currency_id != account_move_line.company_currency_id
|
||||
OR (account.account_type IN ('asset_receivable', 'liability_payable')
|
||||
AND account_move_line.currency_id != account_move_line.company_currency_id)
|
||||
)
|
||||
AND """ + provision_test + """ (
|
||||
SELECT 1 FROM account_account_exclude_res_currency_provision
|
||||
WHERE account_account_id = account_move_line.account_id
|
||||
AND res_currency_id = account_move_line.currency_id
|
||||
)
|
||||
AND (%(no_exch)s)
|
||||
GROUP BY account_move_line.id, aml_comp_currency.decimal_places, aml_currency.decimal_places, custom_currency_table.rate
|
||||
HAVING ROUND(account_move_line.balance - SUM(ara.amount_debit) + SUM(ara.amount_credit), aml_comp_currency.decimal_places) != 0
|
||||
OR ROUND(account_move_line.amount_currency - SUM(ara.amount_debit_currency) + SUM(ara.amount_credit_currency), aml_currency.decimal_places) != 0.0
|
||||
|
||||
UNION
|
||||
|
||||
SELECT
|
||||
""" + groupby_select + """
|
||||
account_move_line.balance AS balance_operation,
|
||||
account_move_line.amount_currency AS balance_currency,
|
||||
account_move_line.amount_currency / custom_currency_table.rate AS balance_current,
|
||||
account_move_line.amount_currency / custom_currency_table.rate - account_move_line.balance AS adjustment,
|
||||
account_move_line.currency_id AS currency_id,
|
||||
account_move_line.id AS aml_id
|
||||
FROM %(from_refs)s
|
||||
JOIN account_account account ON account_move_line.account_id = account.id
|
||||
JOIN custom_currency_table ON custom_currency_table.currency_id = account_move_line.currency_id
|
||||
WHERE %(where)s
|
||||
AND account.account_type NOT IN ('income', 'income_other', 'expense', 'expense_depreciation', 'expense_direct_cost', 'off_balance')
|
||||
AND (
|
||||
account.currency_id != account_move_line.company_currency_id
|
||||
OR (account.account_type IN ('asset_receivable', 'liability_payable')
|
||||
AND account_move_line.currency_id != account_move_line.company_currency_id)
|
||||
)
|
||||
AND """ + provision_test + """ (
|
||||
SELECT 1 FROM account_account_exclude_res_currency_provision
|
||||
WHERE account_account_id = account_id
|
||||
AND res_currency_id = account_move_line.currency_id
|
||||
)
|
||||
AND (%(no_exch)s)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM account_partial_reconcile part
|
||||
WHERE (part.debit_move_id = account_move_line.id OR part.credit_move_id = account_move_line.id)
|
||||
AND part.max_date <= %(dt)s
|
||||
)
|
||||
AND (account_move_line.balance != 0.0 OR account_move_line.amount_currency != 0.0)
|
||||
) subquery
|
||||
GROUP BY grouping_key
|
||||
ORDER BY grouping_key
|
||||
%(tail)s
|
||||
""",
|
||||
rate_table=custom_rate_table,
|
||||
from_refs=qry.from_clause,
|
||||
dt=report_date,
|
||||
where=qry.where_clause,
|
||||
no_exch=no_exchange_clause,
|
||||
tail=tail,
|
||||
)
|
||||
self.env.cr.execute(full_sql)
|
||||
rows = self.env.cr.dictfetchall()
|
||||
|
||||
if not current_groupby:
|
||||
return _build_result(report, rows[0] if rows else {})
|
||||
return [(r['grouping_key'], _build_result(report, r)) for r in rows]
|
||||
799
Fusion Accounting/models/account_partner_ledger.py
Normal file
799
Fusion Accounting/models/account_partner_ledger.py
Normal file
@@ -0,0 +1,799 @@
|
||||
# Fusion Accounting - Partner Ledger Report Handler
|
||||
|
||||
from odoo import api, models, _, fields
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.osv import expression
|
||||
from odoo.tools import SQL
|
||||
|
||||
from datetime import timedelta
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class PartnerLedgerCustomHandler(models.AbstractModel):
|
||||
"""Generates the Partner Ledger report.
|
||||
|
||||
Shows journal items grouped by partner, with initial balances and
|
||||
running totals. Also handles indirectly-linked entries (items
|
||||
without a partner that were reconciled with a partner's entry).
|
||||
"""
|
||||
|
||||
_name = 'account.partner.ledger.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Partner Ledger Custom Handler'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Display
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
return {
|
||||
'css_custom_class': 'partner_ledger',
|
||||
'components': {
|
||||
'AccountReportLineCell': 'fusion_accounting.PartnerLedgerLineCell',
|
||||
},
|
||||
'templates': {
|
||||
'AccountReportFilters': 'fusion_accounting.PartnerLedgerFilters',
|
||||
'AccountReportLineName': 'fusion_accounting.PartnerLedgerLineName',
|
||||
},
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Dynamic lines
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
"""Build all partner lines and a final total line."""
|
||||
partner_rows, col_totals = self._assemble_partner_rows(report, options)
|
||||
|
||||
output = report._regroup_lines_by_name_prefix(
|
||||
options, partner_rows,
|
||||
'_report_expand_unfoldable_line_partner_ledger_prefix_group', 0,
|
||||
)
|
||||
output = [(0, ln) for ln in output]
|
||||
output.append((0, self._build_total_line(options, col_totals)))
|
||||
return output
|
||||
|
||||
def _assemble_partner_rows(self, report, options, depth_shift=0):
|
||||
"""Query partner sums and return ``(lines, totals_by_column_group)``."""
|
||||
rows = []
|
||||
|
||||
col_totals = {
|
||||
cg: {'debit': 0.0, 'credit': 0.0, 'amount': 0.0, 'balance': 0.0}
|
||||
for cg in options['column_groups']
|
||||
}
|
||||
|
||||
partner_data = self._query_partner_sums(options)
|
||||
|
||||
filter_text = options.get('filter_search_bar', '')
|
||||
accept_unknown = filter_text.lower() in self._unknown_partner_label().lower()
|
||||
|
||||
for partner_rec, col_vals in partner_data:
|
||||
# When printing with a search filter, skip the Unknown Partner row
|
||||
# unless the filter matches its label.
|
||||
if (
|
||||
options['export_mode'] == 'print'
|
||||
and filter_text
|
||||
and not partner_rec
|
||||
and not accept_unknown
|
||||
):
|
||||
continue
|
||||
|
||||
per_col = defaultdict(dict)
|
||||
for cg in options['column_groups']:
|
||||
psum = col_vals.get(cg, {})
|
||||
per_col[cg]['debit'] = psum.get('debit', 0.0)
|
||||
per_col[cg]['credit'] = psum.get('credit', 0.0)
|
||||
per_col[cg]['amount'] = psum.get('amount', 0.0)
|
||||
per_col[cg]['balance'] = psum.get('balance', 0.0)
|
||||
|
||||
for fld in ('debit', 'credit', 'amount', 'balance'):
|
||||
col_totals[cg][fld] += per_col[cg][fld]
|
||||
|
||||
rows.append(
|
||||
self._build_partner_line(options, partner_rec, per_col, depth_shift=depth_shift)
|
||||
)
|
||||
|
||||
return rows, col_totals
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Prefix-group expand
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _report_expand_unfoldable_line_partner_ledger_prefix_group(
|
||||
self, line_dict_id, groupby, options, progress, offset, unfold_all_batch_data=None,
|
||||
):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
prefix = report._get_prefix_groups_matched_prefix_from_line_id(line_dict_id)
|
||||
|
||||
prefix_filter = [('partner_id.name', '=ilike', f'{prefix}%')]
|
||||
if self._unknown_partner_label().upper().startswith(prefix):
|
||||
prefix_filter = expression.OR([prefix_filter, [('partner_id', '=', None)]])
|
||||
|
||||
filtered_opts = {
|
||||
**options,
|
||||
'forced_domain': options.get('forced_domain', []) + prefix_filter,
|
||||
}
|
||||
nest_level = len(prefix) * 2
|
||||
child_lines, _ = self._assemble_partner_rows(report, filtered_opts, depth_shift=nest_level)
|
||||
|
||||
for child in child_lines:
|
||||
child['id'] = report._build_subline_id(line_dict_id, child['id'])
|
||||
child['parent_id'] = line_dict_id
|
||||
|
||||
grouped_output = report._regroup_lines_by_name_prefix(
|
||||
options, child_lines,
|
||||
'_report_expand_unfoldable_line_partner_ledger_prefix_group',
|
||||
nest_level,
|
||||
matched_prefix=prefix,
|
||||
parent_line_dict_id=line_dict_id,
|
||||
)
|
||||
return {
|
||||
'lines': grouped_output,
|
||||
'offset_increment': len(grouped_output),
|
||||
'has_more': False,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Options
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
|
||||
extra_domain = []
|
||||
company_ids = report.get_report_company_ids(options)
|
||||
fx_journals = self.env['res.company'].browse(company_ids).mapped('currency_exchange_journal_id')
|
||||
if fx_journals:
|
||||
extra_domain += [
|
||||
'!', '&', '&', '&',
|
||||
('credit', '=', 0.0),
|
||||
('debit', '=', 0.0),
|
||||
('amount_currency', '!=', 0.0),
|
||||
('journal_id', 'in', fx_journals.ids),
|
||||
]
|
||||
|
||||
if options['export_mode'] == 'print' and options.get('filter_search_bar'):
|
||||
extra_domain += [
|
||||
'|', ('matched_debit_ids.debit_move_id.partner_id.name', 'ilike', options['filter_search_bar']),
|
||||
'|', ('matched_credit_ids.credit_move_id.partner_id.name', 'ilike', options['filter_search_bar']),
|
||||
('partner_id.name', 'ilike', options['filter_search_bar']),
|
||||
]
|
||||
|
||||
options['forced_domain'] = options.get('forced_domain', []) + extra_domain
|
||||
|
||||
if self.env.user.has_group('base.group_multi_currency'):
|
||||
options['multi_currency'] = True
|
||||
|
||||
hidden_cols = []
|
||||
options['hide_account'] = (previous_options or {}).get('hide_account', False)
|
||||
if options['hide_account']:
|
||||
hidden_cols += ['journal_code', 'account_code', 'matching_number']
|
||||
|
||||
options['hide_debit_credit'] = (previous_options or {}).get('hide_debit_credit', False)
|
||||
if options['hide_debit_credit']:
|
||||
hidden_cols += ['debit', 'credit']
|
||||
else:
|
||||
hidden_cols += ['amount']
|
||||
|
||||
options['columns'] = [c for c in options['columns'] if c['expression_label'] not in hidden_cols]
|
||||
|
||||
options['buttons'].append({
|
||||
'name': _('Send'),
|
||||
'action': 'action_send_statements',
|
||||
'sequence': 90,
|
||||
'always_show': True,
|
||||
})
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Batch unfold
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_unfold_all_batch_data_generator(self, report, options, lines_to_expand_by_function):
|
||||
partner_ids = []
|
||||
|
||||
for ld in lines_to_expand_by_function.get('_report_expand_unfoldable_line_partner_ledger', []):
|
||||
markup, mdl, mid = self.env['account.report']._parse_line_id(ld['id'])[-1]
|
||||
if mdl == 'res.partner':
|
||||
partner_ids.append(mid)
|
||||
elif markup == 'no_partner':
|
||||
partner_ids.append(None)
|
||||
|
||||
# Prefix-group expansion
|
||||
unknown_label_upper = self._unknown_partner_label().upper()
|
||||
prefix_domains = []
|
||||
for ld in lines_to_expand_by_function.get(
|
||||
'_report_expand_unfoldable_line_partner_ledger_prefix_group', [],
|
||||
):
|
||||
pfx = report._get_prefix_groups_matched_prefix_from_line_id(ld['id'])
|
||||
prefix_domains.append([('name', '=ilike', f'{pfx}%')])
|
||||
if unknown_label_upper.startswith(pfx):
|
||||
partner_ids.append(None)
|
||||
|
||||
if prefix_domains:
|
||||
partner_ids += self.env['res.partner'].with_context(active_test=False).search(
|
||||
expression.OR(prefix_domains)
|
||||
).ids
|
||||
|
||||
return {
|
||||
'initial_balances': self._fetch_initial_balances(partner_ids, options) if partner_ids else {},
|
||||
'aml_values': self._fetch_aml_data(options, partner_ids) if partner_ids else {},
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Actions
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_report_send_recipients(self, options):
|
||||
preset_ids = options.get('partner_ids', [])
|
||||
if not preset_ids:
|
||||
self.env.cr.execute(self._build_partner_sums_sql(options))
|
||||
preset_ids = [r['groupby'] for r in self.env.cr.dictfetchall() if r['groupby']]
|
||||
return self.env['res.partner'].browse(preset_ids)
|
||||
|
||||
def action_send_statements(self, options):
|
||||
tpl = self.env.ref('fusion_accounting.email_template_customer_statement', False)
|
||||
return {
|
||||
'name': _("Send Partner Ledgers"),
|
||||
'type': 'ir.actions.act_window',
|
||||
'views': [[False, 'form']],
|
||||
'res_model': 'account.report.send',
|
||||
'target': 'new',
|
||||
'context': {
|
||||
'default_mail_template_id': tpl.id if tpl else False,
|
||||
'default_report_options': options,
|
||||
},
|
||||
}
|
||||
|
||||
@api.model
|
||||
def action_open_partner(self, options, params):
|
||||
_, rec_id = self.env['account.report']._get_model_info_from_id(params['id'])
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'res.partner',
|
||||
'res_id': rec_id,
|
||||
'views': [[False, 'form']],
|
||||
'view_mode': 'form',
|
||||
'target': 'current',
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# SQL helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _query_partner_sums(self, options):
|
||||
"""Fetch sums grouped by partner and apply corrections for
|
||||
partnerless entries reconciled with partnered entries."""
|
||||
comp_cur = self.env.company.currency_id
|
||||
|
||||
def _assign_if_nonzero(row):
|
||||
check_fields = ['balance', 'debit', 'credit', 'amount']
|
||||
if any(not comp_cur.is_zero(row[f]) for f in check_fields):
|
||||
by_partner.setdefault(row['groupby'], defaultdict(lambda: defaultdict(float)))
|
||||
for f in check_fields:
|
||||
by_partner[row['groupby']][row['column_group_key']][f] += row[f]
|
||||
|
||||
by_partner = {}
|
||||
|
||||
self.env.cr.execute(self._build_partner_sums_sql(options))
|
||||
for rec in self.env.cr.dictfetchall():
|
||||
_assign_if_nonzero(rec)
|
||||
|
||||
# Correction: partnerless entries reconciled with a partner
|
||||
self.env.cr.execute(self._build_partnerless_correction_sql(options))
|
||||
correction_sums = {f: {cg: 0 for cg in options['column_groups']} for f in ('debit', 'credit', 'amount', 'balance')}
|
||||
|
||||
for rec in self.env.cr.dictfetchall():
|
||||
for f in ('debit', 'credit', 'amount', 'balance'):
|
||||
correction_sums[f][rec['column_group_key']] += rec[f]
|
||||
|
||||
if rec['groupby'] in by_partner:
|
||||
_assign_if_nonzero(rec)
|
||||
|
||||
# Adjust the Unknown Partner bucket
|
||||
if None in by_partner:
|
||||
for cg in options['column_groups']:
|
||||
by_partner[None][cg]['debit'] += correction_sums['credit'][cg]
|
||||
by_partner[None][cg]['credit'] += correction_sums['debit'][cg]
|
||||
by_partner[None][cg]['amount'] += correction_sums['amount'][cg]
|
||||
by_partner[None][cg]['balance'] -= correction_sums['balance'][cg]
|
||||
|
||||
if by_partner:
|
||||
partners = self.env['res.partner'].with_context(active_test=False).search_fetch(
|
||||
[('id', 'in', list(by_partner.keys()))],
|
||||
["id", "name", "trust", "company_registry", "vat"],
|
||||
)
|
||||
else:
|
||||
partners = self.env['res.partner']
|
||||
|
||||
if None in by_partner:
|
||||
partners = list(partners) + [None]
|
||||
|
||||
return [(p, by_partner[p.id if p else None]) for p in partners]
|
||||
|
||||
def _build_partner_sums_sql(self, options) -> SQL:
|
||||
"""SQL that sums debit / credit / balance by partner."""
|
||||
parts = []
|
||||
report = self.env.ref('fusion_accounting.partner_ledger_report')
|
||||
|
||||
for cg, cg_opts in report._split_options_per_column_group(options).items():
|
||||
qry = report._get_report_query(cg_opts, 'from_beginning')
|
||||
parts.append(SQL(
|
||||
"""
|
||||
SELECT
|
||||
account_move_line.partner_id AS groupby,
|
||||
%(cg)s AS column_group_key,
|
||||
SUM(%(dr)s) AS debit,
|
||||
SUM(%(cr)s) AS credit,
|
||||
SUM(%(bal)s) AS amount,
|
||||
SUM(%(bal)s) AS balance
|
||||
FROM %(tbl)s
|
||||
%(fx)s
|
||||
WHERE %(cond)s
|
||||
GROUP BY account_move_line.partner_id
|
||||
""",
|
||||
cg=cg,
|
||||
dr=report._currency_table_apply_rate(SQL("account_move_line.debit")),
|
||||
cr=report._currency_table_apply_rate(SQL("account_move_line.credit")),
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
tbl=qry.from_clause,
|
||||
fx=report._currency_table_aml_join(cg_opts),
|
||||
cond=qry.where_clause,
|
||||
))
|
||||
|
||||
return SQL(' UNION ALL ').join(parts)
|
||||
|
||||
def _fetch_initial_balances(self, partner_ids, options):
|
||||
"""Compute opening balances for each partner before date_from."""
|
||||
parts = []
|
||||
report = self.env.ref('fusion_accounting.partner_ledger_report')
|
||||
|
||||
for cg, cg_opts in report._split_options_per_column_group(options).items():
|
||||
init_opts = self._derive_initial_balance_options(cg_opts)
|
||||
qry = report._get_report_query(
|
||||
init_opts, 'from_beginning', domain=[('partner_id', 'in', partner_ids)],
|
||||
)
|
||||
parts.append(SQL(
|
||||
"""
|
||||
SELECT
|
||||
account_move_line.partner_id,
|
||||
%(cg)s AS column_group_key,
|
||||
SUM(%(dr)s) AS debit,
|
||||
SUM(%(cr)s) AS credit,
|
||||
SUM(%(bal)s) AS amount,
|
||||
SUM(%(bal)s) AS balance
|
||||
FROM %(tbl)s
|
||||
%(fx)s
|
||||
WHERE %(cond)s
|
||||
GROUP BY account_move_line.partner_id
|
||||
""",
|
||||
cg=cg,
|
||||
dr=report._currency_table_apply_rate(SQL("account_move_line.debit")),
|
||||
cr=report._currency_table_apply_rate(SQL("account_move_line.credit")),
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
tbl=qry.from_clause,
|
||||
fx=report._currency_table_aml_join(cg_opts),
|
||||
cond=qry.where_clause,
|
||||
))
|
||||
|
||||
self.env.cr.execute(SQL(" UNION ALL ").join(parts))
|
||||
|
||||
init_map = {
|
||||
pid: {cg: {} for cg in options['column_groups']}
|
||||
for pid in partner_ids
|
||||
}
|
||||
for row in self.env.cr.dictfetchall():
|
||||
init_map[row['partner_id']][row['column_group_key']] = row
|
||||
|
||||
return init_map
|
||||
|
||||
def _derive_initial_balance_options(self, options):
|
||||
"""Return a modified options dict ending the day before ``date_from``."""
|
||||
cutoff = fields.Date.from_string(options['date']['date_from']) - timedelta(days=1)
|
||||
new_date = dict(options['date'], date_from=False, date_to=fields.Date.to_string(cutoff))
|
||||
return dict(options, date=new_date)
|
||||
|
||||
def _build_partnerless_correction_sql(self, options):
|
||||
"""SQL for partnerless lines reconciled with a partner's line."""
|
||||
parts = []
|
||||
report = self.env.ref('fusion_accounting.partner_ledger_report')
|
||||
|
||||
for cg, cg_opts in report._split_options_per_column_group(options).items():
|
||||
qry = report._get_report_query(cg_opts, 'from_beginning')
|
||||
parts.append(SQL(
|
||||
"""
|
||||
SELECT
|
||||
%(cg)s AS column_group_key,
|
||||
linked.partner_id AS groupby,
|
||||
SUM(%(dr)s) AS debit,
|
||||
SUM(%(cr)s) AS credit,
|
||||
SUM(%(bal)s) AS amount,
|
||||
SUM(%(bal)s) AS balance
|
||||
FROM %(tbl)s
|
||||
JOIN account_partial_reconcile pr
|
||||
ON account_move_line.id = pr.debit_move_id
|
||||
OR account_move_line.id = pr.credit_move_id
|
||||
JOIN account_move_line linked ON
|
||||
(linked.id = pr.debit_move_id OR linked.id = pr.credit_move_id)
|
||||
AND linked.partner_id IS NOT NULL
|
||||
%(fx)s
|
||||
WHERE pr.max_date <= %(dt_to)s AND %(cond)s
|
||||
AND account_move_line.partner_id IS NULL
|
||||
GROUP BY linked.partner_id
|
||||
""",
|
||||
cg=cg,
|
||||
dr=report._currency_table_apply_rate(SQL(
|
||||
"CASE WHEN linked.balance > 0 THEN 0 ELSE pr.amount END"
|
||||
)),
|
||||
cr=report._currency_table_apply_rate(SQL(
|
||||
"CASE WHEN linked.balance < 0 THEN 0 ELSE pr.amount END"
|
||||
)),
|
||||
bal=report._currency_table_apply_rate(SQL(
|
||||
"-SIGN(linked.balance) * pr.amount"
|
||||
)),
|
||||
tbl=qry.from_clause,
|
||||
fx=report._currency_table_aml_join(cg_opts, aml_alias=SQL("linked")),
|
||||
dt_to=cg_opts['date']['date_to'],
|
||||
cond=qry.where_clause,
|
||||
))
|
||||
|
||||
return SQL(" UNION ALL ").join(parts)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# AML detail data
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_additional_column_aml_values(self):
|
||||
"""Hook for other modules to inject extra SELECT fields into the
|
||||
partner-ledger AML query."""
|
||||
return SQL()
|
||||
|
||||
def _fetch_aml_data(self, options, partner_ids, offset=0, limit=None):
|
||||
"""Load move lines for the given partners.
|
||||
|
||||
Returns ``{partner_id: [row, ...]}`` including both directly- and
|
||||
indirectly-linked entries.
|
||||
"""
|
||||
container = {pid: [] for pid in partner_ids}
|
||||
|
||||
real_ids = [x for x in partner_ids if x]
|
||||
direct_clauses = []
|
||||
indirect_clause = SQL('linked_aml.partner_id IS NOT NULL')
|
||||
|
||||
if None in partner_ids:
|
||||
direct_clauses.append(SQL('account_move_line.partner_id IS NULL'))
|
||||
if real_ids:
|
||||
direct_clauses.append(SQL('account_move_line.partner_id IN %s', tuple(real_ids)))
|
||||
indirect_clause = SQL('linked_aml.partner_id IN %s', tuple(real_ids))
|
||||
|
||||
direct_filter = SQL('(%s)', SQL(' OR ').join(direct_clauses))
|
||||
|
||||
fragments = []
|
||||
jnl_name = self.env['account.journal']._field_to_sql('journal', 'name')
|
||||
report = self.env.ref('fusion_accounting.partner_ledger_report')
|
||||
extra_cols = self._get_additional_column_aml_values()
|
||||
|
||||
for cg, grp_opts in report._split_options_per_column_group(options).items():
|
||||
qry = report._get_report_query(grp_opts, 'strict_range')
|
||||
acct_a = qry.left_join(
|
||||
lhs_alias='account_move_line', lhs_column='account_id',
|
||||
rhs_table='account_account', rhs_column='id', link='account_id',
|
||||
)
|
||||
code_f = self.env['account.account']._field_to_sql(acct_a, 'code', qry)
|
||||
name_f = self.env['account.account']._field_to_sql(acct_a, 'name')
|
||||
|
||||
# Direct entries
|
||||
fragments.append(SQL(
|
||||
'''
|
||||
SELECT
|
||||
account_move_line.id,
|
||||
account_move_line.date_maturity,
|
||||
account_move_line.name,
|
||||
account_move_line.ref,
|
||||
account_move_line.company_id,
|
||||
account_move_line.account_id,
|
||||
account_move_line.payment_id,
|
||||
account_move_line.partner_id,
|
||||
account_move_line.currency_id,
|
||||
account_move_line.amount_currency,
|
||||
account_move_line.matching_number,
|
||||
%(extra_cols)s
|
||||
COALESCE(account_move_line.invoice_date, account_move_line.date) AS invoice_date,
|
||||
%(dr)s AS debit,
|
||||
%(cr)s AS credit,
|
||||
%(bal)s AS amount,
|
||||
%(bal)s AS balance,
|
||||
mv.name AS move_name,
|
||||
mv.move_type AS move_type,
|
||||
%(code_f)s AS account_code,
|
||||
%(name_f)s AS account_name,
|
||||
journal.code AS journal_code,
|
||||
%(jnl_name)s AS journal_name,
|
||||
%(cg)s AS column_group_key,
|
||||
'directly_linked_aml' AS key,
|
||||
0 AS partial_id
|
||||
FROM %(tbl)s
|
||||
JOIN account_move mv ON mv.id = account_move_line.move_id
|
||||
%(fx)s
|
||||
LEFT JOIN res_company co ON co.id = account_move_line.company_id
|
||||
LEFT JOIN res_partner prt ON prt.id = account_move_line.partner_id
|
||||
LEFT JOIN account_journal journal ON journal.id = account_move_line.journal_id
|
||||
WHERE %(cond)s AND %(direct_filter)s
|
||||
ORDER BY account_move_line.date, account_move_line.id
|
||||
''',
|
||||
extra_cols=extra_cols,
|
||||
dr=report._currency_table_apply_rate(SQL("account_move_line.debit")),
|
||||
cr=report._currency_table_apply_rate(SQL("account_move_line.credit")),
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
code_f=code_f,
|
||||
name_f=name_f,
|
||||
jnl_name=jnl_name,
|
||||
cg=cg,
|
||||
tbl=qry.from_clause,
|
||||
fx=report._currency_table_aml_join(grp_opts),
|
||||
cond=qry.where_clause,
|
||||
direct_filter=direct_filter,
|
||||
))
|
||||
|
||||
# Indirect (reconciled with a partner but no partner on the line)
|
||||
fragments.append(SQL(
|
||||
'''
|
||||
SELECT
|
||||
account_move_line.id,
|
||||
account_move_line.date_maturity,
|
||||
account_move_line.name,
|
||||
account_move_line.ref,
|
||||
account_move_line.company_id,
|
||||
account_move_line.account_id,
|
||||
account_move_line.payment_id,
|
||||
linked_aml.partner_id,
|
||||
account_move_line.currency_id,
|
||||
account_move_line.amount_currency,
|
||||
account_move_line.matching_number,
|
||||
%(extra_cols)s
|
||||
COALESCE(account_move_line.invoice_date, account_move_line.date) AS invoice_date,
|
||||
%(dr)s AS debit,
|
||||
%(cr)s AS credit,
|
||||
%(bal)s AS amount,
|
||||
%(bal)s AS balance,
|
||||
mv.name AS move_name,
|
||||
mv.move_type AS move_type,
|
||||
%(code_f)s AS account_code,
|
||||
%(name_f)s AS account_name,
|
||||
journal.code AS journal_code,
|
||||
%(jnl_name)s AS journal_name,
|
||||
%(cg)s AS column_group_key,
|
||||
'indirectly_linked_aml' AS key,
|
||||
pr.id AS partial_id
|
||||
FROM %(tbl)s
|
||||
%(fx)s,
|
||||
account_partial_reconcile pr,
|
||||
account_move mv,
|
||||
account_move_line linked_aml,
|
||||
account_journal journal
|
||||
WHERE
|
||||
(account_move_line.id = pr.debit_move_id OR account_move_line.id = pr.credit_move_id)
|
||||
AND account_move_line.partner_id IS NULL
|
||||
AND mv.id = account_move_line.move_id
|
||||
AND (linked_aml.id = pr.debit_move_id OR linked_aml.id = pr.credit_move_id)
|
||||
AND %(indirect_clause)s
|
||||
AND journal.id = account_move_line.journal_id
|
||||
AND %(acct_alias)s.id = account_move_line.account_id
|
||||
AND %(cond)s
|
||||
AND pr.max_date BETWEEN %(dt_from)s AND %(dt_to)s
|
||||
ORDER BY account_move_line.date, account_move_line.id
|
||||
''',
|
||||
extra_cols=extra_cols,
|
||||
dr=report._currency_table_apply_rate(SQL(
|
||||
"CASE WHEN linked_aml.balance > 0 THEN 0 ELSE pr.amount END"
|
||||
)),
|
||||
cr=report._currency_table_apply_rate(SQL(
|
||||
"CASE WHEN linked_aml.balance < 0 THEN 0 ELSE pr.amount END"
|
||||
)),
|
||||
bal=report._currency_table_apply_rate(SQL("-SIGN(linked_aml.balance) * pr.amount")),
|
||||
code_f=code_f,
|
||||
name_f=name_f,
|
||||
jnl_name=jnl_name,
|
||||
cg=cg,
|
||||
tbl=qry.from_clause,
|
||||
fx=report._currency_table_aml_join(grp_opts),
|
||||
indirect_clause=indirect_clause,
|
||||
acct_alias=SQL.identifier(acct_a),
|
||||
cond=qry.where_clause,
|
||||
dt_from=grp_opts['date']['date_from'],
|
||||
dt_to=grp_opts['date']['date_to'],
|
||||
))
|
||||
|
||||
combined = SQL(" UNION ALL ").join(SQL("(%s)", f) for f in fragments)
|
||||
if offset:
|
||||
combined = SQL('%s OFFSET %s ', combined, offset)
|
||||
if limit:
|
||||
combined = SQL('%s LIMIT %s ', combined, limit)
|
||||
|
||||
self.env.cr.execute(combined)
|
||||
for row in self.env.cr.dictfetchall():
|
||||
if row['key'] == 'indirectly_linked_aml':
|
||||
if row['partner_id'] in container:
|
||||
container[row['partner_id']].append(row)
|
||||
if None in container:
|
||||
container[None].append({
|
||||
**row,
|
||||
'debit': row['credit'],
|
||||
'credit': row['debit'],
|
||||
'amount': row['credit'] - row['debit'],
|
||||
'balance': -row['balance'],
|
||||
})
|
||||
else:
|
||||
container[row['partner_id']].append(row)
|
||||
|
||||
return container
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Expand handler
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _report_expand_unfoldable_line_partner_ledger(
|
||||
self, line_dict_id, groupby, options, progress, offset, unfold_all_batch_data=None,
|
||||
):
|
||||
def _running_balance(line_dict):
|
||||
return {
|
||||
c['column_group_key']: lc.get('no_format', 0)
|
||||
for c, lc in zip(options['columns'], line_dict['columns'])
|
||||
if c['expression_label'] == 'balance'
|
||||
}
|
||||
|
||||
report = self.env.ref('fusion_accounting.partner_ledger_report')
|
||||
_, mdl, rec_id = report._parse_line_id(line_dict_id)[-1]
|
||||
|
||||
if mdl != 'res.partner':
|
||||
raise UserError(_("Invalid line ID for partner ledger expansion: %s", line_dict_id))
|
||||
|
||||
# Count prefix-group nesting levels
|
||||
nesting = sum(
|
||||
1 for mk, _, _ in report._parse_line_id(line_dict_id)
|
||||
if isinstance(mk, dict) and 'groupby_prefix_group' in mk
|
||||
)
|
||||
depth = nesting * 2
|
||||
lines = []
|
||||
|
||||
# Opening balance
|
||||
if offset == 0:
|
||||
if unfold_all_batch_data:
|
||||
init_by_cg = unfold_all_batch_data['initial_balances'][rec_id]
|
||||
else:
|
||||
init_by_cg = self._fetch_initial_balances([rec_id], options)[rec_id]
|
||||
|
||||
opening_line = report._get_partner_and_general_ledger_initial_balance_line(
|
||||
options, line_dict_id, init_by_cg, level_shift=depth,
|
||||
)
|
||||
if opening_line:
|
||||
lines.append(opening_line)
|
||||
progress = _running_balance(opening_line)
|
||||
|
||||
page_size = report.load_more_limit + 1 if report.load_more_limit and options['export_mode'] != 'print' else None
|
||||
|
||||
if unfold_all_batch_data:
|
||||
aml_rows = unfold_all_batch_data['aml_values'][rec_id]
|
||||
else:
|
||||
aml_rows = self._fetch_aml_data(options, [rec_id], offset=offset, limit=page_size)[rec_id]
|
||||
|
||||
overflow = False
|
||||
count = 0
|
||||
running = progress
|
||||
for row in aml_rows:
|
||||
if options['export_mode'] != 'print' and report.load_more_limit and count >= report.load_more_limit:
|
||||
overflow = True
|
||||
break
|
||||
new_line = self._build_aml_line(options, row, line_dict_id, running, depth_shift=depth)
|
||||
lines.append(new_line)
|
||||
running = _running_balance(new_line)
|
||||
count += 1
|
||||
|
||||
return {
|
||||
'lines': lines,
|
||||
'offset_increment': count,
|
||||
'has_more': overflow,
|
||||
'progress': running,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Line builders
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _build_partner_line(self, options, partner, col_data, depth_shift=0):
|
||||
"""Produce the foldable partner-level line."""
|
||||
comp_cur = self.env.company.currency_id
|
||||
first_vals = next(iter(col_data.values()))
|
||||
can_unfold = not comp_cur.is_zero(first_vals.get('debit', 0) or first_vals.get('credit', 0))
|
||||
|
||||
cols = []
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
for col_def in options['columns']:
|
||||
expr = col_def['expression_label']
|
||||
raw = col_data[col_def['column_group_key']].get(expr)
|
||||
can_unfold = can_unfold or (
|
||||
expr in ('debit', 'credit', 'amount') and not comp_cur.is_zero(raw)
|
||||
)
|
||||
cols.append(report._build_column_dict(raw, col_def, options=options))
|
||||
|
||||
if partner:
|
||||
lid = report._get_generic_line_id('res.partner', partner.id)
|
||||
else:
|
||||
lid = report._get_generic_line_id('res.partner', None, markup='no_partner')
|
||||
|
||||
return {
|
||||
'id': lid,
|
||||
'name': (partner.name or '')[:128] if partner else self._unknown_partner_label(),
|
||||
'columns': cols,
|
||||
'level': 1 + depth_shift,
|
||||
'trust': partner.trust if partner else None,
|
||||
'unfoldable': can_unfold,
|
||||
'unfolded': lid in options['unfolded_lines'] or options['unfold_all'],
|
||||
'expand_function': '_report_expand_unfoldable_line_partner_ledger',
|
||||
}
|
||||
|
||||
def _unknown_partner_label(self):
|
||||
return _('Unknown Partner')
|
||||
|
||||
@api.model
|
||||
def _format_aml_name(self, line_name, move_ref, move_name=None):
|
||||
"""Format the display name for a move line."""
|
||||
return self.env['account.move.line']._format_aml_name(line_name, move_ref, move_name=move_name)
|
||||
|
||||
def _build_aml_line(self, options, row, parent_id, running_bal, depth_shift=0):
|
||||
"""Build a single move-line row under its partner."""
|
||||
caret = 'account.payment' if row['payment_id'] else 'account.move.line'
|
||||
|
||||
cols = []
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
for col_def in options['columns']:
|
||||
expr = col_def['expression_label']
|
||||
|
||||
if expr not in row:
|
||||
raise UserError(_("Column '%s' is unavailable for this report.", expr))
|
||||
|
||||
raw = row[expr] if col_def['column_group_key'] == row['column_group_key'] else None
|
||||
if raw is None:
|
||||
cols.append(report._build_column_dict(None, None))
|
||||
continue
|
||||
|
||||
cur = False
|
||||
if expr == 'balance':
|
||||
raw += running_bal[col_def['column_group_key']]
|
||||
if expr == 'amount_currency':
|
||||
cur = self.env['res.currency'].browse(row['currency_id'])
|
||||
if cur == self.env.company.currency_id:
|
||||
raw = ''
|
||||
cols.append(report._build_column_dict(raw, col_def, options=options, currency=cur))
|
||||
|
||||
return {
|
||||
'id': report._get_generic_line_id(
|
||||
'account.move.line', row['id'],
|
||||
parent_line_id=parent_id, markup=row['partial_id'],
|
||||
),
|
||||
'parent_id': parent_id,
|
||||
'name': self._format_aml_name(row['name'], row['ref'], row['move_name']),
|
||||
'columns': cols,
|
||||
'caret_options': caret,
|
||||
'level': 3 + depth_shift,
|
||||
}
|
||||
|
||||
def _build_total_line(self, options, col_totals):
|
||||
cols = []
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
for col_def in options['columns']:
|
||||
raw = col_totals[col_def['column_group_key']].get(col_def['expression_label'])
|
||||
cols.append(report._build_column_dict(raw, col_def, options=options))
|
||||
|
||||
return {
|
||||
'id': report._get_generic_line_id(None, None, markup='total'),
|
||||
'name': _('Total'),
|
||||
'level': 1,
|
||||
'columns': cols,
|
||||
}
|
||||
|
||||
def open_journal_items(self, options, params):
|
||||
params['view_ref'] = 'account.view_move_line_tree_grouped_partner'
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
action = report.open_journal_items(options=options, params=params)
|
||||
action.get('context', {}).update({'search_default_group_by_account': 0})
|
||||
return action
|
||||
50
Fusion Accounting/models/account_payment.py
Normal file
50
Fusion Accounting/models/account_payment.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Fusion Accounting - Payment Extensions
|
||||
# Manual reconciliation and statement-line navigation for payments
|
||||
|
||||
import ast
|
||||
|
||||
from odoo import models, _
|
||||
|
||||
|
||||
class FusionAccountPayment(models.Model):
|
||||
"""Augments payments with manual reconciliation and the ability
|
||||
to navigate to matched bank statement lines."""
|
||||
|
||||
_inherit = "account.payment"
|
||||
|
||||
def action_open_manual_reconciliation_widget(self):
|
||||
"""Open the manual reconciliation view, optionally filtered
|
||||
by the payment's partner and partner type.
|
||||
|
||||
:return: An action dictionary for the reconciliation list.
|
||||
"""
|
||||
self.ensure_one()
|
||||
act_vals = self.env['ir.actions.act_window']._for_xml_id(
|
||||
'fusion_accounting.action_move_line_posted_unreconciled'
|
||||
)
|
||||
if self.partner_id:
|
||||
ctx = ast.literal_eval(act_vals.get('context', '{}'))
|
||||
ctx['search_default_partner_id'] = self.partner_id.id
|
||||
if self.partner_type == 'customer':
|
||||
ctx['search_default_trade_receivable'] = 1
|
||||
elif self.partner_type == 'supplier':
|
||||
ctx['search_default_trade_payable'] = 1
|
||||
act_vals['context'] = ctx
|
||||
return act_vals
|
||||
|
||||
def button_open_statement_lines(self):
|
||||
"""Navigate to the bank reconciliation widget showing only
|
||||
the statement lines that are reconciled with this payment.
|
||||
|
||||
:return: An action dictionary opening the reconciliation widget.
|
||||
"""
|
||||
self.ensure_one()
|
||||
matched_lines = self.reconciled_statement_line_ids
|
||||
return self.env['account.bank.statement.line']._action_open_bank_reconciliation_widget(
|
||||
extra_domain=[('id', 'in', matched_lines.ids)],
|
||||
default_context={
|
||||
'create': False,
|
||||
'default_st_line_id': matched_lines.ids[-1],
|
||||
},
|
||||
name=_("Matched Transactions"),
|
||||
)
|
||||
667
Fusion Accounting/models/account_reconcile_model.py
Normal file
667
Fusion Accounting/models/account_reconcile_model.py
Normal file
@@ -0,0 +1,667 @@
|
||||
# Fusion Accounting - Reconciliation Model Extensions
|
||||
# Extends the bank reconciliation rule engine with invoice matching,
|
||||
# write-off suggestion, and partner mapping capabilities.
|
||||
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import fields, models, tools
|
||||
from odoo.tools import SQL
|
||||
|
||||
|
||||
class AccountReconcileModel(models.Model):
|
||||
_inherit = 'account.reconcile.model'
|
||||
|
||||
# =====================================================================
|
||||
# Bank Widget Line Application
|
||||
# =====================================================================
|
||||
|
||||
def _apply_lines_for_bank_widget(self, residual_amount_currency, partner, st_line):
|
||||
"""Generate journal item values by applying this model's lines
|
||||
to a bank statement line in the reconciliation widget.
|
||||
|
||||
:param residual_amount_currency: Outstanding balance in statement currency.
|
||||
:param partner: Partner associated with the statement line.
|
||||
:param st_line: The bank statement line being processed.
|
||||
:return: List of dicts representing proposed journal items.
|
||||
"""
|
||||
self.ensure_one()
|
||||
stmt_currency = (
|
||||
st_line.foreign_currency_id
|
||||
or st_line.journal_id.currency_id
|
||||
or st_line.company_currency_id
|
||||
)
|
||||
proposed_items = []
|
||||
remaining = residual_amount_currency
|
||||
|
||||
for model_line in self.line_ids:
|
||||
item_vals = model_line._apply_in_bank_widget(remaining, partner, st_line)
|
||||
line_amount = item_vals['amount_currency']
|
||||
|
||||
if stmt_currency.is_zero(line_amount):
|
||||
continue
|
||||
|
||||
proposed_items.append(item_vals)
|
||||
remaining -= line_amount
|
||||
|
||||
return proposed_items
|
||||
|
||||
# =====================================================================
|
||||
# Rule Evaluation Engine
|
||||
# =====================================================================
|
||||
|
||||
def _apply_rules(self, st_line, partner):
|
||||
"""Evaluate all non-button reconciliation models against a
|
||||
statement line and return the first matching result.
|
||||
|
||||
:param st_line: Bank statement line to match.
|
||||
:param partner: Partner context for matching.
|
||||
:return: Dict with match result and model, or empty dict.
|
||||
"""
|
||||
eligible_models = self.filtered(
|
||||
lambda m: m.rule_type != 'writeoff_button'
|
||||
).sorted()
|
||||
|
||||
for model in eligible_models:
|
||||
if not model._is_applicable_for(st_line, partner):
|
||||
continue
|
||||
|
||||
if model.rule_type == 'invoice_matching':
|
||||
priority_map = model._get_invoice_matching_rules_map()
|
||||
for priority in sorted(priority_map.keys()):
|
||||
for matching_fn in priority_map[priority]:
|
||||
candidates = matching_fn(st_line, partner)
|
||||
if not candidates:
|
||||
continue
|
||||
|
||||
if candidates.get('amls'):
|
||||
match_result = model._get_invoice_matching_amls_result(
|
||||
st_line, partner, candidates,
|
||||
)
|
||||
if match_result:
|
||||
return {**match_result, 'model': model}
|
||||
else:
|
||||
return {**candidates, 'model': model}
|
||||
|
||||
elif model.rule_type == 'writeoff_suggestion':
|
||||
return {
|
||||
'model': model,
|
||||
'status': 'write_off',
|
||||
'auto_reconcile': model.auto_reconcile,
|
||||
}
|
||||
|
||||
return {}
|
||||
|
||||
# =====================================================================
|
||||
# Applicability Checks
|
||||
# =====================================================================
|
||||
|
||||
def _is_applicable_for(self, st_line, partner):
|
||||
"""Determine whether this model's filters allow it to be
|
||||
used for the given statement line and partner combination.
|
||||
|
||||
:return: True if the model's criteria are satisfied.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
# --- Amount and journal filters ---
|
||||
if self.match_journal_ids and st_line.move_id.journal_id not in self.match_journal_ids:
|
||||
return False
|
||||
if self.match_nature == 'amount_received' and st_line.amount < 0:
|
||||
return False
|
||||
if self.match_nature == 'amount_paid' and st_line.amount > 0:
|
||||
return False
|
||||
|
||||
abs_amount = abs(st_line.amount)
|
||||
if self.match_amount == 'lower' and abs_amount >= self.match_amount_max:
|
||||
return False
|
||||
if self.match_amount == 'greater' and abs_amount <= self.match_amount_min:
|
||||
return False
|
||||
if self.match_amount == 'between' and not (self.match_amount_min <= abs_amount <= self.match_amount_max):
|
||||
return False
|
||||
|
||||
# --- Partner filters ---
|
||||
if self.match_partner:
|
||||
if not partner:
|
||||
return False
|
||||
if self.match_partner_ids and partner not in self.match_partner_ids:
|
||||
return False
|
||||
if (
|
||||
self.match_partner_category_ids
|
||||
and not (partner.category_id & self.match_partner_category_ids)
|
||||
):
|
||||
return False
|
||||
|
||||
# --- Text matching on label, note, and transaction type ---
|
||||
text_checks = [
|
||||
(st_line, 'label', 'payment_ref'),
|
||||
(st_line.move_id, 'note', 'narration'),
|
||||
(st_line, 'transaction_type', 'transaction_type'),
|
||||
]
|
||||
for record, rule_suffix, record_field in text_checks:
|
||||
configured_term = (self[f'match_{rule_suffix}_param'] or '').lower()
|
||||
actual_value = (record[record_field] or '').lower()
|
||||
match_mode = self[f'match_{rule_suffix}']
|
||||
|
||||
if match_mode == 'contains' and configured_term not in actual_value:
|
||||
return False
|
||||
if match_mode == 'not_contains' and configured_term in actual_value:
|
||||
return False
|
||||
if match_mode == 'match_regex' and not re.match(configured_term, actual_value):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# =====================================================================
|
||||
# Invoice Matching Domain & Token Extraction
|
||||
# =====================================================================
|
||||
|
||||
def _get_invoice_matching_amls_domain(self, st_line, partner):
|
||||
"""Build the search domain for candidate journal items
|
||||
when performing invoice matching."""
|
||||
base_domain = st_line._get_default_amls_matching_domain()
|
||||
|
||||
# Filter by balance direction matching the statement line
|
||||
if st_line.amount > 0.0:
|
||||
base_domain.append(('balance', '>', 0.0))
|
||||
else:
|
||||
base_domain.append(('balance', '<', 0.0))
|
||||
|
||||
line_currency = st_line.foreign_currency_id or st_line.currency_id
|
||||
if self.match_same_currency:
|
||||
base_domain.append(('currency_id', '=', line_currency.id))
|
||||
|
||||
if partner:
|
||||
base_domain.append(('partner_id', '=', partner.id))
|
||||
|
||||
if self.past_months_limit:
|
||||
cutoff = (
|
||||
fields.Date.context_today(self)
|
||||
- relativedelta(months=self.past_months_limit)
|
||||
)
|
||||
base_domain.append(('date', '>=', fields.Date.to_string(cutoff)))
|
||||
|
||||
return base_domain
|
||||
|
||||
def _get_st_line_text_values_for_matching(self, st_line):
|
||||
"""Gather text fields from the statement line that are enabled
|
||||
for matching in this model's configuration.
|
||||
|
||||
:return: List of text values to search against.
|
||||
"""
|
||||
self.ensure_one()
|
||||
enabled_fields = []
|
||||
if self.match_text_location_label:
|
||||
enabled_fields.append('payment_ref')
|
||||
if self.match_text_location_note:
|
||||
enabled_fields.append('narration')
|
||||
if self.match_text_location_reference:
|
||||
enabled_fields.append('ref')
|
||||
return st_line._get_st_line_strings_for_matching(
|
||||
allowed_fields=enabled_fields,
|
||||
)
|
||||
|
||||
def _get_invoice_matching_st_line_tokens(self, st_line):
|
||||
"""Parse statement line text into tokens for matching.
|
||||
|
||||
:return: Tuple of (numerical_tokens, exact_tokens, text_tokens).
|
||||
"""
|
||||
raw_texts = self._get_st_line_text_values_for_matching(st_line)
|
||||
min_token_len = 4
|
||||
|
||||
numeric_tokens = []
|
||||
exact_token_set = set()
|
||||
text_tokens = []
|
||||
|
||||
for text_val in raw_texts:
|
||||
words = (text_val or '').split()
|
||||
exact_token_set.add(text_val)
|
||||
exact_token_set.update(
|
||||
w for w in words if len(w) >= min_token_len
|
||||
)
|
||||
|
||||
cleaned_words = [
|
||||
''.join(ch for ch in w if re.match(r'[0-9a-zA-Z\s]', ch))
|
||||
for w in words
|
||||
]
|
||||
|
||||
for cleaned in cleaned_words:
|
||||
if len(cleaned) < min_token_len:
|
||||
continue
|
||||
text_tokens.append(cleaned)
|
||||
|
||||
digits_only = ''.join(ch for ch in cleaned if ch.isdecimal())
|
||||
if len(digits_only) >= min_token_len:
|
||||
numeric_tokens.append(digits_only)
|
||||
|
||||
return numeric_tokens, list(exact_token_set), text_tokens
|
||||
|
||||
# =====================================================================
|
||||
# Candidate Discovery
|
||||
# =====================================================================
|
||||
|
||||
def _get_invoice_matching_amls_candidates(self, st_line, partner):
|
||||
"""Search for matching journal items using token-based and
|
||||
amount-based strategies.
|
||||
|
||||
:return: Dict with 'amls' recordset and 'allow_auto_reconcile' flag,
|
||||
or None if no candidates found.
|
||||
"""
|
||||
|
||||
def _build_sort_clause(tbl_prefix=SQL()):
|
||||
"""Build ORDER BY clause based on matching_order preference."""
|
||||
sort_dir = SQL(' DESC') if self.matching_order == 'new_first' else SQL(' ASC')
|
||||
return SQL(", ").join(
|
||||
SQL("%s%s%s", tbl_prefix, SQL(col), sort_dir)
|
||||
for col in ('date_maturity', 'date', 'id')
|
||||
)
|
||||
|
||||
assert self.rule_type == 'invoice_matching'
|
||||
self.env['account.move'].flush_model()
|
||||
self.env['account.move.line'].flush_model()
|
||||
|
||||
search_domain = self._get_invoice_matching_amls_domain(st_line, partner)
|
||||
query = self.env['account.move.line']._where_calc(search_domain)
|
||||
from_clause = query.from_clause
|
||||
where_clause = query.where_clause or SQL("TRUE")
|
||||
|
||||
# Prepare CTE and sub-queries for token matching
|
||||
cte_sql = SQL()
|
||||
token_queries: list[SQL] = []
|
||||
num_tokens, exact_tokens, _txt_tokens = (
|
||||
self._get_invoice_matching_st_line_tokens(st_line)
|
||||
)
|
||||
|
||||
if num_tokens or exact_tokens:
|
||||
cte_sql = SQL('''
|
||||
WITH candidate_lines AS (
|
||||
SELECT
|
||||
account_move_line.id AS aml_id,
|
||||
account_move_line.date AS aml_date,
|
||||
account_move_line.date_maturity AS aml_maturity,
|
||||
account_move_line.name AS aml_name,
|
||||
account_move_line__move_id.name AS move_name,
|
||||
account_move_line__move_id.ref AS move_ref
|
||||
FROM %s
|
||||
JOIN account_move account_move_line__move_id
|
||||
ON account_move_line__move_id.id = account_move_line.move_id
|
||||
WHERE %s
|
||||
)
|
||||
''', from_clause, where_clause)
|
||||
|
||||
# Build sub-queries for numerical token matching
|
||||
if num_tokens:
|
||||
for tbl_alias, col_name in [
|
||||
('account_move_line', 'name'),
|
||||
('account_move_line__move_id', 'name'),
|
||||
('account_move_line__move_id', 'ref'),
|
||||
]:
|
||||
col_ref = SQL("%s_%s", SQL(tbl_alias), SQL(col_name))
|
||||
token_queries.append(SQL(r'''
|
||||
SELECT
|
||||
aml_id AS id,
|
||||
aml_date AS date,
|
||||
aml_maturity AS date_maturity,
|
||||
UNNEST(
|
||||
REGEXP_SPLIT_TO_ARRAY(
|
||||
SUBSTRING(
|
||||
REGEXP_REPLACE(%(col)s, '[^0-9\s]', '', 'g'),
|
||||
'\S(?:.*\S)*'
|
||||
),
|
||||
'\s+'
|
||||
)
|
||||
) AS token
|
||||
FROM candidate_lines
|
||||
WHERE %(col)s IS NOT NULL
|
||||
''', col=col_ref))
|
||||
|
||||
# Build sub-queries for exact token matching
|
||||
if exact_tokens:
|
||||
for tbl_alias, col_name in [
|
||||
('account_move_line', 'name'),
|
||||
('account_move_line__move_id', 'name'),
|
||||
('account_move_line__move_id', 'ref'),
|
||||
]:
|
||||
col_ref = SQL("%s_%s", SQL(tbl_alias), SQL(col_name))
|
||||
token_queries.append(SQL('''
|
||||
SELECT
|
||||
aml_id AS id,
|
||||
aml_date AS date,
|
||||
aml_maturity AS date_maturity,
|
||||
%(col)s AS token
|
||||
FROM candidate_lines
|
||||
WHERE %(col)s != ''
|
||||
''', col=col_ref))
|
||||
|
||||
# Execute token-based search if queries exist
|
||||
if token_queries:
|
||||
sort_clause = _build_sort_clause(prefix=SQL('matched.'))
|
||||
all_tokens = tuple(num_tokens + exact_tokens)
|
||||
found_ids = [
|
||||
row[0] for row in self.env.execute_query(SQL(
|
||||
'''
|
||||
%s
|
||||
SELECT
|
||||
matched.id,
|
||||
COUNT(*) AS match_count
|
||||
FROM (%s) AS matched
|
||||
WHERE matched.token IN %s
|
||||
GROUP BY matched.date_maturity, matched.date, matched.id
|
||||
HAVING COUNT(*) > 0
|
||||
ORDER BY match_count DESC, %s
|
||||
''',
|
||||
cte_sql,
|
||||
SQL(" UNION ALL ").join(token_queries),
|
||||
all_tokens,
|
||||
sort_clause,
|
||||
))
|
||||
]
|
||||
if found_ids:
|
||||
return {
|
||||
'allow_auto_reconcile': True,
|
||||
'amls': self.env['account.move.line'].browse(found_ids),
|
||||
}
|
||||
elif (
|
||||
self.match_text_location_label
|
||||
or self.match_text_location_note
|
||||
or self.match_text_location_reference
|
||||
):
|
||||
# Text location matching was enabled but found nothing - don't fall through
|
||||
return
|
||||
|
||||
# Fallback: match by exact amount when no partner is set
|
||||
if not partner:
|
||||
line_currency = (
|
||||
st_line.foreign_currency_id
|
||||
or st_line.journal_id.currency_id
|
||||
or st_line.company_currency_id
|
||||
)
|
||||
if line_currency == self.company_id.currency_id:
|
||||
amt_col = SQL('amount_residual')
|
||||
else:
|
||||
amt_col = SQL('amount_residual_currency')
|
||||
|
||||
sort_clause = _build_sort_clause(prefix=SQL('account_move_line.'))
|
||||
amount_rows = self.env.execute_query(SQL(
|
||||
'''
|
||||
SELECT account_move_line.id
|
||||
FROM %s
|
||||
WHERE
|
||||
%s
|
||||
AND account_move_line.currency_id = %s
|
||||
AND ROUND(account_move_line.%s, %s) = ROUND(%s, %s)
|
||||
ORDER BY %s
|
||||
''',
|
||||
from_clause,
|
||||
where_clause,
|
||||
line_currency.id,
|
||||
amt_col,
|
||||
line_currency.decimal_places,
|
||||
-st_line.amount_residual,
|
||||
line_currency.decimal_places,
|
||||
sort_clause,
|
||||
))
|
||||
found_lines = self.env['account.move.line'].browse(
|
||||
[r[0] for r in amount_rows],
|
||||
)
|
||||
else:
|
||||
found_lines = self.env['account.move.line'].search(
|
||||
search_domain,
|
||||
order=_build_sort_clause().code,
|
||||
)
|
||||
|
||||
if found_lines:
|
||||
return {
|
||||
'allow_auto_reconcile': False,
|
||||
'amls': found_lines,
|
||||
}
|
||||
|
||||
def _get_invoice_matching_rules_map(self):
|
||||
"""Return the priority-ordered mapping of matching rule functions.
|
||||
Override this in other modules to inject additional matching logic.
|
||||
|
||||
:return: Dict mapping priority (int) to list of callables.
|
||||
"""
|
||||
priority_map = defaultdict(list)
|
||||
priority_map[10].append(self._get_invoice_matching_amls_candidates)
|
||||
return priority_map
|
||||
|
||||
# =====================================================================
|
||||
# Partner Mapping
|
||||
# =====================================================================
|
||||
|
||||
def _get_partner_from_mapping(self, st_line):
|
||||
"""Attempt to identify a partner using the model's regex mappings.
|
||||
|
||||
:param st_line: Bank statement line to analyze.
|
||||
:return: Matched partner recordset (may be empty).
|
||||
"""
|
||||
self.ensure_one()
|
||||
if self.rule_type not in ('invoice_matching', 'writeoff_suggestion'):
|
||||
return self.env['res.partner']
|
||||
|
||||
for mapping in self.partner_mapping_line_ids:
|
||||
# Check payment reference regex
|
||||
ref_ok = True
|
||||
if mapping.payment_ref_regex:
|
||||
ref_ok = bool(
|
||||
re.match(mapping.payment_ref_regex, st_line.payment_ref)
|
||||
if st_line.payment_ref else False
|
||||
)
|
||||
|
||||
# Check narration regex
|
||||
narration_ok = True
|
||||
if mapping.narration_regex:
|
||||
plain_narration = tools.html2plaintext(
|
||||
st_line.narration or '',
|
||||
).rstrip()
|
||||
narration_ok = bool(re.match(
|
||||
mapping.narration_regex,
|
||||
plain_narration,
|
||||
flags=re.DOTALL,
|
||||
))
|
||||
|
||||
if ref_ok and narration_ok:
|
||||
return mapping.partner_id
|
||||
|
||||
return self.env['res.partner']
|
||||
|
||||
# =====================================================================
|
||||
# Match Result Processing
|
||||
# =====================================================================
|
||||
|
||||
def _get_invoice_matching_amls_result(self, st_line, partner, candidate_vals):
|
||||
"""Process candidate journal items and determine whether they
|
||||
form a valid match for the statement line.
|
||||
|
||||
:return: Dict with matched amls and status flags, or None.
|
||||
"""
|
||||
|
||||
def _build_result(kept_values, match_status):
|
||||
"""Construct the result dict from kept candidates and status."""
|
||||
if 'rejected' in match_status:
|
||||
return None
|
||||
|
||||
output = {'amls': self.env['account.move.line']}
|
||||
for val_entry in kept_values:
|
||||
output['amls'] |= val_entry['aml']
|
||||
|
||||
if 'allow_write_off' in match_status and self.line_ids:
|
||||
output['status'] = 'write_off'
|
||||
if (
|
||||
'allow_auto_reconcile' in match_status
|
||||
and candidate_vals['allow_auto_reconcile']
|
||||
and self.auto_reconcile
|
||||
):
|
||||
output['auto_reconcile'] = True
|
||||
|
||||
return output
|
||||
|
||||
line_currency = st_line.foreign_currency_id or st_line.currency_id
|
||||
line_amount = st_line._prepare_move_line_default_vals()[1]['amount_currency']
|
||||
direction = 1 if line_amount > 0.0 else -1
|
||||
|
||||
candidates = candidate_vals['amls']
|
||||
standard_values = []
|
||||
epd_values = []
|
||||
same_cur = candidates.currency_id == line_currency
|
||||
|
||||
for aml in candidates:
|
||||
base_vals = {
|
||||
'aml': aml,
|
||||
'amount_residual': aml.amount_residual,
|
||||
'amount_residual_currency': aml.amount_residual_currency,
|
||||
}
|
||||
standard_values.append(base_vals)
|
||||
|
||||
# Handle early payment discount eligibility
|
||||
payment_term = aml.move_id.invoice_payment_term_id
|
||||
last_disc_date = (
|
||||
payment_term._get_last_discount_date(aml.move_id.date)
|
||||
if payment_term else False
|
||||
)
|
||||
if (
|
||||
same_cur
|
||||
and aml.move_id.move_type in (
|
||||
'out_invoice', 'out_receipt', 'in_invoice', 'in_receipt',
|
||||
)
|
||||
and not aml.matched_debit_ids
|
||||
and not aml.matched_credit_ids
|
||||
and last_disc_date
|
||||
and st_line.date <= last_disc_date
|
||||
):
|
||||
rate_factor = (
|
||||
abs(aml.amount_currency) / abs(aml.balance)
|
||||
if aml.balance else 1.0
|
||||
)
|
||||
epd_values.append({
|
||||
**base_vals,
|
||||
'amount_residual': st_line.company_currency_id.round(
|
||||
aml.discount_amount_currency / rate_factor,
|
||||
),
|
||||
'amount_residual_currency': aml.discount_amount_currency,
|
||||
})
|
||||
else:
|
||||
epd_values.append(base_vals)
|
||||
|
||||
def _try_batch_match(values_list):
|
||||
"""Attempt to match items as a batch in same-currency mode."""
|
||||
if not same_cur:
|
||||
return None, []
|
||||
|
||||
kept = []
|
||||
running_total = 0.0
|
||||
|
||||
for vals in values_list:
|
||||
if line_currency.compare_amounts(
|
||||
line_amount, -vals['amount_residual_currency'],
|
||||
) == 0:
|
||||
return 'perfect', [vals]
|
||||
|
||||
if line_currency.compare_amounts(
|
||||
direction * (line_amount + running_total), 0.0,
|
||||
) > 0:
|
||||
kept.append(vals)
|
||||
running_total += vals['amount_residual_currency']
|
||||
|
||||
if line_currency.is_zero(direction * (line_amount + running_total)):
|
||||
return 'perfect', kept
|
||||
elif kept:
|
||||
return 'partial', kept
|
||||
return None, []
|
||||
|
||||
# Priority 1: Try early payment discount match (only accept perfect)
|
||||
batch_type, kept_list = _try_batch_match(epd_values)
|
||||
if batch_type != 'perfect':
|
||||
kept_list = []
|
||||
|
||||
# Priority 2: Try standard same-currency match
|
||||
if not kept_list:
|
||||
_batch_type, kept_list = _try_batch_match(standard_values)
|
||||
|
||||
# Priority 3: Use all candidates as fallback
|
||||
if not kept_list:
|
||||
kept_list = standard_values
|
||||
|
||||
# Validate the final selection against tolerance rules
|
||||
if kept_list:
|
||||
rule_status = self._check_rule_propositions(st_line, kept_list)
|
||||
output = _build_result(kept_list, rule_status)
|
||||
if output:
|
||||
return output
|
||||
|
||||
def _check_rule_propositions(self, st_line, amls_values_list):
|
||||
"""Validate the aggregate match against payment tolerance rules.
|
||||
|
||||
:return: Set of status strings indicating the verdict.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
if not self.allow_payment_tolerance:
|
||||
return {'allow_write_off', 'allow_auto_reconcile'}
|
||||
|
||||
line_currency = st_line.foreign_currency_id or st_line.currency_id
|
||||
line_amt = st_line._prepare_move_line_default_vals()[1]['amount_currency']
|
||||
|
||||
total_candidate_amt = sum(
|
||||
st_line._prepare_counterpart_amounts_using_st_line_rate(
|
||||
v['aml'].currency_id,
|
||||
v['amount_residual'],
|
||||
v['amount_residual_currency'],
|
||||
)['amount_currency']
|
||||
for v in amls_values_list
|
||||
)
|
||||
|
||||
direction = 1 if line_amt > 0.0 else -1
|
||||
post_reco_balance = line_currency.round(
|
||||
direction * (total_candidate_amt + line_amt),
|
||||
)
|
||||
|
||||
# Exact zero balance - perfect match
|
||||
if line_currency.is_zero(post_reco_balance):
|
||||
return {'allow_auto_reconcile'}
|
||||
|
||||
# Payment exceeds invoices - always allow
|
||||
if post_reco_balance > 0.0:
|
||||
return {'allow_auto_reconcile'}
|
||||
|
||||
# Zero tolerance configured - reject
|
||||
if self.payment_tolerance_param == 0:
|
||||
return {'rejected'}
|
||||
|
||||
# Fixed amount tolerance check
|
||||
if (
|
||||
self.payment_tolerance_type == 'fixed_amount'
|
||||
and line_currency.compare_amounts(
|
||||
-post_reco_balance, self.payment_tolerance_param,
|
||||
) <= 0
|
||||
):
|
||||
return {'allow_write_off', 'allow_auto_reconcile'}
|
||||
|
||||
# Percentage tolerance check
|
||||
pct_remaining = abs(post_reco_balance / total_candidate_amt) * 100.0
|
||||
if (
|
||||
self.payment_tolerance_type == 'percentage'
|
||||
and line_currency.compare_amounts(
|
||||
pct_remaining, self.payment_tolerance_param,
|
||||
) <= 0
|
||||
):
|
||||
return {'allow_write_off', 'allow_auto_reconcile'}
|
||||
|
||||
return {'rejected'}
|
||||
|
||||
# =====================================================================
|
||||
# Auto-Reconciliation Cron
|
||||
# =====================================================================
|
||||
|
||||
def run_auto_reconciliation(self):
|
||||
"""Trigger automatic reconciliation for statement lines,
|
||||
with a time limit to prevent long-running operations."""
|
||||
cron_time_limit = tools.config['limit_time_real_cron'] or -1
|
||||
max_seconds = (
|
||||
cron_time_limit if 0 < cron_time_limit < 180 else 180
|
||||
)
|
||||
self.env['account.bank.statement.line']._cron_try_auto_reconcile_statement_lines(
|
||||
limit_time=max_seconds,
|
||||
)
|
||||
220
Fusion Accounting/models/account_reconcile_model_line.py
Normal file
220
Fusion Accounting/models/account_reconcile_model_line.py
Normal file
@@ -0,0 +1,220 @@
|
||||
import re
|
||||
from math import copysign
|
||||
|
||||
from odoo import _, models, Command
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
|
||||
class ReconcileModelLine(models.Model):
|
||||
"""Extends reconciliation model lines with methods for computing
|
||||
journal item values across manual and bank reconciliation contexts."""
|
||||
|
||||
_inherit = 'account.reconcile.model.line'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Core helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _resolve_taxes_for_partner(self, partner):
|
||||
"""Return the tax recordset that should be applied, taking fiscal
|
||||
position mapping into account when a partner is provided."""
|
||||
tax_records = self.tax_ids
|
||||
if not tax_records or not partner:
|
||||
return tax_records
|
||||
fpos = self.env['account.fiscal.position']._get_fiscal_position(partner)
|
||||
if fpos:
|
||||
tax_records = fpos.map_tax(tax_records)
|
||||
return tax_records
|
||||
|
||||
def _prepare_aml_vals(self, partner):
|
||||
"""Build a base dictionary of account.move.line values derived from
|
||||
this reconciliation model line.
|
||||
|
||||
Fiscal-position tax remapping is applied automatically when the
|
||||
supplied *partner* record has a matching fiscal position.
|
||||
|
||||
Args:
|
||||
partner: ``res.partner`` record to attach to the move line.
|
||||
|
||||
Returns:
|
||||
``dict`` suitable for later account.move.line creation.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
mapped_taxes = self._resolve_taxes_for_partner(partner)
|
||||
|
||||
result_values = {
|
||||
'name': self.label,
|
||||
'partner_id': partner.id,
|
||||
'analytic_distribution': self.analytic_distribution,
|
||||
'tax_ids': [Command.set(mapped_taxes.ids)],
|
||||
'reconcile_model_id': self.model_id.id,
|
||||
}
|
||||
|
||||
if self.account_id:
|
||||
result_values['account_id'] = self.account_id.id
|
||||
|
||||
return result_values
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Manual reconciliation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _compute_manual_amount(self, remaining_balance, currency):
|
||||
"""Derive the line amount for manual reconciliation based on the
|
||||
configured amount type (percentage or fixed).
|
||||
|
||||
Raises ``UserError`` for amount types that are only valid inside the
|
||||
bank reconciliation widget (e.g. regex, percentage_st_line).
|
||||
"""
|
||||
if self.amount_type == 'percentage':
|
||||
return currency.round(remaining_balance * self.amount / 100.0)
|
||||
|
||||
if self.amount_type == 'fixed':
|
||||
direction = 1 if remaining_balance > 0.0 else -1
|
||||
return currency.round(self.amount * direction)
|
||||
|
||||
raise UserError(
|
||||
_("This reconciliation model cannot be applied in the manual "
|
||||
"reconciliation widget because its amount type is not supported "
|
||||
"in that context.")
|
||||
)
|
||||
|
||||
def _apply_in_manual_widget(self, residual_amount_currency, partner, currency):
|
||||
"""Produce move-line values for the manual reconciliation widget.
|
||||
|
||||
The ``journal_id`` field is deliberately included in the result even
|
||||
though it is a related (read-only) field on the move line. The manual
|
||||
reconciliation widget relies on its presence to group lines into a
|
||||
single journal entry per journal.
|
||||
|
||||
Args:
|
||||
residual_amount_currency: Open balance in the account's currency.
|
||||
partner: ``res.partner`` record for the counterpart.
|
||||
currency: ``res.currency`` record used by the account.
|
||||
|
||||
Returns:
|
||||
``dict`` ready for account.move.line creation.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
computed_amount = self._compute_manual_amount(residual_amount_currency, currency)
|
||||
|
||||
line_data = self._prepare_aml_vals(partner)
|
||||
line_data.update({
|
||||
'currency_id': currency.id,
|
||||
'amount_currency': computed_amount,
|
||||
'journal_id': self.journal_id.id,
|
||||
})
|
||||
return line_data
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Bank reconciliation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _extract_regex_amount(self, payment_ref, residual_balance):
|
||||
"""Try to extract a numeric amount from *payment_ref* using the
|
||||
regex pattern stored on this line.
|
||||
|
||||
Returns the parsed amount with the correct sign, or ``0.0`` when
|
||||
parsing fails or the pattern does not match.
|
||||
"""
|
||||
pattern_match = re.search(self.amount_string, payment_ref)
|
||||
if not pattern_match:
|
||||
return 0.0
|
||||
|
||||
separator = self.model_id.decimal_separator
|
||||
direction = 1 if residual_balance > 0.0 else -1
|
||||
try:
|
||||
raw_group = pattern_match.group(1)
|
||||
digits_only = re.sub(r'[^\d' + separator + ']', '', raw_group)
|
||||
parsed_value = float(digits_only.replace(separator, '.'))
|
||||
return copysign(parsed_value * direction, residual_balance)
|
||||
except (ValueError, IndexError):
|
||||
return 0.0
|
||||
|
||||
def _compute_percentage_st_line_amount(self, st_line, currency):
|
||||
"""Calculate the move-line amount and currency when the amount type
|
||||
is ``percentage_st_line``.
|
||||
|
||||
Depending on the model configuration the calculation uses either the
|
||||
raw transaction figures or the journal-currency figures.
|
||||
|
||||
Returns a ``(computed_amount, target_currency)`` tuple.
|
||||
"""
|
||||
(
|
||||
txn_amount, txn_currency,
|
||||
jnl_amount, jnl_currency,
|
||||
_comp_amount, _comp_currency,
|
||||
) = st_line._get_accounting_amounts_and_currencies()
|
||||
|
||||
ratio = self.amount / 100.0
|
||||
is_invoice_writeoff = (
|
||||
self.model_id.rule_type == 'writeoff_button'
|
||||
and self.model_id.counterpart_type in ('sale', 'purchase')
|
||||
)
|
||||
|
||||
if is_invoice_writeoff:
|
||||
# Invoice creation – use the original transaction currency.
|
||||
return currency.round(-txn_amount * ratio), txn_currency, ratio
|
||||
# Standard write-off – follow the journal currency.
|
||||
return currency.round(-jnl_amount * ratio), jnl_currency, None
|
||||
|
||||
def _apply_in_bank_widget(self, residual_amount_currency, partner, st_line):
|
||||
"""Produce move-line values for the bank reconciliation widget.
|
||||
|
||||
Handles three amount-type strategies:
|
||||
|
||||
* ``percentage_st_line`` – percentage of the statement line amount
|
||||
* ``regex`` – amount extracted from the payment reference
|
||||
* fallback – delegates to :meth:`_apply_in_manual_widget`
|
||||
|
||||
Args:
|
||||
residual_amount_currency: Open balance in the statement line currency.
|
||||
partner: ``res.partner`` record for the counterpart.
|
||||
st_line: ``account.bank.statement.line`` being reconciled.
|
||||
|
||||
Returns:
|
||||
``dict`` ready for account.move.line creation.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
line_currency = (
|
||||
st_line.foreign_currency_id
|
||||
or st_line.journal_id.currency_id
|
||||
or st_line.company_currency_id
|
||||
)
|
||||
|
||||
# -- percentage of statement line ---------------------------------
|
||||
if self.amount_type == 'percentage_st_line':
|
||||
computed_amount, target_cur, pct_ratio = (
|
||||
self._compute_percentage_st_line_amount(st_line, line_currency)
|
||||
)
|
||||
entry_data = self._prepare_aml_vals(partner)
|
||||
entry_data['currency_id'] = target_cur.id
|
||||
entry_data['amount_currency'] = computed_amount
|
||||
if pct_ratio is not None:
|
||||
entry_data['percentage_st_line'] = pct_ratio
|
||||
if not entry_data.get('name'):
|
||||
entry_data['name'] = st_line.payment_ref
|
||||
return entry_data
|
||||
|
||||
# -- regex extraction from payment reference ----------------------
|
||||
if self.amount_type == 'regex':
|
||||
extracted = self._extract_regex_amount(
|
||||
st_line.payment_ref, residual_amount_currency,
|
||||
)
|
||||
entry_data = self._prepare_aml_vals(partner)
|
||||
entry_data['currency_id'] = line_currency.id
|
||||
entry_data['amount_currency'] = extracted
|
||||
if not entry_data.get('name'):
|
||||
entry_data['name'] = st_line.payment_ref
|
||||
return entry_data
|
||||
|
||||
# -- percentage / fixed – reuse manual widget logic ---------------
|
||||
entry_data = self._apply_in_manual_widget(
|
||||
residual_amount_currency, partner, line_currency,
|
||||
)
|
||||
if not entry_data.get('name'):
|
||||
entry_data['name'] = st_line.payment_ref
|
||||
return entry_data
|
||||
7261
Fusion Accounting/models/account_report.py
Normal file
7261
Fusion Accounting/models/account_report.py
Normal file
File diff suppressed because it is too large
Load Diff
393
Fusion Accounting/models/account_sales_report.py
Normal file
393
Fusion Accounting/models/account_sales_report.py
Normal file
@@ -0,0 +1,393 @@
|
||||
# Fusion Accounting - EC Sales / Tax Report Handler
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from odoo import _, api, fields, models
|
||||
from odoo.tools import SQL
|
||||
|
||||
|
||||
class ECSalesReportCustomHandler(models.AbstractModel):
|
||||
"""Produces the EC Sales List report.
|
||||
|
||||
Lists intra-community transactions broken down by goods, triangular
|
||||
operations and services, with per-partner VAT details and optional
|
||||
country-specific operation codes.
|
||||
"""
|
||||
|
||||
_name = 'account.ec.sales.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'EC Sales Report Custom Handler'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Display
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_custom_display_config(self):
|
||||
return {
|
||||
'components': {
|
||||
'AccountReportFilters': 'fusion_accounting.SalesReportFilters',
|
||||
},
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Dynamic lines
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
"""Generate one line per partner per operation category (vertical layout)."""
|
||||
output = []
|
||||
col_totals = {
|
||||
cg: {
|
||||
'balance': 0.0, 'goods': 0.0, 'triangular': 0.0,
|
||||
'services': 0.0, 'vat_number': '', 'country_code': '',
|
||||
'sales_type_code': '',
|
||||
}
|
||||
for cg in options['column_groups']
|
||||
}
|
||||
|
||||
category_config = options['sales_report_taxes'].get('operation_category', {})
|
||||
filter_map = {
|
||||
item['id']: item.get('selected')
|
||||
for item in options.get('ec_tax_filter_selection', [])
|
||||
}
|
||||
|
||||
for partner_rec, data in self._query_partner_amounts(report, options, warnings):
|
||||
for category in ('goods', 'triangular', 'services'):
|
||||
if not filter_map.get(category):
|
||||
continue
|
||||
|
||||
per_col = defaultdict(dict)
|
||||
override_code = category_config.get(category)
|
||||
found_entries = False
|
||||
|
||||
for cg in options['column_groups']:
|
||||
psum = data.get(cg, {})
|
||||
per_col[cg]['vat_number'] = psum.get('vat_number', 'UNKNOWN')
|
||||
per_col[cg]['country_code'] = psum.get('country_code', 'UNKNOWN')
|
||||
per_col[cg]['sales_type_code'] = []
|
||||
per_col[cg]['balance'] = psum.get(category, 0.0)
|
||||
col_totals[cg]['balance'] += psum.get(category, 0.0)
|
||||
|
||||
for idx, elem_id in enumerate(psum.get('tax_element_id', [])):
|
||||
if elem_id in options['sales_report_taxes'][category]:
|
||||
found_entries = True
|
||||
code_val = (
|
||||
override_code
|
||||
or (psum.get('sales_type_code') and psum['sales_type_code'][idx])
|
||||
or None
|
||||
)
|
||||
per_col[cg]['sales_type_code'].append(code_val)
|
||||
|
||||
per_col[cg]['sales_type_code'] = ', '.join(
|
||||
set(per_col[cg]['sales_type_code'])
|
||||
)
|
||||
|
||||
if found_entries:
|
||||
output.append((
|
||||
0,
|
||||
self._render_partner_line(report, options, partner_rec, per_col, markup=category),
|
||||
))
|
||||
|
||||
if output:
|
||||
output.append((0, self._render_total_line(report, options, col_totals)))
|
||||
|
||||
return output
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Caret
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _caret_options_initializer(self):
|
||||
return {
|
||||
'ec_sales': [
|
||||
{'name': _("View Partner"), 'action': 'caret_option_open_record_form'},
|
||||
],
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Options
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
"""Set up EC tax filter selections and partner-country domain."""
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
self._setup_core_options(report, options, previous_options)
|
||||
|
||||
# Populate tax identifiers for each category.
|
||||
# In the generic case (no country tax report), fall back to tax IDs.
|
||||
options['sales_report_taxes'] = {
|
||||
'goods': tuple(self.env['account.tax'].search([
|
||||
*self.env['account.tax']._check_company_domain(self.env.company),
|
||||
('amount', '=', 0.0),
|
||||
('amount_type', '=', 'percent'),
|
||||
('type_tax_use', '=', 'sale'),
|
||||
]).ids),
|
||||
'services': tuple(),
|
||||
'triangular': tuple(),
|
||||
'use_taxes_instead_of_tags': True,
|
||||
}
|
||||
|
||||
ec_codes = self._ec_country_code_set(options)
|
||||
ec_country_ids = self.env['res.country'].search([
|
||||
('code', 'in', tuple(ec_codes)),
|
||||
]).ids
|
||||
foreign_ids = tuple(
|
||||
set(ec_country_ids) - {self.env.company.account_fiscal_country_id.id}
|
||||
)
|
||||
|
||||
options.setdefault('forced_domain', []).extend([
|
||||
'|',
|
||||
('move_id.partner_shipping_id.country_id', 'in', foreign_ids),
|
||||
'&',
|
||||
('move_id.partner_shipping_id', '=', False),
|
||||
('partner_id.country_id', 'in', foreign_ids),
|
||||
])
|
||||
|
||||
report._init_options_journals(options, previous_options=previous_options)
|
||||
self._enable_export_buttons_for_common_vat_groups_in_branches(options)
|
||||
|
||||
def _setup_core_options(self, report, options, previous_options):
|
||||
"""Initialise the EC tax category filter (goods / triangular / services)."""
|
||||
default_filters = [
|
||||
{'id': 'goods', 'name': _('Goods'), 'selected': True},
|
||||
{'id': 'triangular', 'name': _('Triangular'), 'selected': True},
|
||||
{'id': 'services', 'name': _('Services'), 'selected': True},
|
||||
]
|
||||
saved = previous_options.get('ec_tax_filter_selection', default_filters)
|
||||
if saved != default_filters:
|
||||
valid_ids = {f['id'] for f in default_filters}
|
||||
saved = [item for item in saved if item['id'] in valid_ids]
|
||||
options['ec_tax_filter_selection'] = saved
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Line renderers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _render_partner_line(self, report, options, partner, col_data, markup=''):
|
||||
"""Format a single partner / category row."""
|
||||
cols = []
|
||||
for col_def in options['columns']:
|
||||
raw = col_data[col_def['column_group_key']].get(col_def['expression_label'])
|
||||
cols.append(report._build_column_dict(raw, col_def, options=options))
|
||||
|
||||
return {
|
||||
'id': report._get_generic_line_id('res.partner', partner.id, markup=markup),
|
||||
'name': (partner.name or '')[:128] if partner else _('Unknown Partner'),
|
||||
'columns': cols,
|
||||
'level': 2,
|
||||
'trust': partner.trust if partner else None,
|
||||
'caret_options': 'ec_sales',
|
||||
}
|
||||
|
||||
def _render_total_line(self, report, options, col_totals):
|
||||
cols = []
|
||||
for col_def in options['columns']:
|
||||
raw = col_totals[col_def['column_group_key']].get(col_def['expression_label'])
|
||||
display = raw if col_def['figure_type'] == 'monetary' else ''
|
||||
cols.append(report._build_column_dict(display, col_def, options=options))
|
||||
|
||||
return {
|
||||
'id': report._get_generic_line_id(None, None, markup='total'),
|
||||
'name': _('Total'),
|
||||
'class': 'total',
|
||||
'level': 1,
|
||||
'columns': cols,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# SQL
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _query_partner_amounts(self, report, options, warnings=None):
|
||||
"""Execute the main query and return ``[(partner, values), ...]``."""
|
||||
by_partner = {}
|
||||
comp_cur = self.env.company.currency_id
|
||||
|
||||
def _store_row(row):
|
||||
if comp_cur.is_zero(row['balance']):
|
||||
return
|
||||
|
||||
by_partner.setdefault(row['groupby'], defaultdict(lambda: defaultdict(float)))
|
||||
bucket = by_partner[row['groupby']][row['column_group_key']]
|
||||
|
||||
elem_id = row['tax_element_id']
|
||||
if elem_id in options['sales_report_taxes']['goods']:
|
||||
bucket['goods'] += row['balance']
|
||||
elif elem_id in options['sales_report_taxes']['triangular']:
|
||||
bucket['triangular'] += row['balance']
|
||||
elif elem_id in options['sales_report_taxes']['services']:
|
||||
bucket['services'] += row['balance']
|
||||
|
||||
bucket.setdefault('tax_element_id', []).append(elem_id)
|
||||
bucket.setdefault('sales_type_code', []).append(row['sales_type_code'])
|
||||
|
||||
vat_raw = row['vat_number'] or ''
|
||||
country_prefix = vat_raw[:2] if vat_raw[:2].isalpha() else None
|
||||
bucket.setdefault('vat_number', vat_raw if not country_prefix else vat_raw[2:])
|
||||
bucket.setdefault('full_vat_number', vat_raw)
|
||||
bucket.setdefault('country_code', country_prefix or row.get('country_code'))
|
||||
|
||||
if warnings is not None:
|
||||
ec_codes = self._ec_country_code_set(options)
|
||||
if row['country_code'] not in ec_codes:
|
||||
warnings['fusion_accounting.sales_report_warning_non_ec_country'] = {'alert_type': 'warning'}
|
||||
elif not row.get('vat_number'):
|
||||
warnings['fusion_accounting.sales_report_warning_missing_vat'] = {'alert_type': 'warning'}
|
||||
if row.get('same_country') and row['country_code']:
|
||||
warnings['fusion_accounting.sales_report_warning_same_country'] = {'alert_type': 'warning'}
|
||||
|
||||
sql = self._build_sums_sql(report, options)
|
||||
self.env.cr.execute(sql)
|
||||
for rec in self.env.cr.dictfetchall():
|
||||
_store_row(rec)
|
||||
|
||||
if by_partner:
|
||||
partners = self.env['res.partner'].with_context(active_test=False).browse(by_partner.keys())
|
||||
else:
|
||||
partners = self.env['res.partner']
|
||||
|
||||
return [(p, by_partner[p.id]) for p in partners.sorted()]
|
||||
|
||||
def _build_sums_sql(self, report, options) -> SQL:
|
||||
"""Build the main aggregation query, joining either tax tags or tax
|
||||
records depending on configuration."""
|
||||
parts = []
|
||||
allowed = self._filtered_element_ids(options)
|
||||
|
||||
use_tax_fallback = options.get('sales_report_taxes', {}).get('use_taxes_instead_of_tags')
|
||||
if use_tax_fallback:
|
||||
elem_table = SQL('account_tax')
|
||||
elem_id_col = SQL('account_tax_id')
|
||||
rel_table = SQL('account_move_line_account_tax_rel')
|
||||
elem_name_sql = self.env['account.tax']._field_to_sql('account_tax', 'name')
|
||||
else:
|
||||
elem_table = SQL('account_account_tag')
|
||||
elem_id_col = SQL('account_account_tag_id')
|
||||
rel_table = SQL('account_account_tag_account_move_line_rel')
|
||||
elem_name_sql = self.env['account.account.tag']._field_to_sql('account_account_tag', 'name')
|
||||
|
||||
for cg, cg_opts in report._split_options_per_column_group(options).items():
|
||||
qry = report._get_report_query(cg_opts, 'strict_range')
|
||||
if allowed:
|
||||
qry.add_where(SQL('%s.id IN %s', elem_table, tuple(allowed)))
|
||||
|
||||
parts.append(SQL(
|
||||
"""
|
||||
SELECT
|
||||
%(cg)s AS column_group_key,
|
||||
account_move_line.partner_id AS groupby,
|
||||
rp.vat AS vat_number,
|
||||
rc.code AS country_code,
|
||||
-SUM(%(bal)s) AS balance,
|
||||
%(elem_name)s AS sales_type_code,
|
||||
%(elem_tbl)s.id AS tax_element_id,
|
||||
(comp_p.country_id = rp.country_id) AS same_country
|
||||
FROM %(tbl)s
|
||||
%(fx)s
|
||||
JOIN %(rel)s ON %(rel)s.account_move_line_id = account_move_line.id
|
||||
JOIN %(elem_tbl)s ON %(rel)s.%(elem_id)s = %(elem_tbl)s.id
|
||||
JOIN res_partner rp ON account_move_line.partner_id = rp.id
|
||||
JOIN res_country rc ON rp.country_id = rc.id
|
||||
JOIN res_company rco ON rco.id = account_move_line.company_id
|
||||
JOIN res_partner comp_p ON comp_p.id = rco.partner_id
|
||||
WHERE %(cond)s
|
||||
GROUP BY %(elem_tbl)s.id, %(elem_tbl)s.name,
|
||||
account_move_line.partner_id,
|
||||
rp.vat, rc.code, comp_p.country_id, rp.country_id
|
||||
""",
|
||||
cg=cg,
|
||||
elem_name=elem_name_sql,
|
||||
elem_tbl=elem_table,
|
||||
tbl=qry.from_clause,
|
||||
bal=report._currency_table_apply_rate(SQL("account_move_line.balance")),
|
||||
fx=report._currency_table_aml_join(cg_opts),
|
||||
rel=rel_table,
|
||||
elem_id=elem_id_col,
|
||||
cond=qry.where_clause,
|
||||
))
|
||||
|
||||
return SQL(' UNION ALL ').join(parts)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@api.model
|
||||
def _filtered_element_ids(self, options):
|
||||
"""Collect the set of tax / tag IDs selected via the filter toggles."""
|
||||
selected = set()
|
||||
for toggle in options.get('ec_tax_filter_selection', []):
|
||||
if toggle.get('selected'):
|
||||
selected.update(options['sales_report_taxes'][toggle['id']])
|
||||
return selected
|
||||
|
||||
@api.model
|
||||
def _ec_country_code_set(self, options):
|
||||
"""Return the set of EU member-state country codes applicable to the
|
||||
report period."""
|
||||
members = {
|
||||
'AT', 'BE', 'BG', 'HR', 'CY', 'CZ', 'DK', 'EE', 'FI', 'FR',
|
||||
'DE', 'GR', 'HU', 'IE', 'IT', 'LV', 'LT', 'LU', 'MT', 'NL',
|
||||
'PL', 'PT', 'RO', 'SK', 'SI', 'ES', 'SE', 'XI',
|
||||
}
|
||||
# Great Britain left the EU on 2021-01-01
|
||||
period_start = fields.Date.from_string(options['date']['date_from'])
|
||||
if period_start < fields.Date.from_string('2021-01-01'):
|
||||
members.add('GB')
|
||||
# Monaco participates in the French VAT area
|
||||
if self.env.company.account_fiscal_country_id.code != 'FR':
|
||||
members.add('MC')
|
||||
return members
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Warning action
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_warning_act_window(self, options, params):
|
||||
"""Open a window showing the problematic entries for a given warning."""
|
||||
action = {'type': 'ir.actions.act_window', 'context': {}}
|
||||
|
||||
warning_type = params['type']
|
||||
if warning_type == 'no_vat':
|
||||
aml_filter = [
|
||||
('partner_id.vat', '=', None),
|
||||
('partner_id.country_id.code', 'in', tuple(self._ec_country_code_set(options))),
|
||||
]
|
||||
action.update({
|
||||
'name': _("Entries with partners missing VAT numbers"),
|
||||
'context': {'search_default_group_by_partner': 1, 'expand': 1},
|
||||
})
|
||||
elif warning_type == 'non_ec_country':
|
||||
aml_filter = [
|
||||
('partner_id.country_id.code', 'not in', tuple(self._ec_country_code_set(options))),
|
||||
]
|
||||
action['name'] = _("EC tax applied to non-EC countries")
|
||||
else:
|
||||
aml_filter = [
|
||||
('partner_id.country_id.code', '=', options.get('same_country_warning')),
|
||||
]
|
||||
action['name'] = _("EC tax applied to same country")
|
||||
|
||||
use_tax_fallback = options.get('sales_report_taxes', {}).get('use_taxes_instead_of_tags')
|
||||
lookup_field = 'tax_ids.id' if use_tax_fallback else 'tax_tag_ids.id'
|
||||
|
||||
matching_lines = self.env['account.move.line'].search([
|
||||
*aml_filter,
|
||||
*self.env['account.report']._get_options_date_domain(options, 'strict_range'),
|
||||
(lookup_field, 'in', tuple(self._filtered_element_ids(options))),
|
||||
])
|
||||
|
||||
if params['model'] == 'move':
|
||||
action.update({
|
||||
'views': [[self.env.ref('account.view_move_tree').id, 'list'], (False, 'form')],
|
||||
'res_model': 'account.move',
|
||||
'domain': [('id', 'in', matching_lines.move_id.ids)],
|
||||
})
|
||||
else:
|
||||
action.update({
|
||||
'views': [(False, 'list'), (False, 'form')],
|
||||
'res_model': 'res.partner',
|
||||
'domain': [('id', 'in', matching_lines.move_id.partner_id.ids)],
|
||||
})
|
||||
|
||||
return action
|
||||
313
Fusion Accounting/models/account_tax.py
Normal file
313
Fusion Accounting/models/account_tax.py
Normal file
@@ -0,0 +1,313 @@
|
||||
# Fusion Accounting - Tax & Tax Unit Extensions
|
||||
# Deferred date propagation in tax computations and tax unit management
|
||||
|
||||
from odoo import api, fields, models, Command, _
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
|
||||
class FusionAccountTax(models.Model):
|
||||
"""Extends the tax engine to carry deferred revenue/expense dates
|
||||
through the base-line and tax-line computation pipeline."""
|
||||
|
||||
_inherit = "account.tax"
|
||||
|
||||
def _prepare_base_line_for_taxes_computation(self, record, **kwargs):
|
||||
"""Inject deferred period dates into the base-line dictionary."""
|
||||
vals = super()._prepare_base_line_for_taxes_computation(record, **kwargs)
|
||||
vals['deferred_start_date'] = self._get_base_line_field_value_from_record(
|
||||
record, 'deferred_start_date', kwargs, False,
|
||||
)
|
||||
vals['deferred_end_date'] = self._get_base_line_field_value_from_record(
|
||||
record, 'deferred_end_date', kwargs, False,
|
||||
)
|
||||
return vals
|
||||
|
||||
def _prepare_tax_line_for_taxes_computation(self, record, **kwargs):
|
||||
"""Inject deferred period dates into the tax-line dictionary."""
|
||||
vals = super()._prepare_tax_line_for_taxes_computation(record, **kwargs)
|
||||
vals['deferred_start_date'] = self._get_base_line_field_value_from_record(
|
||||
record, 'deferred_start_date', kwargs, False,
|
||||
)
|
||||
vals['deferred_end_date'] = self._get_base_line_field_value_from_record(
|
||||
record, 'deferred_end_date', kwargs, False,
|
||||
)
|
||||
return vals
|
||||
|
||||
def _prepare_base_line_grouping_key(self, base_line):
|
||||
"""Include deferred dates in the grouping key so lines with
|
||||
different deferral periods are not merged."""
|
||||
grp_key = super()._prepare_base_line_grouping_key(base_line)
|
||||
grp_key['deferred_start_date'] = base_line['deferred_start_date']
|
||||
grp_key['deferred_end_date'] = base_line['deferred_end_date']
|
||||
return grp_key
|
||||
|
||||
def _prepare_base_line_tax_repartition_grouping_key(
|
||||
self, base_line, base_line_grouping_key, tax_data, tax_rep_data,
|
||||
):
|
||||
"""Propagate deferred dates into the repartition grouping key
|
||||
only when the account is deferral-compatible and the tax
|
||||
repartition line does not participate in tax closing."""
|
||||
grp_key = super()._prepare_base_line_tax_repartition_grouping_key(
|
||||
base_line, base_line_grouping_key, tax_data, tax_rep_data,
|
||||
)
|
||||
source_record = base_line['record']
|
||||
is_deferral_eligible = (
|
||||
isinstance(source_record, models.Model)
|
||||
and source_record._name == 'account.move.line'
|
||||
and source_record._has_deferred_compatible_account()
|
||||
and base_line['deferred_start_date']
|
||||
and base_line['deferred_end_date']
|
||||
and not tax_rep_data['tax_rep'].use_in_tax_closing
|
||||
)
|
||||
if is_deferral_eligible:
|
||||
grp_key['deferred_start_date'] = base_line['deferred_start_date']
|
||||
grp_key['deferred_end_date'] = base_line['deferred_end_date']
|
||||
else:
|
||||
grp_key['deferred_start_date'] = False
|
||||
grp_key['deferred_end_date'] = False
|
||||
return grp_key
|
||||
|
||||
def _prepare_tax_line_repartition_grouping_key(self, tax_line):
|
||||
"""Mirror deferred dates from the tax line into its repartition
|
||||
grouping key."""
|
||||
grp_key = super()._prepare_tax_line_repartition_grouping_key(tax_line)
|
||||
grp_key['deferred_start_date'] = tax_line['deferred_start_date']
|
||||
grp_key['deferred_end_date'] = tax_line['deferred_end_date']
|
||||
return grp_key
|
||||
|
||||
|
||||
class FusionTaxUnit(models.Model):
|
||||
"""A tax unit groups multiple companies for consolidated tax
|
||||
reporting. Manages fiscal position synchronisation and
|
||||
horizontal-group linkage to generic tax reports."""
|
||||
|
||||
_name = "account.tax.unit"
|
||||
_description = "Tax Unit"
|
||||
|
||||
name = fields.Char(string="Name", required=True)
|
||||
country_id = fields.Many2one(
|
||||
comodel_name='res.country',
|
||||
string="Country",
|
||||
required=True,
|
||||
help="Jurisdiction under which this unit's consolidated tax returns are filed.",
|
||||
)
|
||||
vat = fields.Char(
|
||||
string="Tax ID",
|
||||
required=True,
|
||||
help="VAT identification number used when submitting the unit's declaration.",
|
||||
)
|
||||
company_ids = fields.Many2many(
|
||||
comodel_name='res.company',
|
||||
string="Companies",
|
||||
required=True,
|
||||
help="Member companies grouped under this unit.",
|
||||
)
|
||||
main_company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string="Main Company",
|
||||
required=True,
|
||||
help="The reporting entity responsible for filing and payment.",
|
||||
)
|
||||
fpos_synced = fields.Boolean(
|
||||
string="Fiscal Positions Synchronised",
|
||||
compute='_compute_fiscal_position_completion',
|
||||
help="Indicates whether fiscal positions exist for every member company.",
|
||||
)
|
||||
|
||||
# ---- CRUD Overrides ----
|
||||
def create(self, vals_list):
|
||||
"""After creation, set up horizontal groups on the generic tax
|
||||
reports so this unit appears in multi-company views."""
|
||||
records = super().create(vals_list)
|
||||
|
||||
h_groups = self.env['account.report.horizontal.group'].create([
|
||||
{
|
||||
'name': unit.name,
|
||||
'rule_ids': [
|
||||
Command.create({
|
||||
'field_name': 'company_id',
|
||||
'domain': f"[('account_tax_unit_ids', 'in', {unit.id})]",
|
||||
}),
|
||||
],
|
||||
}
|
||||
for unit in records
|
||||
])
|
||||
|
||||
# Link horizontal groups to all relevant tax reports
|
||||
report_refs = [
|
||||
'account.generic_tax_report',
|
||||
'account.generic_tax_report_account_tax',
|
||||
'account.generic_tax_report_tax_account',
|
||||
'fusion_accounting.generic_ec_sales_report',
|
||||
]
|
||||
for ref_str in report_refs:
|
||||
tax_rpt = self.env.ref(ref_str)
|
||||
tax_rpt.horizontal_group_ids |= h_groups
|
||||
|
||||
# Also attach to country-specific variants
|
||||
base_generic = self.env.ref('account.generic_tax_report')
|
||||
for unit in records:
|
||||
matching_variants = base_generic.variant_report_ids.filtered(
|
||||
lambda v: v.country_id == unit.country_id
|
||||
)
|
||||
matching_variants.write({
|
||||
'horizontal_group_ids': [Command.link(hg.id) for hg in h_groups],
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def unlink(self):
|
||||
"""Clean up fiscal positions before deletion."""
|
||||
self._get_tax_unit_fiscal_positions(
|
||||
companies=self.env['res.company'].search([]),
|
||||
).unlink()
|
||||
return super().unlink()
|
||||
|
||||
# ---- Computed Fields ----
|
||||
@api.depends('company_ids')
|
||||
def _compute_fiscal_position_completion(self):
|
||||
"""Check whether every member company has a synchronised fiscal
|
||||
position mapping all other members' taxes to no-tax."""
|
||||
for unit in self:
|
||||
is_synced = True
|
||||
for company in unit.company_ids:
|
||||
origin = company._origin if isinstance(company.id, models.NewId) else company
|
||||
fp = unit._get_tax_unit_fiscal_positions(companies=origin)
|
||||
partners_with_fp = (
|
||||
self.env['res.company']
|
||||
.search([])
|
||||
.with_company(origin)
|
||||
.partner_id
|
||||
.filtered(lambda p: p.property_account_position_id == fp)
|
||||
if fp
|
||||
else self.env['res.partner']
|
||||
)
|
||||
is_synced = partners_with_fp == (unit.company_ids - origin).partner_id
|
||||
if not is_synced:
|
||||
break
|
||||
unit.fpos_synced = is_synced
|
||||
|
||||
# ---- Fiscal Position Management ----
|
||||
def _get_tax_unit_fiscal_positions(self, companies, create_or_refresh=False):
|
||||
"""Retrieve (or create) fiscal positions for each company in the
|
||||
unit. When *create_or_refresh* is True, positions are upserted
|
||||
with mappings that zero-out all company taxes.
|
||||
|
||||
:param companies: Companies to process.
|
||||
:param create_or_refresh: If True, create/update the fiscal positions.
|
||||
:returns: Recordset of fiscal positions.
|
||||
"""
|
||||
fp_set = self.env['account.fiscal.position'].with_context(
|
||||
allowed_company_ids=self.env.user.company_ids.ids,
|
||||
)
|
||||
for unit in self:
|
||||
for comp in companies:
|
||||
xml_ref = f'account.tax_unit_{unit.id}_fp_{comp.id}'
|
||||
existing = self.env.ref(xml_ref, raise_if_not_found=False)
|
||||
if create_or_refresh:
|
||||
company_taxes = self.env['account.tax'].with_context(
|
||||
allowed_company_ids=self.env.user.company_ids.ids,
|
||||
).search(self.env['account.tax']._check_company_domain(comp))
|
||||
fp_data = {
|
||||
'xml_id': xml_ref,
|
||||
'values': {
|
||||
'name': unit.name,
|
||||
'company_id': comp.id,
|
||||
'tax_ids': [Command.clear()] + [
|
||||
Command.create({'tax_src_id': t.id}) for t in company_taxes
|
||||
],
|
||||
},
|
||||
}
|
||||
existing = fp_set._load_records([fp_data])
|
||||
if existing:
|
||||
fp_set += existing
|
||||
return fp_set
|
||||
|
||||
def action_sync_unit_fiscal_positions(self):
|
||||
"""Remove existing unit fiscal positions and recreate them
|
||||
with up-to-date tax mappings for all member companies."""
|
||||
self._get_tax_unit_fiscal_positions(
|
||||
companies=self.env['res.company'].search([]),
|
||||
).unlink()
|
||||
for unit in self:
|
||||
for comp in unit.company_ids:
|
||||
fp = unit._get_tax_unit_fiscal_positions(
|
||||
companies=comp, create_or_refresh=True,
|
||||
)
|
||||
(unit.company_ids - comp).with_company(comp).partner_id.property_account_position_id = fp
|
||||
|
||||
# ---- Constraints ----
|
||||
@api.constrains('country_id', 'company_ids')
|
||||
def _validate_companies_country(self):
|
||||
"""All member companies must share the same currency and each
|
||||
company may only belong to one unit per country."""
|
||||
for unit in self:
|
||||
currencies_seen = set()
|
||||
for comp in unit.company_ids:
|
||||
currencies_seen.add(comp.currency_id)
|
||||
other_units_same_country = any(
|
||||
u != unit and u.country_id == unit.country_id
|
||||
for u in comp.account_tax_unit_ids
|
||||
)
|
||||
if other_units_same_country:
|
||||
raise ValidationError(_(
|
||||
"Company %(company)s already belongs to a tax unit in "
|
||||
"%(country)s. Each company can only participate in one "
|
||||
"tax unit per country.",
|
||||
company=comp.name,
|
||||
country=unit.country_id.name,
|
||||
))
|
||||
if len(currencies_seen) > 1:
|
||||
raise ValidationError(
|
||||
_("All companies within a tax unit must share the same primary currency.")
|
||||
)
|
||||
|
||||
@api.constrains('company_ids', 'main_company_id')
|
||||
def _validate_main_company(self):
|
||||
"""The designated main company must be among the unit's members."""
|
||||
for unit in self:
|
||||
if unit.main_company_id not in unit.company_ids:
|
||||
raise ValidationError(
|
||||
_("The main company must be a member of the tax unit.")
|
||||
)
|
||||
|
||||
@api.constrains('company_ids')
|
||||
def _validate_companies(self):
|
||||
"""A tax unit requires at least two member companies."""
|
||||
for unit in self:
|
||||
if len(unit.company_ids) < 2:
|
||||
raise ValidationError(
|
||||
_("A tax unit requires a minimum of two companies. "
|
||||
"Consider deleting the unit instead.")
|
||||
)
|
||||
|
||||
@api.constrains('country_id', 'vat')
|
||||
def _validate_vat(self):
|
||||
"""Validate the VAT number against the unit's country."""
|
||||
for unit in self:
|
||||
if not unit.vat:
|
||||
continue
|
||||
detected_code = self.env['res.partner']._run_vat_test(
|
||||
unit.vat, unit.country_id,
|
||||
)
|
||||
if detected_code and detected_code != unit.country_id.code.lower():
|
||||
raise ValidationError(
|
||||
_("The country derived from the VAT number does not match "
|
||||
"the country configured on this tax unit.")
|
||||
)
|
||||
if not detected_code:
|
||||
unit_label = _("tax unit [%s]", unit.name)
|
||||
err_msg = self.env['res.partner']._build_vat_error_message(
|
||||
unit.country_id.code.lower(), unit.vat, unit_label,
|
||||
)
|
||||
raise ValidationError(err_msg)
|
||||
|
||||
# ---- Onchange ----
|
||||
@api.onchange('company_ids')
|
||||
def _onchange_company_ids(self):
|
||||
"""Auto-select the first company as main when the current main
|
||||
is removed from the member list."""
|
||||
if self.main_company_id not in self.company_ids and self.company_ids:
|
||||
self.main_company_id = self.company_ids[0]._origin
|
||||
elif not self.company_ids:
|
||||
self.main_company_id = False
|
||||
194
Fusion Accounting/models/account_transfer.py
Normal file
194
Fusion Accounting/models/account_transfer.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# Fusion Accounting - Account Transfer Wizard
|
||||
# Copyright (C) 2026 Nexa Systems Inc. (https://nexasystems.ca)
|
||||
# Original implementation for the Fusion Accounting module.
|
||||
#
|
||||
# Provides a transient model that creates a journal entry to move
|
||||
# a balance from one account to another within the same company.
|
||||
|
||||
import logging
|
||||
from datetime import date
|
||||
|
||||
from odoo import api, fields, models, Command, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionAccountTransfer(models.TransientModel):
|
||||
"""Wizard for transferring balances between two accounts.
|
||||
|
||||
Creates a balanced journal entry with one debit line and one credit
|
||||
line, effectively moving a specified amount from the source account
|
||||
to the destination account.
|
||||
"""
|
||||
|
||||
_name = 'fusion.account.transfer'
|
||||
_description = 'Account Balance Transfer'
|
||||
|
||||
# =====================================================================
|
||||
# Fields
|
||||
# =====================================================================
|
||||
|
||||
company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string="Company",
|
||||
required=True,
|
||||
default=lambda self: self.env.company,
|
||||
readonly=True,
|
||||
)
|
||||
currency_id = fields.Many2one(
|
||||
comodel_name='res.currency',
|
||||
string="Currency",
|
||||
related='company_id.currency_id',
|
||||
readonly=True,
|
||||
)
|
||||
source_account_id = fields.Many2one(
|
||||
comodel_name='account.account',
|
||||
string="Source Account",
|
||||
required=True,
|
||||
domain="[('company_ids', 'in', company_id)]",
|
||||
help="The account to transfer funds FROM (will be credited).",
|
||||
)
|
||||
destination_account_id = fields.Many2one(
|
||||
comodel_name='account.account',
|
||||
string="Destination Account",
|
||||
required=True,
|
||||
domain="[('company_ids', 'in', company_id)]",
|
||||
help="The account to transfer funds TO (will be debited).",
|
||||
)
|
||||
amount = fields.Monetary(
|
||||
string="Amount",
|
||||
required=True,
|
||||
currency_field='currency_id',
|
||||
help="Amount to transfer between the accounts.",
|
||||
)
|
||||
journal_id = fields.Many2one(
|
||||
comodel_name='account.journal',
|
||||
string="Journal",
|
||||
required=True,
|
||||
domain="[('type', '=', 'general'), ('company_id', '=', company_id)]",
|
||||
help="Miscellaneous journal to record the transfer entry.",
|
||||
)
|
||||
date = fields.Date(
|
||||
string="Date",
|
||||
required=True,
|
||||
default=fields.Date.context_today,
|
||||
help="Date of the transfer journal entry.",
|
||||
)
|
||||
memo = fields.Char(
|
||||
string="Memo",
|
||||
help="Optional description for the journal entry.",
|
||||
)
|
||||
partner_id = fields.Many2one(
|
||||
comodel_name='res.partner',
|
||||
string="Partner",
|
||||
help="Optional partner for the journal entry lines.",
|
||||
)
|
||||
|
||||
# =====================================================================
|
||||
# Constraints
|
||||
# =====================================================================
|
||||
|
||||
@api.constrains('source_account_id', 'destination_account_id')
|
||||
def _check_different_accounts(self):
|
||||
for record in self:
|
||||
if record.source_account_id == record.destination_account_id:
|
||||
raise ValidationError(
|
||||
_("Source and destination accounts must be different.")
|
||||
)
|
||||
|
||||
@api.constrains('amount')
|
||||
def _check_positive_amount(self):
|
||||
for record in self:
|
||||
if record.amount <= 0:
|
||||
raise ValidationError(
|
||||
_("Transfer amount must be greater than zero.")
|
||||
)
|
||||
|
||||
# =====================================================================
|
||||
# Default Values
|
||||
# =====================================================================
|
||||
|
||||
@api.model
|
||||
def default_get(self, fields_list):
|
||||
"""Set default journal to the company's miscellaneous journal."""
|
||||
defaults = super().default_get(fields_list)
|
||||
if 'journal_id' not in defaults:
|
||||
misc_journal = self.env['account.journal'].search([
|
||||
('type', '=', 'general'),
|
||||
('company_id', '=', self.env.company.id),
|
||||
], limit=1)
|
||||
if misc_journal:
|
||||
defaults['journal_id'] = misc_journal.id
|
||||
return defaults
|
||||
|
||||
# =====================================================================
|
||||
# Action
|
||||
# =====================================================================
|
||||
|
||||
def action_transfer(self):
|
||||
"""Create a journal entry moving balance between accounts.
|
||||
|
||||
Generates a balanced journal entry:
|
||||
- Credit line on the source account
|
||||
- Debit line on the destination account
|
||||
|
||||
:returns: action dict pointing to the created journal entry
|
||||
:raises UserError: if required fields are missing or invalid
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
if not self.source_account_id or not self.destination_account_id:
|
||||
raise UserError(_("Both source and destination accounts are required."))
|
||||
|
||||
if self.amount <= 0:
|
||||
raise UserError(_("The transfer amount must be positive."))
|
||||
|
||||
ref = self.memo or _("Account Transfer: %s → %s",
|
||||
self.source_account_id.display_name,
|
||||
self.destination_account_id.display_name)
|
||||
|
||||
move_vals = {
|
||||
'journal_id': self.journal_id.id,
|
||||
'date': self.date,
|
||||
'ref': ref,
|
||||
'company_id': self.company_id.id,
|
||||
'move_type': 'entry',
|
||||
'line_ids': [
|
||||
# Credit the source account
|
||||
Command.create({
|
||||
'account_id': self.source_account_id.id,
|
||||
'name': ref,
|
||||
'debit': 0.0,
|
||||
'credit': self.amount,
|
||||
'partner_id': self.partner_id.id if self.partner_id else False,
|
||||
}),
|
||||
# Debit the destination account
|
||||
Command.create({
|
||||
'account_id': self.destination_account_id.id,
|
||||
'name': ref,
|
||||
'debit': self.amount,
|
||||
'credit': 0.0,
|
||||
'partner_id': self.partner_id.id if self.partner_id else False,
|
||||
}),
|
||||
],
|
||||
}
|
||||
|
||||
move = self.env['account.move'].create(move_vals)
|
||||
|
||||
_logger.info(
|
||||
"Fusion Account Transfer: created journal entry %s (id=%s) "
|
||||
"for %.2f from %s to %s",
|
||||
move.name, move.id, self.amount,
|
||||
self.source_account_id.code,
|
||||
self.destination_account_id.code,
|
||||
)
|
||||
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.move',
|
||||
'res_id': move.id,
|
||||
'view_mode': 'form',
|
||||
'target': 'current',
|
||||
'name': _("Transfer Entry"),
|
||||
}
|
||||
268
Fusion Accounting/models/account_trial_balance_report.py
Normal file
268
Fusion Accounting/models/account_trial_balance_report.py
Normal file
@@ -0,0 +1,268 @@
|
||||
# Fusion Accounting - Trial Balance Report Handler
|
||||
|
||||
from odoo import api, models, _, fields
|
||||
from odoo.tools import float_compare
|
||||
from odoo.tools.misc import DEFAULT_SERVER_DATE_FORMAT
|
||||
|
||||
|
||||
# Sentinel key used for end-balance columns which are computed client-side
|
||||
# and never generate their own SQL column group.
|
||||
_END_COL_GROUP_SENTINEL = '_trial_balance_end_column_group'
|
||||
|
||||
|
||||
class TrialBalanceCustomHandler(models.AbstractModel):
|
||||
"""Wraps the General Ledger handler to produce a Trial Balance.
|
||||
|
||||
The trial balance adds initial-balance and end-balance column groups
|
||||
around the regular period columns and collapses each account's detail
|
||||
into a single non-foldable row.
|
||||
"""
|
||||
|
||||
_name = 'account.trial.balance.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Trial Balance Custom Handler'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Dynamic lines
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
"""Delegate to the GL handler and then post-process rows to
|
||||
collapse debit/credit, compute end-balance columns, and remove
|
||||
expand functions."""
|
||||
|
||||
def _set_cell(row, idx, amount):
|
||||
row['columns'][idx]['no_format'] = amount
|
||||
row['columns'][idx]['is_zero'] = self.env.company.currency_id.is_zero(amount)
|
||||
|
||||
def _collapse_debit_credit(row, dr_idx, cr_idx, bal_idx=None):
|
||||
"""Net debit and credit: whichever is larger keeps the difference;
|
||||
the other becomes zero. Optionally write balance too."""
|
||||
dr_val = row['columns'][dr_idx]['no_format'] if dr_idx is not None else False
|
||||
cr_val = row['columns'][cr_idx]['no_format'] if cr_idx is not None else False
|
||||
|
||||
if dr_val and cr_val:
|
||||
cmp = self.env.company.currency_id.compare_amounts(dr_val, cr_val)
|
||||
if cmp == 1:
|
||||
_set_cell(row, dr_idx, dr_val - cr_val)
|
||||
_set_cell(row, cr_idx, 0.0)
|
||||
else:
|
||||
_set_cell(row, dr_idx, 0.0)
|
||||
_set_cell(row, cr_idx, (dr_val - cr_val) * -1)
|
||||
|
||||
if bal_idx is not None:
|
||||
_set_cell(row, bal_idx, dr_val - cr_val)
|
||||
|
||||
# Obtain raw GL lines
|
||||
gl_handler = self.env['account.general.ledger.report.handler']
|
||||
raw = [
|
||||
row[1]
|
||||
for row in gl_handler._dynamic_lines_generator(
|
||||
report, options, all_column_groups_expression_totals, warnings=warnings,
|
||||
)
|
||||
]
|
||||
|
||||
# Locate column indices for initial / end balance
|
||||
col_defs = options['columns']
|
||||
init_dr = next((i for i, c in enumerate(col_defs) if c.get('expression_label') == 'debit'), None)
|
||||
init_cr = next((i for i, c in enumerate(col_defs) if c.get('expression_label') == 'credit'), None)
|
||||
|
||||
end_dr = next((i for i, c in enumerate(col_defs)
|
||||
if c.get('expression_label') == 'debit'
|
||||
and c.get('column_group_key') == _END_COL_GROUP_SENTINEL), None)
|
||||
end_cr = next((i for i, c in enumerate(col_defs)
|
||||
if c.get('expression_label') == 'credit'
|
||||
and c.get('column_group_key') == _END_COL_GROUP_SENTINEL), None)
|
||||
end_bal = next((i for i, c in enumerate(col_defs)
|
||||
if c.get('expression_label') == 'balance'
|
||||
and c.get('column_group_key') == _END_COL_GROUP_SENTINEL), None)
|
||||
|
||||
cur = self.env.company.currency_id
|
||||
|
||||
# Process every account row (all except the last = total line)
|
||||
for row in raw[:-1]:
|
||||
_collapse_debit_credit(row, init_dr, init_cr)
|
||||
|
||||
# End balance = sum of all debit columns except the end one itself
|
||||
if end_dr is not None:
|
||||
dr_sum = sum(
|
||||
cur.round(cell['no_format'])
|
||||
for idx, cell in enumerate(row['columns'])
|
||||
if cell.get('expression_label') == 'debit'
|
||||
and idx != end_dr
|
||||
and cell['no_format'] is not None
|
||||
)
|
||||
_set_cell(row, end_dr, dr_sum)
|
||||
|
||||
if end_cr is not None:
|
||||
cr_sum = sum(
|
||||
cur.round(cell['no_format'])
|
||||
for idx, cell in enumerate(row['columns'])
|
||||
if cell.get('expression_label') == 'credit'
|
||||
and idx != end_cr
|
||||
and cell['no_format'] is not None
|
||||
)
|
||||
_set_cell(row, end_cr, cr_sum)
|
||||
|
||||
_collapse_debit_credit(row, end_dr, end_cr, end_bal)
|
||||
|
||||
# Remove GL expand-related keys
|
||||
row.pop('expand_function', None)
|
||||
row.pop('groupby', None)
|
||||
row['unfoldable'] = False
|
||||
row['unfolded'] = False
|
||||
|
||||
mdl = report._get_model_info_from_id(row['id'])[0]
|
||||
if mdl == 'account.account':
|
||||
row['caret_options'] = 'trial_balance'
|
||||
|
||||
# Recompute totals on the total line
|
||||
if raw:
|
||||
total_row = raw[-1]
|
||||
for idx in (init_dr, init_cr, end_dr, end_cr):
|
||||
if idx is not None:
|
||||
total_row['columns'][idx]['no_format'] = sum(
|
||||
cur.round(r['columns'][idx]['no_format'])
|
||||
for r in raw[:-1]
|
||||
if report._get_model_info_from_id(r['id'])[0] == 'account.account'
|
||||
)
|
||||
|
||||
return [(0, row) for row in raw]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Caret options
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _caret_options_initializer(self):
|
||||
return {
|
||||
'trial_balance': [
|
||||
{'name': _("General Ledger"), 'action': 'caret_option_open_general_ledger'},
|
||||
{'name': _("Journal Items"), 'action': 'open_journal_items'},
|
||||
],
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Column group management
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_column_group_creation_data(self, report, options, previous_options=None):
|
||||
"""Declare which extra column groups to add and on which side of
|
||||
the report they appear."""
|
||||
return (
|
||||
(self._build_initial_balance_col_group, 'left'),
|
||||
(self._build_end_balance_col_group, 'right'),
|
||||
)
|
||||
|
||||
@api.model
|
||||
def _create_and_append_column_group(
|
||||
self, report, options, header_label, forced_opts, side,
|
||||
group_vals, exclude_initial_balance=False, append_col_groups=True,
|
||||
):
|
||||
"""Helper: generate a new column group and append it to *side*."""
|
||||
header_elem = [{'name': header_label, 'forced_options': forced_opts}]
|
||||
full_headers = [header_elem, *options['column_headers'][1:]]
|
||||
cg_vals = report._generate_columns_group_vals_recursively(full_headers, group_vals)
|
||||
|
||||
if exclude_initial_balance:
|
||||
for cg in cg_vals:
|
||||
cg['forced_options']['general_ledger_strict_range'] = True
|
||||
|
||||
cols, col_groups = report._build_columns_from_column_group_vals(forced_opts, cg_vals)
|
||||
|
||||
side['column_headers'] += header_elem
|
||||
if append_col_groups:
|
||||
side['column_groups'] |= col_groups
|
||||
side['columns'] += cols
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Options
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
"""Insert initial-balance and end-balance column groups around the
|
||||
standard period columns."""
|
||||
default_gv = {'horizontal_groupby_element': {}, 'forced_options': {}}
|
||||
lhs = {'column_headers': [], 'column_groups': {}, 'columns': []}
|
||||
rhs = {'column_headers': [], 'column_groups': {}, 'columns': []}
|
||||
|
||||
# Mid-period columns should use strict range
|
||||
for cg in options['column_groups'].values():
|
||||
cg['forced_options']['general_ledger_strict_range'] = True
|
||||
|
||||
if options.get('comparison') and not options['comparison'].get('periods'):
|
||||
options['comparison']['period_order'] = 'ascending'
|
||||
|
||||
for factory_fn, side_label in self._get_column_group_creation_data(report, options, previous_options):
|
||||
target = lhs if side_label == 'left' else rhs
|
||||
factory_fn(report, options, previous_options, default_gv, target)
|
||||
|
||||
options['column_headers'][0] = lhs['column_headers'] + options['column_headers'][0] + rhs['column_headers']
|
||||
options['column_groups'].update(lhs['column_groups'])
|
||||
options['column_groups'].update(rhs['column_groups'])
|
||||
options['columns'] = lhs['columns'] + options['columns'] + rhs['columns']
|
||||
options['ignore_totals_below_sections'] = True
|
||||
|
||||
# Force a shared currency-table period for all middle columns
|
||||
shared_period_key = '_trial_balance_middle_periods'
|
||||
for cg in options['column_groups'].values():
|
||||
dt = cg['forced_options'].get('date')
|
||||
if dt:
|
||||
dt['currency_table_period_key'] = shared_period_key
|
||||
|
||||
report._init_options_order_column(options, previous_options)
|
||||
|
||||
def _custom_line_postprocessor(self, report, options, lines):
|
||||
"""Add contrast styling to hierarchy group lines when hierarchy is
|
||||
enabled."""
|
||||
if options.get('hierarchy'):
|
||||
for ln in lines:
|
||||
mdl, _ = report._get_model_info_from_id(ln['id'])
|
||||
if mdl == 'account.group':
|
||||
ln['class'] = ln.get('class', '') + ' o_account_coa_column_contrast_hierarchy'
|
||||
return lines
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Column group builders
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _build_initial_balance_col_group(self, report, options, previous_options, default_gv, side):
|
||||
"""Create the Initial Balance column group on the left."""
|
||||
gl_handler = self.env['account.general.ledger.report.handler']
|
||||
init_opts = gl_handler._get_options_initial_balance(options)
|
||||
forced = {
|
||||
'date': init_opts['date'],
|
||||
'include_current_year_in_unaff_earnings': init_opts['include_current_year_in_unaff_earnings'],
|
||||
'no_impact_on_currency_table': True,
|
||||
}
|
||||
self._create_and_append_column_group(
|
||||
report, options, _("Initial Balance"), forced, side, default_gv,
|
||||
)
|
||||
|
||||
def _build_end_balance_col_group(self, report, options, previous_options, default_gv, side):
|
||||
"""Create the End Balance column group on the right.
|
||||
|
||||
No actual SQL is run for this group; its values are computed by
|
||||
summing all other groups during line post-processing.
|
||||
"""
|
||||
to_dt = options['date']['date_to']
|
||||
from_dt = (
|
||||
options['comparison']['periods'][-1]['date_from']
|
||||
if options.get('comparison', {}).get('periods')
|
||||
else options['date']['date_from']
|
||||
)
|
||||
forced = {
|
||||
'date': report._get_dates_period(
|
||||
fields.Date.from_string(from_dt),
|
||||
fields.Date.from_string(to_dt),
|
||||
'range',
|
||||
),
|
||||
}
|
||||
self._create_and_append_column_group(
|
||||
report, options, _("End Balance"), forced, side, default_gv,
|
||||
append_col_groups=False,
|
||||
)
|
||||
|
||||
# Mark end-balance columns with the sentinel key
|
||||
num_report_cols = len(report.column_ids)
|
||||
for col in side['columns'][-num_report_cols:]:
|
||||
col['column_group_key'] = _END_COL_GROUP_SENTINEL
|
||||
625
Fusion Accounting/models/avatax_provider.py
Normal file
625
Fusion Accounting/models/avatax_provider.py
Normal file
@@ -0,0 +1,625 @@
|
||||
"""
|
||||
Fusion Accounting - Avalara AvaTax Provider
|
||||
============================================
|
||||
|
||||
Concrete implementation of :class:`FusionExternalTaxProvider` that integrates
|
||||
with the **Avalara AvaTax REST API v2** for real-time tax calculation, address
|
||||
validation, and transaction management.
|
||||
|
||||
API Reference: https://developer.avalara.com/api-reference/avatax/rest/v2/
|
||||
|
||||
Supported operations
|
||||
--------------------
|
||||
* **CreateTransaction** - compute tax on sales/purchase documents.
|
||||
* **VoidTransaction** - cancel a previously committed transaction.
|
||||
* **ResolveAddress** - validate and normalise postal addresses.
|
||||
* **Ping** - connection health check.
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
Set the *AvaTax Environment* field to ``sandbox`` during development (uses
|
||||
``sandbox-rest.avatax.com``) and switch to ``production`` for live tax filings.
|
||||
|
||||
Copyright (c) Nexa Systems Inc. - All rights reserved.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
|
||||
import requests
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
# AvaTax REST API v2 base URLs
|
||||
AVATAX_API_URLS = {
|
||||
'sandbox': 'https://sandbox-rest.avatax.com/api/v2',
|
||||
'production': 'https://rest.avatax.com/api/v2',
|
||||
}
|
||||
|
||||
# Default timeout for AvaTax API requests (seconds)
|
||||
AVATAX_REQUEST_TIMEOUT = 30
|
||||
|
||||
# Mapping of Odoo tax types to AvaTax transaction document types
|
||||
AVATAX_DOC_TYPES = {
|
||||
'out_invoice': 'SalesInvoice',
|
||||
'out_refund': 'ReturnInvoice',
|
||||
'in_invoice': 'PurchaseInvoice',
|
||||
'in_refund': 'ReturnInvoice',
|
||||
'entry': 'SalesOrder',
|
||||
}
|
||||
|
||||
|
||||
class FusionAvaTaxProvider(models.Model):
|
||||
"""Avalara AvaTax integration for automated tax calculation.
|
||||
|
||||
Extends :class:`fusion.external.tax.provider` with AvaTax-specific
|
||||
credentials, endpoint configuration, and full REST API v2 support.
|
||||
"""
|
||||
|
||||
_inherit = "fusion.external.tax.provider"
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# AvaTax-Specific Fields
|
||||
# -------------------------------------------------------------------------
|
||||
avatax_account_number = fields.Char(
|
||||
string="AvaTax Account Number",
|
||||
groups="account.group_account_manager",
|
||||
help="Numeric account ID provided by Avalara upon registration.",
|
||||
)
|
||||
avatax_license_key = fields.Char(
|
||||
string="AvaTax License Key",
|
||||
groups="account.group_account_manager",
|
||||
help="Secret license key issued by Avalara. Stored encrypted.",
|
||||
)
|
||||
avatax_company_code = fields.Char(
|
||||
string="AvaTax Company Code",
|
||||
help="Company code configured in the Avalara portal. This identifies "
|
||||
"your nexus and tax configuration within AvaTax.",
|
||||
)
|
||||
avatax_environment = fields.Selection(
|
||||
selection=[
|
||||
('sandbox', 'Sandbox (Testing)'),
|
||||
('production', 'Production'),
|
||||
],
|
||||
string="AvaTax Environment",
|
||||
default='sandbox',
|
||||
required=True,
|
||||
help="Use Sandbox for testing without real tax filings. Switch to "
|
||||
"Production for live transaction recording.",
|
||||
)
|
||||
avatax_commit_on_post = fields.Boolean(
|
||||
string="Commit on Invoice Post",
|
||||
default=True,
|
||||
help="When enabled, transactions are committed (locked) in AvaTax "
|
||||
"the moment the invoice is posted. Otherwise, they remain "
|
||||
"uncommitted until explicitly committed.",
|
||||
)
|
||||
avatax_address_validation = fields.Boolean(
|
||||
string="Address Validation",
|
||||
default=True,
|
||||
help="When enabled, customer addresses are validated and normalised "
|
||||
"through the AvaTax address resolution service before tax "
|
||||
"calculation.",
|
||||
)
|
||||
avatax_default_tax_code = fields.Char(
|
||||
string="Default Tax Code",
|
||||
default='P0000000',
|
||||
help="AvaTax tax code applied to products without a specific mapping. "
|
||||
"'P0000000' represents tangible personal property.",
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Selection Extension
|
||||
# -------------------------------------------------------------------------
|
||||
@api.model
|
||||
def _get_provider_type_selection(self):
|
||||
"""Add 'avatax' to the provider type selection list."""
|
||||
return [('generic', 'Generic'), ('avatax', 'Avalara AvaTax')]
|
||||
|
||||
def _init_provider_type(self):
|
||||
"""Dynamically extend provider_type selection for AvaTax."""
|
||||
selection = self._fields['provider_type'].selection
|
||||
if isinstance(selection, list):
|
||||
avatax_entry = ('avatax', 'Avalara AvaTax')
|
||||
if avatax_entry not in selection:
|
||||
selection.append(avatax_entry)
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
"""Set provider code and API URL for AvaTax records automatically."""
|
||||
for vals in vals_list:
|
||||
if vals.get('provider_type') == 'avatax':
|
||||
vals.setdefault('code', 'avatax')
|
||||
env = vals.get('avatax_environment', 'sandbox')
|
||||
vals.setdefault('api_url', AVATAX_API_URLS.get(env, AVATAX_API_URLS['sandbox']))
|
||||
return super().create(vals_list)
|
||||
|
||||
def write(self, vals):
|
||||
"""Keep the API URL in sync when the environment changes."""
|
||||
if 'avatax_environment' in vals:
|
||||
vals['api_url'] = AVATAX_API_URLS.get(
|
||||
vals['avatax_environment'], AVATAX_API_URLS['sandbox']
|
||||
)
|
||||
return super().write(vals)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# AvaTax REST API Helpers
|
||||
# -------------------------------------------------------------------------
|
||||
def _avatax_get_api_url(self):
|
||||
"""Return the base API URL for the configured environment.
|
||||
|
||||
:returns: Base URL string without trailing slash.
|
||||
"""
|
||||
self.ensure_one()
|
||||
return AVATAX_API_URLS.get(self.avatax_environment, AVATAX_API_URLS['sandbox'])
|
||||
|
||||
def _avatax_get_auth_header(self):
|
||||
"""Build the HTTP Basic authentication header.
|
||||
|
||||
AvaTax authenticates via ``Authorization: Basic <base64(account:key)>``.
|
||||
|
||||
:returns: ``dict`` with the Authorization header.
|
||||
:raises UserError: When credentials are missing.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not self.avatax_account_number or not self.avatax_license_key:
|
||||
raise UserError(_(
|
||||
"AvaTax account number and license key are required. "
|
||||
"Please configure them on provider '%(name)s'.",
|
||||
name=self.name,
|
||||
))
|
||||
credentials = f"{self.avatax_account_number}:{self.avatax_license_key}"
|
||||
encoded = base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
|
||||
return {'Authorization': f'Basic {encoded}'}
|
||||
|
||||
def _avatax_request(self, method, endpoint, payload=None, params=None):
|
||||
"""Execute an authenticated request against the AvaTax REST API v2.
|
||||
|
||||
:param method: HTTP method (``'GET'``, ``'POST'``, ``'DELETE'``).
|
||||
:param endpoint: API path relative to the version root, e.g.
|
||||
``'/transactions/create'``.
|
||||
:param payload: JSON-serialisable request body (for POST/PUT).
|
||||
:param params: URL query parameters dict.
|
||||
:returns: Parsed JSON response ``dict``.
|
||||
:raises UserError: On HTTP or API errors.
|
||||
"""
|
||||
self.ensure_one()
|
||||
base_url = self._avatax_get_api_url()
|
||||
url = f"{base_url}{endpoint}"
|
||||
headers = {
|
||||
**self._avatax_get_auth_header(),
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
'X-Avalara-Client': 'FusionAccounting;19.0;OdooConnector;1.0',
|
||||
}
|
||||
|
||||
if self.log_requests:
|
||||
_logger.debug(
|
||||
"AvaTax %s %s | payload=%s | params=%s",
|
||||
method, url, json.dumps(payload or {}), params,
|
||||
)
|
||||
|
||||
try:
|
||||
response = requests.request(
|
||||
method=method.upper(),
|
||||
url=url,
|
||||
json=payload,
|
||||
params=params,
|
||||
headers=headers,
|
||||
timeout=AVATAX_REQUEST_TIMEOUT,
|
||||
)
|
||||
except requests.exceptions.ConnectionError:
|
||||
raise UserError(_(
|
||||
"Unable to connect to AvaTax at %(url)s. "
|
||||
"Please verify your network connection and the configured environment.",
|
||||
url=url,
|
||||
))
|
||||
except requests.exceptions.Timeout:
|
||||
raise UserError(_(
|
||||
"The request to AvaTax timed out after %(timeout)s seconds. "
|
||||
"Please try again or contact Avalara support.",
|
||||
timeout=AVATAX_REQUEST_TIMEOUT,
|
||||
))
|
||||
except requests.exceptions.RequestException as exc:
|
||||
raise UserError(_(
|
||||
"AvaTax request failed: %(error)s",
|
||||
error=str(exc),
|
||||
))
|
||||
|
||||
if self.log_requests:
|
||||
_logger.debug(
|
||||
"AvaTax response %s: %s",
|
||||
response.status_code, response.text[:2000],
|
||||
)
|
||||
|
||||
if response.status_code in (200, 201):
|
||||
return response.json()
|
||||
|
||||
# Handle structured AvaTax error responses
|
||||
self._avatax_handle_error(response)
|
||||
|
||||
def _avatax_handle_error(self, response):
|
||||
"""Parse and raise a descriptive error from an AvaTax API response.
|
||||
|
||||
:param response: ``requests.Response`` with a non-2xx status code.
|
||||
:raises UserError: Always.
|
||||
"""
|
||||
try:
|
||||
error_data = response.json()
|
||||
except (ValueError, KeyError):
|
||||
raise UserError(_(
|
||||
"AvaTax returned HTTP %(code)s with an unparseable body: %(body)s",
|
||||
code=response.status_code,
|
||||
body=response.text[:500],
|
||||
))
|
||||
|
||||
error_info = error_data.get('error', {})
|
||||
message = error_info.get('message', 'Unknown error')
|
||||
details = error_info.get('details', [])
|
||||
detail_messages = '\n'.join(
|
||||
f" - [{d.get('severity', 'Error')}] {d.get('message', '')} "
|
||||
f"(ref: {d.get('refersTo', 'N/A')})"
|
||||
for d in details
|
||||
)
|
||||
raise UserError(_(
|
||||
"AvaTax API Error (HTTP %(code)s): %(message)s\n\nDetails:\n%(details)s",
|
||||
code=response.status_code,
|
||||
message=message,
|
||||
details=detail_messages or _("No additional details provided."),
|
||||
))
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Tax Calculation
|
||||
# -------------------------------------------------------------------------
|
||||
def calculate_tax(self, order_lines):
|
||||
"""Compute tax via AvaTax CreateTransaction API.
|
||||
|
||||
Builds a ``CreateTransactionModel`` payload from the provided move
|
||||
lines and submits it to the AvaTax API. The response is parsed and
|
||||
returned as a normalised list of per-line tax results.
|
||||
|
||||
:param order_lines: ``account.move.line`` recordset with product,
|
||||
quantity, price, and associated partner address data.
|
||||
:returns: ``dict`` with keys ``doc_code``, ``total_tax``, and ``lines``
|
||||
(list of per-line tax detail dicts).
|
||||
:raises UserError: On API failure or missing configuration.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not order_lines:
|
||||
return {'doc_code': False, 'total_tax': 0.0, 'lines': []}
|
||||
|
||||
move = order_lines[0].move_id
|
||||
partner = move.partner_id
|
||||
if not partner:
|
||||
raise UserError(_(
|
||||
"Cannot compute external taxes: the invoice has no partner set."
|
||||
))
|
||||
|
||||
payload = self._avatax_build_transaction_payload(move, order_lines)
|
||||
result = self._avatax_request('POST', '/transactions/create', payload=payload)
|
||||
return self._avatax_parse_transaction_result(result, order_lines)
|
||||
|
||||
def _avatax_build_transaction_payload(self, move, lines):
|
||||
"""Construct the CreateTransactionModel JSON body.
|
||||
|
||||
Maps invoice data to the AvaTax transaction schema described at:
|
||||
https://developer.avalara.com/api-reference/avatax/rest/v2/models/CreateTransactionModel/
|
||||
|
||||
:param move: ``account.move`` record.
|
||||
:param lines: ``account.move.line`` recordset (product lines only).
|
||||
:returns: ``dict`` ready for JSON serialisation.
|
||||
"""
|
||||
self.ensure_one()
|
||||
partner = move.partner_id
|
||||
company = move.company_id
|
||||
|
||||
# Determine document type from move_type
|
||||
doc_type = AVATAX_DOC_TYPES.get(move.move_type, 'SalesOrder')
|
||||
|
||||
# Build address objects
|
||||
ship_to = self._avatax_build_address(partner)
|
||||
ship_from = self._avatax_build_address(company.partner_id)
|
||||
|
||||
# Build line items
|
||||
avatax_lines = []
|
||||
for idx, line in enumerate(lines.filtered(lambda l: l.display_type == 'product')):
|
||||
tax_code = self._avatax_get_product_tax_code(line.product_id)
|
||||
avatax_lines.append({
|
||||
'number': str(idx + 1),
|
||||
'quantity': abs(line.quantity),
|
||||
'amount': abs(line.price_subtotal),
|
||||
'taxCode': tax_code,
|
||||
'itemCode': line.product_id.default_code or line.product_id.name or '',
|
||||
'description': (line.name or '')[:255],
|
||||
'discounted': bool(line.discount),
|
||||
'ref1': str(line.id),
|
||||
})
|
||||
|
||||
if not avatax_lines:
|
||||
raise UserError(_(
|
||||
"No taxable product lines found on invoice %(ref)s.",
|
||||
ref=move.name or move.ref or 'New',
|
||||
))
|
||||
|
||||
commit = self.avatax_commit_on_post and move.state == 'posted'
|
||||
payload = {
|
||||
'type': doc_type,
|
||||
'companyCode': self.avatax_company_code or company.name,
|
||||
'date': fields.Date.to_string(move.invoice_date or move.date),
|
||||
'customerCode': partner.ref or partner.name or str(partner.id),
|
||||
'purchaseOrderNo': move.ref or '',
|
||||
'addresses': {
|
||||
'shipFrom': ship_from,
|
||||
'shipTo': ship_to,
|
||||
},
|
||||
'lines': avatax_lines,
|
||||
'commit': commit,
|
||||
'currencyCode': move.currency_id.name,
|
||||
'description': f"Odoo Invoice {move.name or 'Draft'}",
|
||||
}
|
||||
|
||||
# Only include document code for posted invoices
|
||||
if move.name and move.name != '/':
|
||||
payload['code'] = move.name
|
||||
|
||||
return payload
|
||||
|
||||
def _avatax_build_address(self, partner):
|
||||
"""Convert a partner record to an AvaTax address dict.
|
||||
|
||||
:param partner: ``res.partner`` record.
|
||||
:returns: ``dict`` with AvaTax address fields.
|
||||
"""
|
||||
return {
|
||||
'line1': partner.street or '',
|
||||
'line2': partner.street2 or '',
|
||||
'city': partner.city or '',
|
||||
'region': partner.state_id.code or '',
|
||||
'country': partner.country_id.code or '',
|
||||
'postalCode': partner.zip or '',
|
||||
}
|
||||
|
||||
def _avatax_get_product_tax_code(self, product):
|
||||
"""Resolve the AvaTax tax code for a given product.
|
||||
|
||||
Checks (in order):
|
||||
1. A custom field ``avatax_tax_code`` on the product template.
|
||||
2. A category-level mapping via ``categ_id.avatax_tax_code``.
|
||||
3. The provider's default tax code.
|
||||
|
||||
:param product: ``product.product`` record.
|
||||
:returns: Tax code string.
|
||||
"""
|
||||
if product and hasattr(product, 'avatax_tax_code') and product.avatax_tax_code:
|
||||
return product.avatax_tax_code
|
||||
if (
|
||||
product
|
||||
and product.categ_id
|
||||
and hasattr(product.categ_id, 'avatax_tax_code')
|
||||
and product.categ_id.avatax_tax_code
|
||||
):
|
||||
return product.categ_id.avatax_tax_code
|
||||
return self.avatax_default_tax_code or 'P0000000'
|
||||
|
||||
def _avatax_parse_transaction_result(self, result, order_lines):
|
||||
"""Parse the AvaTax CreateTransaction response into a normalised format.
|
||||
|
||||
:param result: Parsed JSON response from AvaTax.
|
||||
:param order_lines: Original ``account.move.line`` recordset.
|
||||
:returns: ``dict`` with ``doc_code``, ``total_tax``, ``total_amount``,
|
||||
and ``lines`` list.
|
||||
"""
|
||||
doc_code = result.get('code', '')
|
||||
total_tax = result.get('totalTax', 0.0)
|
||||
total_amount = result.get('totalAmount', 0.0)
|
||||
|
||||
lines_result = []
|
||||
for avatax_line in result.get('lines', []):
|
||||
line_ref = avatax_line.get('ref1', '')
|
||||
tax_details = []
|
||||
for detail in avatax_line.get('details', []):
|
||||
tax_details.append({
|
||||
'tax_name': detail.get('taxName', ''),
|
||||
'tax_rate': detail.get('rate', 0.0),
|
||||
'tax_amount': detail.get('tax', 0.0),
|
||||
'taxable_amount': detail.get('taxableAmount', 0.0),
|
||||
'jurisdiction': detail.get('jurisName', ''),
|
||||
'jurisdiction_type': detail.get('jurisType', ''),
|
||||
'region': detail.get('region', ''),
|
||||
'country': detail.get('country', ''),
|
||||
})
|
||||
lines_result.append({
|
||||
'line_id': int(line_ref) if line_ref.isdigit() else False,
|
||||
'line_number': avatax_line.get('lineNumber', ''),
|
||||
'tax_amount': avatax_line.get('tax', 0.0),
|
||||
'taxable_amount': avatax_line.get('taxableAmount', 0.0),
|
||||
'exempt_amount': avatax_line.get('exemptAmount', 0.0),
|
||||
'tax_details': tax_details,
|
||||
})
|
||||
|
||||
return {
|
||||
'doc_code': doc_code,
|
||||
'total_tax': total_tax,
|
||||
'total_amount': total_amount,
|
||||
'lines': lines_result,
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Transaction Void
|
||||
# -------------------------------------------------------------------------
|
||||
def void_transaction(self, doc_code, doc_type='SalesInvoice'):
|
||||
"""Void a committed transaction in AvaTax.
|
||||
|
||||
Uses the VoidTransaction API endpoint to mark a previously committed
|
||||
tax document as voided. This prevents it from appearing in tax filings.
|
||||
|
||||
:param doc_code: Document code (typically the invoice number).
|
||||
:param doc_type: AvaTax document type (default ``'SalesInvoice'``).
|
||||
:returns: ``True`` on success.
|
||||
:raises UserError: When the API call fails.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not doc_code:
|
||||
_logger.warning("void_transaction called with empty doc_code, skipping.")
|
||||
return True
|
||||
|
||||
company_code = self.avatax_company_code or self.company_id.name
|
||||
endpoint = f"/companies/{company_code}/transactions/{doc_code}/void"
|
||||
payload = {'code': 'DocVoided'}
|
||||
|
||||
self._avatax_request('POST', endpoint, payload=payload)
|
||||
_logger.info(
|
||||
"AvaTax transaction voided: company=%s doc_code=%s",
|
||||
company_code, doc_code,
|
||||
)
|
||||
return True
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Address Validation
|
||||
# -------------------------------------------------------------------------
|
||||
def validate_address(self, partner):
|
||||
"""Validate and normalise a partner address via the AvaTax address
|
||||
resolution service.
|
||||
|
||||
Calls ``POST /addresses/resolve`` and returns the validated address
|
||||
components. If validation fails, the original address is returned
|
||||
unchanged with a warning.
|
||||
|
||||
:param partner: ``res.partner`` record to validate.
|
||||
:returns: ``dict`` with normalised address fields.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not self.avatax_address_validation:
|
||||
return {}
|
||||
|
||||
payload = {
|
||||
'line1': partner.street or '',
|
||||
'line2': partner.street2 or '',
|
||||
'city': partner.city or '',
|
||||
'region': partner.state_id.code or '',
|
||||
'country': partner.country_id.code or '',
|
||||
'postalCode': partner.zip or '',
|
||||
}
|
||||
|
||||
try:
|
||||
result = self._avatax_request('POST', '/addresses/resolve', payload=payload)
|
||||
except UserError:
|
||||
_logger.warning(
|
||||
"AvaTax address validation failed for partner %s, using original address.",
|
||||
partner.display_name,
|
||||
)
|
||||
return {}
|
||||
|
||||
validated = result.get('validatedAddresses', [{}])[0] if result.get('validatedAddresses') else {}
|
||||
messages = result.get('messages', [])
|
||||
|
||||
return {
|
||||
'street': validated.get('line1', partner.street),
|
||||
'street2': validated.get('line2', partner.street2),
|
||||
'city': validated.get('city', partner.city),
|
||||
'zip': validated.get('postalCode', partner.zip),
|
||||
'state_code': validated.get('region', ''),
|
||||
'country_code': validated.get('country', ''),
|
||||
'latitude': validated.get('latitude', ''),
|
||||
'longitude': validated.get('longitude', ''),
|
||||
'messages': [
|
||||
{'severity': m.get('severity', 'info'), 'summary': m.get('summary', '')}
|
||||
for m in messages
|
||||
],
|
||||
}
|
||||
|
||||
def action_validate_partner_address(self):
|
||||
"""Wizard action: validate the address of a selected partner."""
|
||||
self.ensure_one()
|
||||
partner = self.env.context.get('active_id')
|
||||
if not partner:
|
||||
raise UserError(_("No partner selected for address validation."))
|
||||
|
||||
partner_rec = self.env['res.partner'].browse(partner)
|
||||
result = self.validate_address(partner_rec)
|
||||
|
||||
if not result:
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'title': _("Address Validation"),
|
||||
'message': _("Address validation is disabled or returned no data."),
|
||||
'type': 'warning',
|
||||
'sticky': False,
|
||||
},
|
||||
}
|
||||
|
||||
# Update the partner with validated address
|
||||
update_vals = {}
|
||||
if result.get('street'):
|
||||
update_vals['street'] = result['street']
|
||||
if result.get('street2'):
|
||||
update_vals['street2'] = result['street2']
|
||||
if result.get('city'):
|
||||
update_vals['city'] = result['city']
|
||||
if result.get('zip'):
|
||||
update_vals['zip'] = result['zip']
|
||||
if result.get('state_code'):
|
||||
state = self.env['res.country.state'].search([
|
||||
('code', '=', result['state_code']),
|
||||
('country_id.code', '=', result.get('country_code', '')),
|
||||
], limit=1)
|
||||
if state:
|
||||
update_vals['state_id'] = state.id
|
||||
if update_vals:
|
||||
partner_rec.write(update_vals)
|
||||
|
||||
msg_parts = [m['summary'] for m in result.get('messages', []) if m.get('summary')]
|
||||
summary = '\n'.join(msg_parts) if msg_parts else _("Address validated successfully.")
|
||||
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'title': _("Address Validation"),
|
||||
'message': summary,
|
||||
'type': 'success' if not msg_parts else 'warning',
|
||||
'sticky': bool(msg_parts),
|
||||
},
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Connection Test
|
||||
# -------------------------------------------------------------------------
|
||||
def test_connection(self):
|
||||
"""Ping the AvaTax API to verify credentials and connectivity.
|
||||
|
||||
Calls ``GET /utilities/ping`` which returns authentication status.
|
||||
|
||||
:returns: ``True`` on successful ping.
|
||||
:raises UserError: When the ping fails.
|
||||
"""
|
||||
self.ensure_one()
|
||||
result = self._avatax_request('GET', '/utilities/ping')
|
||||
authenticated = result.get('authenticated', False)
|
||||
if not authenticated:
|
||||
raise UserError(_(
|
||||
"AvaTax ping succeeded but the credentials are not valid. "
|
||||
"Please check your account number and license key."
|
||||
))
|
||||
_logger.info(
|
||||
"AvaTax connection test passed: authenticated=%s version=%s",
|
||||
authenticated, result.get('version', 'unknown'),
|
||||
)
|
||||
return True
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Onchange
|
||||
# -------------------------------------------------------------------------
|
||||
@api.onchange('avatax_environment')
|
||||
def _onchange_avatax_environment(self):
|
||||
"""Update the API URL when the environment selection changes."""
|
||||
if self.avatax_environment:
|
||||
self.api_url = AVATAX_API_URLS.get(
|
||||
self.avatax_environment, AVATAX_API_URLS['sandbox']
|
||||
)
|
||||
22
Fusion Accounting/models/balance_sheet.py
Normal file
22
Fusion Accounting/models/balance_sheet.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Fusion Accounting - Balance Sheet Report Handler
|
||||
|
||||
from odoo import models
|
||||
|
||||
|
||||
class BalanceSheetCustomHandler(models.AbstractModel):
|
||||
"""Handler for balance sheet report customizations."""
|
||||
|
||||
_name = 'account.balance.sheet.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Balance Sheet Report Handler'
|
||||
|
||||
def _customize_warnings(self, report, options, all_column_groups_expression_totals, warnings):
|
||||
"""Flag a warning when currency translation adjustment is active.
|
||||
|
||||
If the selected currency table type is 'cta', there may be unbalanced
|
||||
entries caused by the currency translation process. This method injects
|
||||
the appropriate warning template so the user is informed.
|
||||
"""
|
||||
currency_cfg = options.get('currency_table', {})
|
||||
if currency_cfg.get('type') == 'cta':
|
||||
warnings['fusion_accounting.common_possibly_unbalanced_because_cta'] = {}
|
||||
2100
Fusion Accounting/models/bank_rec_widget.py
Normal file
2100
Fusion Accounting/models/bank_rec_widget.py
Normal file
File diff suppressed because it is too large
Load Diff
697
Fusion Accounting/models/bank_rec_widget_line.py
Normal file
697
Fusion Accounting/models/bank_rec_widget_line.py
Normal file
@@ -0,0 +1,697 @@
|
||||
# Fusion Accounting - Bank Reconciliation Widget Line
|
||||
# Original implementation for Fusion Accounting module
|
||||
|
||||
import uuid
|
||||
import markupsafe
|
||||
|
||||
from odoo import _, api, fields, models, Command
|
||||
from odoo.osv import expression
|
||||
from odoo.tools.misc import formatLang, frozendict
|
||||
|
||||
|
||||
# Flags that derive their values directly from the source journal entry
|
||||
_SOURCE_LINKED_FLAGS = frozenset({'aml', 'new_aml', 'liquidity', 'exchange_diff'})
|
||||
# Flags where the statement line date should be used
|
||||
_STMT_DATE_FLAGS = frozenset({'liquidity', 'auto_balance', 'manual', 'early_payment', 'tax_line'})
|
||||
# Flags that derive partner from the source journal entry
|
||||
_PARTNER_FROM_SOURCE_FLAGS = frozenset({'aml', 'new_aml'})
|
||||
# Flags that use the widget's partner
|
||||
_PARTNER_FROM_WIDGET_FLAGS = frozenset({'liquidity', 'auto_balance', 'manual', 'early_payment', 'tax_line'})
|
||||
# Flags that derive currency from the transaction
|
||||
_TRANSACTION_CURRENCY_FLAGS = frozenset({'auto_balance', 'manual', 'early_payment'})
|
||||
|
||||
|
||||
class FusionBankRecLine(models.Model):
|
||||
"""Represents a single entry within the bank reconciliation widget.
|
||||
|
||||
Each entry has a 'flag' indicating its role in the reconciliation process:
|
||||
- liquidity: The bank/cash journal item from the statement line
|
||||
- new_aml: A journal item being matched against the statement line
|
||||
- aml: An already-reconciled journal item (read-only display)
|
||||
- exchange_diff: Automatically generated foreign exchange adjustment
|
||||
- tax_line: Tax amount computed from manual entries
|
||||
- manual: A user-created write-off or adjustment entry
|
||||
- early_payment: Discount entry for early payment terms
|
||||
- auto_balance: System-generated balancing entry
|
||||
|
||||
This model exists only in memory; no database table is created.
|
||||
"""
|
||||
|
||||
_name = "bank.rec.widget.line"
|
||||
_inherit = "analytic.mixin"
|
||||
_description = "Fusion bank reconciliation entry"
|
||||
|
||||
_auto = False
|
||||
_table_query = "0"
|
||||
|
||||
# --- Relationship to parent widget ---
|
||||
wizard_id = fields.Many2one(comodel_name='bank.rec.widget')
|
||||
index = fields.Char(compute='_compute_index')
|
||||
flag = fields.Selection(
|
||||
selection=[
|
||||
('liquidity', 'liquidity'),
|
||||
('new_aml', 'new_aml'),
|
||||
('aml', 'aml'),
|
||||
('exchange_diff', 'exchange_diff'),
|
||||
('tax_line', 'tax_line'),
|
||||
('manual', 'manual'),
|
||||
('early_payment', 'early_payment'),
|
||||
('auto_balance', 'auto_balance'),
|
||||
],
|
||||
)
|
||||
|
||||
# --- Core accounting fields ---
|
||||
journal_default_account_id = fields.Many2one(
|
||||
related='wizard_id.st_line_id.journal_id.default_account_id',
|
||||
depends=['wizard_id'],
|
||||
)
|
||||
account_id = fields.Many2one(
|
||||
comodel_name='account.account',
|
||||
compute='_compute_account_id',
|
||||
store=True,
|
||||
readonly=False,
|
||||
check_company=True,
|
||||
domain="""[
|
||||
('id', '!=', journal_default_account_id),
|
||||
('account_type', 'not in', ('asset_cash', 'off_balance')),
|
||||
]""",
|
||||
)
|
||||
date = fields.Date(
|
||||
compute='_compute_date',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
name = fields.Char(
|
||||
compute='_compute_name',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
partner_id = fields.Many2one(
|
||||
comodel_name='res.partner',
|
||||
compute='_compute_partner_id',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
currency_id = fields.Many2one(
|
||||
comodel_name='res.currency',
|
||||
compute='_compute_currency_id',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
company_id = fields.Many2one(related='wizard_id.company_id')
|
||||
country_code = fields.Char(related='company_id.country_id.code', depends=['company_id'])
|
||||
company_currency_id = fields.Many2one(related='wizard_id.company_currency_id')
|
||||
|
||||
amount_currency = fields.Monetary(
|
||||
currency_field='currency_id',
|
||||
compute='_compute_amount_currency',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
balance = fields.Monetary(
|
||||
currency_field='company_currency_id',
|
||||
compute='_compute_balance',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
|
||||
# --- Transaction currency fields (from statement line) ---
|
||||
transaction_currency_id = fields.Many2one(
|
||||
related='wizard_id.st_line_id.foreign_currency_id',
|
||||
depends=['wizard_id'],
|
||||
)
|
||||
amount_transaction_currency = fields.Monetary(
|
||||
currency_field='transaction_currency_id',
|
||||
related='wizard_id.st_line_id.amount_currency',
|
||||
depends=['wizard_id'],
|
||||
)
|
||||
|
||||
# --- Debit/Credit split ---
|
||||
debit = fields.Monetary(
|
||||
currency_field='company_currency_id',
|
||||
compute='_compute_from_balance',
|
||||
)
|
||||
credit = fields.Monetary(
|
||||
currency_field='company_currency_id',
|
||||
compute='_compute_from_balance',
|
||||
)
|
||||
|
||||
# --- Tax handling ---
|
||||
force_price_included_taxes = fields.Boolean()
|
||||
tax_base_amount_currency = fields.Monetary(currency_field='currency_id')
|
||||
|
||||
# --- Source journal entry reference ---
|
||||
source_aml_id = fields.Many2one(comodel_name='account.move.line')
|
||||
source_aml_move_id = fields.Many2one(
|
||||
comodel_name='account.move',
|
||||
compute='_compute_source_aml_fields',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
source_aml_move_name = fields.Char(
|
||||
compute='_compute_source_aml_fields',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
|
||||
# --- Tax detail fields ---
|
||||
tax_repartition_line_id = fields.Many2one(
|
||||
comodel_name='account.tax.repartition.line',
|
||||
compute='_compute_tax_repartition_line_id',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
tax_ids = fields.Many2many(
|
||||
comodel_name='account.tax',
|
||||
compute='_compute_tax_ids',
|
||||
store=True,
|
||||
readonly=False,
|
||||
check_company=True,
|
||||
)
|
||||
tax_tag_ids = fields.Many2many(
|
||||
comodel_name='account.account.tag',
|
||||
compute='_compute_tax_tag_ids',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
group_tax_id = fields.Many2one(
|
||||
comodel_name='account.tax',
|
||||
compute='_compute_group_tax_id',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
|
||||
# --- Reconcile model tracking ---
|
||||
reconcile_model_id = fields.Many2one(comodel_name='account.reconcile.model')
|
||||
|
||||
# --- Original (pre-partial) amounts for comparison ---
|
||||
source_amount_currency = fields.Monetary(currency_field='currency_id')
|
||||
source_balance = fields.Monetary(currency_field='company_currency_id')
|
||||
source_debit = fields.Monetary(
|
||||
currency_field='company_currency_id',
|
||||
compute='_compute_from_source_balance',
|
||||
)
|
||||
source_credit = fields.Monetary(
|
||||
currency_field='company_currency_id',
|
||||
compute='_compute_from_source_balance',
|
||||
)
|
||||
|
||||
# --- Visual indicators for partial amounts ---
|
||||
display_stroked_amount_currency = fields.Boolean(compute='_compute_display_stroked_amount_currency')
|
||||
display_stroked_balance = fields.Boolean(compute='_compute_display_stroked_balance')
|
||||
|
||||
# --- Partner account info for UI suggestions ---
|
||||
partner_currency_id = fields.Many2one(
|
||||
comodel_name='res.currency',
|
||||
compute='_compute_partner_info',
|
||||
)
|
||||
partner_receivable_account_id = fields.Many2one(
|
||||
comodel_name='account.account',
|
||||
compute='_compute_partner_info',
|
||||
)
|
||||
partner_payable_account_id = fields.Many2one(
|
||||
comodel_name='account.account',
|
||||
compute='_compute_partner_info',
|
||||
)
|
||||
partner_receivable_amount = fields.Monetary(
|
||||
currency_field='partner_currency_id',
|
||||
compute='_compute_partner_info',
|
||||
)
|
||||
partner_payable_amount = fields.Monetary(
|
||||
currency_field='partner_currency_id',
|
||||
compute='_compute_partner_info',
|
||||
)
|
||||
|
||||
# --- Display fields ---
|
||||
bank_account = fields.Char(compute='_compute_bank_account')
|
||||
suggestion_html = fields.Html(
|
||||
compute='_compute_suggestion',
|
||||
sanitize=False,
|
||||
)
|
||||
suggestion_amount_currency = fields.Monetary(
|
||||
currency_field='currency_id',
|
||||
compute='_compute_suggestion',
|
||||
)
|
||||
suggestion_balance = fields.Monetary(
|
||||
currency_field='company_currency_id',
|
||||
compute='_compute_suggestion',
|
||||
)
|
||||
ref = fields.Char(
|
||||
compute='_compute_ref_narration',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
narration = fields.Html(
|
||||
compute='_compute_ref_narration',
|
||||
store=True,
|
||||
readonly=False,
|
||||
)
|
||||
|
||||
manually_modified = fields.Boolean()
|
||||
|
||||
# =========================================================================
|
||||
# COMPUTE METHODS
|
||||
# =========================================================================
|
||||
|
||||
def _compute_index(self):
|
||||
"""Assign a unique identifier to each entry for JS-side tracking."""
|
||||
for entry in self:
|
||||
entry.index = uuid.uuid4()
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_account_id(self):
|
||||
"""Derive the account from the source journal item for linked entries.
|
||||
|
||||
Entries tied to actual journal items (aml, new_aml, liquidity, exchange_diff)
|
||||
inherit the account directly. Other entry types retain their current account.
|
||||
"""
|
||||
for entry in self:
|
||||
if entry.flag in _SOURCE_LINKED_FLAGS:
|
||||
entry.account_id = entry.source_aml_id.account_id
|
||||
else:
|
||||
entry.account_id = entry.account_id
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_date(self):
|
||||
"""Set the date based on the entry type.
|
||||
|
||||
Source-linked entries (aml, new_aml, exchange_diff) use the original journal
|
||||
item date. Statement-based entries use the statement line date.
|
||||
"""
|
||||
for entry in self:
|
||||
if entry.flag in _STMT_DATE_FLAGS:
|
||||
entry.date = entry.wizard_id.st_line_id.date
|
||||
elif entry.flag in ('aml', 'new_aml', 'exchange_diff'):
|
||||
entry.date = entry.source_aml_id.date
|
||||
else:
|
||||
entry.date = entry.date
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_name(self):
|
||||
"""Set the description/label from the source journal item when applicable.
|
||||
|
||||
For entries derived from journal items, the label is taken from the
|
||||
original item. If the source has no name (e.g. credit notes), the
|
||||
move name is used as fallback.
|
||||
"""
|
||||
for entry in self:
|
||||
if entry.flag in ('aml', 'new_aml', 'liquidity'):
|
||||
entry.name = entry.source_aml_id.name or entry.source_aml_move_name
|
||||
else:
|
||||
entry.name = entry.name
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_partner_id(self):
|
||||
"""Determine the partner for each entry based on its type.
|
||||
|
||||
Matched journal items carry their own partner. Statement-derived
|
||||
entries use the partner set on the reconciliation widget.
|
||||
"""
|
||||
for entry in self:
|
||||
if entry.flag in _PARTNER_FROM_SOURCE_FLAGS:
|
||||
entry.partner_id = entry.source_aml_id.partner_id
|
||||
elif entry.flag in _PARTNER_FROM_WIDGET_FLAGS:
|
||||
entry.partner_id = entry.wizard_id.partner_id
|
||||
else:
|
||||
entry.partner_id = entry.partner_id
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_currency_id(self):
|
||||
"""Set the currency based on entry type.
|
||||
|
||||
Source-linked entries use the currency from the original journal item.
|
||||
Transaction-related entries (auto_balance, manual, early_payment) use
|
||||
the transaction currency from the bank statement.
|
||||
"""
|
||||
for entry in self:
|
||||
if entry.flag in _SOURCE_LINKED_FLAGS:
|
||||
entry.currency_id = entry.source_aml_id.currency_id
|
||||
elif entry.flag in _TRANSACTION_CURRENCY_FLAGS:
|
||||
entry.currency_id = entry.wizard_id.transaction_currency_id
|
||||
else:
|
||||
entry.currency_id = entry.currency_id
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_balance(self):
|
||||
"""Set the company-currency balance from the source when applicable.
|
||||
|
||||
Only 'aml' and 'liquidity' entries copy the balance directly from the
|
||||
source journal item. All other types preserve their computed/manual balance.
|
||||
"""
|
||||
for entry in self:
|
||||
if entry.flag in ('aml', 'liquidity'):
|
||||
entry.balance = entry.source_aml_id.balance
|
||||
else:
|
||||
entry.balance = entry.balance
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_amount_currency(self):
|
||||
"""Set the foreign currency amount from the source when applicable.
|
||||
|
||||
Only 'aml' and 'liquidity' entries copy directly from the source.
|
||||
"""
|
||||
for entry in self:
|
||||
if entry.flag in ('aml', 'liquidity'):
|
||||
entry.amount_currency = entry.source_aml_id.amount_currency
|
||||
else:
|
||||
entry.amount_currency = entry.amount_currency
|
||||
|
||||
@api.depends('balance')
|
||||
def _compute_from_balance(self):
|
||||
"""Split the balance into separate debit and credit components."""
|
||||
for entry in self:
|
||||
entry.debit = max(entry.balance, 0.0)
|
||||
entry.credit = max(-entry.balance, 0.0)
|
||||
|
||||
@api.depends('source_balance')
|
||||
def _compute_from_source_balance(self):
|
||||
"""Split the original source balance into debit and credit."""
|
||||
for entry in self:
|
||||
entry.source_debit = max(entry.source_balance, 0.0)
|
||||
entry.source_credit = max(-entry.source_balance, 0.0)
|
||||
|
||||
@api.depends('source_aml_id', 'account_id', 'partner_id')
|
||||
def _compute_analytic_distribution(self):
|
||||
"""Compute analytic distribution based on entry type.
|
||||
|
||||
Source-linked entries (liquidity, aml) inherit from the source item.
|
||||
Tax/early-payment entries keep their current distribution. Other entries
|
||||
look up the default distribution from analytic distribution models.
|
||||
"""
|
||||
distribution_cache = {}
|
||||
for entry in self:
|
||||
if entry.flag in ('liquidity', 'aml'):
|
||||
entry.analytic_distribution = entry.source_aml_id.analytic_distribution
|
||||
elif entry.flag in ('tax_line', 'early_payment'):
|
||||
entry.analytic_distribution = entry.analytic_distribution
|
||||
else:
|
||||
lookup_params = frozendict({
|
||||
"partner_id": entry.partner_id.id,
|
||||
"partner_category_id": entry.partner_id.category_id.ids,
|
||||
"account_prefix": entry.account_id.code,
|
||||
"company_id": entry.company_id.id,
|
||||
})
|
||||
if lookup_params not in distribution_cache:
|
||||
distribution_cache[lookup_params] = (
|
||||
self.env['account.analytic.distribution.model']._get_distribution(lookup_params)
|
||||
)
|
||||
entry.analytic_distribution = distribution_cache[lookup_params] or entry.analytic_distribution
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_tax_repartition_line_id(self):
|
||||
"""Inherit tax repartition line from the source for 'aml' entries only."""
|
||||
for entry in self:
|
||||
if entry.flag == 'aml':
|
||||
entry.tax_repartition_line_id = entry.source_aml_id.tax_repartition_line_id
|
||||
else:
|
||||
entry.tax_repartition_line_id = entry.tax_repartition_line_id
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_tax_ids(self):
|
||||
"""Copy applied tax references from the source for 'aml' entries."""
|
||||
for entry in self:
|
||||
if entry.flag == 'aml':
|
||||
entry.tax_ids = [Command.set(entry.source_aml_id.tax_ids.ids)]
|
||||
else:
|
||||
entry.tax_ids = entry.tax_ids
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_tax_tag_ids(self):
|
||||
"""Copy tax tags from the source for 'aml' entries."""
|
||||
for entry in self:
|
||||
if entry.flag == 'aml':
|
||||
entry.tax_tag_ids = [Command.set(entry.source_aml_id.tax_tag_ids.ids)]
|
||||
else:
|
||||
entry.tax_tag_ids = entry.tax_tag_ids
|
||||
|
||||
@api.depends('source_aml_id')
|
||||
def _compute_group_tax_id(self):
|
||||
"""Copy the group tax reference from the source for 'aml' entries."""
|
||||
for entry in self:
|
||||
if entry.flag == 'aml':
|
||||
entry.group_tax_id = entry.source_aml_id.group_tax_id
|
||||
else:
|
||||
entry.group_tax_id = entry.group_tax_id
|
||||
|
||||
@api.depends('currency_id', 'amount_currency', 'source_amount_currency')
|
||||
def _compute_display_stroked_amount_currency(self):
|
||||
"""Determine whether to show a strikethrough on the foreign currency amount.
|
||||
|
||||
This visual indicator appears when a 'new_aml' entry has been partially
|
||||
matched (its current amount differs from the original source amount).
|
||||
"""
|
||||
for entry in self:
|
||||
is_modified = entry.currency_id.compare_amounts(
|
||||
entry.amount_currency, entry.source_amount_currency
|
||||
) != 0
|
||||
entry.display_stroked_amount_currency = entry.flag == 'new_aml' and is_modified
|
||||
|
||||
@api.depends('currency_id', 'balance', 'source_balance')
|
||||
def _compute_display_stroked_balance(self):
|
||||
"""Determine whether to show a strikethrough on the balance.
|
||||
|
||||
Applies to 'new_aml' and 'exchange_diff' entries whose balance
|
||||
has been adjusted from the original source value.
|
||||
"""
|
||||
for entry in self:
|
||||
balance_changed = entry.currency_id.compare_amounts(
|
||||
entry.balance, entry.source_balance
|
||||
) != 0
|
||||
entry.display_stroked_balance = (
|
||||
entry.flag in ('new_aml', 'exchange_diff') and balance_changed
|
||||
)
|
||||
|
||||
@api.depends('flag')
|
||||
def _compute_source_aml_fields(self):
|
||||
"""Resolve the originating move for display and navigation purposes.
|
||||
|
||||
For 'new_aml' and 'liquidity' entries, this is simply the move containing
|
||||
the source journal item. For 'aml' (already reconciled) entries, we trace
|
||||
through partial reconciliation records to find the counterpart document.
|
||||
"""
|
||||
for entry in self:
|
||||
entry.source_aml_move_id = None
|
||||
entry.source_aml_move_name = None
|
||||
|
||||
if entry.flag in ('new_aml', 'liquidity'):
|
||||
originating_move = entry.source_aml_id.move_id
|
||||
entry.source_aml_move_id = originating_move
|
||||
entry.source_aml_move_name = originating_move.name
|
||||
elif entry.flag == 'aml':
|
||||
# Trace through reconciliation partials to find the counterpart
|
||||
partial_records = (
|
||||
entry.source_aml_id.matched_debit_ids
|
||||
+ entry.source_aml_id.matched_credit_ids
|
||||
)
|
||||
linked_items = partial_records.debit_move_id + partial_records.credit_move_id
|
||||
# Exclude the source itself and any exchange difference entries
|
||||
fx_move_items = partial_records.exchange_move_id.line_ids
|
||||
counterpart_items = linked_items - entry.source_aml_id - fx_move_items
|
||||
if len(counterpart_items) == 1:
|
||||
entry.source_aml_move_id = counterpart_items.move_id
|
||||
entry.source_aml_move_name = counterpart_items.move_id.name
|
||||
|
||||
@api.depends('wizard_id.form_index', 'partner_id')
|
||||
def _compute_partner_info(self):
|
||||
"""Load receivable/payable account info for the selected partner.
|
||||
|
||||
This data is used by the UI to offer account switching suggestions
|
||||
when a partner is set on a manual entry. Only computed for the
|
||||
entry currently being edited (matching the form_index).
|
||||
"""
|
||||
for entry in self:
|
||||
# Set defaults
|
||||
entry.partner_receivable_amount = 0.0
|
||||
entry.partner_payable_amount = 0.0
|
||||
entry.partner_currency_id = None
|
||||
entry.partner_receivable_account_id = None
|
||||
entry.partner_payable_account_id = None
|
||||
|
||||
# Only compute for the actively edited entry with a partner
|
||||
if not entry.partner_id or entry.index != entry.wizard_id.form_index:
|
||||
continue
|
||||
|
||||
entry.partner_currency_id = entry.company_currency_id
|
||||
scoped_partner = entry.partner_id.with_company(entry.wizard_id.company_id)
|
||||
posted_filter = [('parent_state', '=', 'posted'), ('partner_id', '=', scoped_partner.id)]
|
||||
|
||||
# Receivable info
|
||||
recv_account = scoped_partner.property_account_receivable_id
|
||||
entry.partner_receivable_account_id = recv_account
|
||||
if recv_account:
|
||||
recv_domain = expression.AND([posted_filter, [('account_id', '=', recv_account.id)]])
|
||||
recv_data = self.env['account.move.line']._read_group(
|
||||
domain=recv_domain,
|
||||
aggregates=['amount_residual:sum'],
|
||||
)
|
||||
entry.partner_receivable_amount = recv_data[0][0]
|
||||
|
||||
# Payable info
|
||||
pay_account = scoped_partner.property_account_payable_id
|
||||
entry.partner_payable_account_id = pay_account
|
||||
if pay_account:
|
||||
pay_domain = expression.AND([posted_filter, [('account_id', '=', pay_account.id)]])
|
||||
pay_data = self.env['account.move.line']._read_group(
|
||||
domain=pay_domain,
|
||||
aggregates=['amount_residual:sum'],
|
||||
)
|
||||
entry.partner_payable_amount = pay_data[0][0]
|
||||
|
||||
@api.depends('flag')
|
||||
def _compute_bank_account(self):
|
||||
"""Show the bank account number on the liquidity entry only."""
|
||||
for entry in self:
|
||||
if entry.flag == 'liquidity':
|
||||
stmt_line = entry.wizard_id.st_line_id
|
||||
displayed_account = stmt_line.partner_bank_id.display_name or stmt_line.account_number
|
||||
entry.bank_account = displayed_account or None
|
||||
else:
|
||||
entry.bank_account = None
|
||||
|
||||
@api.depends('wizard_id.form_index', 'amount_currency', 'balance')
|
||||
def _compute_suggestion(self):
|
||||
"""Build contextual suggestion text for matched journal items.
|
||||
|
||||
When a 'new_aml' entry is being edited, this generates guidance text
|
||||
explaining the reconciliation impact and offering a quick action button
|
||||
for full or partial matching.
|
||||
"""
|
||||
for entry in self:
|
||||
entry.suggestion_html = None
|
||||
entry.suggestion_amount_currency = None
|
||||
entry.suggestion_balance = None
|
||||
|
||||
# Only generate suggestions for actively edited matched entries
|
||||
if entry.flag != 'new_aml' or entry.index != entry.wizard_id.form_index:
|
||||
continue
|
||||
|
||||
source_item = entry.source_aml_id
|
||||
parent_widget = entry.wizard_id
|
||||
original_residual = abs(source_item.amount_residual_currency)
|
||||
post_match_residual = abs(source_item.amount_residual_currency + entry.amount_currency)
|
||||
matched_portion = original_residual - post_match_residual
|
||||
fully_consumed = source_item.currency_id.is_zero(post_match_residual)
|
||||
belongs_to_invoice = source_item.move_id.is_invoice(include_receipts=True)
|
||||
|
||||
# Build the clickable document reference
|
||||
doc_link_html = markupsafe.Markup(
|
||||
'<button name="action_redirect_to_move"'
|
||||
' class="btn btn-link p-0 align-baseline fst-italic">'
|
||||
'%(doc_name)s</button>'
|
||||
) % {'doc_name': source_item.move_id.display_name}
|
||||
|
||||
# Shared template parameters
|
||||
tpl_params = {
|
||||
'amount': formatLang(self.env, matched_portion, currency_obj=source_item.currency_id),
|
||||
'open_amount': formatLang(self.env, original_residual, currency_obj=source_item.currency_id),
|
||||
'display_name_html': doc_link_html,
|
||||
'btn_start': markupsafe.Markup(
|
||||
'<button name="action_apply_line_suggestion"'
|
||||
' class="btn btn-link p-0 align-baseline fst-italic">'
|
||||
),
|
||||
'btn_end': markupsafe.Markup('</button>'),
|
||||
}
|
||||
|
||||
if fully_consumed:
|
||||
# Full match scenario
|
||||
if belongs_to_invoice:
|
||||
status_msg = _(
|
||||
"The invoice %(display_name_html)s with an open amount of"
|
||||
" %(open_amount)s will be entirely paid by the transaction."
|
||||
)
|
||||
else:
|
||||
status_msg = _(
|
||||
"%(display_name_html)s with an open amount of %(open_amount)s"
|
||||
" will be fully reconciled by the transaction."
|
||||
)
|
||||
suggestion_lines = [status_msg]
|
||||
|
||||
# Check if a partial would be more appropriate
|
||||
partial_data = parent_widget._lines_check_partial_amount(entry)
|
||||
if partial_data:
|
||||
if belongs_to_invoice:
|
||||
partial_msg = _(
|
||||
"You might want to record a"
|
||||
" %(btn_start)spartial payment%(btn_end)s."
|
||||
)
|
||||
else:
|
||||
partial_msg = _(
|
||||
"You might want to make a"
|
||||
" %(btn_start)spartial reconciliation%(btn_end)s instead."
|
||||
)
|
||||
suggestion_lines.append(partial_msg)
|
||||
entry.suggestion_amount_currency = partial_data['amount_currency']
|
||||
entry.suggestion_balance = partial_data['balance']
|
||||
else:
|
||||
# Partial match scenario - suggest full reconciliation
|
||||
if belongs_to_invoice:
|
||||
suggestion_lines = [
|
||||
_(
|
||||
"The invoice %(display_name_html)s with an open amount of"
|
||||
" %(open_amount)s will be reduced by %(amount)s."
|
||||
),
|
||||
_(
|
||||
"You might want to set the invoice as"
|
||||
" %(btn_start)sfully paid%(btn_end)s."
|
||||
),
|
||||
]
|
||||
else:
|
||||
suggestion_lines = [
|
||||
_(
|
||||
"%(display_name_html)s with an open amount of"
|
||||
" %(open_amount)s will be reduced by %(amount)s."
|
||||
),
|
||||
_(
|
||||
"You might want to %(btn_start)sfully reconcile%(btn_end)s"
|
||||
" the document."
|
||||
),
|
||||
]
|
||||
entry.suggestion_amount_currency = entry.source_amount_currency
|
||||
entry.suggestion_balance = entry.source_balance
|
||||
|
||||
rendered_lines = markupsafe.Markup('<br/>').join(
|
||||
msg % tpl_params for msg in suggestion_lines
|
||||
)
|
||||
entry.suggestion_html = (
|
||||
markupsafe.Markup('<div class="text-muted">%s</div>') % rendered_lines
|
||||
)
|
||||
|
||||
@api.depends('flag')
|
||||
def _compute_ref_narration(self):
|
||||
"""Populate ref and narration from the statement line for liquidity entries."""
|
||||
for entry in self:
|
||||
if entry.flag == 'liquidity':
|
||||
entry.ref = entry.wizard_id.st_line_id.ref
|
||||
entry.narration = entry.wizard_id.st_line_id.narration
|
||||
else:
|
||||
entry.ref = None
|
||||
entry.narration = None
|
||||
|
||||
# =========================================================================
|
||||
# HELPERS
|
||||
# =========================================================================
|
||||
|
||||
def _get_aml_values(self, **kwargs):
|
||||
"""Convert this widget entry into values suitable for creating journal items.
|
||||
|
||||
Returns a dictionary of field values that can be passed to
|
||||
Command.create() for account.move.line records during validation.
|
||||
"""
|
||||
self.ensure_one()
|
||||
vals = {
|
||||
'name': self.name,
|
||||
'account_id': self.account_id.id,
|
||||
'currency_id': self.currency_id.id,
|
||||
'amount_currency': self.amount_currency,
|
||||
'balance': self.debit - self.credit,
|
||||
'reconcile_model_id': self.reconcile_model_id.id,
|
||||
'analytic_distribution': self.analytic_distribution,
|
||||
'tax_repartition_line_id': self.tax_repartition_line_id.id,
|
||||
'tax_ids': [Command.set(self.tax_ids.ids)],
|
||||
'tax_tag_ids': [Command.set(self.tax_tag_ids.ids)],
|
||||
'group_tax_id': self.group_tax_id.id,
|
||||
}
|
||||
vals.update(kwargs)
|
||||
if self.flag == 'early_payment':
|
||||
vals['display_type'] = 'epd'
|
||||
return vals
|
||||
453
Fusion Accounting/models/bank_reconciliation_report.py
Normal file
453
Fusion Accounting/models/bank_reconciliation_report.py
Normal file
@@ -0,0 +1,453 @@
|
||||
# Fusion Accounting - Bank Reconciliation Report Handler
|
||||
# Statement balance tracking, outstanding items, and miscellaneous ops
|
||||
|
||||
import logging
|
||||
from datetime import date
|
||||
|
||||
from odoo import models, fields, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import SQL
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionBankReconciliationHandler(models.AbstractModel):
|
||||
"""Custom handler for the bank reconciliation report. Computes
|
||||
last-statement balances, unreconciled items, outstanding
|
||||
payments/receipts, and miscellaneous bank journal operations."""
|
||||
|
||||
_name = 'account.bank.reconciliation.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Bank Reconciliation Report Custom Handler'
|
||||
|
||||
# ================================================================
|
||||
# OPTIONS
|
||||
# ================================================================
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
options['ignore_totals_below_sections'] = True
|
||||
|
||||
if 'active_id' in self.env.context and self.env.context.get('active_model') == 'account.journal':
|
||||
options['bank_reconciliation_report_journal_id'] = self.env.context['active_id']
|
||||
elif 'bank_reconciliation_report_journal_id' in previous_options:
|
||||
options['bank_reconciliation_report_journal_id'] = previous_options['bank_reconciliation_report_journal_id']
|
||||
else:
|
||||
options['bank_reconciliation_report_journal_id'] = (
|
||||
self.env['account.journal'].search([('type', '=', 'bank')], limit=1).id
|
||||
)
|
||||
|
||||
has_multicur = (
|
||||
self.env.user.has_group('base.group_multi_currency')
|
||||
and self.env.user.has_group('base.group_no_one')
|
||||
)
|
||||
if not has_multicur:
|
||||
options['columns'] = [
|
||||
c for c in options['columns']
|
||||
if c['expression_label'] not in ('amount_currency', 'currency')
|
||||
]
|
||||
|
||||
# ================================================================
|
||||
# GETTERS
|
||||
# ================================================================
|
||||
|
||||
def _get_bank_journal_and_currencies(self, options):
|
||||
jnl = self.env['account.journal'].browse(
|
||||
options.get('bank_reconciliation_report_journal_id'),
|
||||
)
|
||||
co_cur = jnl.company_id.currency_id
|
||||
jnl_cur = jnl.currency_id or co_cur
|
||||
return jnl, jnl_cur, co_cur
|
||||
|
||||
# ================================================================
|
||||
# RESULT BUILDER
|
||||
# ================================================================
|
||||
|
||||
def _build_custom_engine_result(
|
||||
self, date=None, label=None, amount_currency=None,
|
||||
amount_currency_currency_id=None, currency=None,
|
||||
amount=0, amount_currency_id=None, has_sublines=False,
|
||||
):
|
||||
return {
|
||||
'date': date,
|
||||
'label': label,
|
||||
'amount_currency': amount_currency,
|
||||
'amount_currency_currency_id': amount_currency_currency_id,
|
||||
'currency': currency,
|
||||
'amount': amount,
|
||||
'amount_currency_id': amount_currency_id,
|
||||
'has_sublines': has_sublines,
|
||||
}
|
||||
|
||||
# ================================================================
|
||||
# CUSTOM ENGINES
|
||||
# ================================================================
|
||||
|
||||
def _report_custom_engine_forced_currency_amount(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
_j, jcur, _c = self._get_bank_journal_and_currencies(options)
|
||||
return self._build_custom_engine_result(amount_currency_id=jcur.id)
|
||||
|
||||
def _report_custom_engine_unreconciled_last_statement_receipts(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
return self._common_st_line_engine(options, 'receipts', current_groupby, True)
|
||||
|
||||
def _report_custom_engine_unreconciled_last_statement_payments(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
return self._common_st_line_engine(options, 'payments', current_groupby, True)
|
||||
|
||||
def _report_custom_engine_unreconciled_receipts(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
return self._common_st_line_engine(options, 'receipts', current_groupby, False)
|
||||
|
||||
def _report_custom_engine_unreconciled_payments(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
return self._common_st_line_engine(options, 'payments', current_groupby, False)
|
||||
|
||||
def _report_custom_engine_outstanding_receipts(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
return self._outstanding_engine(options, 'receipts', current_groupby)
|
||||
|
||||
def _report_custom_engine_outstanding_payments(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
return self._outstanding_engine(options, 'payments', current_groupby)
|
||||
|
||||
def _report_custom_engine_misc_operations(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
report._check_groupby_fields([current_groupby] if current_groupby else [])
|
||||
jnl, jcur, _c = self._get_bank_journal_and_currencies(options)
|
||||
misc_domain = self._get_bank_miscellaneous_move_lines_domain(options, jnl)
|
||||
misc_total = self.env["account.move.line"]._read_group(
|
||||
domain=misc_domain or [],
|
||||
groupby=current_groupby or [],
|
||||
aggregates=['balance:sum'],
|
||||
)[-1][0]
|
||||
return self._build_custom_engine_result(amount=misc_total or 0, amount_currency_id=jcur.id)
|
||||
|
||||
def _report_custom_engine_last_statement_balance_amount(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
if current_groupby:
|
||||
raise UserError(_("Last-statement balance does not support groupby."))
|
||||
jnl, jcur, _c = self._get_bank_journal_and_currencies(options)
|
||||
last_stmt = self._get_last_bank_statement(jnl, options)
|
||||
return self._build_custom_engine_result(amount=last_stmt.balance_end_real, amount_currency_id=jcur.id)
|
||||
|
||||
def _report_custom_engine_transaction_without_statement_amount(self, expressions, options, date_scope, current_groupby, next_groupby, offset=0, limit=None, warnings=None):
|
||||
return self._common_st_line_engine(options, 'all', current_groupby, False, unreconciled=False)
|
||||
|
||||
# ================================================================
|
||||
# SHARED ENGINES
|
||||
# ================================================================
|
||||
|
||||
def _common_st_line_engine(self, options, direction, current_groupby, from_last_stmt, unreconciled=True):
|
||||
jnl, jcur, _ccur = self._get_bank_journal_and_currencies(options)
|
||||
if not jnl:
|
||||
return self._build_custom_engine_result()
|
||||
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
report._check_groupby_fields([current_groupby] if current_groupby else [])
|
||||
|
||||
def _assemble(rows):
|
||||
if current_groupby == 'id':
|
||||
r = rows[0]
|
||||
fcur = self.env['res.currency'].browse(r['foreign_currency_id'])
|
||||
rate = (r['amount'] / r['amount_currency']) if r['amount_currency'] else 0
|
||||
return self._build_custom_engine_result(
|
||||
date=r['date'] or None,
|
||||
label=r['payment_ref'] or r['ref'] or '/',
|
||||
amount_currency=-r['amount_residual'] if r['foreign_currency_id'] else None,
|
||||
amount_currency_currency_id=fcur.id if r['foreign_currency_id'] else None,
|
||||
currency=fcur.display_name if r['foreign_currency_id'] else None,
|
||||
amount=-r['amount_residual'] * rate if r['amount_residual'] else None,
|
||||
amount_currency_id=jcur.id,
|
||||
)
|
||||
total = 0
|
||||
for r in rows:
|
||||
rate = (r['amount'] / r['amount_currency']) if r['foreign_currency_id'] and r['amount_currency'] else 1
|
||||
total += -r.get('amount_residual', 0) * rate if unreconciled else r.get('amount', 0)
|
||||
return self._build_custom_engine_result(amount=total, amount_currency_id=jcur.id, has_sublines=bool(rows))
|
||||
|
||||
qry = report._get_report_query(options, 'strict_range', domain=[
|
||||
('journal_id', '=', jnl.id),
|
||||
('account_id', '=', jnl.default_account_id.id),
|
||||
])
|
||||
|
||||
if from_last_stmt:
|
||||
last_stmt_id = self._get_last_bank_statement(jnl, options).id
|
||||
if last_stmt_id:
|
||||
stmt_cond = SQL("st_line.statement_id = %s", last_stmt_id)
|
||||
else:
|
||||
return self._compute_result([], current_groupby, _assemble)
|
||||
else:
|
||||
stmt_cond = SQL("st_line.statement_id IS NULL")
|
||||
|
||||
if direction == 'receipts':
|
||||
amt_cond = SQL("AND st_line.amount > 0")
|
||||
elif direction == 'payments':
|
||||
amt_cond = SQL("AND st_line.amount < 0")
|
||||
else:
|
||||
amt_cond = SQL("")
|
||||
|
||||
full_sql = SQL("""
|
||||
SELECT %(sel_gb)s,
|
||||
st_line.id, move.name, move.ref, move.date,
|
||||
st_line.payment_ref, st_line.amount, st_line.amount_residual,
|
||||
st_line.amount_currency, st_line.foreign_currency_id
|
||||
FROM %(tbl)s
|
||||
JOIN account_bank_statement_line st_line ON st_line.move_id = account_move_line.move_id
|
||||
JOIN account_move move ON move.id = st_line.move_id
|
||||
WHERE %(where)s
|
||||
%(unrec)s %(amt_cond)s
|
||||
AND %(stmt_cond)s
|
||||
GROUP BY %(gb)s, st_line.id, move.id
|
||||
""",
|
||||
sel_gb=SQL("%s AS grouping_key", SQL.identifier('account_move_line', current_groupby)) if current_groupby else SQL('null'),
|
||||
tbl=qry.from_clause,
|
||||
where=qry.where_clause,
|
||||
unrec=SQL("AND NOT st_line.is_reconciled") if unreconciled else SQL(""),
|
||||
amt_cond=amt_cond,
|
||||
stmt_cond=stmt_cond,
|
||||
gb=SQL.identifier('account_move_line', current_groupby) if current_groupby else SQL('st_line.id'),
|
||||
)
|
||||
self.env.cr.execute(full_sql)
|
||||
return self._compute_result(self.env.cr.dictfetchall(), current_groupby, _assemble)
|
||||
|
||||
def _outstanding_engine(self, options, direction, current_groupby):
|
||||
jnl, jcur, ccur = self._get_bank_journal_and_currencies(options)
|
||||
if not jnl:
|
||||
return self._build_custom_engine_result()
|
||||
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
report._check_groupby_fields([current_groupby] if current_groupby else [])
|
||||
|
||||
def _assemble(rows):
|
||||
if current_groupby == 'id':
|
||||
r = rows[0]
|
||||
convert = not (jcur and r['currency_id'] == jcur.id)
|
||||
amt_cur = r['amount_residual_currency'] if r['is_account_reconcile'] else r['amount_currency']
|
||||
bal = r['amount_residual'] if r['is_account_reconcile'] else r['balance']
|
||||
fcur = self.env['res.currency'].browse(r['currency_id'])
|
||||
return self._build_custom_engine_result(
|
||||
date=r['date'] or None,
|
||||
label=r['ref'] or None,
|
||||
amount_currency=amt_cur if convert else None,
|
||||
amount_currency_currency_id=fcur.id if convert else None,
|
||||
currency=fcur.display_name if convert else None,
|
||||
amount=ccur._convert(bal, jcur, jnl.company_id, options['date']['date_to']) if convert else amt_cur,
|
||||
amount_currency_id=jcur.id,
|
||||
)
|
||||
total = 0
|
||||
for r in rows:
|
||||
convert = not (jcur and r['currency_id'] == jcur.id)
|
||||
if convert:
|
||||
bal = r['amount_residual'] if r['is_account_reconcile'] else r['balance']
|
||||
total += ccur._convert(bal, jcur, jnl.company_id, options['date']['date_to'])
|
||||
else:
|
||||
total += r['amount_residual_currency'] if r['is_account_reconcile'] else r['amount_currency']
|
||||
return self._build_custom_engine_result(amount=total, amount_currency_id=jcur.id, has_sublines=bool(rows))
|
||||
|
||||
accts = jnl._get_journal_inbound_outstanding_payment_accounts() + jnl._get_journal_outbound_outstanding_payment_accounts()
|
||||
qry = report._get_report_query(options, 'from_beginning', domain=[
|
||||
('journal_id', '=', jnl.id),
|
||||
('account_id', 'in', accts.ids),
|
||||
('full_reconcile_id', '=', False),
|
||||
('amount_residual_currency', '!=', 0.0),
|
||||
])
|
||||
|
||||
full_sql = SQL("""
|
||||
SELECT %(sel_gb)s,
|
||||
account_move_line.account_id, account_move_line.payment_id,
|
||||
account_move_line.move_id, account_move_line.currency_id,
|
||||
account_move_line.move_name AS name, account_move_line.ref,
|
||||
account_move_line.date, account.reconcile AS is_account_reconcile,
|
||||
SUM(account_move_line.amount_residual) AS amount_residual,
|
||||
SUM(account_move_line.balance) AS balance,
|
||||
SUM(account_move_line.amount_residual_currency) AS amount_residual_currency,
|
||||
SUM(account_move_line.amount_currency) AS amount_currency
|
||||
FROM %(tbl)s
|
||||
JOIN account_account account ON account.id = account_move_line.account_id
|
||||
WHERE %(where)s AND %(dir_cond)s
|
||||
GROUP BY %(gb)s, account_move_line.account_id, account_move_line.payment_id,
|
||||
account_move_line.move_id, account_move_line.currency_id,
|
||||
account_move_line.move_name, account_move_line.ref,
|
||||
account_move_line.date, account.reconcile
|
||||
""",
|
||||
sel_gb=SQL("%s AS grouping_key", SQL.identifier('account_move_line', current_groupby)) if current_groupby else SQL('null'),
|
||||
tbl=qry.from_clause,
|
||||
where=qry.where_clause,
|
||||
dir_cond=SQL("account_move_line.balance > 0") if direction == 'receipts' else SQL("account_move_line.balance < 0"),
|
||||
gb=SQL.identifier('account_move_line', current_groupby) if current_groupby else SQL('account_move_line.account_id'),
|
||||
)
|
||||
self.env.cr.execute(full_sql)
|
||||
return self._compute_result(self.env.cr.dictfetchall(), current_groupby, _assemble)
|
||||
|
||||
def _compute_result(self, rows, current_groupby, builder):
|
||||
if not current_groupby:
|
||||
return builder(rows)
|
||||
grouped = {}
|
||||
for r in rows:
|
||||
grouped.setdefault(r['grouping_key'], []).append(r)
|
||||
return [(k, builder(v)) for k, v in grouped.items()]
|
||||
|
||||
# ================================================================
|
||||
# POST-PROCESSING & WARNINGS
|
||||
# ================================================================
|
||||
|
||||
def _custom_line_postprocessor(self, report, options, lines):
|
||||
lines = super()._custom_line_postprocessor(report, options, lines)
|
||||
jnl, _jc, _cc = self._get_bank_journal_and_currencies(options)
|
||||
if not jnl:
|
||||
return lines
|
||||
|
||||
last_stmt = self._get_last_bank_statement(jnl, options)
|
||||
for ln in lines:
|
||||
line_id = report._get_res_id_from_line_id(ln['id'], 'account.report.line')
|
||||
code = self.env['account.report.line'].browse(line_id).code
|
||||
|
||||
if code == "balance_bank":
|
||||
ln['name'] = _("Balance of '%s'", jnl.default_account_id.display_name)
|
||||
if code == "last_statement_balance":
|
||||
ln['class'] = 'o_bold_tr'
|
||||
if last_stmt:
|
||||
ln['columns'][1].update({'name': last_stmt.display_name, 'auditable': True})
|
||||
if code in ("transaction_without_statement", "misc_operations"):
|
||||
ln['class'] = 'o_bold_tr'
|
||||
|
||||
mdl, _mid = report._get_model_info_from_id(ln['id'])
|
||||
if mdl == "account.move.line":
|
||||
ln['name'] = ln['name'].split()[0]
|
||||
|
||||
return lines
|
||||
|
||||
def _customize_warnings(self, report, options, all_column_groups_expression_totals, warnings):
|
||||
jnl, jcur, _cc = self._get_bank_journal_and_currencies(options)
|
||||
bad_stmts = self._get_inconsistent_statements(options, jnl).ids
|
||||
misc_domain = self._get_bank_miscellaneous_move_lines_domain(options, jnl)
|
||||
has_misc = misc_domain and bool(self.env['account.move.line'].search_count(misc_domain, limit=1))
|
||||
last_stmt, gl_bal, end_bal, diff, mismatch = self._compute_journal_balances(report, options, jnl, jcur)
|
||||
|
||||
if warnings is not None:
|
||||
if last_stmt and mismatch:
|
||||
warnings['fusion_accounting.journal_balance'] = {
|
||||
'alert_type': 'warning',
|
||||
'general_ledger_amount': gl_bal,
|
||||
'last_bank_statement_amount': end_bal,
|
||||
'unexplained_difference': diff,
|
||||
}
|
||||
if bad_stmts:
|
||||
warnings['fusion_accounting.inconsistent_statement_warning'] = {'alert_type': 'warning', 'args': bad_stmts}
|
||||
if has_misc:
|
||||
warnings['fusion_accounting.has_bank_miscellaneous_move_lines'] = {
|
||||
'alert_type': 'warning',
|
||||
'args': jnl.default_account_id.display_name,
|
||||
}
|
||||
|
||||
# ================================================================
|
||||
# BALANCE COMPUTATION
|
||||
# ================================================================
|
||||
|
||||
def _compute_journal_balances(self, report, options, journal, jcur):
|
||||
domain = report._get_options_domain(options, 'from_beginning')
|
||||
gl_raw = journal._get_journal_bank_account_balance(domain=domain)[0]
|
||||
last_stmt, end_raw, diff_raw, mismatch = self._compute_balances(options, journal, gl_raw, jcur)
|
||||
fmt = lambda v: report.format_value(options, v, format_params={'currency_id': jcur.id}, figure_type='monetary')
|
||||
return last_stmt, fmt(gl_raw), fmt(end_raw), fmt(diff_raw), mismatch
|
||||
|
||||
def _compute_balances(self, options, journal, gl_balance, report_currency):
|
||||
rpt_date = fields.Date.from_string(options['date']['date_to'])
|
||||
last_stmt = self._get_last_bank_statement(journal, options)
|
||||
end_bal = diff = 0
|
||||
mismatch = False
|
||||
if last_stmt:
|
||||
lines_in_range = last_stmt.line_ids.filtered(lambda l: l.date <= rpt_date)
|
||||
end_bal = last_stmt.balance_start + sum(lines_in_range.mapped('amount'))
|
||||
diff = gl_balance - end_bal
|
||||
mismatch = not report_currency.is_zero(diff)
|
||||
return last_stmt, end_bal, diff, mismatch
|
||||
|
||||
# ================================================================
|
||||
# STATEMENT HELPERS
|
||||
# ================================================================
|
||||
|
||||
def _get_last_bank_statement(self, journal, options):
|
||||
rpt_date = fields.Date.from_string(options['date']['date_to'])
|
||||
last_line = self.env['account.bank.statement.line'].search([
|
||||
('journal_id', '=', journal.id),
|
||||
('statement_id', '!=', False),
|
||||
('date', '<=', rpt_date),
|
||||
], order='date desc, id desc', limit=1)
|
||||
return last_line.statement_id
|
||||
|
||||
def _get_inconsistent_statements(self, options, journal):
|
||||
return self.env['account.bank.statement'].search([
|
||||
('journal_id', '=', journal.id),
|
||||
('date', '<=', options['date']['date_to']),
|
||||
('is_valid', '=', False),
|
||||
])
|
||||
|
||||
def _get_bank_miscellaneous_move_lines_domain(self, options, journal):
|
||||
if not journal.default_account_id:
|
||||
return None
|
||||
report = self.env['account.report'].browse(options['report_id'])
|
||||
domain = [
|
||||
('account_id', '=', journal.default_account_id.id),
|
||||
('statement_line_id', '=', False),
|
||||
*report._get_options_domain(options, 'from_beginning'),
|
||||
]
|
||||
lock_date = journal.company_id._get_user_fiscal_lock_date(journal)
|
||||
if lock_date != date.min:
|
||||
domain.append(('date', '>', lock_date))
|
||||
if journal.company_id.account_opening_move_id:
|
||||
domain.append(('move_id', '!=', journal.company_id.account_opening_move_id.id))
|
||||
return domain
|
||||
|
||||
# ================================================================
|
||||
# AUDIT ACTIONS
|
||||
# ================================================================
|
||||
|
||||
def action_audit_cell(self, options, params):
|
||||
rpt_line = self.env['account.report.line'].browse(params['report_line_id'])
|
||||
if rpt_line.code == "balance_bank":
|
||||
return self.action_redirect_to_general_ledger(options)
|
||||
elif rpt_line.code == "misc_operations":
|
||||
return self.open_bank_miscellaneous_move_lines(options)
|
||||
elif rpt_line.code == "last_statement_balance":
|
||||
return self.action_redirect_to_bank_statement_widget(options)
|
||||
return rpt_line.report_id.action_audit_cell(options, params)
|
||||
|
||||
def action_redirect_to_general_ledger(self, options):
|
||||
gl_action = self.env['ir.actions.actions']._for_xml_id(
|
||||
'fusion_accounting.action_account_report_general_ledger',
|
||||
)
|
||||
gl_action['params'] = {'options': options, 'ignore_session': True}
|
||||
return gl_action
|
||||
|
||||
def action_redirect_to_bank_statement_widget(self, options):
|
||||
jnl = self.env['account.journal'].browse(
|
||||
options.get('bank_reconciliation_report_journal_id'),
|
||||
)
|
||||
last_stmt = self._get_last_bank_statement(jnl, options)
|
||||
return self.env['account.bank.statement.line']._action_open_bank_reconciliation_widget(
|
||||
default_context={'create': False, 'search_default_statement_id': last_stmt.id},
|
||||
name=last_stmt.display_name,
|
||||
)
|
||||
|
||||
def open_bank_miscellaneous_move_lines(self, options):
|
||||
jnl = self.env['account.journal'].browse(
|
||||
options['bank_reconciliation_report_journal_id'],
|
||||
)
|
||||
return {
|
||||
'name': _('Journal Items'),
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.move.line',
|
||||
'view_type': 'list',
|
||||
'view_mode': 'list',
|
||||
'target': 'current',
|
||||
'views': [(self.env.ref('account.view_move_line_tree').id, 'list')],
|
||||
'domain': self.env['account.bank.reconciliation.report.handler']._get_bank_miscellaneous_move_lines_domain(options, jnl),
|
||||
}
|
||||
|
||||
def bank_reconciliation_report_open_inconsistent_statements(self, options, params=None):
|
||||
stmt_ids = params['args']
|
||||
action = {
|
||||
'name': _("Inconsistent Statements"),
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.bank.statement',
|
||||
}
|
||||
if len(stmt_ids) == 1:
|
||||
action.update({'view_mode': 'form', 'res_id': stmt_ids[0], 'views': [(False, 'form')]})
|
||||
else:
|
||||
action.update({'view_mode': 'list', 'domain': [('id', 'in', stmt_ids)], 'views': [(False, 'list')]})
|
||||
return action
|
||||
540
Fusion Accounting/models/bank_statement_import_camt.py
Normal file
540
Fusion Accounting/models/bank_statement_import_camt.py
Normal file
@@ -0,0 +1,540 @@
|
||||
# Fusion Accounting - CAMT.053 Bank Statement Parser
|
||||
# Original implementation for ISO 20022 camt.053 bank-to-customer statement
|
||||
# Based on the published ISO 20022 message definitions
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from odoo import _, models
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionCAMTParser:
|
||||
"""Standalone parser for ISO 20022 CAMT.053 XML bank statements.
|
||||
|
||||
CAMT.053 (Bank-to-Customer Statement) is the international standard
|
||||
for electronic bank statements. This parser supports the following
|
||||
schema versions:
|
||||
|
||||
* ``camt.053.001.02`` — original version
|
||||
* ``camt.053.001.03`` through ``camt.053.001.08`` — subsequent
|
||||
revisions (structurally compatible for the fields we consume)
|
||||
|
||||
The parser auto-detects the XML namespace from the document root.
|
||||
|
||||
This is an **original** implementation written from the published
|
||||
ISO 20022 message definitions — it is not derived from Odoo Enterprise.
|
||||
"""
|
||||
|
||||
# Namespace prefixes we recognise (base URI without version suffix)
|
||||
_CAMT_NS_BASE = 'urn:iso:std:iso:20022:tech:xsd:camt.053.001.'
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Public API
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def parse_camt(self, data_file):
|
||||
"""Parse a CAMT.053 XML file and return a list of statement dicts
|
||||
compatible with the Fusion Accounting import pipeline.
|
||||
|
||||
Each dict has the keys:
|
||||
- ``name`` : statement identification (from ``<Id>``)
|
||||
- ``date`` : creation date
|
||||
- ``balance_start`` : opening booked balance
|
||||
- ``balance_end_real``: closing booked balance
|
||||
- ``currency_code`` : ISO 4217 currency
|
||||
- ``account_number`` : IBAN or other account identifier
|
||||
- ``transactions`` : list of transaction dicts
|
||||
|
||||
Transaction dicts contain:
|
||||
- ``date`` : booking date
|
||||
- ``payment_ref`` : combined reference / remittance info
|
||||
- ``ref`` : end-to-end reference or instruction id
|
||||
- ``amount`` : signed float (negative for debits)
|
||||
- ``unique_import_id`` : generated unique key
|
||||
- ``partner_name`` : debtor or creditor name
|
||||
- ``account_number`` : debtor/creditor IBAN
|
||||
"""
|
||||
raw_xml = self._to_bytes(data_file)
|
||||
root = self._parse_xml(raw_xml)
|
||||
ns = self._detect_namespace(root)
|
||||
return self._extract_statements(root, ns)
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Input handling
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _to_bytes(data_file):
|
||||
"""Ensure *data_file* is bytes for XML parsing."""
|
||||
if isinstance(data_file, str):
|
||||
return data_file.encode('utf-8')
|
||||
return data_file
|
||||
|
||||
@staticmethod
|
||||
def _parse_xml(raw_xml):
|
||||
"""Parse raw XML bytes and return the root Element."""
|
||||
try:
|
||||
return ElementTree.fromstring(raw_xml)
|
||||
except ElementTree.ParseError as exc:
|
||||
raise UserError(
|
||||
_("Failed to parse CAMT.053 XML: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
def _detect_namespace(self, root):
|
||||
"""Auto-detect the CAMT.053 namespace from the document root.
|
||||
|
||||
Returns a dict ``{'ns': 'urn:...'}`` suitable for passing to
|
||||
``Element.find()`` / ``Element.findall()``."""
|
||||
tag = root.tag
|
||||
if '}' in tag:
|
||||
ns_uri = tag.split('}')[0].lstrip('{')
|
||||
else:
|
||||
ns_uri = ''
|
||||
|
||||
if ns_uri and not ns_uri.startswith(self._CAMT_NS_BASE):
|
||||
_log.warning(
|
||||
"Unexpected CAMT namespace: %s (expected %s*)",
|
||||
ns_uri, self._CAMT_NS_BASE,
|
||||
)
|
||||
|
||||
return {'ns': ns_uri} if ns_uri else {}
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Convenience helpers for namespaced tag access
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _tag(ns_map, local_name):
|
||||
"""Build a namespaced tag string for ElementTree lookups."""
|
||||
ns = ns_map.get('ns', '')
|
||||
if ns:
|
||||
return f'{{{ns}}}{local_name}'
|
||||
return local_name
|
||||
|
||||
def _find(self, parent, ns, path):
|
||||
"""Find the first child element matching a ``/``-separated
|
||||
*path* of local tag names."""
|
||||
current = parent
|
||||
for part in path.split('/'):
|
||||
if current is None:
|
||||
return None
|
||||
current = current.find(self._tag(ns, part))
|
||||
return current
|
||||
|
||||
def _find_text(self, parent, ns, path):
|
||||
"""Return the stripped text of the element at *path*, or ``None``."""
|
||||
el = self._find(parent, ns, path)
|
||||
if el is not None and el.text:
|
||||
return el.text.strip()
|
||||
return None
|
||||
|
||||
def _findall(self, parent, ns, local_name):
|
||||
"""Return all direct children matching *local_name*."""
|
||||
return parent.findall(self._tag(ns, local_name))
|
||||
|
||||
def _iter(self, parent, ns, local_name):
|
||||
"""Iterate over all descendant elements matching *local_name*."""
|
||||
return parent.iter(self._tag(ns, local_name))
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Statement-level extraction
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def _extract_statements(self, root, ns):
|
||||
"""Extract all ``<Stmt>`` elements from the document."""
|
||||
statements = []
|
||||
|
||||
# CAMT.053 structure: Document > BkToCstmrStmt > Stmt (repeating)
|
||||
for stmt_el in self._iter(root, ns, 'Stmt'):
|
||||
stmt = self._extract_single_statement(stmt_el, ns)
|
||||
if stmt:
|
||||
statements.append(stmt)
|
||||
|
||||
if not statements:
|
||||
raise UserError(
|
||||
_("No statements found in the CAMT.053 file.")
|
||||
)
|
||||
return statements
|
||||
|
||||
def _extract_single_statement(self, stmt_el, ns):
|
||||
"""Extract one ``<Stmt>`` element into a statement dict."""
|
||||
# Statement ID
|
||||
stmt_id = self._find_text(stmt_el, ns, 'Id') or ''
|
||||
|
||||
# Creation date/time
|
||||
creation_dt = self._find_text(stmt_el, ns, 'CreDtTm')
|
||||
stmt_date = self._parse_camt_datetime(creation_dt)
|
||||
|
||||
# Account identification
|
||||
acct_el = self._find(stmt_el, ns, 'Acct')
|
||||
account_number = ''
|
||||
currency_code = None
|
||||
|
||||
if acct_el is not None:
|
||||
# Try IBAN first, then generic Id/Othr/Id
|
||||
iban = self._find_text(acct_el, ns, 'Id/IBAN')
|
||||
if iban:
|
||||
account_number = iban
|
||||
else:
|
||||
account_number = self._find_text(acct_el, ns, 'Id/Othr/Id') or ''
|
||||
|
||||
# Currency from Ccy element or attribute
|
||||
ccy_text = self._find_text(acct_el, ns, 'Ccy')
|
||||
if ccy_text:
|
||||
currency_code = ccy_text.upper()
|
||||
|
||||
# Balances — look for OPBD (opening booked) and CLBD (closing booked)
|
||||
balance_start = 0.0
|
||||
balance_end = 0.0
|
||||
|
||||
for bal_el in self._findall(stmt_el, ns, 'Bal'):
|
||||
bal_type_el = self._find(bal_el, ns, 'Tp/CdOrPrtry/Cd')
|
||||
bal_code = bal_type_el.text.strip().upper() if (bal_type_el is not None and bal_type_el.text) else ''
|
||||
|
||||
amt_el = self._find(bal_el, ns, 'Amt')
|
||||
amt_val = 0.0
|
||||
if amt_el is not None and amt_el.text:
|
||||
amt_val = self._safe_float(amt_el.text)
|
||||
# Also capture currency from balance if not yet known
|
||||
if not currency_code:
|
||||
currency_code = (amt_el.get('Ccy') or '').upper() or None
|
||||
|
||||
# Credit/Debit indicator
|
||||
cdi = self._find_text(bal_el, ns, 'CdtDbtInd')
|
||||
if cdi and cdi.upper() == 'DBIT':
|
||||
amt_val = -amt_val
|
||||
|
||||
if bal_code in ('OPBD', 'PRCD'):
|
||||
# Opening booked / previous closing (used as opening)
|
||||
balance_start = amt_val
|
||||
elif bal_code in ('CLBD', 'CLAV'):
|
||||
# Closing booked / closing available
|
||||
balance_end = amt_val
|
||||
|
||||
# Also capture statement date from closing balance if missing
|
||||
if bal_code in ('CLBD',) and not stmt_date:
|
||||
dt_text = self._find_text(bal_el, ns, 'Dt/Dt')
|
||||
if dt_text:
|
||||
stmt_date = self._parse_camt_date(dt_text)
|
||||
|
||||
# Transactions — Ntry elements
|
||||
transactions = []
|
||||
for ntry_el in self._findall(stmt_el, ns, 'Ntry'):
|
||||
txn_list = self._extract_entry(ntry_el, ns, stmt_id, account_number)
|
||||
transactions.extend(txn_list)
|
||||
|
||||
stmt_name = stmt_id or f"CAMT {account_number}"
|
||||
if stmt_date:
|
||||
stmt_name += f" {stmt_date.strftime('%Y-%m-%d')}"
|
||||
|
||||
return {
|
||||
'name': stmt_name,
|
||||
'date': stmt_date,
|
||||
'balance_start': balance_start,
|
||||
'balance_end_real': balance_end,
|
||||
'currency_code': currency_code,
|
||||
'account_number': account_number,
|
||||
'transactions': transactions,
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Entry / transaction extraction
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def _extract_entry(self, ntry_el, ns, stmt_id, acct_number):
|
||||
"""Extract transactions from a single ``<Ntry>`` element.
|
||||
|
||||
An entry may contain one or more ``<NtryDtls>/<TxDtls>`` detail
|
||||
blocks. If no detail blocks exist, we create a single transaction
|
||||
from the entry-level data.
|
||||
"""
|
||||
# Entry-level fields
|
||||
entry_amt = self._safe_float(
|
||||
self._find_text(ntry_el, ns, 'Amt') or '0'
|
||||
)
|
||||
entry_cdi = self._find_text(ntry_el, ns, 'CdtDbtInd') or ''
|
||||
if entry_cdi.upper() == 'DBIT':
|
||||
entry_amt = -abs(entry_amt)
|
||||
else:
|
||||
entry_amt = abs(entry_amt)
|
||||
|
||||
# Reversal indicator
|
||||
rvsl = self._find_text(ntry_el, ns, 'RvslInd')
|
||||
if rvsl and rvsl.upper() in ('TRUE', 'Y', '1'):
|
||||
entry_amt = -entry_amt
|
||||
|
||||
booking_date = self._parse_camt_date(
|
||||
self._find_text(ntry_el, ns, 'BookgDt/Dt')
|
||||
)
|
||||
if not booking_date:
|
||||
booking_date = self._parse_camt_datetime(
|
||||
self._find_text(ntry_el, ns, 'BookgDt/DtTm')
|
||||
)
|
||||
value_date = self._parse_camt_date(
|
||||
self._find_text(ntry_el, ns, 'ValDt/Dt')
|
||||
)
|
||||
|
||||
entry_ref = self._find_text(ntry_el, ns, 'NtryRef') or ''
|
||||
entry_addl_info = self._find_text(ntry_el, ns, 'AddtlNtryInf') or ''
|
||||
|
||||
# Check for detail-level transactions
|
||||
tx_details = []
|
||||
for ntry_dtls in self._findall(ntry_el, ns, 'NtryDtls'):
|
||||
for tx_dtls in self._findall(ntry_dtls, ns, 'TxDtls'):
|
||||
tx_details.append(tx_dtls)
|
||||
|
||||
if not tx_details:
|
||||
# No detail blocks — create transaction from entry-level data
|
||||
description = entry_addl_info or entry_ref or '/'
|
||||
unique_id = self._make_unique_id(
|
||||
stmt_id, acct_number, entry_ref,
|
||||
booking_date, entry_amt, description,
|
||||
)
|
||||
return [{
|
||||
'date': booking_date or value_date,
|
||||
'payment_ref': description,
|
||||
'ref': entry_ref,
|
||||
'amount': entry_amt,
|
||||
'unique_import_id': unique_id,
|
||||
}]
|
||||
|
||||
# Process each detail block
|
||||
transactions = []
|
||||
for idx, tx_dtls in enumerate(tx_details):
|
||||
txn = self._extract_tx_details(
|
||||
tx_dtls, ns, stmt_id, acct_number,
|
||||
entry_amt, entry_cdi, booking_date, value_date,
|
||||
entry_ref, entry_addl_info, idx,
|
||||
)
|
||||
if txn:
|
||||
transactions.append(txn)
|
||||
|
||||
return transactions
|
||||
|
||||
def _extract_tx_details(
|
||||
self, tx_dtls, ns, stmt_id, acct_number,
|
||||
entry_amt, entry_cdi, booking_date, value_date,
|
||||
entry_ref, entry_addl_info, detail_idx,
|
||||
):
|
||||
"""Extract a single transaction from a ``<TxDtls>`` element."""
|
||||
# Amount — detail may override entry amount
|
||||
detail_amt_text = self._find_text(tx_dtls, ns, 'Amt')
|
||||
if detail_amt_text:
|
||||
amount = self._safe_float(detail_amt_text)
|
||||
cdi = self._find_text(tx_dtls, ns, 'CdtDbtInd') or entry_cdi
|
||||
if cdi.upper() == 'DBIT':
|
||||
amount = -abs(amount)
|
||||
else:
|
||||
amount = abs(amount)
|
||||
else:
|
||||
amount = entry_amt
|
||||
|
||||
# References
|
||||
refs = self._find(tx_dtls, ns, 'Refs')
|
||||
end_to_end_id = ''
|
||||
instruction_id = ''
|
||||
msg_id = ''
|
||||
if refs is not None:
|
||||
end_to_end_id = self._find_text(refs, ns, 'EndToEndId') or ''
|
||||
instruction_id = self._find_text(refs, ns, 'InstrId') or ''
|
||||
msg_id = self._find_text(refs, ns, 'MsgId') or ''
|
||||
|
||||
# Filter out NOTPROVIDED sentinel values
|
||||
if end_to_end_id.upper() in ('NOTPROVIDED', 'NOTAVAILABLE', 'NONE'):
|
||||
end_to_end_id = ''
|
||||
if instruction_id.upper() in ('NOTPROVIDED', 'NOTAVAILABLE', 'NONE'):
|
||||
instruction_id = ''
|
||||
|
||||
ref = end_to_end_id or instruction_id or msg_id or entry_ref
|
||||
|
||||
# Remittance information (unstructured)
|
||||
remittance_info = ''
|
||||
rmt_inf = self._find(tx_dtls, ns, 'RmtInf')
|
||||
if rmt_inf is not None:
|
||||
ustrd_parts = []
|
||||
for ustrd in self._findall(rmt_inf, ns, 'Ustrd'):
|
||||
if ustrd.text and ustrd.text.strip():
|
||||
ustrd_parts.append(ustrd.text.strip())
|
||||
remittance_info = ' '.join(ustrd_parts)
|
||||
|
||||
# Structured remittance: creditor reference
|
||||
if not remittance_info and rmt_inf is not None:
|
||||
cred_ref = self._find_text(rmt_inf, ns, 'Strd/CdtrRefInf/Ref')
|
||||
if cred_ref:
|
||||
remittance_info = cred_ref
|
||||
|
||||
# Additional transaction info
|
||||
addl_tx_info = self._find_text(tx_dtls, ns, 'AddtlTxInf') or ''
|
||||
|
||||
# Build description from all available text fields
|
||||
desc_parts = [p for p in [remittance_info, addl_tx_info, entry_addl_info] if p]
|
||||
description = ' | '.join(desc_parts) if desc_parts else ref or '/'
|
||||
|
||||
# Debtor / Creditor information
|
||||
partner_name = ''
|
||||
partner_account = ''
|
||||
|
||||
# For credits (incoming), the relevant party is the debtor
|
||||
# For debits (outgoing), the relevant party is the creditor
|
||||
for party_tag in ('DbtrAcct', 'CdtrAcct'):
|
||||
iban = self._find_text(tx_dtls, ns, f'RltdPties/{party_tag}/Id/IBAN')
|
||||
if iban:
|
||||
partner_account = iban
|
||||
break
|
||||
other_id = self._find_text(tx_dtls, ns, f'RltdPties/{party_tag}/Id/Othr/Id')
|
||||
if other_id:
|
||||
partner_account = other_id
|
||||
break
|
||||
|
||||
for name_tag in ('Dbtr/Nm', 'Cdtr/Nm'):
|
||||
nm = self._find_text(tx_dtls, ns, f'RltdPties/{name_tag}')
|
||||
if nm:
|
||||
partner_name = nm
|
||||
break
|
||||
|
||||
# Unique ID
|
||||
unique_id = self._make_unique_id(
|
||||
stmt_id, acct_number, ref,
|
||||
booking_date, amount, f"{description}-{detail_idx}",
|
||||
)
|
||||
|
||||
txn = {
|
||||
'date': booking_date or value_date,
|
||||
'payment_ref': description,
|
||||
'ref': ref,
|
||||
'amount': amount,
|
||||
'unique_import_id': unique_id,
|
||||
}
|
||||
if partner_name:
|
||||
txn['partner_name'] = partner_name
|
||||
if partner_account:
|
||||
txn['account_number'] = partner_account
|
||||
return txn
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Unique-ID generation
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _make_unique_id(stmt_id, acct_number, ref, date, amount, extra=''):
|
||||
"""Generate a deterministic unique import ID from available data."""
|
||||
parts = [
|
||||
'CAMT',
|
||||
stmt_id or '',
|
||||
acct_number or '',
|
||||
ref or '',
|
||||
date.isoformat() if date else '',
|
||||
str(amount),
|
||||
]
|
||||
if extra:
|
||||
parts.append(extra)
|
||||
return '-'.join(p for p in parts if p)
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Date helpers
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _parse_camt_date(date_str):
|
||||
"""Parse an ISO 8601 date (``YYYY-MM-DD``) to ``datetime.date``."""
|
||||
if not date_str:
|
||||
return None
|
||||
try:
|
||||
return datetime.strptime(date_str.strip()[:10], '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
_log.warning("Unparseable CAMT date: %s", date_str)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _parse_camt_datetime(dt_str):
|
||||
"""Parse an ISO 8601 datetime to ``datetime.date``."""
|
||||
if not dt_str:
|
||||
return None
|
||||
# Strip timezone suffix for simple parsing
|
||||
cleaned = dt_str.strip()
|
||||
for fmt in ('%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S.%f',
|
||||
'%Y-%m-%d', '%Y-%m-%dT%H:%M:%S%z'):
|
||||
try:
|
||||
return datetime.strptime(cleaned[:19], fmt[:len(fmt)]).date()
|
||||
except ValueError:
|
||||
continue
|
||||
_log.warning("Unparseable CAMT datetime: %s", dt_str)
|
||||
return None
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Numeric helper
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _safe_float(value):
|
||||
"""Convert *value* to float, returning 0.0 on failure."""
|
||||
if not value:
|
||||
return 0.0
|
||||
try:
|
||||
return float(value.strip().replace(',', '.'))
|
||||
except (ValueError, AttributeError):
|
||||
return 0.0
|
||||
|
||||
|
||||
class FusionJournalCAMTImport(models.Model):
|
||||
"""Register CAMT.053 as an available bank-statement import format
|
||||
and implement the parser hook on ``account.journal``."""
|
||||
|
||||
_inherit = 'account.journal'
|
||||
|
||||
# ---- Format Registration ----
|
||||
def _get_bank_statements_available_import_formats(self):
|
||||
"""Append CAMT.053 to the list of importable formats."""
|
||||
formats = super()._get_bank_statements_available_import_formats()
|
||||
formats.append('CAMT.053')
|
||||
return formats
|
||||
|
||||
# ---- Parser Hook ----
|
||||
def _parse_bank_statement_file(self, attachment):
|
||||
"""Attempt to parse *attachment* as CAMT.053. Falls through to
|
||||
``super()`` when the file is not recognised as CAMT."""
|
||||
raw_data = attachment.raw
|
||||
if not self._is_camt_file(raw_data):
|
||||
return super()._parse_bank_statement_file(attachment)
|
||||
|
||||
parser = FusionCAMTParser()
|
||||
try:
|
||||
statements = parser.parse_camt(raw_data)
|
||||
except UserError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
_log.exception("CAMT.053 parsing error")
|
||||
raise UserError(
|
||||
_("Could not parse the CAMT.053 file: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
# Extract currency and account from the first statement
|
||||
currency_code = None
|
||||
account_number = None
|
||||
if statements:
|
||||
currency_code = statements[0].get('currency_code')
|
||||
account_number = statements[0].get('account_number')
|
||||
|
||||
return currency_code, account_number, statements
|
||||
|
||||
# ---- Detection ----
|
||||
@staticmethod
|
||||
def _is_camt_file(raw_data):
|
||||
"""Heuristic check: does *raw_data* look like a CAMT.053 file?"""
|
||||
try:
|
||||
text = raw_data.decode('utf-8-sig', errors='ignore')[:4096]
|
||||
except (UnicodeDecodeError, AttributeError):
|
||||
text = str(raw_data)[:4096]
|
||||
|
||||
# Look for the CAMT namespace URI
|
||||
if 'camt.053' in text.lower():
|
||||
return True
|
||||
# Also accept documents with BkToCstmrStmt element (in case the
|
||||
# namespace URI uses a different casing or custom prefix)
|
||||
if 'BkToCstmrStmt' in text:
|
||||
return True
|
||||
return False
|
||||
458
Fusion Accounting/models/bank_statement_import_ofx.py
Normal file
458
Fusion Accounting/models/bank_statement_import_ofx.py
Normal file
@@ -0,0 +1,458 @@
|
||||
# Fusion Accounting - OFX Bank Statement Parser
|
||||
# Original implementation for Open Financial Exchange v1 (SGML) and v2 (XML)
|
||||
# Based on the published OFX specification (https://www.ofx.net/spec)
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from odoo import _, models
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionOFXParser:
|
||||
"""Standalone parser for OFX (Open Financial Exchange) files.
|
||||
|
||||
Supports both OFX v1 (SGML-like markup without closing tags) and
|
||||
OFX v2 (well-formed XML). The parser normalises either dialect into
|
||||
a common intermediate structure before extracting statement data.
|
||||
|
||||
This is an **original** implementation written from the published
|
||||
OFX 1.6 / 2.2 specification — it is not derived from Odoo Enterprise.
|
||||
"""
|
||||
|
||||
# OFX date format: YYYYMMDDHHMMSS[.XXX[:TZ]] — timezone and fractional
|
||||
# seconds are optional; many banks only emit YYYYMMDD.
|
||||
_OFX_DATE_RE = re.compile(
|
||||
r'^(\d{4})(\d{2})(\d{2})' # YYYYMMDD (required)
|
||||
r'(?:(\d{2})(\d{2})(\d{2}))?' # HHMMSS (optional)
|
||||
r'(?:\.\d+)?' # .XXX (optional fractional)
|
||||
r'(?:\[.*\])?$' # [:TZ] (optional timezone)
|
||||
)
|
||||
|
||||
# SGML self-closing tags used in OFX v1 (no closing tag counterpart).
|
||||
# These contain scalar data directly after the tag.
|
||||
_SGML_LEAF_TAGS = {
|
||||
'TRNTYPE', 'DTPOSTED', 'DTUSER', 'DTSTART', 'DTEND',
|
||||
'TRNAMT', 'FITID', 'CHECKNUM', 'REFNUM', 'NAME', 'MEMO',
|
||||
'PAYEEID', 'ACCTID', 'BANKID', 'BRANCHID', 'ACCTTYPE',
|
||||
'BALAMT', 'DTASOF', 'CURDEF', 'SEVERITY', 'CODE', 'MESSAGE',
|
||||
'SIC', 'PAYEEID', 'CORRECTFITID', 'CORRECTACTION',
|
||||
'SRVRTID', 'CLRTID',
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Public API
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def parse_ofx(self, data_file):
|
||||
"""Parse an OFX file (bytes or str) and return a list of statement
|
||||
dicts compatible with the Fusion Accounting import pipeline.
|
||||
|
||||
Each dict has the keys:
|
||||
- ``name`` : statement identifier
|
||||
- ``date`` : closing date (datetime.date)
|
||||
- ``balance_start`` : opening balance (float)
|
||||
- ``balance_end_real``: closing balance (float)
|
||||
- ``currency_code`` : ISO 4217 currency code
|
||||
- ``account_number`` : bank account number
|
||||
- ``transactions`` : list of transaction dicts
|
||||
|
||||
Transaction dicts contain:
|
||||
- ``date`` : posting date (datetime.date)
|
||||
- ``payment_ref`` : description / memo
|
||||
- ``ref`` : FITID or reference number
|
||||
- ``amount`` : signed float (negative = debit)
|
||||
- ``unique_import_id`` : unique per-transaction identifier
|
||||
- ``transaction_type`` : OFX TRNTYPE value
|
||||
"""
|
||||
raw = self._to_text(data_file)
|
||||
|
||||
# Determine OFX dialect and obtain an ElementTree root
|
||||
if self._is_ofx_v2(raw):
|
||||
root = self._parse_xml(raw)
|
||||
else:
|
||||
root = self._parse_sgml(raw)
|
||||
|
||||
return self._extract_statements(root)
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Input normalisation
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _to_text(data_file):
|
||||
"""Ensure *data_file* is a string, decoding bytes if necessary."""
|
||||
if isinstance(data_file, bytes):
|
||||
# Try UTF-8 first; fall back to Latin-1 (lossless for any byte)
|
||||
for encoding in ('utf-8-sig', 'utf-8', 'latin-1'):
|
||||
try:
|
||||
return data_file.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
return data_file
|
||||
|
||||
@staticmethod
|
||||
def _is_ofx_v2(text):
|
||||
"""Return True when *text* looks like OFX v2 (XML) rather than
|
||||
SGML-based v1. OFX v2 begins with an XML processing instruction
|
||||
or a ``<?OFX …?>`` header."""
|
||||
stripped = text.lstrip()
|
||||
return stripped.startswith('<?xml') or stripped.startswith('<?OFX')
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# OFX v2 (XML) parser
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def _parse_xml(self, text):
|
||||
"""Parse well-formed OFX v2 XML and return the root Element."""
|
||||
try:
|
||||
return ElementTree.fromstring(text.encode('utf-8'))
|
||||
except ElementTree.ParseError as exc:
|
||||
raise UserError(
|
||||
_("Failed to parse OFX XML file: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# OFX v1 (SGML) parser — convert to XML then parse
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def _parse_sgml(self, text):
|
||||
"""Convert an OFX v1 SGML document into well-formed XML and
|
||||
return the root Element.
|
||||
|
||||
The SGML dialect used by OFX v1 has two kinds of tags:
|
||||
* **Aggregate** tags like ``<STMTTRNRS>`` which contain child
|
||||
elements and always have a matching ``</STMTTRNRS>``.
|
||||
* **Leaf** (data) tags like ``<TRNAMT>-42.50`` which carry a
|
||||
scalar value and are never explicitly closed.
|
||||
|
||||
The conversion strategy inserts explicit close tags for every
|
||||
leaf element so that the result is valid XML.
|
||||
"""
|
||||
# Strip the SGML headers (everything before the first ``<OFX>``).
|
||||
ofx_idx = text.upper().find('<OFX>')
|
||||
if ofx_idx == -1:
|
||||
raise UserError(_("The file does not contain a valid OFX document."))
|
||||
body = text[ofx_idx:]
|
||||
|
||||
# Normalise whitespace inside tags: collapse runs of whitespace
|
||||
# between ``>`` and ``<`` but preserve data values.
|
||||
lines = body.splitlines()
|
||||
xml_lines = []
|
||||
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if not stripped:
|
||||
continue
|
||||
xml_lines.append(stripped)
|
||||
|
||||
joined = '\n'.join(xml_lines)
|
||||
|
||||
# Insert closing tags for leaf elements.
|
||||
# A leaf tag looks like ``<TAGNAME>value`` (no ``</TAGNAME>`` follows).
|
||||
def _close_leaf_tags(sgml_text):
|
||||
"""Insert ``</TAG>`` after each leaf tag's data value."""
|
||||
result = []
|
||||
tag_re = re.compile(r'<(/?)(\w+)>(.*)', re.DOTALL)
|
||||
for raw_line in sgml_text.split('\n'):
|
||||
raw_line = raw_line.strip()
|
||||
if not raw_line:
|
||||
continue
|
||||
m = tag_re.match(raw_line)
|
||||
if m:
|
||||
is_close = m.group(1) == '/'
|
||||
tag_name = m.group(2).upper()
|
||||
rest = m.group(3).strip()
|
||||
|
||||
if is_close:
|
||||
result.append(f'</{tag_name}>')
|
||||
elif tag_name in self._SGML_LEAF_TAGS:
|
||||
# Leaf element: value sits between open and (missing) close tag
|
||||
data_val = rest.split('<')[0].strip() if '<' in rest else rest
|
||||
result.append(f'<{tag_name}>{self._xml_escape(data_val)}</{tag_name}>')
|
||||
# If the rest of the line has another tag, process it
|
||||
if '<' in rest:
|
||||
leftover = rest[rest.index('<'):]
|
||||
for extra in _close_leaf_tags(leftover).split('\n'):
|
||||
if extra.strip():
|
||||
result.append(extra.strip())
|
||||
else:
|
||||
# Aggregate (container) tag — keep as-is
|
||||
result.append(f'<{tag_name}>')
|
||||
if rest:
|
||||
for extra in _close_leaf_tags(rest).split('\n'):
|
||||
if extra.strip():
|
||||
result.append(extra.strip())
|
||||
else:
|
||||
result.append(raw_line)
|
||||
return '\n'.join(result)
|
||||
|
||||
xml_text = _close_leaf_tags(joined)
|
||||
|
||||
try:
|
||||
return ElementTree.fromstring(xml_text.encode('utf-8'))
|
||||
except ElementTree.ParseError as exc:
|
||||
_log.debug("SGML→XML conversion result:\n%s", xml_text[:2000])
|
||||
raise UserError(
|
||||
_("Failed to parse OFX v1 (SGML) file. The file may be "
|
||||
"corrupt or in an unsupported dialect: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
@staticmethod
|
||||
def _xml_escape(text):
|
||||
"""Escape XML-special characters in *text*."""
|
||||
return (
|
||||
text.replace('&', '&')
|
||||
.replace('<', '<')
|
||||
.replace('>', '>')
|
||||
.replace('"', '"')
|
||||
.replace("'", ''')
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Data extraction
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def _extract_statements(self, root):
|
||||
"""Walk the parsed OFX element tree and collect statement data.
|
||||
|
||||
Supports ``BANKMSGSRSV1`` (bank accounts) and ``CCMSGSRSV1``
|
||||
(credit-card accounts).
|
||||
"""
|
||||
statements = []
|
||||
|
||||
# Locate all statement response containers
|
||||
for tag_suffix, acct_tag in [
|
||||
('BANKMSGSRSV1', 'BANKACCTFROM'),
|
||||
('CCMSGSRSV1', 'CCACCTFROM'),
|
||||
]:
|
||||
for stmtrs in self._find_all(root, 'STMTRS') + self._find_all(root, 'CCSTMTRS'):
|
||||
stmt = self._extract_single_statement(stmtrs, acct_tag)
|
||||
if stmt:
|
||||
statements.append(stmt)
|
||||
|
||||
if not statements:
|
||||
raise UserError(
|
||||
_("No bank or credit-card statements found in the OFX file.")
|
||||
)
|
||||
return statements
|
||||
|
||||
def _extract_single_statement(self, stmtrs, acct_tag):
|
||||
"""Extract one statement from a ``<STMTRS>`` or ``<CCSTMTRS>``
|
||||
element."""
|
||||
# Currency
|
||||
currency = self._find_text(stmtrs, 'CURDEF') or ''
|
||||
|
||||
# Account number
|
||||
acct_elem = self._find_first(stmtrs, acct_tag)
|
||||
if acct_elem is None:
|
||||
acct_elem = self._find_first(stmtrs, 'BANKACCTFROM')
|
||||
if acct_elem is None:
|
||||
acct_elem = self._find_first(stmtrs, 'CCACCTFROM')
|
||||
|
||||
acct_number = ''
|
||||
if acct_elem is not None:
|
||||
acct_number = self._find_text(acct_elem, 'ACCTID') or ''
|
||||
|
||||
# Transaction list
|
||||
txn_list_el = self._find_first(stmtrs, 'BANKTRANLIST')
|
||||
if txn_list_el is None:
|
||||
txn_list_el = stmtrs # CCSTMTRS may put transactions directly inside
|
||||
|
||||
start_date = self._parse_ofx_date(self._find_text(txn_list_el, 'DTSTART'))
|
||||
end_date = self._parse_ofx_date(self._find_text(txn_list_el, 'DTEND'))
|
||||
|
||||
transactions = []
|
||||
for stmttrn in self._find_all(txn_list_el, 'STMTTRN'):
|
||||
txn = self._extract_transaction(stmttrn)
|
||||
if txn:
|
||||
transactions.append(txn)
|
||||
|
||||
# Balances — look for LEDGERBAL and AVAILBAL
|
||||
balance_start = 0.0
|
||||
balance_end = 0.0
|
||||
|
||||
ledger_bal = self._find_first(stmtrs, 'LEDGERBAL')
|
||||
if ledger_bal is not None:
|
||||
balance_end = self._safe_float(self._find_text(ledger_bal, 'BALAMT'))
|
||||
|
||||
avail_bal = self._find_first(stmtrs, 'AVAILBAL')
|
||||
if avail_bal is not None and ledger_bal is None:
|
||||
balance_end = self._safe_float(self._find_text(avail_bal, 'BALAMT'))
|
||||
|
||||
# Derive opening balance: opening = closing − sum(transactions)
|
||||
txn_total = sum(t['amount'] for t in transactions)
|
||||
balance_start = balance_end - txn_total
|
||||
|
||||
stmt_date = end_date or (start_date if start_date else None)
|
||||
stmt_name = f"OFX {acct_number}" if acct_number else "OFX Import"
|
||||
if stmt_date:
|
||||
stmt_name += f" {stmt_date.strftime('%Y-%m-%d')}"
|
||||
|
||||
return {
|
||||
'name': stmt_name,
|
||||
'date': stmt_date,
|
||||
'balance_start': balance_start,
|
||||
'balance_end_real': balance_end,
|
||||
'currency_code': currency.upper() if currency else None,
|
||||
'account_number': acct_number,
|
||||
'transactions': transactions,
|
||||
}
|
||||
|
||||
def _extract_transaction(self, stmttrn):
|
||||
"""Extract a single transaction from a ``<STMTTRN>`` element."""
|
||||
trntype = self._find_text(stmttrn, 'TRNTYPE') or ''
|
||||
dt_posted = self._parse_ofx_date(self._find_text(stmttrn, 'DTPOSTED'))
|
||||
dt_user = self._parse_ofx_date(self._find_text(stmttrn, 'DTUSER'))
|
||||
amount = self._safe_float(self._find_text(stmttrn, 'TRNAMT'))
|
||||
fitid = self._find_text(stmttrn, 'FITID') or ''
|
||||
checknum = self._find_text(stmttrn, 'CHECKNUM') or ''
|
||||
refnum = self._find_text(stmttrn, 'REFNUM') or ''
|
||||
name = self._find_text(stmttrn, 'NAME') or ''
|
||||
memo = self._find_text(stmttrn, 'MEMO') or ''
|
||||
|
||||
# Build description: prefer NAME, append MEMO if different
|
||||
description = name
|
||||
if memo and memo != name:
|
||||
description = f"{name} - {memo}" if name else memo
|
||||
|
||||
# Build reference: FITID is the primary unique ID; CHECKNUM or REFNUM
|
||||
# serve as human-readable reference
|
||||
ref = checknum or refnum or fitid
|
||||
unique_id = fitid
|
||||
|
||||
return {
|
||||
'date': dt_user or dt_posted,
|
||||
'payment_ref': description or ref or '/',
|
||||
'ref': ref,
|
||||
'amount': amount,
|
||||
'unique_import_id': unique_id,
|
||||
'transaction_type': trntype,
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Element-tree helpers (case-insensitive tag search)
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _find_all(parent, tag):
|
||||
"""Find all descendant elements whose tag matches *tag*
|
||||
(case-insensitive)."""
|
||||
tag_upper = tag.upper()
|
||||
return [el for el in parent.iter() if el.tag.upper() == tag_upper]
|
||||
|
||||
@staticmethod
|
||||
def _find_first(parent, tag):
|
||||
"""Return the first descendant matching *tag* (case-insensitive)
|
||||
or ``None``."""
|
||||
tag_upper = tag.upper()
|
||||
for el in parent.iter():
|
||||
if el.tag.upper() == tag_upper:
|
||||
return el
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _find_text(cls, parent, tag):
|
||||
"""Return stripped text content of the first descendant matching
|
||||
*tag*, or ``None``."""
|
||||
el = cls._find_first(parent, tag)
|
||||
if el is not None and el.text:
|
||||
return el.text.strip()
|
||||
return None
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Date / numeric helpers
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def _parse_ofx_date(cls, date_str):
|
||||
"""Parse an OFX date string (``YYYYMMDD…``) into a Python date."""
|
||||
if not date_str:
|
||||
return None
|
||||
m = cls._OFX_DATE_RE.match(date_str.strip())
|
||||
if not m:
|
||||
# Fallback: try basic YYYYMMDD
|
||||
try:
|
||||
return datetime.strptime(date_str.strip()[:8], '%Y%m%d').date()
|
||||
except (ValueError, IndexError):
|
||||
_log.warning("Unparseable OFX date: %s", date_str)
|
||||
return None
|
||||
year, month, day = int(m.group(1)), int(m.group(2)), int(m.group(3))
|
||||
try:
|
||||
return datetime(year, month, day).date()
|
||||
except ValueError:
|
||||
_log.warning("Invalid OFX date components: %s", date_str)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _safe_float(value):
|
||||
"""Convert *value* to float, returning 0.0 for empty / invalid."""
|
||||
if not value:
|
||||
return 0.0
|
||||
try:
|
||||
return float(value.replace(',', '.'))
|
||||
except (ValueError, AttributeError):
|
||||
return 0.0
|
||||
|
||||
|
||||
class FusionJournalOFXImport(models.Model):
|
||||
"""Register OFX as an available bank-statement import format and
|
||||
implement the parser hook on ``account.journal``."""
|
||||
|
||||
_inherit = 'account.journal'
|
||||
|
||||
# ---- Format Registration ----
|
||||
def _get_bank_statements_available_import_formats(self):
|
||||
"""Append OFX to the list of importable formats."""
|
||||
formats = super()._get_bank_statements_available_import_formats()
|
||||
formats.append('OFX')
|
||||
return formats
|
||||
|
||||
# ---- Parser Hook ----
|
||||
def _parse_bank_statement_file(self, attachment):
|
||||
"""Attempt to parse *attachment* as OFX. Falls through to
|
||||
``super()`` when the file is not recognised as OFX."""
|
||||
raw_data = attachment.raw
|
||||
if not self._is_ofx_file(raw_data):
|
||||
return super()._parse_bank_statement_file(attachment)
|
||||
|
||||
parser = FusionOFXParser()
|
||||
try:
|
||||
statements = parser.parse_ofx(raw_data)
|
||||
except UserError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
_log.exception("OFX parsing error")
|
||||
raise UserError(
|
||||
_("Could not parse the OFX file: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
# The import pipeline expects (currency_code, account_number, stmts)
|
||||
currency_code = None
|
||||
account_number = None
|
||||
if statements:
|
||||
currency_code = statements[0].get('currency_code')
|
||||
account_number = statements[0].get('account_number')
|
||||
|
||||
return currency_code, account_number, statements
|
||||
|
||||
# ---- Detection ----
|
||||
@staticmethod
|
||||
def _is_ofx_file(raw_data):
|
||||
"""Heuristic check: does *raw_data* look like an OFX file?"""
|
||||
try:
|
||||
text = raw_data.decode('utf-8-sig', errors='ignore')[:4096]
|
||||
except (UnicodeDecodeError, AttributeError):
|
||||
text = str(raw_data)[:4096]
|
||||
text_upper = text.upper()
|
||||
# OFX v2 (XML)
|
||||
if '<?OFX' in text_upper or '<OFX>' in text_upper:
|
||||
return True
|
||||
# OFX v1 (SGML header markers)
|
||||
if 'OFXHEADER:' in text_upper:
|
||||
return True
|
||||
return False
|
||||
378
Fusion Accounting/models/bank_statement_import_qif.py
Normal file
378
Fusion Accounting/models/bank_statement_import_qif.py
Normal file
@@ -0,0 +1,378 @@
|
||||
# Fusion Accounting - QIF Bank Statement Parser
|
||||
# Original implementation for Quicken Interchange Format files
|
||||
# Based on the published QIF specification
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
from odoo import _, models
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionQIFParser:
|
||||
"""Standalone parser for QIF (Quicken Interchange Format) files.
|
||||
|
||||
QIF is a plain-text format where each field occupies its own line,
|
||||
prefixed by a single-character code:
|
||||
|
||||
D Date of the transaction
|
||||
T Amount (net)
|
||||
U Amount (duplicate field, same meaning as T)
|
||||
P Payee name
|
||||
N Check number or reference
|
||||
M Memo / description
|
||||
L Category or transfer account
|
||||
A Address line (up to 6 lines)
|
||||
C Cleared status (*/c/X/R)
|
||||
^ End-of-record separator
|
||||
|
||||
Sections are introduced by a ``!Type:`` header line.
|
||||
|
||||
This is an **original** implementation written from the published
|
||||
QIF specification — it is not derived from Odoo Enterprise.
|
||||
"""
|
||||
|
||||
# Supported QIF date formats (US mm/dd/yyyy is most common, but
|
||||
# dd/mm/yyyy and yyyy-mm-dd also appear in the wild).
|
||||
_DATE_FORMATS = [
|
||||
'%m/%d/%Y', # 01/31/2025
|
||||
'%m/%d/%y', # 01/31/25
|
||||
'%m-%d-%Y', # 01-31-2025
|
||||
'%m-%d-%y', # 01-31-25
|
||||
'%d/%m/%Y', # 31/01/2025
|
||||
'%d/%m/%y', # 31/01/25
|
||||
'%d-%m-%Y', # 31-01-2025
|
||||
'%d-%m-%y', # 31-01-25
|
||||
'%Y-%m-%d', # 2025-01-31
|
||||
'%Y/%m/%d', # 2025/01/31
|
||||
"%m/%d'%Y", # 1/31'2025 (Quicken short-year)
|
||||
"%m/%d'%y", # 1/31'25
|
||||
]
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Public API
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def parse_qif(self, data_file):
|
||||
"""Parse a QIF file and return a statement dict compatible with
|
||||
the Fusion Accounting import pipeline.
|
||||
|
||||
Returns a **single** dict (QIF files describe one account):
|
||||
- ``name`` : generated statement identifier
|
||||
- ``date`` : last transaction date
|
||||
- ``balance_start`` : 0.0 (QIF does not carry balances)
|
||||
- ``balance_end_real``: 0.0
|
||||
- ``transactions`` : list of transaction dicts
|
||||
|
||||
Transaction dicts contain:
|
||||
- ``date`` : transaction date (datetime.date)
|
||||
- ``payment_ref`` : payee / memo
|
||||
- ``ref`` : check number / reference
|
||||
- ``amount`` : signed float
|
||||
- ``unique_import_id`` : generated unique key
|
||||
"""
|
||||
text = self._to_text(data_file)
|
||||
lines = text.splitlines()
|
||||
|
||||
# Detect account type from the header (optional)
|
||||
account_type = self._detect_account_type(lines)
|
||||
|
||||
# Split the record stream at ``^`` separators
|
||||
records = self._split_records(lines)
|
||||
|
||||
if not records:
|
||||
raise UserError(
|
||||
_("The QIF file contains no transaction records.")
|
||||
)
|
||||
|
||||
transactions = []
|
||||
for idx, rec in enumerate(records):
|
||||
txn = self._parse_record(rec, idx)
|
||||
if txn:
|
||||
transactions.append(txn)
|
||||
|
||||
if not transactions:
|
||||
raise UserError(
|
||||
_("No valid transactions could be extracted from the QIF file.")
|
||||
)
|
||||
|
||||
# Build statement metadata
|
||||
dates = [t['date'] for t in transactions if t.get('date')]
|
||||
last_date = max(dates) if dates else None
|
||||
first_date = min(dates) if dates else None
|
||||
|
||||
stmt_name = "QIF Import"
|
||||
if last_date:
|
||||
stmt_name = f"QIF {last_date.strftime('%Y-%m-%d')}"
|
||||
|
||||
return {
|
||||
'name': stmt_name,
|
||||
'date': last_date,
|
||||
'balance_start': 0.0,
|
||||
'balance_end_real': 0.0,
|
||||
'account_type': account_type,
|
||||
'transactions': transactions,
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Text handling
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _to_text(data_file):
|
||||
"""Ensure *data_file* is a string."""
|
||||
if isinstance(data_file, bytes):
|
||||
for encoding in ('utf-8-sig', 'utf-8', 'latin-1'):
|
||||
try:
|
||||
return data_file.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
return data_file
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Account-type detection
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _detect_account_type(lines):
|
||||
"""Return the QIF account type from a ``!Type:`` header, or
|
||||
``'Bank'`` as the default."""
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if stripped.upper().startswith('!TYPE:'):
|
||||
return stripped[6:].strip()
|
||||
return 'Bank'
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Record splitting
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _split_records(lines):
|
||||
"""Split *lines* into a list of record-lists, using ``^`` as the
|
||||
record separator. Header lines (``!``) are skipped."""
|
||||
records = []
|
||||
current = []
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if not stripped:
|
||||
continue
|
||||
if stripped.startswith('!'):
|
||||
# Header / type declaration — skip
|
||||
continue
|
||||
if stripped == '^':
|
||||
if current:
|
||||
records.append(current)
|
||||
current = []
|
||||
else:
|
||||
current.append(stripped)
|
||||
# Trailing record without final ``^``
|
||||
if current:
|
||||
records.append(current)
|
||||
return records
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Single-record parsing
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
def _parse_record(self, field_lines, record_index):
|
||||
"""Parse a list of single-char-prefixed field lines into a
|
||||
transaction dict."""
|
||||
fields = {}
|
||||
address_lines = []
|
||||
|
||||
for line in field_lines:
|
||||
if len(line) < 1:
|
||||
continue
|
||||
code = line[0]
|
||||
value = line[1:].strip()
|
||||
|
||||
if code == 'D':
|
||||
fields['date_str'] = value
|
||||
elif code == 'T':
|
||||
fields['amount'] = value
|
||||
elif code == 'U':
|
||||
# Duplicate amount field — use only if T is missing
|
||||
if 'amount' not in fields:
|
||||
fields['amount'] = value
|
||||
elif code == 'P':
|
||||
fields['payee'] = value
|
||||
elif code == 'N':
|
||||
fields['number'] = value
|
||||
elif code == 'M':
|
||||
fields['memo'] = value
|
||||
elif code == 'L':
|
||||
fields['category'] = value
|
||||
elif code == 'C':
|
||||
fields['cleared'] = value
|
||||
elif code == 'A':
|
||||
address_lines.append(value)
|
||||
# Other codes (S, E, $, %) are split-transaction markers;
|
||||
# they are uncommon in bank exports and are ignored here.
|
||||
|
||||
if address_lines:
|
||||
fields['address'] = ', '.join(address_lines)
|
||||
|
||||
# Amount is mandatory
|
||||
amount = self._parse_amount(fields.get('amount', ''))
|
||||
if amount is None:
|
||||
return None
|
||||
|
||||
txn_date = self._parse_qif_date(fields.get('date_str', ''))
|
||||
payee = fields.get('payee', '')
|
||||
memo = fields.get('memo', '')
|
||||
number = fields.get('number', '')
|
||||
|
||||
# Build description
|
||||
description = payee
|
||||
if memo and memo != payee:
|
||||
description = f"{payee} - {memo}" if payee else memo
|
||||
|
||||
# Generate a unique import ID from available data
|
||||
unique_parts = [
|
||||
txn_date.isoformat() if txn_date else str(record_index),
|
||||
str(amount),
|
||||
payee or memo or str(record_index),
|
||||
]
|
||||
if number:
|
||||
unique_parts.append(number)
|
||||
unique_id = 'QIF-' + '-'.join(unique_parts)
|
||||
|
||||
return {
|
||||
'date': txn_date,
|
||||
'payment_ref': description or number or '/',
|
||||
'ref': number,
|
||||
'amount': amount,
|
||||
'unique_import_id': unique_id,
|
||||
}
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Date parsing
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def _parse_qif_date(cls, date_str):
|
||||
"""Try multiple date formats and return the first successful
|
||||
parse as a ``datetime.date``, or ``None``."""
|
||||
if not date_str:
|
||||
return None
|
||||
|
||||
# Normalise Quicken apostrophe-year notation: 1/31'2025 → 1/31/2025
|
||||
normalised = date_str.replace("'", "/")
|
||||
|
||||
for fmt in cls._DATE_FORMATS:
|
||||
try:
|
||||
return datetime.strptime(normalised, fmt).date()
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
_log.warning("Unparseable QIF date: %s", date_str)
|
||||
return None
|
||||
|
||||
# -------------------------------------------------------------------
|
||||
# Amount parsing
|
||||
# -------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _parse_amount(raw):
|
||||
"""Parse a QIF amount string. Handles commas as thousand
|
||||
separators or as decimal separators (European style)."""
|
||||
if not raw:
|
||||
return None
|
||||
# Remove currency symbols and whitespace
|
||||
cleaned = re.sub(r'[^\d.,\-+]', '', raw)
|
||||
if not cleaned:
|
||||
return None
|
||||
|
||||
# Determine decimal separator heuristic:
|
||||
# If both comma and period present, the last one is the decimal sep.
|
||||
if ',' in cleaned and '.' in cleaned:
|
||||
last_comma = cleaned.rfind(',')
|
||||
last_period = cleaned.rfind('.')
|
||||
if last_comma > last_period:
|
||||
# European: 1.234,56
|
||||
cleaned = cleaned.replace('.', '').replace(',', '.')
|
||||
else:
|
||||
# US: 1,234.56
|
||||
cleaned = cleaned.replace(',', '')
|
||||
elif ',' in cleaned:
|
||||
# Could be thousand separator (1,234) or decimal (1,23)
|
||||
parts = cleaned.split(',')
|
||||
if len(parts) == 2 and len(parts[1]) <= 2:
|
||||
# Likely decimal separator
|
||||
cleaned = cleaned.replace(',', '.')
|
||||
else:
|
||||
# Likely thousand separator
|
||||
cleaned = cleaned.replace(',', '')
|
||||
|
||||
try:
|
||||
return float(cleaned)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
class FusionJournalQIFImport(models.Model):
|
||||
"""Register QIF as an available bank-statement import format and
|
||||
implement the parser hook on ``account.journal``."""
|
||||
|
||||
_inherit = 'account.journal'
|
||||
|
||||
# ---- Format Registration ----
|
||||
def _get_bank_statements_available_import_formats(self):
|
||||
"""Append QIF to the list of importable formats."""
|
||||
formats = super()._get_bank_statements_available_import_formats()
|
||||
formats.append('QIF')
|
||||
return formats
|
||||
|
||||
# ---- Parser Hook ----
|
||||
def _parse_bank_statement_file(self, attachment):
|
||||
"""Attempt to parse *attachment* as QIF. Falls through to
|
||||
``super()`` when the file is not recognised as QIF."""
|
||||
raw_data = attachment.raw
|
||||
if not self._is_qif_file(raw_data):
|
||||
return super()._parse_bank_statement_file(attachment)
|
||||
|
||||
parser = FusionQIFParser()
|
||||
try:
|
||||
stmt = parser.parse_qif(raw_data)
|
||||
except UserError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
_log.exception("QIF parsing error")
|
||||
raise UserError(
|
||||
_("Could not parse the QIF file: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
# QIF does not carry account-number or currency metadata
|
||||
currency_code = None
|
||||
account_number = None
|
||||
|
||||
# Wrap the single statement in a list for the pipeline
|
||||
return currency_code, account_number, [stmt]
|
||||
|
||||
# ---- Detection ----
|
||||
@staticmethod
|
||||
def _is_qif_file(raw_data):
|
||||
"""Heuristic check: does *raw_data* look like a QIF file?"""
|
||||
try:
|
||||
text = raw_data.decode('utf-8-sig', errors='ignore')[:2048]
|
||||
except (UnicodeDecodeError, AttributeError):
|
||||
text = str(raw_data)[:2048]
|
||||
|
||||
# QIF files almost always start with a !Type: or !Account: header
|
||||
# and contain ``^`` record separators.
|
||||
text_upper = text.upper().strip()
|
||||
if text_upper.startswith('!TYPE:') or text_upper.startswith('!ACCOUNT:'):
|
||||
return True
|
||||
|
||||
# Fallback: look for the ``^`` separator combined with D/T field codes
|
||||
if '^' in text:
|
||||
has_date_field = bool(re.search(r'^D\d', text, re.MULTILINE))
|
||||
has_amount_field = bool(re.search(r'^T[\d\-+]', text, re.MULTILINE))
|
||||
if has_date_field and has_amount_field:
|
||||
return True
|
||||
|
||||
return False
|
||||
257
Fusion Accounting/models/batch_payment.py
Normal file
257
Fusion Accounting/models/batch_payment.py
Normal file
@@ -0,0 +1,257 @@
|
||||
"""
|
||||
Fusion Accounting - Batch Payment Processing
|
||||
|
||||
Provides the ``fusion.batch.payment`` model which allows grouping
|
||||
multiple vendor or customer payments into a single batch for
|
||||
streamlined bank submission and reconciliation.
|
||||
"""
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
|
||||
class FusionBatchPayment(models.Model):
|
||||
"""Groups individual payments into batches for bulk processing.
|
||||
|
||||
A batch payment collects payments that share the same journal and
|
||||
payment method so they can be sent to the bank as a single file
|
||||
or printed on a single check run.
|
||||
"""
|
||||
|
||||
_name = 'fusion.batch.payment'
|
||||
_description = 'Batch Payment'
|
||||
_order = 'date desc, id desc'
|
||||
_inherit = ['mail.thread', 'mail.activity.mixin']
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Fields
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
name = fields.Char(
|
||||
string='Reference',
|
||||
required=True,
|
||||
copy=False,
|
||||
readonly=True,
|
||||
default='/',
|
||||
tracking=True,
|
||||
help="Unique reference for this batch payment.",
|
||||
)
|
||||
journal_id = fields.Many2one(
|
||||
comodel_name='account.journal',
|
||||
string='Bank Journal',
|
||||
required=True,
|
||||
domain="[('type', '=', 'bank')]",
|
||||
tracking=True,
|
||||
help="The bank journal used for all payments in this batch.",
|
||||
)
|
||||
payment_method_id = fields.Many2one(
|
||||
comodel_name='account.payment.method',
|
||||
string='Payment Method',
|
||||
required=True,
|
||||
tracking=True,
|
||||
help="Payment method shared by every payment in the batch.",
|
||||
)
|
||||
payment_ids = fields.Many2many(
|
||||
comodel_name='account.payment',
|
||||
relation='fusion_batch_payment_rel',
|
||||
column1='batch_id',
|
||||
column2='payment_id',
|
||||
string='Payments',
|
||||
copy=False,
|
||||
help="Individual payments included in this batch.",
|
||||
)
|
||||
state = fields.Selection(
|
||||
selection=[
|
||||
('draft', 'Draft'),
|
||||
('sent', 'Sent'),
|
||||
('reconciled', 'Reconciled'),
|
||||
],
|
||||
string='Status',
|
||||
default='draft',
|
||||
required=True,
|
||||
readonly=True,
|
||||
copy=False,
|
||||
tracking=True,
|
||||
help="Draft: batch is being assembled.\n"
|
||||
"Sent: batch has been transmitted to the bank.\n"
|
||||
"Reconciled: all payments in the batch are reconciled.",
|
||||
)
|
||||
date = fields.Date(
|
||||
string='Date',
|
||||
required=True,
|
||||
default=fields.Date.context_today,
|
||||
tracking=True,
|
||||
help="Effective date of the batch payment.",
|
||||
)
|
||||
amount_total = fields.Monetary(
|
||||
string='Total Amount',
|
||||
compute='_compute_amount_total',
|
||||
store=True,
|
||||
currency_field='currency_id',
|
||||
help="Sum of all payment amounts in this batch.",
|
||||
)
|
||||
currency_id = fields.Many2one(
|
||||
comodel_name='res.currency',
|
||||
string='Currency',
|
||||
related='journal_id.currency_id',
|
||||
readonly=True,
|
||||
store=True,
|
||||
help="Currency of the bank journal.",
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string='Company',
|
||||
related='journal_id.company_id',
|
||||
store=True,
|
||||
readonly=True,
|
||||
)
|
||||
payment_count = fields.Integer(
|
||||
string='Payment Count',
|
||||
compute='_compute_amount_total',
|
||||
store=True,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Computed fields
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@api.depends('payment_ids', 'payment_ids.amount')
|
||||
def _compute_amount_total(self):
|
||||
"""Compute the total batch amount and payment count."""
|
||||
for batch in self:
|
||||
batch.amount_total = sum(batch.payment_ids.mapped('amount'))
|
||||
batch.payment_count = len(batch.payment_ids)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Constraints
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@api.constrains('payment_ids')
|
||||
def _check_payments_journal(self):
|
||||
"""Ensure every payment belongs to the same journal and uses the
|
||||
same payment method as the batch."""
|
||||
for batch in self:
|
||||
for payment in batch.payment_ids:
|
||||
if payment.journal_id != batch.journal_id:
|
||||
raise ValidationError(_(
|
||||
"Payment '%(payment)s' uses journal '%(pj)s' but "
|
||||
"the batch requires journal '%(bj)s'.",
|
||||
payment=payment.display_name,
|
||||
pj=payment.journal_id.display_name,
|
||||
bj=batch.journal_id.display_name,
|
||||
))
|
||||
if payment.payment_method_id != batch.payment_method_id:
|
||||
raise ValidationError(_(
|
||||
"Payment '%(payment)s' uses payment method '%(pm)s' "
|
||||
"which differs from the batch method '%(bm)s'.",
|
||||
payment=payment.display_name,
|
||||
pm=payment.payment_method_id.display_name,
|
||||
bm=batch.payment_method_id.display_name,
|
||||
))
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# CRUD overrides
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
"""Assign a sequence number when creating a new batch."""
|
||||
for vals in vals_list:
|
||||
if vals.get('name', '/') == '/':
|
||||
vals['name'] = self.env['ir.sequence'].next_by_code(
|
||||
'fusion.batch.payment'
|
||||
) or _('New')
|
||||
return super().create(vals_list)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Actions / Business Logic
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def validate_batch(self):
|
||||
"""Validate the batch and mark it as *Sent*.
|
||||
|
||||
All payments in the batch must be in the *posted* state before
|
||||
the batch can be validated.
|
||||
|
||||
:raises UserError: if the batch contains no payments or if any
|
||||
payment is not posted.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if self.state != 'draft':
|
||||
raise UserError(_("Only draft batches can be validated."))
|
||||
if not self.payment_ids:
|
||||
raise UserError(_(
|
||||
"Cannot validate an empty batch. Please add payments first."
|
||||
))
|
||||
non_posted = self.payment_ids.filtered(lambda p: p.state != 'posted')
|
||||
if non_posted:
|
||||
raise UserError(_(
|
||||
"The following payments are not posted and must be confirmed "
|
||||
"before the batch can be validated:\n%(payments)s",
|
||||
payments=', '.join(non_posted.mapped('name')),
|
||||
))
|
||||
self.write({'state': 'sent'})
|
||||
|
||||
def action_draft(self):
|
||||
"""Reset a sent batch back to draft state."""
|
||||
self.ensure_one()
|
||||
if self.state != 'sent':
|
||||
raise UserError(_("Only sent batches can be reset to draft."))
|
||||
self.write({'state': 'draft'})
|
||||
|
||||
def action_reconcile(self):
|
||||
"""Mark the batch as reconciled once bank confirms all payments."""
|
||||
self.ensure_one()
|
||||
if self.state != 'sent':
|
||||
raise UserError(_(
|
||||
"Only sent batches can be marked as reconciled."
|
||||
))
|
||||
self.write({'state': 'reconciled'})
|
||||
|
||||
def print_batch(self):
|
||||
"""Generate a printable report for this batch payment.
|
||||
|
||||
:return: Action dictionary triggering the report download.
|
||||
:rtype: dict
|
||||
"""
|
||||
self.ensure_one()
|
||||
return self.env.ref(
|
||||
'fusion_accounting.action_report_batch_payment'
|
||||
).report_action(self)
|
||||
|
||||
@api.model
|
||||
def create_batch_from_payments(self, payment_ids):
|
||||
"""Create a new batch payment from an existing set of payments.
|
||||
|
||||
All supplied payments must share the same journal and payment
|
||||
method.
|
||||
|
||||
:param payment_ids: recordset or list of ``account.payment`` ids
|
||||
:return: newly created ``fusion.batch.payment`` record
|
||||
:raises UserError: when payments do not share journal / method
|
||||
"""
|
||||
if isinstance(payment_ids, (list, tuple)):
|
||||
payments = self.env['account.payment'].browse(payment_ids)
|
||||
else:
|
||||
payments = payment_ids
|
||||
|
||||
if not payments:
|
||||
raise UserError(_("No payments were provided."))
|
||||
|
||||
journals = payments.mapped('journal_id')
|
||||
methods = payments.mapped('payment_method_id')
|
||||
if len(journals) > 1:
|
||||
raise UserError(_(
|
||||
"All payments must belong to the same bank journal to "
|
||||
"be batched together."
|
||||
))
|
||||
if len(methods) > 1:
|
||||
raise UserError(_(
|
||||
"All payments must use the same payment method."
|
||||
))
|
||||
|
||||
return self.create({
|
||||
'journal_id': journals.id,
|
||||
'payment_method_id': methods.id,
|
||||
'payment_ids': [(6, 0, payments.ids)],
|
||||
})
|
||||
220
Fusion Accounting/models/budget.py
Normal file
220
Fusion Accounting/models/budget.py
Normal file
@@ -0,0 +1,220 @@
|
||||
# Part of Fusion Accounting. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, Command, fields, models, _
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.tools import date_utils, float_is_zero, float_round
|
||||
|
||||
|
||||
class FusionBudget(models.Model):
|
||||
"""Represents a financial budget linked to accounting reports.
|
||||
|
||||
A budget groups together individual budget line items, each targeting
|
||||
a specific account and month. Budgets are company-specific and appear
|
||||
as additional columns in accounting reports.
|
||||
"""
|
||||
|
||||
_name = 'account.report.budget'
|
||||
_description = "Fusion Report Budget"
|
||||
_order = 'sequence, id'
|
||||
|
||||
name = fields.Char(
|
||||
string="Budget Name",
|
||||
required=True,
|
||||
)
|
||||
sequence = fields.Integer(
|
||||
string="Display Order",
|
||||
default=10,
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string="Company",
|
||||
required=True,
|
||||
default=lambda self: self.env.company,
|
||||
)
|
||||
item_ids = fields.One2many(
|
||||
comodel_name='account.report.budget.item',
|
||||
inverse_name='budget_id',
|
||||
string="Budget Lines",
|
||||
)
|
||||
|
||||
# --------------------------------------------------
|
||||
# CRUD
|
||||
# --------------------------------------------------
|
||||
|
||||
@api.model_create_multi
|
||||
def create(self, vals_list):
|
||||
"""Override create to sanitize the budget name by stripping whitespace."""
|
||||
for record_vals in vals_list:
|
||||
raw_name = record_vals.get('name')
|
||||
if raw_name:
|
||||
record_vals['name'] = raw_name.strip()
|
||||
return super().create(vals_list)
|
||||
|
||||
# --------------------------------------------------
|
||||
# Constraints
|
||||
# --------------------------------------------------
|
||||
|
||||
@api.constrains('name')
|
||||
def _check_budget_name_not_empty(self):
|
||||
"""Ensure every budget record has a non-empty name."""
|
||||
for record in self:
|
||||
if not record.name or not record.name.strip():
|
||||
raise ValidationError(
|
||||
_("A budget must have a non-empty name.")
|
||||
)
|
||||
|
||||
# --------------------------------------------------
|
||||
# Duplication helpers
|
||||
# --------------------------------------------------
|
||||
|
||||
def copy_data(self, default=None):
|
||||
"""Append '(copy)' suffix to duplicated budget names."""
|
||||
data_list = super().copy_data(default=default)
|
||||
result = []
|
||||
for budget, vals in zip(self, data_list):
|
||||
vals['name'] = _("%s (copy)", budget.name)
|
||||
result.append(vals)
|
||||
return result
|
||||
|
||||
def copy(self, default=None):
|
||||
"""Duplicate budgets together with their line items."""
|
||||
duplicated_budgets = super().copy(default)
|
||||
for source_budget, target_budget in zip(self, duplicated_budgets):
|
||||
for line in source_budget.item_ids:
|
||||
line.copy({
|
||||
'budget_id': target_budget.id,
|
||||
'account_id': line.account_id.id,
|
||||
'amount': line.amount,
|
||||
'date': line.date,
|
||||
})
|
||||
return duplicated_budgets
|
||||
|
||||
# --------------------------------------------------
|
||||
# Budget item management (called from report engine)
|
||||
# --------------------------------------------------
|
||||
|
||||
def _create_or_update_budget_items(
|
||||
self, value_to_set, account_id, rounding, date_from, date_to
|
||||
):
|
||||
"""Distribute a target amount across monthly budget items.
|
||||
|
||||
When the user edits a budget cell in the report view, this method
|
||||
calculates the difference between the desired total and the existing
|
||||
total for the given account/date range, then distributes that delta
|
||||
evenly across the months in the range.
|
||||
|
||||
Existing items within the range are updated in place; new items are
|
||||
created for months that don't have one yet.
|
||||
|
||||
Args:
|
||||
value_to_set: The desired total amount for the date range.
|
||||
account_id: The ``account.account`` record id.
|
||||
rounding: Number of decimal digits for monetary precision.
|
||||
date_from: Start date (inclusive) of the budget period.
|
||||
date_to: End date (inclusive) of the budget period.
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
period_start = fields.Date.to_date(date_from)
|
||||
period_end = fields.Date.to_date(date_to)
|
||||
|
||||
BudgetItem = self.env['account.report.budget.item']
|
||||
|
||||
# Fetch all items that already cover (part of) the requested range
|
||||
matching_items = BudgetItem.search_fetch(
|
||||
[
|
||||
('budget_id', '=', self.id),
|
||||
('account_id', '=', account_id),
|
||||
('date', '>=', period_start),
|
||||
('date', '<=', period_end),
|
||||
],
|
||||
['id', 'amount'],
|
||||
)
|
||||
current_total = sum(matching_items.mapped('amount'))
|
||||
|
||||
# Calculate the remaining amount to distribute
|
||||
remaining_delta = value_to_set - current_total
|
||||
if float_is_zero(remaining_delta, precision_digits=rounding):
|
||||
return
|
||||
|
||||
# Build a list of first-of-month dates spanning the period
|
||||
month_starts = [
|
||||
date_utils.start_of(d, 'month')
|
||||
for d in date_utils.date_range(period_start, period_end)
|
||||
]
|
||||
month_count = len(month_starts)
|
||||
|
||||
# Spread the delta equally across months (rounding down),
|
||||
# then assign any leftover cents to the final month.
|
||||
per_month = float_round(
|
||||
remaining_delta / month_count,
|
||||
precision_digits=rounding,
|
||||
rounding_method='DOWN',
|
||||
)
|
||||
monthly_portions = [per_month] * month_count
|
||||
distributed_sum = float_round(sum(monthly_portions), precision_digits=rounding)
|
||||
monthly_portions[-1] += float_round(
|
||||
remaining_delta - distributed_sum,
|
||||
precision_digits=rounding,
|
||||
)
|
||||
|
||||
# Pair existing items with months and amounts; create or update as needed
|
||||
write_commands = []
|
||||
idx = 0
|
||||
for month_date, portion in zip(month_starts, monthly_portions):
|
||||
if idx < len(matching_items):
|
||||
# Update an existing item
|
||||
existing = matching_items[idx]
|
||||
write_commands.append(
|
||||
Command.update(existing.id, {
|
||||
'amount': existing.amount + portion,
|
||||
})
|
||||
)
|
||||
else:
|
||||
# No existing item for this slot – create a new one
|
||||
write_commands.append(
|
||||
Command.create({
|
||||
'account_id': account_id,
|
||||
'amount': portion,
|
||||
'date': month_date,
|
||||
})
|
||||
)
|
||||
idx += 1
|
||||
|
||||
if write_commands:
|
||||
self.item_ids = write_commands
|
||||
# Ensure the ORM flushes new records to the database so
|
||||
# subsequent queries within the same request see them.
|
||||
BudgetItem.flush_model()
|
||||
|
||||
|
||||
class FusionBudgetItem(models.Model):
|
||||
"""A single monthly budget entry for one account within a budget.
|
||||
|
||||
Each item records a monetary amount allocated to a specific
|
||||
``account.account`` for a particular month. The ``date`` field
|
||||
stores the first day of the relevant month.
|
||||
"""
|
||||
|
||||
_name = 'account.report.budget.item'
|
||||
_description = "Fusion Report Budget Line"
|
||||
|
||||
budget_id = fields.Many2one(
|
||||
comodel_name='account.report.budget',
|
||||
string="Parent Budget",
|
||||
required=True,
|
||||
ondelete='cascade',
|
||||
)
|
||||
account_id = fields.Many2one(
|
||||
comodel_name='account.account',
|
||||
string="Account",
|
||||
required=True,
|
||||
)
|
||||
date = fields.Date(
|
||||
string="Month",
|
||||
required=True,
|
||||
)
|
||||
amount = fields.Float(
|
||||
string="Budgeted Amount",
|
||||
default=0.0,
|
||||
)
|
||||
233
Fusion Accounting/models/cash_basis_report.py
Normal file
233
Fusion Accounting/models/cash_basis_report.py
Normal file
@@ -0,0 +1,233 @@
|
||||
# Fusion Accounting - Cash Basis Reporting
|
||||
# Copyright (C) 2026 Nexa Systems Inc. (https://nexasystems.ca)
|
||||
# Original implementation for the Fusion Accounting module.
|
||||
#
|
||||
# Alternative report handler that uses payment dates instead of invoice
|
||||
# dates for recognizing revenue and expenses, supporting the cash basis
|
||||
# accounting method.
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.tools import SQL, Query, float_is_zero
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionCashBasisReport(models.AbstractModel):
|
||||
"""Cash basis report custom handler.
|
||||
|
||||
Unlike the standard accrual-based reporting, cash basis reports
|
||||
recognise revenue when payment is received and expenses when payment
|
||||
is made, regardless of when the invoice was issued.
|
||||
|
||||
This handler:
|
||||
- Replaces the invoice/bill date with the payment reconciliation date
|
||||
- Filters transactions to only include those with matching payments
|
||||
- Provides a toggle in report options to switch between accrual and
|
||||
cash basis views
|
||||
"""
|
||||
|
||||
_name = 'account.cash.basis.report.handler'
|
||||
_inherit = 'account.report.custom.handler'
|
||||
_description = 'Cash Basis Report Custom Handler'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Options Initializer
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _custom_options_initializer(self, report, options, previous_options):
|
||||
"""Add cash-basis specific options to the report."""
|
||||
super()._custom_options_initializer(report, options, previous_options=previous_options)
|
||||
|
||||
# Add the cash basis toggle
|
||||
options['fusion_cash_basis'] = previous_options.get('fusion_cash_basis', True)
|
||||
|
||||
# Restrict to journals that support cash basis
|
||||
report._init_options_journals(
|
||||
options,
|
||||
previous_options=previous_options,
|
||||
additional_journals_domain=[('type', 'in', ('sale', 'purchase', 'bank', 'cash', 'general'))],
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Dynamic Lines
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _dynamic_lines_generator(self, report, options, all_column_groups_expression_totals, warnings=None):
|
||||
"""Generate report lines based on cash basis (payment date) accounting.
|
||||
|
||||
Returns a list of (sequence, line_dict) tuples for the report engine.
|
||||
"""
|
||||
output_lines = []
|
||||
|
||||
if not options.get('fusion_cash_basis', True):
|
||||
# Fallback to standard accrual-based processing
|
||||
return output_lines
|
||||
|
||||
cash_data = self._compute_cash_basis_data(report, options)
|
||||
|
||||
# ---- Revenue Section ----
|
||||
revenue_total = sum(d['amount'] for d in cash_data.get('revenue', []))
|
||||
output_lines.append((0, self._build_section_line(
|
||||
report, options, 'revenue',
|
||||
_("Cash Revenue"), revenue_total,
|
||||
)))
|
||||
for entry in sorted(cash_data.get('revenue', []), key=lambda e: e.get('date', '')):
|
||||
output_lines.append((1, self._build_detail_line(report, options, entry)))
|
||||
|
||||
# ---- Expense Section ----
|
||||
expense_total = sum(d['amount'] for d in cash_data.get('expense', []))
|
||||
output_lines.append((0, self._build_section_line(
|
||||
report, options, 'expense',
|
||||
_("Cash Expenses"), expense_total,
|
||||
)))
|
||||
for entry in sorted(cash_data.get('expense', []), key=lambda e: e.get('date', '')):
|
||||
output_lines.append((1, self._build_detail_line(report, options, entry)))
|
||||
|
||||
# ---- Net Cash Income ----
|
||||
net_total = revenue_total - abs(expense_total)
|
||||
output_lines.append((0, self._build_section_line(
|
||||
report, options, 'net_income',
|
||||
_("Net Cash Income"), net_total,
|
||||
)))
|
||||
|
||||
return output_lines
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Data Computation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _compute_cash_basis_data(self, report, options):
|
||||
"""Compute cash basis amounts grouped by revenue/expense.
|
||||
|
||||
Queries reconciled payments to find the actual cash dates for
|
||||
recognised amounts.
|
||||
|
||||
:returns: dict with keys ``revenue`` and ``expense``, each
|
||||
containing a list of entry dicts with amount, date,
|
||||
account, and partner information.
|
||||
"""
|
||||
result = {'revenue': [], 'expense': []}
|
||||
company_ids = [c['id'] for c in options.get('companies', [{'id': self.env.company.id}])]
|
||||
date_from = options.get('date', {}).get('date_from')
|
||||
date_to = options.get('date', {}).get('date_to')
|
||||
|
||||
if not date_from or not date_to:
|
||||
return result
|
||||
|
||||
# Query: find all payment reconciliation entries within the period
|
||||
query = """
|
||||
SELECT
|
||||
aml.id AS line_id,
|
||||
aml.account_id,
|
||||
aa.name AS account_name,
|
||||
aa.code AS account_code,
|
||||
aml.partner_id,
|
||||
rp.name AS partner_name,
|
||||
apr.max_date AS cash_date,
|
||||
CASE
|
||||
WHEN aa.account_type IN ('income', 'income_other')
|
||||
THEN aml.credit - aml.debit
|
||||
ELSE aml.debit - aml.credit
|
||||
END AS amount
|
||||
FROM account_move_line aml
|
||||
JOIN account_account aa ON aa.id = aml.account_id
|
||||
LEFT JOIN res_partner rp ON rp.id = aml.partner_id
|
||||
JOIN account_move am ON am.id = aml.move_id
|
||||
JOIN (
|
||||
SELECT
|
||||
apr2.debit_move_id,
|
||||
apr2.credit_move_id,
|
||||
apr2.max_date
|
||||
FROM account_partial_reconcile apr2
|
||||
WHERE apr2.max_date >= %s
|
||||
AND apr2.max_date <= %s
|
||||
) apr ON (apr.debit_move_id = aml.id OR apr.credit_move_id = aml.id)
|
||||
WHERE am.state = 'posted'
|
||||
AND am.company_id IN %s
|
||||
AND aa.account_type IN (
|
||||
'income', 'income_other',
|
||||
'expense', 'expense_direct_cost', 'expense_depreciation'
|
||||
)
|
||||
ORDER BY apr.max_date, aa.code
|
||||
"""
|
||||
|
||||
self.env.cr.execute(query, (date_from, date_to, tuple(company_ids)))
|
||||
rows = self.env.cr.dictfetchall()
|
||||
|
||||
seen_lines = set()
|
||||
for row in rows:
|
||||
# Avoid counting the same line twice if partially reconciled
|
||||
if row['line_id'] in seen_lines:
|
||||
continue
|
||||
seen_lines.add(row['line_id'])
|
||||
|
||||
entry = {
|
||||
'line_id': row['line_id'],
|
||||
'account_id': row['account_id'],
|
||||
'account_name': row['account_name'],
|
||||
'account_code': row['account_code'] or '',
|
||||
'partner_id': row['partner_id'],
|
||||
'partner_name': row['partner_name'] or '',
|
||||
'date': str(row['cash_date']),
|
||||
'amount': row['amount'] or 0.0,
|
||||
}
|
||||
|
||||
account_type = self.env['account.account'].browse(
|
||||
row['account_id']
|
||||
).account_type
|
||||
if account_type in ('income', 'income_other'):
|
||||
result['revenue'].append(entry)
|
||||
else:
|
||||
result['expense'].append(entry)
|
||||
|
||||
return result
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Line Builders
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _build_section_line(self, report, options, section_id, name, total):
|
||||
"""Build a section header line for the report.
|
||||
|
||||
:param section_id: unique identifier for the section
|
||||
:param name: display name of the section
|
||||
:param total: aggregated monetary total
|
||||
:returns: line dict compatible with the report engine
|
||||
"""
|
||||
columns = report._build_column_dict(total, options, figure_type='monetary')
|
||||
return {
|
||||
'id': report._get_generic_line_id(None, None, markup=f'fusion_cb_{section_id}'),
|
||||
'name': name,
|
||||
'level': 1,
|
||||
'columns': [columns],
|
||||
'unfoldable': False,
|
||||
'unfolded': False,
|
||||
}
|
||||
|
||||
def _build_detail_line(self, report, options, entry):
|
||||
"""Build a detail line for a single cash-basis entry.
|
||||
|
||||
:param entry: dict with amount, account_code, account_name, etc.
|
||||
:returns: line dict compatible with the report engine
|
||||
"""
|
||||
name = f"{entry['account_code']} {entry['account_name']}"
|
||||
if entry.get('partner_name'):
|
||||
name += f" - {entry['partner_name']}"
|
||||
|
||||
columns = report._build_column_dict(
|
||||
entry['amount'], options, figure_type='monetary',
|
||||
)
|
||||
|
||||
return {
|
||||
'id': report._get_generic_line_id(
|
||||
'account.move.line', entry['line_id'],
|
||||
markup='fusion_cb_detail',
|
||||
),
|
||||
'name': name,
|
||||
'level': 3,
|
||||
'columns': [columns],
|
||||
'caret_options': 'account.move.line',
|
||||
'unfoldable': False,
|
||||
}
|
||||
60
Fusion Accounting/models/chart_template.py
Normal file
60
Fusion Accounting/models/chart_template.py
Normal file
@@ -0,0 +1,60 @@
|
||||
# Fusion Accounting - Chart Template Post-Load Hook
|
||||
# Configures tax periodicity and generates initial tax-closing reminders
|
||||
|
||||
from odoo import fields, models, _
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
|
||||
class FusionChartTemplatePostLoad(models.AbstractModel):
|
||||
"""Runs post-installation configuration after chart-of-accounts
|
||||
data is loaded: sets the tax periodicity journal, enables the
|
||||
totals-below-sections option, and schedules the first tax-closing
|
||||
reminder activity."""
|
||||
|
||||
_inherit = 'account.chart.template'
|
||||
|
||||
def _post_load_data(self, template_code, company, template_data):
|
||||
"""Apply Fusion Accounting defaults after chart template data
|
||||
has been loaded for *company*."""
|
||||
super()._post_load_data(template_code, company, template_data)
|
||||
|
||||
target_company = company or self.env.company
|
||||
|
||||
# Locate the default miscellaneous journal
|
||||
misc_journal = self.env['account.journal'].search([
|
||||
*self.env['account.journal']._check_company_domain(target_company),
|
||||
('type', '=', 'general'),
|
||||
], limit=1)
|
||||
|
||||
if not misc_journal:
|
||||
raise ValidationError(
|
||||
_("No miscellaneous journal could be found for the active company.")
|
||||
)
|
||||
|
||||
target_company.update({
|
||||
'totals_below_sections': target_company.anglo_saxon_accounting,
|
||||
'account_tax_periodicity_journal_id': misc_journal,
|
||||
'account_tax_periodicity_reminder_day': 7,
|
||||
})
|
||||
misc_journal.show_on_dashboard = True
|
||||
|
||||
# Determine the appropriate tax report (country-specific or generic)
|
||||
generic_report = self.env.ref('account.generic_tax_report')
|
||||
country_report = self.env['account.report'].search([
|
||||
('availability_condition', '=', 'country'),
|
||||
('country_id', '=', target_company.country_id.id),
|
||||
('root_report_id', '=', generic_report.id),
|
||||
], limit=1)
|
||||
effective_report = country_report or generic_report
|
||||
|
||||
# Schedule the initial tax-closing reminder activity
|
||||
_start, period_end = target_company._get_tax_closing_period_boundaries(
|
||||
fields.Date.today(), effective_report,
|
||||
)
|
||||
existing_activity = target_company._get_tax_closing_reminder_activity(
|
||||
effective_report.id, period_end,
|
||||
)
|
||||
if not existing_activity:
|
||||
target_company._generate_tax_closing_reminder_activity(
|
||||
effective_report, period_end,
|
||||
)
|
||||
261
Fusion Accounting/models/check_printing.py
Normal file
261
Fusion Accounting/models/check_printing.py
Normal file
@@ -0,0 +1,261 @@
|
||||
"""
|
||||
Fusion Accounting - Check Printing Support
|
||||
|
||||
Extends ``account.payment`` with fields and logic required for
|
||||
printing physical checks, including automatic check numbering and
|
||||
amount-to-words conversion.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import math
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
# ======================================================================
|
||||
# Amount-to-words conversion (English)
|
||||
# ======================================================================
|
||||
|
||||
_ONES = [
|
||||
'', 'One', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight',
|
||||
'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen', 'Fourteen', 'Fifteen',
|
||||
'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen',
|
||||
]
|
||||
_TENS = [
|
||||
'', '', 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy',
|
||||
'Eighty', 'Ninety',
|
||||
]
|
||||
_SCALES = [
|
||||
(10 ** 12, 'Trillion'),
|
||||
(10 ** 9, 'Billion'),
|
||||
(10 ** 6, 'Million'),
|
||||
(10 ** 3, 'Thousand'),
|
||||
(10 ** 2, 'Hundred'),
|
||||
]
|
||||
|
||||
|
||||
def _int_to_words(number):
|
||||
"""Convert a non-negative integer to its English word representation.
|
||||
|
||||
:param int number: A non-negative integer (0 .. 999 999 999 999 999).
|
||||
:return: English words, e.g. ``'One Thousand Two Hundred Thirty-Four'``.
|
||||
:rtype: str
|
||||
"""
|
||||
if number == 0:
|
||||
return 'Zero'
|
||||
if number < 0:
|
||||
return 'Minus ' + _int_to_words(-number)
|
||||
|
||||
parts = []
|
||||
for scale_value, scale_name in _SCALES:
|
||||
count, number = divmod(number, scale_value)
|
||||
if count:
|
||||
if scale_value == 100:
|
||||
parts.append(f'{_int_to_words(count)} {scale_name}')
|
||||
else:
|
||||
parts.append(f'{_int_to_words(count)} {scale_name}')
|
||||
if 0 < number < 20:
|
||||
parts.append(_ONES[number])
|
||||
elif number >= 20:
|
||||
tens_idx, ones_idx = divmod(number, 10)
|
||||
word = _TENS[tens_idx]
|
||||
if ones_idx:
|
||||
word += '-' + _ONES[ones_idx]
|
||||
parts.append(word)
|
||||
|
||||
return ' '.join(parts)
|
||||
|
||||
|
||||
def amount_to_words(amount, currency_name='Dollars', cents_name='Cents'):
|
||||
"""Convert a monetary amount to an English sentence.
|
||||
|
||||
Example::
|
||||
|
||||
>>> amount_to_words(1234.56)
|
||||
'One Thousand Two Hundred Thirty-Four Dollars and Fifty-Six Cents'
|
||||
|
||||
:param float amount: The monetary amount.
|
||||
:param str currency_name: Name of the major currency unit.
|
||||
:param str cents_name: Name of the minor currency unit.
|
||||
:return: The amount expressed in English words.
|
||||
:rtype: str
|
||||
"""
|
||||
if amount < 0:
|
||||
return 'Minus ' + amount_to_words(-amount, currency_name, cents_name)
|
||||
|
||||
whole = int(amount)
|
||||
# Round to avoid floating-point artefacts (e.g. 1.005 -> 0 cents)
|
||||
cents = round((amount - whole) * 100)
|
||||
if cents >= 100:
|
||||
whole += 1
|
||||
cents = 0
|
||||
|
||||
result = f'{_int_to_words(whole)} {currency_name}'
|
||||
if cents:
|
||||
result += f' and {_int_to_words(cents)} {cents_name}'
|
||||
else:
|
||||
result += f' and Zero {cents_name}'
|
||||
return result
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Odoo model
|
||||
# ======================================================================
|
||||
|
||||
|
||||
class FusionCheckPrinting(models.Model):
|
||||
"""Adds check-printing capabilities to ``account.payment``.
|
||||
|
||||
Features
|
||||
--------
|
||||
* Manual or automatic check numbering per journal.
|
||||
* Human-readable amount-in-words field for check printing.
|
||||
* Validation to prevent duplicate check numbers within a journal.
|
||||
"""
|
||||
|
||||
_inherit = 'account.payment'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Fields
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
check_number = fields.Char(
|
||||
string='Check Number',
|
||||
copy=False,
|
||||
tracking=True,
|
||||
help="The number printed on the physical check.",
|
||||
)
|
||||
check_manual_sequencing = fields.Boolean(
|
||||
string='Manual Numbering',
|
||||
related='journal_id.fusion_check_manual_sequencing',
|
||||
readonly=True,
|
||||
help="When enabled, check numbers are entered manually instead "
|
||||
"of being assigned automatically.",
|
||||
)
|
||||
check_next_number = fields.Char(
|
||||
string='Next Check Number',
|
||||
related='journal_id.fusion_check_next_number',
|
||||
readonly=False,
|
||||
help="The next check number to be assigned automatically.",
|
||||
)
|
||||
check_amount_in_words = fields.Char(
|
||||
string='Amount in Words',
|
||||
compute='_compute_check_amount_in_words',
|
||||
store=True,
|
||||
help="Human-readable representation of the payment amount, "
|
||||
"suitable for printing on a check.",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Computed fields
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@api.depends('amount', 'currency_id')
|
||||
def _compute_check_amount_in_words(self):
|
||||
"""Compute the textual representation of the payment amount."""
|
||||
for payment in self:
|
||||
if payment.currency_id and payment.amount:
|
||||
currency_name = payment.currency_id.currency_unit_label or 'Units'
|
||||
cents_name = payment.currency_id.currency_subunit_label or 'Cents'
|
||||
payment.check_amount_in_words = amount_to_words(
|
||||
payment.amount,
|
||||
currency_name=currency_name,
|
||||
cents_name=cents_name,
|
||||
)
|
||||
else:
|
||||
payment.check_amount_in_words = ''
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Constraints
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
_sql_constraints = [
|
||||
(
|
||||
'check_number_unique',
|
||||
'UNIQUE(check_number, journal_id)',
|
||||
'A check number must be unique per journal.',
|
||||
),
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Business logic
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def action_assign_check_number(self):
|
||||
"""Assign the next available check number from the journal.
|
||||
|
||||
If the journal is configured for manual sequencing the user
|
||||
must enter the number themselves; this method handles only the
|
||||
automatic case.
|
||||
|
||||
:raises UserError: if the journal uses manual sequencing.
|
||||
"""
|
||||
for payment in self:
|
||||
if payment.check_manual_sequencing:
|
||||
raise UserError(_(
|
||||
"Journal '%(journal)s' uses manual check numbering. "
|
||||
"Please enter the check number manually.",
|
||||
journal=payment.journal_id.display_name,
|
||||
))
|
||||
if payment.check_number:
|
||||
continue # already assigned
|
||||
|
||||
next_number = payment.journal_id.fusion_check_next_number or '1'
|
||||
payment.check_number = next_number.zfill(6)
|
||||
# Increment the journal's next-number counter
|
||||
try:
|
||||
payment.journal_id.fusion_check_next_number = str(
|
||||
int(next_number) + 1
|
||||
)
|
||||
except ValueError:
|
||||
_log.warning(
|
||||
"Could not auto-increment check number '%s' on "
|
||||
"journal %s", next_number,
|
||||
payment.journal_id.display_name,
|
||||
)
|
||||
|
||||
def action_print_check(self):
|
||||
"""Print the check report for the selected payments.
|
||||
|
||||
Automatically assigns check numbers to any payment that does
|
||||
not already have one.
|
||||
|
||||
:return: Report action dictionary.
|
||||
:rtype: dict
|
||||
"""
|
||||
payments_without_number = self.filtered(
|
||||
lambda p: not p.check_number and not p.check_manual_sequencing
|
||||
)
|
||||
payments_without_number.action_assign_check_number()
|
||||
|
||||
missing = self.filtered(lambda p: not p.check_number)
|
||||
if missing:
|
||||
raise UserError(_(
|
||||
"The following payments still have no check number:\n"
|
||||
"%(payments)s\nPlease assign check numbers before printing.",
|
||||
payments=', '.join(missing.mapped('name')),
|
||||
))
|
||||
|
||||
return self.env.ref(
|
||||
'fusion_accounting.action_report_check'
|
||||
).report_action(self)
|
||||
|
||||
|
||||
class FusionAccountJournalCheck(models.Model):
|
||||
"""Adds check-numbering configuration to ``account.journal``."""
|
||||
|
||||
_inherit = 'account.journal'
|
||||
|
||||
fusion_check_manual_sequencing = fields.Boolean(
|
||||
string='Manual Check Numbering',
|
||||
help="Enable to enter check numbers manually instead of using "
|
||||
"automatic sequencing.",
|
||||
)
|
||||
fusion_check_next_number = fields.Char(
|
||||
string='Next Check Number',
|
||||
default='1',
|
||||
help="The next check number that will be automatically assigned "
|
||||
"when printing checks from this journal.",
|
||||
)
|
||||
715
Fusion Accounting/models/cii_generator.py
Normal file
715
Fusion Accounting/models/cii_generator.py
Normal file
@@ -0,0 +1,715 @@
|
||||
"""
|
||||
Fusion Accounting - Cross-Industry Invoice (CII) / Factur-X Generator & Parser
|
||||
|
||||
Generates UN/CEFACT Cross-Industry Invoice (CII) compliant XML documents
|
||||
and supports embedding the XML inside a PDF/A-3 container to produce
|
||||
Factur-X / ZUGFeRD hybrid invoices.
|
||||
|
||||
References
|
||||
----------
|
||||
* UN/CEFACT XML Schemas (D16B)
|
||||
https://unece.org/trade/uncefact/xml-schemas
|
||||
* Factur-X / ZUGFeRD specification
|
||||
https://fnfe-mpe.org/factur-x/
|
||||
* EN 16931-1:2017 – European e-Invoicing semantic data model
|
||||
|
||||
Namespace URIs used below are taken directly from the published
|
||||
UN/CEFACT schemas.
|
||||
|
||||
Original implementation by Nexa Systems Inc.
|
||||
"""
|
||||
|
||||
import io
|
||||
import logging
|
||||
from datetime import date
|
||||
from lxml import etree
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
from odoo.tools import float_round
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
# ======================================================================
|
||||
# XML Namespace constants (UN/CEFACT CII D16B)
|
||||
# ======================================================================
|
||||
NS_RSM = (
|
||||
"urn:un:unece:uncefact:data:standard:"
|
||||
"CrossIndustryInvoice:100"
|
||||
)
|
||||
NS_RAM = (
|
||||
"urn:un:unece:uncefact:data:standard:"
|
||||
"ReusableAggregateBusinessInformationEntity:100"
|
||||
)
|
||||
NS_QDT = (
|
||||
"urn:un:unece:uncefact:data:standard:"
|
||||
"QualifiedDataType:100"
|
||||
)
|
||||
NS_UDT = (
|
||||
"urn:un:unece:uncefact:data:standard:"
|
||||
"UnqualifiedDataType:100"
|
||||
)
|
||||
|
||||
NSMAP_CII = {
|
||||
"rsm": NS_RSM,
|
||||
"ram": NS_RAM,
|
||||
"qdt": NS_QDT,
|
||||
"udt": NS_UDT,
|
||||
}
|
||||
|
||||
# Factur-X profile URNs
|
||||
FACTURX_PROFILES = {
|
||||
"minimum": (
|
||||
"urn:factur-x.eu:1p0:minimum"
|
||||
),
|
||||
"basicwl": (
|
||||
"urn:factur-x.eu:1p0:basicwl"
|
||||
),
|
||||
"basic": (
|
||||
"urn:factur-x.eu:1p0:basic"
|
||||
),
|
||||
"en16931": (
|
||||
"urn:cen.eu:en16931:2017#compliant#"
|
||||
"urn:factur-x.eu:1p0:en16931"
|
||||
),
|
||||
"extended": (
|
||||
"urn:factur-x.eu:1p0:extended"
|
||||
),
|
||||
}
|
||||
|
||||
# CII type code mapping (UNTDID 1001)
|
||||
CII_TYPE_CODE_MAP = {
|
||||
"out_invoice": "380", # Commercial Invoice
|
||||
"out_refund": "381", # Credit Note
|
||||
"in_invoice": "380",
|
||||
"in_refund": "381",
|
||||
}
|
||||
|
||||
|
||||
class FusionCIIGenerator(models.AbstractModel):
|
||||
"""
|
||||
Generates and parses UN/CEFACT Cross-Industry Invoice documents and
|
||||
optionally embeds the XML within a PDF/A-3 container for Factur-X
|
||||
compliance.
|
||||
|
||||
Implemented as an Odoo abstract model for ORM registry access.
|
||||
"""
|
||||
|
||||
_name = "fusion.cii.generator"
|
||||
_description = "Fusion CII / Factur-X Generator"
|
||||
|
||||
# ==================================================================
|
||||
# Public API
|
||||
# ==================================================================
|
||||
def generate_cii_invoice(self, move, profile="en16931"):
|
||||
"""Build a CII XML document for a single ``account.move``.
|
||||
|
||||
Args:
|
||||
move: An ``account.move`` singleton.
|
||||
profile (str): Factur-X conformance profile. One of
|
||||
``'minimum'``, ``'basic'``, ``'en16931'`` (default),
|
||||
``'extended'``.
|
||||
|
||||
Returns:
|
||||
bytes: UTF-8 encoded CII XML.
|
||||
"""
|
||||
move.ensure_one()
|
||||
self._validate_move(move)
|
||||
|
||||
root = etree.Element(
|
||||
f"{{{NS_RSM}}}CrossIndustryInvoice", nsmap=NSMAP_CII
|
||||
)
|
||||
|
||||
self._add_exchange_context(root, profile)
|
||||
header = self._add_header_trade(root, move)
|
||||
agreement = self._add_agreement_trade(root, move)
|
||||
delivery = self._add_delivery_trade(root, move)
|
||||
settlement = self._add_settlement_trade(root, move)
|
||||
self._add_line_items(root, move)
|
||||
|
||||
return etree.tostring(
|
||||
root,
|
||||
xml_declaration=True,
|
||||
encoding="UTF-8",
|
||||
pretty_print=True,
|
||||
)
|
||||
|
||||
def parse_cii_invoice(self, xml_bytes):
|
||||
"""Parse a CII XML document into an invoice values dictionary.
|
||||
|
||||
Args:
|
||||
xml_bytes (bytes): Raw CII XML content.
|
||||
|
||||
Returns:
|
||||
dict: Invoice values suitable for ``account.move.create()``.
|
||||
"""
|
||||
root = etree.fromstring(xml_bytes)
|
||||
ns = {
|
||||
"rsm": NS_RSM,
|
||||
"ram": NS_RAM,
|
||||
"udt": NS_UDT,
|
||||
}
|
||||
|
||||
# Header
|
||||
header_path = (
|
||||
"rsm:SupplyChainTradeTransaction/"
|
||||
"ram:ApplicableHeaderTradeSettlement"
|
||||
)
|
||||
doc_path = (
|
||||
"rsm:ExchangedDocument"
|
||||
)
|
||||
|
||||
ref = self._xpath_text(root, f"{doc_path}/ram:ID", ns)
|
||||
type_code = self._xpath_text(root, f"{doc_path}/ram:TypeCode", ns)
|
||||
issue_date = self._xpath_text(
|
||||
root,
|
||||
f"{doc_path}/ram:IssueDateTime/udt:DateTimeString",
|
||||
ns,
|
||||
)
|
||||
currency = self._xpath_text(
|
||||
root, f"{header_path}/ram:InvoiceCurrencyCode", ns
|
||||
)
|
||||
due_date = self._xpath_text(
|
||||
root,
|
||||
f"{header_path}/ram:SpecifiedTradePaymentTerms/"
|
||||
"ram:DueDateDateTime/udt:DateTimeString",
|
||||
ns,
|
||||
)
|
||||
|
||||
# Parties
|
||||
agreement_path = (
|
||||
"rsm:SupplyChainTradeTransaction/"
|
||||
"ram:ApplicableHeaderTradeAgreement"
|
||||
)
|
||||
supplier_name = self._xpath_text(
|
||||
root,
|
||||
f"{agreement_path}/ram:SellerTradeParty/ram:Name",
|
||||
ns,
|
||||
)
|
||||
supplier_vat = self._xpath_text(
|
||||
root,
|
||||
f"{agreement_path}/ram:SellerTradeParty/"
|
||||
"ram:SpecifiedTaxRegistration/ram:ID",
|
||||
ns,
|
||||
)
|
||||
customer_name = self._xpath_text(
|
||||
root,
|
||||
f"{agreement_path}/ram:BuyerTradeParty/ram:Name",
|
||||
ns,
|
||||
)
|
||||
customer_vat = self._xpath_text(
|
||||
root,
|
||||
f"{agreement_path}/ram:BuyerTradeParty/"
|
||||
"ram:SpecifiedTaxRegistration/ram:ID",
|
||||
ns,
|
||||
)
|
||||
|
||||
# Lines
|
||||
line_path = (
|
||||
"rsm:SupplyChainTradeTransaction/"
|
||||
"ram:IncludedSupplyChainTradeLineItem"
|
||||
)
|
||||
line_nodes = root.findall(line_path, ns)
|
||||
lines = []
|
||||
for ln in line_nodes:
|
||||
name = self._xpath_text(
|
||||
ln,
|
||||
"ram:SpecifiedTradeProduct/ram:Name",
|
||||
ns,
|
||||
) or ""
|
||||
qty = float(
|
||||
self._xpath_text(
|
||||
ln,
|
||||
"ram:SpecifiedLineTradeDelivery/"
|
||||
"ram:BilledQuantity",
|
||||
ns,
|
||||
) or "1"
|
||||
)
|
||||
price = float(
|
||||
self._xpath_text(
|
||||
ln,
|
||||
"ram:SpecifiedLineTradeAgreement/"
|
||||
"ram:NetPriceProductTradePrice/"
|
||||
"ram:ChargeAmount",
|
||||
ns,
|
||||
) or "0"
|
||||
)
|
||||
lines.append({
|
||||
"name": name,
|
||||
"quantity": qty,
|
||||
"price_unit": price,
|
||||
})
|
||||
|
||||
is_credit_note = type_code == "381"
|
||||
move_type = "out_refund" if is_credit_note else "out_invoice"
|
||||
|
||||
# Normalise dates from CII format (YYYYMMDD) to ISO
|
||||
if issue_date and len(issue_date) == 8:
|
||||
issue_date = f"{issue_date[:4]}-{issue_date[4:6]}-{issue_date[6:]}"
|
||||
if due_date and len(due_date) == 8:
|
||||
due_date = f"{due_date[:4]}-{due_date[4:6]}-{due_date[6:]}"
|
||||
|
||||
return {
|
||||
"move_type": move_type,
|
||||
"ref": ref,
|
||||
"invoice_date": issue_date,
|
||||
"invoice_date_due": due_date,
|
||||
"currency_id": currency,
|
||||
"supplier_name": supplier_name,
|
||||
"supplier_vat": supplier_vat,
|
||||
"customer_name": customer_name,
|
||||
"customer_vat": customer_vat,
|
||||
"invoice_line_ids": lines,
|
||||
}
|
||||
|
||||
def embed_in_pdf(self, pdf_bytes, xml_bytes, profile="en16931"):
|
||||
"""Embed CII XML into a PDF to produce a Factur-X / ZUGFeRD file.
|
||||
|
||||
This creates a PDF/A-3 compliant document with the XML attached
|
||||
as an Associated File (AF) according to the Factur-X specification.
|
||||
|
||||
Args:
|
||||
pdf_bytes (bytes): The original invoice PDF content.
|
||||
xml_bytes (bytes): The CII XML to embed.
|
||||
profile (str): Factur-X profile name for metadata.
|
||||
|
||||
Returns:
|
||||
bytes: The resulting PDF/A-3 with embedded XML.
|
||||
|
||||
Note:
|
||||
This method requires the ``pypdf`` library. If it is not
|
||||
installed the original PDF is returned unchanged with a
|
||||
warning logged.
|
||||
"""
|
||||
try:
|
||||
from pypdf import PdfReader, PdfWriter
|
||||
except ImportError:
|
||||
_log.warning(
|
||||
"pypdf is not installed; returning PDF without embedded XML. "
|
||||
"Install pypdf to enable Factur-X PDF/A-3 embedding."
|
||||
)
|
||||
return pdf_bytes
|
||||
|
||||
reader = PdfReader(io.BytesIO(pdf_bytes))
|
||||
writer = PdfWriter()
|
||||
|
||||
# Copy all pages from the source PDF
|
||||
for page in reader.pages:
|
||||
writer.add_page(page)
|
||||
|
||||
# Copy metadata
|
||||
if reader.metadata:
|
||||
writer.add_metadata(reader.metadata)
|
||||
|
||||
# Attach the XML as an embedded file
|
||||
writer.add_attachment(
|
||||
fname="factur-x.xml",
|
||||
data=xml_bytes,
|
||||
)
|
||||
|
||||
# Update document info with Factur-X conformance level
|
||||
profile_label = profile.upper() if profile != "en16931" else "EN 16931"
|
||||
writer.add_metadata({
|
||||
"/Subject": f"Factur-X {profile_label}",
|
||||
})
|
||||
|
||||
output = io.BytesIO()
|
||||
writer.write(output)
|
||||
return output.getvalue()
|
||||
|
||||
# ==================================================================
|
||||
# Internal – XML construction helpers
|
||||
# ==================================================================
|
||||
def _validate_move(self, move):
|
||||
"""Ensure the move has the minimum data needed for CII export."""
|
||||
if not move.partner_id:
|
||||
raise UserError(
|
||||
_("Cannot generate CII: invoice '%s' has no partner.",
|
||||
move.name or _("Draft"))
|
||||
)
|
||||
|
||||
def _add_exchange_context(self, root, profile):
|
||||
"""Add ``ExchangedDocumentContext`` with the Factur-X profile."""
|
||||
ram = NS_RAM
|
||||
rsm = NS_RSM
|
||||
|
||||
ctx = self._sub(root, f"{{{rsm}}}ExchangedDocumentContext")
|
||||
guide = self._sub(ctx, f"{{{ram}}}GuidelineSpecifiedDocumentContextParameter")
|
||||
profile_urn = FACTURX_PROFILES.get(profile, FACTURX_PROFILES["en16931"])
|
||||
self._sub(guide, f"{{{ram}}}ID", profile_urn)
|
||||
|
||||
def _add_header_trade(self, root, move):
|
||||
"""Add ``ExchangedDocument`` with ID, type code, and issue date."""
|
||||
rsm = NS_RSM
|
||||
ram = NS_RAM
|
||||
udt = NS_UDT
|
||||
|
||||
doc = self._sub(root, f"{{{rsm}}}ExchangedDocument")
|
||||
self._sub(doc, f"{{{ram}}}ID", move.name or "DRAFT")
|
||||
|
||||
type_code = CII_TYPE_CODE_MAP.get(move.move_type, "380")
|
||||
self._sub(doc, f"{{{ram}}}TypeCode", type_code)
|
||||
|
||||
issue_dt = self._sub(doc, f"{{{ram}}}IssueDateTime")
|
||||
issue_date = move.invoice_date or fields.Date.context_today(move)
|
||||
date_str_el = self._sub(
|
||||
issue_dt, f"{{{udt}}}DateTimeString",
|
||||
issue_date.strftime("%Y%m%d"),
|
||||
)
|
||||
date_str_el.set("format", "102")
|
||||
|
||||
if move.narration:
|
||||
import re
|
||||
plain = re.sub(r"<[^>]+>", "", move.narration)
|
||||
note = self._sub(doc, f"{{{ram}}}IncludedNote")
|
||||
self._sub(note, f"{{{ram}}}Content", plain)
|
||||
|
||||
return doc
|
||||
|
||||
def _add_agreement_trade(self, root, move):
|
||||
"""Add ``ApplicableHeaderTradeAgreement`` with seller/buyer parties."""
|
||||
rsm = NS_RSM
|
||||
ram = NS_RAM
|
||||
|
||||
txn = root.find(f"{{{rsm}}}SupplyChainTradeTransaction")
|
||||
if txn is None:
|
||||
txn = self._sub(root, f"{{{rsm}}}SupplyChainTradeTransaction")
|
||||
|
||||
agreement = self._sub(txn, f"{{{ram}}}ApplicableHeaderTradeAgreement")
|
||||
|
||||
# Seller
|
||||
seller = self._sub(agreement, f"{{{ram}}}SellerTradeParty")
|
||||
self._add_trade_party(seller, move.company_id.partner_id, move.company_id)
|
||||
|
||||
# Buyer
|
||||
buyer = self._sub(agreement, f"{{{ram}}}BuyerTradeParty")
|
||||
self._add_trade_party(buyer, move.partner_id)
|
||||
|
||||
return agreement
|
||||
|
||||
def _add_delivery_trade(self, root, move):
|
||||
"""Add ``ApplicableHeaderTradeDelivery``."""
|
||||
rsm = NS_RSM
|
||||
ram = NS_RAM
|
||||
udt = NS_UDT
|
||||
|
||||
txn = root.find(f"{{{rsm}}}SupplyChainTradeTransaction")
|
||||
delivery = self._sub(txn, f"{{{ram}}}ApplicableHeaderTradeDelivery")
|
||||
|
||||
# Actual delivery date (use invoice date as fallback)
|
||||
event = self._sub(delivery, f"{{{ram}}}ActualDeliverySupplyChainEvent")
|
||||
occ = self._sub(event, f"{{{ram}}}OccurrenceDateTime")
|
||||
del_date = move.invoice_date or fields.Date.context_today(move)
|
||||
date_el = self._sub(
|
||||
occ, f"{{{udt}}}DateTimeString", del_date.strftime("%Y%m%d")
|
||||
)
|
||||
date_el.set("format", "102")
|
||||
|
||||
return delivery
|
||||
|
||||
def _add_settlement_trade(self, root, move):
|
||||
"""Add ``ApplicableHeaderTradeSettlement`` with tax, totals, and terms."""
|
||||
rsm = NS_RSM
|
||||
ram = NS_RAM
|
||||
udt = NS_UDT
|
||||
|
||||
txn = root.find(f"{{{rsm}}}SupplyChainTradeTransaction")
|
||||
settlement = self._sub(
|
||||
txn, f"{{{ram}}}ApplicableHeaderTradeSettlement"
|
||||
)
|
||||
|
||||
currency = move.currency_id.name or "USD"
|
||||
self._sub(settlement, f"{{{ram}}}InvoiceCurrencyCode", currency)
|
||||
|
||||
# Payment means
|
||||
pm = self._sub(
|
||||
settlement, f"{{{ram}}}SpecifiedTradeSettlementPaymentMeans"
|
||||
)
|
||||
self._sub(pm, f"{{{ram}}}TypeCode", "30") # Credit transfer
|
||||
|
||||
if move.partner_bank_id:
|
||||
account = self._sub(
|
||||
pm, f"{{{ram}}}PayeePartyCreditorFinancialAccount"
|
||||
)
|
||||
self._sub(
|
||||
account, f"{{{ram}}}IBANID",
|
||||
move.partner_bank_id.acc_number or "",
|
||||
)
|
||||
|
||||
# Tax breakdown
|
||||
self._add_cii_tax(settlement, move, currency)
|
||||
|
||||
# Payment terms
|
||||
if move.invoice_date_due:
|
||||
terms = self._sub(
|
||||
settlement, f"{{{ram}}}SpecifiedTradePaymentTerms"
|
||||
)
|
||||
due_dt = self._sub(terms, f"{{{ram}}}DueDateDateTime")
|
||||
due_el = self._sub(
|
||||
due_dt, f"{{{udt}}}DateTimeString",
|
||||
move.invoice_date_due.strftime("%Y%m%d"),
|
||||
)
|
||||
due_el.set("format", "102")
|
||||
|
||||
# Monetary summation
|
||||
summation = self._sub(
|
||||
settlement,
|
||||
f"{{{ram}}}SpecifiedTradeSettlementHeaderMonetarySummation",
|
||||
)
|
||||
self._monetary_sub(
|
||||
summation, f"{{{ram}}}LineTotalAmount",
|
||||
move.amount_untaxed, currency,
|
||||
)
|
||||
self._monetary_sub(
|
||||
summation, f"{{{ram}}}TaxBasisTotalAmount",
|
||||
move.amount_untaxed, currency,
|
||||
)
|
||||
self._monetary_sub(
|
||||
summation, f"{{{ram}}}TaxTotalAmount",
|
||||
move.amount_tax, currency,
|
||||
)
|
||||
self._monetary_sub(
|
||||
summation, f"{{{ram}}}GrandTotalAmount",
|
||||
move.amount_total, currency,
|
||||
)
|
||||
self._monetary_sub(
|
||||
summation, f"{{{ram}}}DuePayableAmount",
|
||||
move.amount_residual, currency,
|
||||
)
|
||||
|
||||
return settlement
|
||||
|
||||
def _add_cii_tax(self, settlement, move, currency):
|
||||
"""Add per-tax ``ApplicableTradeTax`` elements."""
|
||||
ram = NS_RAM
|
||||
|
||||
# Group tax lines
|
||||
tax_groups = {}
|
||||
for line in move.line_ids.filtered(
|
||||
lambda l: l.tax_line_id and l.tax_line_id.amount_type != "group"
|
||||
):
|
||||
tax = line.tax_line_id
|
||||
key = (tax.id, tax.name, tax.amount)
|
||||
if key not in tax_groups:
|
||||
tax_groups[key] = {
|
||||
"tax": tax,
|
||||
"tax_amount": 0.0,
|
||||
"base_amount": 0.0,
|
||||
}
|
||||
tax_groups[key]["tax_amount"] += abs(line.balance)
|
||||
|
||||
for inv_line in move.invoice_line_ids:
|
||||
for tax in inv_line.tax_ids:
|
||||
key = (tax.id, tax.name, tax.amount)
|
||||
if key in tax_groups:
|
||||
tax_groups[key]["base_amount"] += abs(inv_line.balance)
|
||||
|
||||
for _key, data in tax_groups.items():
|
||||
tax_el = self._sub(settlement, f"{{{ram}}}ApplicableTradeTax")
|
||||
self._monetary_sub(
|
||||
tax_el, f"{{{ram}}}CalculatedAmount",
|
||||
data["tax_amount"], currency,
|
||||
)
|
||||
self._sub(tax_el, f"{{{ram}}}TypeCode", "VAT")
|
||||
self._monetary_sub(
|
||||
tax_el, f"{{{ram}}}BasisAmount",
|
||||
data["base_amount"], currency,
|
||||
)
|
||||
self._sub(
|
||||
tax_el, f"{{{ram}}}CategoryCode",
|
||||
self._tax_category(data["tax"]),
|
||||
)
|
||||
self._sub(
|
||||
tax_el, f"{{{ram}}}RateApplicablePercent",
|
||||
self._fmt(abs(data["tax"].amount)),
|
||||
)
|
||||
|
||||
def _add_line_items(self, root, move):
|
||||
"""Append ``IncludedSupplyChainTradeLineItem`` elements."""
|
||||
rsm = NS_RSM
|
||||
ram = NS_RAM
|
||||
|
||||
txn = root.find(f"{{{rsm}}}SupplyChainTradeTransaction")
|
||||
currency = move.currency_id.name or "USD"
|
||||
|
||||
for idx, line in enumerate(move.invoice_line_ids, start=1):
|
||||
if line.display_type in ("line_section", "line_note"):
|
||||
continue
|
||||
|
||||
item_el = self._sub(
|
||||
txn, f"{{{ram}}}IncludedSupplyChainTradeLineItem"
|
||||
)
|
||||
|
||||
# Line document
|
||||
line_doc = self._sub(
|
||||
item_el,
|
||||
f"{{{ram}}}AssociatedDocumentLineDocument",
|
||||
)
|
||||
self._sub(line_doc, f"{{{ram}}}LineID", str(idx))
|
||||
|
||||
# Product
|
||||
product = self._sub(
|
||||
item_el, f"{{{ram}}}SpecifiedTradeProduct"
|
||||
)
|
||||
if line.product_id and line.product_id.default_code:
|
||||
self._sub(
|
||||
product, f"{{{ram}}}SellerAssignedID",
|
||||
line.product_id.default_code,
|
||||
)
|
||||
self._sub(
|
||||
product, f"{{{ram}}}Name",
|
||||
line.name or line.product_id.name or _("(Unnamed)"),
|
||||
)
|
||||
|
||||
# Line trade agreement (price)
|
||||
line_agreement = self._sub(
|
||||
item_el, f"{{{ram}}}SpecifiedLineTradeAgreement"
|
||||
)
|
||||
net_price = self._sub(
|
||||
line_agreement,
|
||||
f"{{{ram}}}NetPriceProductTradePrice",
|
||||
)
|
||||
self._monetary_sub(
|
||||
net_price, f"{{{ram}}}ChargeAmount",
|
||||
line.price_unit, currency,
|
||||
)
|
||||
|
||||
# Line trade delivery (quantity)
|
||||
line_delivery = self._sub(
|
||||
item_el, f"{{{ram}}}SpecifiedLineTradeDelivery"
|
||||
)
|
||||
qty_el = self._sub(
|
||||
line_delivery, f"{{{ram}}}BilledQuantity",
|
||||
self._fmt(line.quantity),
|
||||
)
|
||||
qty_el.set("unitCode", self._uom_unece(line))
|
||||
|
||||
# Line trade settlement (tax, total)
|
||||
line_settlement = self._sub(
|
||||
item_el, f"{{{ram}}}SpecifiedLineTradeSettlement"
|
||||
)
|
||||
|
||||
for tax in line.tax_ids:
|
||||
trade_tax = self._sub(
|
||||
line_settlement, f"{{{ram}}}ApplicableTradeTax"
|
||||
)
|
||||
self._sub(trade_tax, f"{{{ram}}}TypeCode", "VAT")
|
||||
self._sub(
|
||||
trade_tax, f"{{{ram}}}CategoryCode",
|
||||
self._tax_category(tax),
|
||||
)
|
||||
self._sub(
|
||||
trade_tax, f"{{{ram}}}RateApplicablePercent",
|
||||
self._fmt(abs(tax.amount)),
|
||||
)
|
||||
|
||||
line_summation = self._sub(
|
||||
line_settlement,
|
||||
f"{{{ram}}}SpecifiedTradeSettlementLineMonetarySummation",
|
||||
)
|
||||
self._monetary_sub(
|
||||
line_summation, f"{{{ram}}}LineTotalAmount",
|
||||
line.price_subtotal, currency,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Trade party helper
|
||||
# ------------------------------------------------------------------
|
||||
def _add_trade_party(self, parent, partner, company=None):
|
||||
"""Populate a trade party element with name, address, and tax ID."""
|
||||
ram = NS_RAM
|
||||
|
||||
self._sub(
|
||||
parent, f"{{{ram}}}Name",
|
||||
company.name if company else partner.name,
|
||||
)
|
||||
|
||||
# Postal address
|
||||
address = self._sub(parent, f"{{{ram}}}PostalTradeAddress")
|
||||
if partner.zip:
|
||||
self._sub(address, f"{{{ram}}}PostcodeCode", partner.zip)
|
||||
if partner.street:
|
||||
self._sub(address, f"{{{ram}}}LineOne", partner.street)
|
||||
if partner.street2:
|
||||
self._sub(address, f"{{{ram}}}LineTwo", partner.street2)
|
||||
if partner.city:
|
||||
self._sub(address, f"{{{ram}}}CityName", partner.city)
|
||||
if partner.country_id:
|
||||
self._sub(
|
||||
address, f"{{{ram}}}CountryID", partner.country_id.code
|
||||
)
|
||||
|
||||
# Tax registration
|
||||
vat = company.vat if company else partner.vat
|
||||
if vat:
|
||||
tax_reg = self._sub(
|
||||
parent, f"{{{ram}}}SpecifiedTaxRegistration"
|
||||
)
|
||||
tax_id = self._sub(tax_reg, f"{{{ram}}}ID", vat)
|
||||
tax_id.set("schemeID", "VA")
|
||||
|
||||
# ==================================================================
|
||||
# Utility helpers
|
||||
# ==================================================================
|
||||
@staticmethod
|
||||
def _sub(parent, tag, text=None):
|
||||
"""Create a sub-element, optionally setting its text content."""
|
||||
el = etree.SubElement(parent, tag)
|
||||
if text is not None:
|
||||
el.text = str(text)
|
||||
return el
|
||||
|
||||
@staticmethod
|
||||
def _monetary_sub(parent, tag, value, currency):
|
||||
"""Create a monetary amount sub-element with ``currencyID``."""
|
||||
formatted = f"{float_round(float(value or 0), precision_digits=2):.2f}"
|
||||
el = etree.SubElement(parent, tag)
|
||||
el.text = formatted
|
||||
el.set("currencyID", currency)
|
||||
return el
|
||||
|
||||
@staticmethod
|
||||
def _fmt(value, precision=2):
|
||||
"""Format a numeric value with the given decimal precision."""
|
||||
return f"{float_round(float(value or 0), precision_digits=precision):.{precision}f}"
|
||||
|
||||
@staticmethod
|
||||
def _tax_category(tax):
|
||||
"""Map an Odoo tax to a CII/UBL tax category code (UNCL 5305)."""
|
||||
amount = abs(tax.amount)
|
||||
if amount == 0:
|
||||
return "Z"
|
||||
tax_name_lower = (tax.name or "").lower()
|
||||
if "exempt" in tax_name_lower:
|
||||
return "E"
|
||||
if "reverse" in tax_name_lower:
|
||||
return "AE"
|
||||
return "S"
|
||||
|
||||
@staticmethod
|
||||
def _uom_unece(line):
|
||||
"""Return the UN/ECE Rec 20 unit code for the invoice line."""
|
||||
uom = line.product_uom_id
|
||||
if not uom:
|
||||
return "C62"
|
||||
unece_code = getattr(uom, "unece_code", None)
|
||||
if unece_code:
|
||||
return unece_code
|
||||
name = (uom.name or "").lower()
|
||||
mapping = {
|
||||
"unit": "C62", "units": "C62", "piece": "C62",
|
||||
"pieces": "C62", "pce": "C62",
|
||||
"kg": "KGM", "kilogram": "KGM",
|
||||
"g": "GRM", "gram": "GRM",
|
||||
"l": "LTR", "liter": "LTR", "litre": "LTR",
|
||||
"m": "MTR", "meter": "MTR", "metre": "MTR",
|
||||
"hour": "HUR", "hours": "HUR",
|
||||
"day": "DAY", "days": "DAY",
|
||||
}
|
||||
return mapping.get(name, "C62")
|
||||
|
||||
@staticmethod
|
||||
def _xpath_text(node, xpath, ns):
|
||||
"""Return the text of the first matching element, or ``None``."""
|
||||
found = node.find(xpath, ns)
|
||||
return found.text if found is not None else None
|
||||
192
Fusion Accounting/models/debit_note.py
Normal file
192
Fusion Accounting/models/debit_note.py
Normal file
@@ -0,0 +1,192 @@
|
||||
# Fusion Accounting - Debit Note Creation
|
||||
# Copyright (C) 2026 Nexa Systems Inc. (https://nexasystems.ca)
|
||||
# Original implementation for the Fusion Accounting module.
|
||||
#
|
||||
# Extends account.move with the ability to create debit notes from
|
||||
# existing invoices. A debit note copies the invoice lines with
|
||||
# reversed sign and links back to the original document.
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, Command, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionDebitNote(models.Model):
|
||||
"""Extends account.move with debit note creation from invoices.
|
||||
|
||||
A *debit note* is an additional charge issued to a customer or
|
||||
received from a vendor. Unlike a credit note (which reduces the
|
||||
amount owed), a debit note increases it.
|
||||
|
||||
This implementation:
|
||||
- Copies all product lines from the source invoice
|
||||
- Creates a new invoice of the same type (not reversed)
|
||||
- Links the debit note back to the original document
|
||||
"""
|
||||
|
||||
_inherit = 'account.move'
|
||||
|
||||
# =====================================================================
|
||||
# Fields
|
||||
# =====================================================================
|
||||
|
||||
fusion_debit_note_origin_id = fields.Many2one(
|
||||
comodel_name='account.move',
|
||||
string="Debit Note Origin",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
index=True,
|
||||
help="The original invoice from which this debit note was created.",
|
||||
)
|
||||
fusion_debit_note_ids = fields.One2many(
|
||||
comodel_name='account.move',
|
||||
inverse_name='fusion_debit_note_origin_id',
|
||||
string="Debit Notes",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help="Debit notes created from this invoice.",
|
||||
)
|
||||
fusion_debit_note_count = fields.Integer(
|
||||
string="Debit Note Count",
|
||||
compute='_compute_debit_note_count',
|
||||
)
|
||||
|
||||
# =====================================================================
|
||||
# Computed Fields
|
||||
# =====================================================================
|
||||
|
||||
@api.depends('fusion_debit_note_ids')
|
||||
def _compute_debit_note_count(self):
|
||||
for move in self:
|
||||
move.fusion_debit_note_count = len(move.fusion_debit_note_ids)
|
||||
|
||||
# =====================================================================
|
||||
# Debit Note Creation
|
||||
# =====================================================================
|
||||
|
||||
def action_create_debit_note(self):
|
||||
"""Create a debit note from the current invoice.
|
||||
|
||||
The debit note is a new invoice document with the same type as
|
||||
the original. All product lines are copied. The amounts remain
|
||||
positive (this is an additional charge, not a reversal).
|
||||
|
||||
Supported source types:
|
||||
- Customer Invoice (out_invoice) → Customer Debit Note (out_invoice)
|
||||
- Vendor Bill (in_invoice) → Vendor Debit Note (in_invoice)
|
||||
|
||||
:returns: action dict to open the newly created debit note
|
||||
:raises UserError: if the move type is unsupported
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
if self.move_type not in ('out_invoice', 'in_invoice'):
|
||||
raise UserError(_(
|
||||
"Debit notes can only be created from customer invoices "
|
||||
"or vendor bills."
|
||||
))
|
||||
|
||||
if self.state == 'draft':
|
||||
raise UserError(_(
|
||||
"Please confirm the invoice before creating a debit note."
|
||||
))
|
||||
|
||||
# Build line values from original invoice
|
||||
line_vals = []
|
||||
for line in self.invoice_line_ids.filtered(
|
||||
lambda l: l.display_type == 'product'
|
||||
):
|
||||
line_vals.append(Command.create({
|
||||
'name': _("Debit Note: %s", line.name or ''),
|
||||
'product_id': line.product_id.id if line.product_id else False,
|
||||
'product_uom_id': line.product_uom_id.id if line.product_uom_id else False,
|
||||
'quantity': line.quantity,
|
||||
'price_unit': line.price_unit,
|
||||
'discount': line.discount,
|
||||
'tax_ids': [Command.set(line.tax_ids.ids)],
|
||||
'analytic_distribution': line.analytic_distribution,
|
||||
'account_id': line.account_id.id,
|
||||
}))
|
||||
|
||||
# Copy section and note lines for context
|
||||
for line in self.invoice_line_ids.filtered(
|
||||
lambda l: l.display_type in ('line_section', 'line_note')
|
||||
):
|
||||
line_vals.append(Command.create({
|
||||
'display_type': line.display_type,
|
||||
'name': line.name,
|
||||
'sequence': line.sequence,
|
||||
}))
|
||||
|
||||
if not line_vals:
|
||||
raise UserError(_(
|
||||
"The invoice has no lines to copy for the debit note."
|
||||
))
|
||||
|
||||
debit_note_vals = {
|
||||
'move_type': self.move_type,
|
||||
'partner_id': self.partner_id.id,
|
||||
'journal_id': self.journal_id.id,
|
||||
'currency_id': self.currency_id.id,
|
||||
'company_id': self.company_id.id,
|
||||
'invoice_date': fields.Date.context_today(self),
|
||||
'ref': _("DN: %s", self.name),
|
||||
'narration': _("Debit Note for %s", self.name),
|
||||
'fiscal_position_id': self.fiscal_position_id.id if self.fiscal_position_id else False,
|
||||
'invoice_payment_term_id': self.invoice_payment_term_id.id if self.invoice_payment_term_id else False,
|
||||
'fusion_debit_note_origin_id': self.id,
|
||||
'invoice_line_ids': line_vals,
|
||||
}
|
||||
|
||||
debit_note = self.env['account.move'].create(debit_note_vals)
|
||||
|
||||
_logger.info(
|
||||
"Fusion Debit Note: created %s (id=%s) from %s (id=%s)",
|
||||
debit_note.name, debit_note.id, self.name, self.id,
|
||||
)
|
||||
|
||||
# Return action to view the debit note
|
||||
if self.move_type == 'out_invoice':
|
||||
action_ref = 'account.action_move_out_invoice_type'
|
||||
else:
|
||||
action_ref = 'account.action_move_in_invoice_type'
|
||||
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.move',
|
||||
'res_id': debit_note.id,
|
||||
'view_mode': 'form',
|
||||
'target': 'current',
|
||||
'name': _("Debit Note"),
|
||||
}
|
||||
|
||||
# =====================================================================
|
||||
# View Related Debit Notes
|
||||
# =====================================================================
|
||||
|
||||
def action_view_debit_notes(self):
|
||||
"""Open the list of debit notes created from this invoice."""
|
||||
self.ensure_one()
|
||||
debit_notes = self.fusion_debit_note_ids
|
||||
|
||||
if len(debit_notes) == 1:
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.move',
|
||||
'res_id': debit_notes.id,
|
||||
'view_mode': 'form',
|
||||
'target': 'current',
|
||||
'name': _("Debit Note"),
|
||||
}
|
||||
|
||||
return {
|
||||
'type': 'ir.actions.act_window',
|
||||
'res_model': 'account.move',
|
||||
'domain': [('id', 'in', debit_notes.ids)],
|
||||
'view_mode': 'list,form',
|
||||
'target': 'current',
|
||||
'name': _("Debit Notes"),
|
||||
}
|
||||
51
Fusion Accounting/models/digest.py
Normal file
51
Fusion Accounting/models/digest.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Fusion Accounting - Digest KPI Extensions
|
||||
# Adds bank & cash movement KPIs to the periodic digest emails
|
||||
|
||||
from odoo import fields, models, _
|
||||
from odoo.exceptions import AccessError
|
||||
|
||||
|
||||
class FusionDigest(models.Model):
|
||||
"""Extends the digest framework with an accounting KPI that
|
||||
summarises bank and cash journal movements."""
|
||||
|
||||
_inherit = 'digest.digest'
|
||||
|
||||
kpi_account_bank_cash = fields.Boolean(string='Bank & Cash Moves')
|
||||
kpi_account_bank_cash_value = fields.Monetary(
|
||||
compute='_compute_bank_cash_kpi_total',
|
||||
)
|
||||
|
||||
def _compute_bank_cash_kpi_total(self):
|
||||
"""Aggregate the total amount of moves posted in bank and cash
|
||||
journals during the digest period."""
|
||||
if not self.env.user.has_group('account.group_account_user'):
|
||||
raise AccessError(
|
||||
_("Insufficient permissions to compute accounting KPIs.")
|
||||
)
|
||||
|
||||
period_start, period_end, target_companies = self._get_kpi_compute_parameters()
|
||||
aggregated = self.env['account.move']._read_group(
|
||||
domain=[
|
||||
('date', '>=', period_start),
|
||||
('date', '<', period_end),
|
||||
('journal_id.type', 'in', ('cash', 'bank')),
|
||||
('company_id', 'in', target_companies.ids),
|
||||
],
|
||||
groupby=['company_id'],
|
||||
aggregates=['amount_total:sum'],
|
||||
)
|
||||
totals_by_company = dict(aggregated)
|
||||
|
||||
for rec in self:
|
||||
co = rec.company_id or self.env.company
|
||||
rec.kpi_account_bank_cash_value = totals_by_company.get(co)
|
||||
|
||||
def _compute_kpis_actions(self, company, user):
|
||||
"""Map the bank/cash KPI to the journal dashboard action."""
|
||||
actions = super(FusionDigest, self)._compute_kpis_actions(company, user)
|
||||
finance_menu_id = self.env.ref('account.menu_finance').id
|
||||
actions['kpi_account_bank_cash'] = (
|
||||
f'account.open_account_journal_dashboard_kanban&menu_id={finance_menu_id}'
|
||||
)
|
||||
return actions
|
||||
481
Fusion Accounting/models/document_extraction.py
Normal file
481
Fusion Accounting/models/document_extraction.py
Normal file
@@ -0,0 +1,481 @@
|
||||
"""
|
||||
Fusion Accounting - Document AI / OCR Extraction Engine
|
||||
|
||||
Provides a pluggable OCR back-end that can extract text from scanned
|
||||
invoices, receipts, and other accounting documents. Three providers are
|
||||
supported out-of-the-box:
|
||||
|
||||
* **Tesseract** – runs locally via pytesseract (no cloud calls).
|
||||
* **Google Cloud Vision** – calls the Vision API v1 TEXT_DETECTION endpoint.
|
||||
* **Azure AI Document Intelligence** – calls the Azure prebuilt-invoice
|
||||
layout model.
|
||||
|
||||
Each company may configure one or more extractor records and switch
|
||||
between them freely.
|
||||
|
||||
Original implementation by Nexa Systems Inc.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
|
||||
import requests
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Optional imports – gracefully degrade when libs are absent so the module
|
||||
# can still be installed (the user simply won't be able to use Tesseract).
|
||||
# ---------------------------------------------------------------------------
|
||||
try:
|
||||
from PIL import Image # noqa: F401
|
||||
_PILLOW_AVAILABLE = True
|
||||
except ImportError:
|
||||
_PILLOW_AVAILABLE = False
|
||||
|
||||
try:
|
||||
import pytesseract # noqa: F401
|
||||
_TESSERACT_AVAILABLE = True
|
||||
except ImportError:
|
||||
_TESSERACT_AVAILABLE = False
|
||||
|
||||
|
||||
class FusionDocumentExtractor(models.Model):
|
||||
"""
|
||||
Configurable OCR / AI extraction back-end.
|
||||
|
||||
Each record represents a single provider configuration. The
|
||||
:meth:`extract_fields` entry-point dispatches to the appropriate
|
||||
private method based on the selected *provider*.
|
||||
"""
|
||||
|
||||
_name = "fusion.document.extractor"
|
||||
_description = "Document AI Extraction Provider"
|
||||
_order = "sequence, id"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Fields
|
||||
# ------------------------------------------------------------------
|
||||
name = fields.Char(
|
||||
string="Name",
|
||||
required=True,
|
||||
help="A human-readable label for this extractor (e.g. 'Production Tesseract').",
|
||||
)
|
||||
sequence = fields.Integer(
|
||||
string="Sequence",
|
||||
default=10,
|
||||
help="Lower numbers appear first when multiple extractors exist.",
|
||||
)
|
||||
provider = fields.Selection(
|
||||
selection=[
|
||||
("tesseract", "Tesseract (Local)"),
|
||||
("google_vision", "Google Cloud Vision"),
|
||||
("azure_ai", "Azure AI Document Intelligence"),
|
||||
],
|
||||
string="Provider",
|
||||
required=True,
|
||||
default="tesseract",
|
||||
help=(
|
||||
"The OCR engine to use.\n\n"
|
||||
"• Tesseract – free, runs locally; requires pytesseract + Tesseract binary.\n"
|
||||
"• Google Cloud Vision – cloud API; requires a service-account JSON key.\n"
|
||||
"• Azure AI Document Intelligence – cloud API; requires endpoint + key."
|
||||
),
|
||||
)
|
||||
api_key = fields.Char(
|
||||
string="API Key / Credentials",
|
||||
groups="base.group_system",
|
||||
help=(
|
||||
"For Google Vision: paste the full service-account JSON key.\n"
|
||||
"For Azure AI: paste the subscription key.\n"
|
||||
"Not used for Tesseract."
|
||||
),
|
||||
)
|
||||
api_endpoint = fields.Char(
|
||||
string="API Endpoint",
|
||||
help=(
|
||||
"For Azure AI: the resource endpoint URL "
|
||||
"(e.g. https://<resource>.cognitiveservices.azure.com).\n"
|
||||
"Not used for Tesseract or Google Vision."
|
||||
),
|
||||
)
|
||||
tesseract_lang = fields.Char(
|
||||
string="Tesseract Language",
|
||||
default="eng",
|
||||
help="Tesseract language code(s), e.g. 'eng', 'fra+eng'. Ignored for cloud providers.",
|
||||
)
|
||||
is_active = fields.Boolean(
|
||||
string="Active",
|
||||
default=True,
|
||||
help="Inactive extractors are hidden from selection lists.",
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name="res.company",
|
||||
string="Company",
|
||||
default=lambda self: self.env.company,
|
||||
help="Restrict this extractor to a single company, or leave blank for all.",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Constraints
|
||||
# ------------------------------------------------------------------
|
||||
@api.constrains("provider", "api_key")
|
||||
def _check_api_key_for_cloud_providers(self):
|
||||
"""Ensure cloud providers have credentials configured."""
|
||||
for rec in self:
|
||||
if rec.provider in ("google_vision", "azure_ai") and not rec.api_key:
|
||||
raise ValidationError(
|
||||
_("An API key is required for the '%s' provider.", rec.get_provider_label())
|
||||
)
|
||||
|
||||
@api.constrains("provider", "api_endpoint")
|
||||
def _check_endpoint_for_azure(self):
|
||||
"""Azure AI requires an explicit endpoint URL."""
|
||||
for rec in self:
|
||||
if rec.provider == "azure_ai" and not rec.api_endpoint:
|
||||
raise ValidationError(
|
||||
_("An API endpoint URL is required for Azure AI Document Intelligence.")
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
def get_provider_label(self):
|
||||
"""Return the human-readable label for the current provider selection."""
|
||||
self.ensure_one()
|
||||
return dict(self._fields["provider"].selection).get(self.provider, self.provider)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
def extract_fields(self, image_bytes, document_type="invoice"):
|
||||
"""Run OCR on *image_bytes* and return a dict of extracted fields.
|
||||
|
||||
Args:
|
||||
image_bytes (bytes): Raw bytes of a PDF page or image file.
|
||||
document_type (str): Hint for the extraction engine
|
||||
(``'invoice'``, ``'receipt'``, ``'credit_note'``).
|
||||
|
||||
Returns:
|
||||
dict: Extracted data with at least the key ``'raw_text'``
|
||||
(the full OCR output) and provider-specific structured
|
||||
fields when available.
|
||||
|
||||
Raises:
|
||||
UserError: When the selected provider cannot be used (missing
|
||||
library, bad credentials, …).
|
||||
"""
|
||||
self.ensure_one()
|
||||
_log.info(
|
||||
"Fusion OCR: extracting from %d bytes via '%s' (doc_type=%s)",
|
||||
len(image_bytes), self.provider, document_type,
|
||||
)
|
||||
|
||||
dispatch = {
|
||||
"tesseract": self._extract_via_tesseract,
|
||||
"google_vision": self._extract_via_google_vision,
|
||||
"azure_ai": self._extract_via_azure_ai,
|
||||
}
|
||||
handler = dispatch.get(self.provider)
|
||||
if not handler:
|
||||
raise UserError(_("Unknown extraction provider: %s", self.provider))
|
||||
|
||||
result = handler(image_bytes, document_type=document_type)
|
||||
|
||||
# Guarantee a 'raw_text' key exists
|
||||
result.setdefault("raw_text", "")
|
||||
result["provider"] = self.provider
|
||||
return result
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Provider: Tesseract (local)
|
||||
# ------------------------------------------------------------------
|
||||
def _extract_via_tesseract(self, image_bytes, **kwargs):
|
||||
"""Extract text locally using Tesseract OCR.
|
||||
|
||||
Converts the input bytes to a PIL Image, then calls
|
||||
``pytesseract.image_to_string``. PDF inputs are converted
|
||||
to images via Pillow first.
|
||||
|
||||
Args:
|
||||
image_bytes (bytes): Raw image or PDF bytes.
|
||||
|
||||
Returns:
|
||||
dict: ``{'raw_text': <str>}``
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not _PILLOW_AVAILABLE:
|
||||
raise UserError(
|
||||
_("The Pillow library is required for Tesseract OCR. "
|
||||
"Install it with: pip install Pillow")
|
||||
)
|
||||
if not _TESSERACT_AVAILABLE:
|
||||
raise UserError(
|
||||
_("The pytesseract library is required for local OCR. "
|
||||
"Install it with: pip install pytesseract")
|
||||
)
|
||||
|
||||
try:
|
||||
image = Image.open(io.BytesIO(image_bytes))
|
||||
except Exception as exc:
|
||||
raise UserError(
|
||||
_("Could not open the attachment as an image: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
lang = self.tesseract_lang or "eng"
|
||||
try:
|
||||
raw_text = pytesseract.image_to_string(image, lang=lang)
|
||||
except Exception as exc:
|
||||
_log.exception("Fusion OCR – Tesseract failed")
|
||||
raise UserError(
|
||||
_("Tesseract OCR failed: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
return {"raw_text": raw_text}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Provider: Google Cloud Vision
|
||||
# ------------------------------------------------------------------
|
||||
def _extract_via_google_vision(self, image_bytes, **kwargs):
|
||||
"""Call Google Cloud Vision API TEXT_DETECTION.
|
||||
|
||||
The *api_key* field is expected to contain either:
|
||||
* A plain API key (simple authentication), or
|
||||
* A full service-account JSON (used for OAuth – **not yet
|
||||
implemented**; for now we use the key-based endpoint).
|
||||
|
||||
Args:
|
||||
image_bytes (bytes): Raw image bytes (PNG / JPEG / TIFF / PDF).
|
||||
|
||||
Returns:
|
||||
dict: ``{'raw_text': <str>, 'annotations': <list>}``
|
||||
"""
|
||||
self.ensure_one()
|
||||
url = (
|
||||
"https://vision.googleapis.com/v1/images:annotate"
|
||||
f"?key={self.api_key}"
|
||||
)
|
||||
encoded = base64.b64encode(image_bytes).decode("ascii")
|
||||
payload = {
|
||||
"requests": [
|
||||
{
|
||||
"image": {"content": encoded},
|
||||
"features": [{"type": "TEXT_DETECTION"}],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
try:
|
||||
resp = requests.post(url, json=payload, timeout=60)
|
||||
resp.raise_for_status()
|
||||
except requests.RequestException as exc:
|
||||
_log.exception("Fusion OCR – Google Vision API request failed")
|
||||
raise UserError(
|
||||
_("Google Cloud Vision request failed: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
data = resp.json()
|
||||
responses = data.get("responses", [{}])
|
||||
annotations = responses[0].get("textAnnotations", [])
|
||||
raw_text = annotations[0].get("description", "") if annotations else ""
|
||||
|
||||
return {
|
||||
"raw_text": raw_text,
|
||||
"annotations": annotations,
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Provider: Azure AI Document Intelligence
|
||||
# ------------------------------------------------------------------
|
||||
def _extract_via_azure_ai(self, image_bytes, document_type="invoice", **kwargs):
|
||||
"""Call Azure AI Document Intelligence (formerly Form Recognizer).
|
||||
|
||||
Uses the **prebuilt-invoice** model for invoices and falls back
|
||||
to **prebuilt-read** for generic documents.
|
||||
|
||||
Args:
|
||||
image_bytes (bytes): Raw document bytes.
|
||||
document_type (str): ``'invoice'`` selects the prebuilt-invoice
|
||||
model; anything else uses prebuilt-read.
|
||||
|
||||
Returns:
|
||||
dict: ``{'raw_text': <str>, 'fields': <dict>, 'pages': <list>}``
|
||||
"""
|
||||
self.ensure_one()
|
||||
endpoint = self.api_endpoint.rstrip("/")
|
||||
model_id = "prebuilt-invoice" if document_type == "invoice" else "prebuilt-read"
|
||||
analyze_url = (
|
||||
f"{endpoint}/formrecognizer/documentModels/{model_id}:analyze"
|
||||
"?api-version=2023-07-31"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Ocp-Apim-Subscription-Key": self.api_key,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
|
||||
# Step 1 – submit the document for analysis
|
||||
try:
|
||||
resp = requests.post(
|
||||
analyze_url, headers=headers, data=image_bytes, timeout=60,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
except requests.RequestException as exc:
|
||||
_log.exception("Fusion OCR – Azure AI submit failed")
|
||||
raise UserError(
|
||||
_("Azure AI Document Intelligence request failed: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
operation_url = resp.headers.get("Operation-Location")
|
||||
if not operation_url:
|
||||
raise UserError(
|
||||
_("Azure AI did not return an Operation-Location header.")
|
||||
)
|
||||
|
||||
# Step 2 – poll for results (max ~60 s)
|
||||
import time
|
||||
poll_headers = {"Ocp-Apim-Subscription-Key": self.api_key}
|
||||
result_data = {}
|
||||
for _attempt in range(30):
|
||||
time.sleep(2)
|
||||
try:
|
||||
poll_resp = requests.get(
|
||||
operation_url, headers=poll_headers, timeout=30,
|
||||
)
|
||||
poll_resp.raise_for_status()
|
||||
result_data = poll_resp.json()
|
||||
except requests.RequestException as exc:
|
||||
_log.warning("Fusion OCR – Azure AI poll attempt failed: %s", exc)
|
||||
continue
|
||||
status = result_data.get("status", "")
|
||||
if status == "succeeded":
|
||||
break
|
||||
if status == "failed":
|
||||
error_detail = result_data.get("error", {}).get("message", "Unknown error")
|
||||
raise UserError(
|
||||
_("Azure AI analysis failed: %s", error_detail)
|
||||
)
|
||||
else:
|
||||
raise UserError(
|
||||
_("Azure AI analysis did not complete within the timeout window.")
|
||||
)
|
||||
|
||||
# Step 3 – parse the result
|
||||
analyze_result = result_data.get("analyzeResult", {})
|
||||
raw_text = analyze_result.get("content", "")
|
||||
extracted_fields = {}
|
||||
pages = analyze_result.get("pages", [])
|
||||
|
||||
# Parse structured invoice fields when available
|
||||
documents = analyze_result.get("documents", [])
|
||||
if documents:
|
||||
doc_fields = documents[0].get("fields", {})
|
||||
extracted_fields = self._parse_azure_invoice_fields(doc_fields)
|
||||
|
||||
return {
|
||||
"raw_text": raw_text,
|
||||
"fields": extracted_fields,
|
||||
"pages": pages,
|
||||
}
|
||||
|
||||
@api.model
|
||||
def _parse_azure_invoice_fields(self, doc_fields):
|
||||
"""Convert Azure's structured field map into a flat dict.
|
||||
|
||||
Args:
|
||||
doc_fields (dict): The ``documents[0].fields`` portion of
|
||||
an Azure analyzeResult response.
|
||||
|
||||
Returns:
|
||||
dict: Normalized field names → values.
|
||||
"""
|
||||
def _val(field_dict):
|
||||
"""Extract the 'content' or 'valueString' from an Azure field."""
|
||||
if not field_dict:
|
||||
return None
|
||||
return (
|
||||
field_dict.get("valueString")
|
||||
or field_dict.get("valueDate")
|
||||
or field_dict.get("valueNumber")
|
||||
or field_dict.get("content")
|
||||
)
|
||||
|
||||
mapping = {
|
||||
"vendor_name": "VendorName",
|
||||
"vendor_address": "VendorAddress",
|
||||
"invoice_number": "InvoiceId",
|
||||
"invoice_date": "InvoiceDate",
|
||||
"due_date": "DueDate",
|
||||
"total_amount": "InvoiceTotal",
|
||||
"subtotal": "SubTotal",
|
||||
"tax_amount": "TotalTax",
|
||||
"currency": "CurrencyCode",
|
||||
"purchase_order": "PurchaseOrder",
|
||||
"customer_name": "CustomerName",
|
||||
}
|
||||
|
||||
result = {}
|
||||
for local_key, azure_key in mapping.items():
|
||||
result[local_key] = _val(doc_fields.get(azure_key))
|
||||
|
||||
# Line items
|
||||
items_field = doc_fields.get("Items")
|
||||
if items_field and items_field.get("valueArray"):
|
||||
lines = []
|
||||
for item in items_field["valueArray"]:
|
||||
item_fields = item.get("valueObject", {})
|
||||
lines.append({
|
||||
"description": _val(item_fields.get("Description")),
|
||||
"quantity": _val(item_fields.get("Quantity")),
|
||||
"unit_price": _val(item_fields.get("UnitPrice")),
|
||||
"amount": _val(item_fields.get("Amount")),
|
||||
"tax": _val(item_fields.get("Tax")),
|
||||
})
|
||||
result["line_items"] = lines
|
||||
|
||||
return result
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Actions
|
||||
# ------------------------------------------------------------------
|
||||
def action_test_connection(self):
|
||||
"""Quick connectivity / credential check for the configured provider.
|
||||
|
||||
Creates a tiny white image, sends it through the extraction
|
||||
pipeline, and reports success or failure via a notification.
|
||||
"""
|
||||
self.ensure_one()
|
||||
# Build a minimal 10×10 white PNG as test payload
|
||||
if not _PILLOW_AVAILABLE:
|
||||
raise UserError(_("Pillow is required to run a connection test."))
|
||||
|
||||
img = Image.new("RGB", (10, 10), color=(255, 255, 255))
|
||||
buf = io.BytesIO()
|
||||
img.save(buf, format="PNG")
|
||||
test_bytes = buf.getvalue()
|
||||
|
||||
try:
|
||||
result = self.extract_fields(test_bytes, document_type="test")
|
||||
_log.info("Fusion OCR – connection test succeeded: %s", result.get("provider"))
|
||||
except UserError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise UserError(
|
||||
_("Connection test failed: %s", str(exc))
|
||||
) from exc
|
||||
|
||||
return {
|
||||
"type": "ir.actions.client",
|
||||
"tag": "display_notification",
|
||||
"params": {
|
||||
"title": _("Connection Successful"),
|
||||
"message": _("The '%s' provider responded correctly.", self.name),
|
||||
"type": "success",
|
||||
"sticky": False,
|
||||
},
|
||||
}
|
||||
235
Fusion Accounting/models/edi_document.py
Normal file
235
Fusion Accounting/models/edi_document.py
Normal file
@@ -0,0 +1,235 @@
|
||||
"""
|
||||
Fusion Accounting - EDI Document Framework
|
||||
|
||||
Manages the lifecycle of Electronic Data Interchange (EDI) documents
|
||||
associated with accounting journal entries. Each EDI document tracks a
|
||||
single rendition of an invoice in a specific electronic format (UBL, CII,
|
||||
etc.), from initial generation through transmission and eventual
|
||||
cancellation when required.
|
||||
|
||||
Original implementation by Nexa Systems Inc.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionEDIDocument(models.Model):
|
||||
"""
|
||||
Represents one EDI rendition of a journal entry.
|
||||
|
||||
A single ``account.move`` may have several EDI documents if the
|
||||
company is required to report in more than one format (e.g. UBL for
|
||||
Peppol and CII for Factur-X). Each record progresses through a
|
||||
linear state machine:
|
||||
|
||||
to_send -> sent -> to_cancel -> cancelled
|
||||
|
||||
Errors encountered during generation or transmission are captured in
|
||||
``error_message`` and the document remains in its current state so
|
||||
the user can resolve the issue and retry.
|
||||
"""
|
||||
|
||||
_name = "fusion.edi.document"
|
||||
_description = "Fusion EDI Document"
|
||||
_order = "create_date desc"
|
||||
_rec_name = "display_name"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Fields
|
||||
# ------------------------------------------------------------------
|
||||
move_id = fields.Many2one(
|
||||
comodel_name="account.move",
|
||||
string="Journal Entry",
|
||||
required=True,
|
||||
ondelete="cascade",
|
||||
index=True,
|
||||
help="The journal entry that this EDI document represents.",
|
||||
)
|
||||
edi_format_id = fields.Many2one(
|
||||
comodel_name="fusion.edi.format",
|
||||
string="EDI Format",
|
||||
required=True,
|
||||
ondelete="restrict",
|
||||
help="The electronic format used for this document.",
|
||||
)
|
||||
state = fields.Selection(
|
||||
selection=[
|
||||
("to_send", "To Send"),
|
||||
("sent", "Sent"),
|
||||
("to_cancel", "To Cancel"),
|
||||
("cancelled", "Cancelled"),
|
||||
],
|
||||
string="Status",
|
||||
default="to_send",
|
||||
required=True,
|
||||
copy=False,
|
||||
tracking=True,
|
||||
help=(
|
||||
"Lifecycle state of the EDI document.\n"
|
||||
"- To Send: document needs to be generated and/or transmitted.\n"
|
||||
"- Sent: document has been successfully delivered.\n"
|
||||
"- To Cancel: a cancellation has been requested.\n"
|
||||
"- Cancelled: the document has been formally cancelled."
|
||||
),
|
||||
)
|
||||
attachment_id = fields.Many2one(
|
||||
comodel_name="ir.attachment",
|
||||
string="Attachment",
|
||||
copy=False,
|
||||
ondelete="set null",
|
||||
help="The generated XML/PDF file for this EDI document.",
|
||||
)
|
||||
error_message = fields.Text(
|
||||
string="Error Message",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help="Details of the last error encountered during processing.",
|
||||
)
|
||||
blocking_level = fields.Selection(
|
||||
selection=[
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
],
|
||||
string="Error Severity",
|
||||
copy=False,
|
||||
help="Severity of the last processing error.",
|
||||
)
|
||||
|
||||
# Related / display helpers
|
||||
move_name = fields.Char(
|
||||
related="move_id.name",
|
||||
string="Invoice Number",
|
||||
)
|
||||
partner_id = fields.Many2one(
|
||||
related="move_id.partner_id",
|
||||
string="Partner",
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
related="move_id.company_id",
|
||||
string="Company",
|
||||
store=True,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Computed display name
|
||||
# ------------------------------------------------------------------
|
||||
@api.depends("move_id.name", "edi_format_id.name")
|
||||
def _compute_display_name(self):
|
||||
for doc in self:
|
||||
doc.display_name = (
|
||||
f"{doc.move_id.name or _('Draft')} - "
|
||||
f"{doc.edi_format_id.name or _('Unknown Format')}"
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Actions
|
||||
# ------------------------------------------------------------------
|
||||
def action_send(self):
|
||||
"""Generate the EDI file and advance the document to *sent*.
|
||||
|
||||
Delegates the actual XML/PDF creation to the linked
|
||||
``fusion.edi.format`` record. On success the resulting binary
|
||||
payload is stored as an ``ir.attachment`` and the state flips to
|
||||
``sent``. Errors are captured rather than raised so that batch
|
||||
processing can continue for the remaining documents.
|
||||
"""
|
||||
for doc in self:
|
||||
if doc.state != "to_send":
|
||||
raise UserError(
|
||||
_("Only documents in 'To Send' state can be sent. "
|
||||
"Document '%s' is in state '%s'.",
|
||||
doc.display_name, doc.state)
|
||||
)
|
||||
try:
|
||||
xml_bytes = doc.edi_format_id.generate_document(doc.move_id)
|
||||
if not xml_bytes:
|
||||
doc.write({
|
||||
"error_message": _(
|
||||
"The EDI format returned an empty document."
|
||||
),
|
||||
"blocking_level": "error",
|
||||
})
|
||||
continue
|
||||
|
||||
filename = doc._build_attachment_filename()
|
||||
attachment = self.env["ir.attachment"].create({
|
||||
"name": filename,
|
||||
"raw": xml_bytes,
|
||||
"res_model": doc.move_id._name,
|
||||
"res_id": doc.move_id.id,
|
||||
"mimetype": "application/xml",
|
||||
"type": "binary",
|
||||
})
|
||||
doc.write({
|
||||
"attachment_id": attachment.id,
|
||||
"state": "sent",
|
||||
"error_message": False,
|
||||
"blocking_level": False,
|
||||
})
|
||||
_log.info(
|
||||
"EDI document %s generated successfully for %s.",
|
||||
doc.edi_format_id.code,
|
||||
doc.move_id.name,
|
||||
)
|
||||
except Exception as exc:
|
||||
_log.exception(
|
||||
"Failed to generate EDI document for %s.", doc.move_id.name
|
||||
)
|
||||
doc.write({
|
||||
"error_message": str(exc),
|
||||
"blocking_level": "error",
|
||||
})
|
||||
|
||||
def action_cancel(self):
|
||||
"""Request cancellation of a previously sent EDI document."""
|
||||
for doc in self:
|
||||
if doc.state not in ("sent", "to_cancel"):
|
||||
raise UserError(
|
||||
_("Only sent documents can be cancelled. "
|
||||
"Document '%s' is in state '%s'.",
|
||||
doc.display_name, doc.state)
|
||||
)
|
||||
doc.write({
|
||||
"state": "cancelled",
|
||||
"error_message": False,
|
||||
"blocking_level": False,
|
||||
})
|
||||
_log.info(
|
||||
"EDI document %s cancelled for %s.",
|
||||
doc.edi_format_id.code,
|
||||
doc.move_id.name,
|
||||
)
|
||||
|
||||
def action_retry(self):
|
||||
"""Reset a failed document back to *to_send* so it can be re-processed."""
|
||||
for doc in self:
|
||||
if not doc.error_message:
|
||||
raise UserError(
|
||||
_("Document '%s' has no error to retry.", doc.display_name)
|
||||
)
|
||||
doc.write({
|
||||
"state": "to_send",
|
||||
"error_message": False,
|
||||
"blocking_level": False,
|
||||
"attachment_id": False,
|
||||
})
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
def _build_attachment_filename(self):
|
||||
"""Construct a human-readable filename for the EDI attachment.
|
||||
|
||||
Returns:
|
||||
str: e.g. ``INV-2026-00001_ubl21.xml``
|
||||
"""
|
||||
self.ensure_one()
|
||||
move_name = (self.move_id.name or "DRAFT").replace("/", "-")
|
||||
fmt_code = self.edi_format_id.code or "edi"
|
||||
return f"{move_name}_{fmt_code}.xml"
|
||||
205
Fusion Accounting/models/edi_format.py
Normal file
205
Fusion Accounting/models/edi_format.py
Normal file
@@ -0,0 +1,205 @@
|
||||
"""
|
||||
Fusion Accounting - EDI Format Registry
|
||||
|
||||
Provides a configuration model for registering electronic document
|
||||
interchange formats. Each format record carries a unique code and
|
||||
delegates the actual XML generation / parsing to dedicated generator
|
||||
classes (e.g. ``FusionUBLGenerator``, ``FusionCIIGenerator``).
|
||||
|
||||
Administrators may restrict a format to customer invoices, vendor bills,
|
||||
or allow it for both through the ``applicable_to`` field.
|
||||
|
||||
Original implementation by Nexa Systems Inc.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionEDIFormat(models.Model):
|
||||
"""
|
||||
Registry entry for a supported EDI format.
|
||||
|
||||
This model acts as a strategy-pattern dispatcher: the ``code`` field
|
||||
selects the concrete generator/parser to invoke when creating or
|
||||
reading an electronic document. Format records are typically seeded
|
||||
via XML data files and should not be deleted while EDI documents
|
||||
reference them.
|
||||
"""
|
||||
|
||||
_name = "fusion.edi.format"
|
||||
_description = "Fusion EDI Format"
|
||||
_order = "sequence, name"
|
||||
_rec_name = "name"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Fields
|
||||
# ------------------------------------------------------------------
|
||||
name = fields.Char(
|
||||
string="Format Name",
|
||||
required=True,
|
||||
help="Human-readable name shown in selection lists.",
|
||||
)
|
||||
code = fields.Char(
|
||||
string="Code",
|
||||
required=True,
|
||||
help=(
|
||||
"Unique technical identifier used to dispatch generation / "
|
||||
"parsing logic. Examples: 'ubl_21', 'cii', 'facturx'."
|
||||
),
|
||||
)
|
||||
description = fields.Text(
|
||||
string="Description",
|
||||
help="Optional notes about the format, its version, or usage.",
|
||||
)
|
||||
applicable_to = fields.Selection(
|
||||
selection=[
|
||||
("invoices", "Customer Invoices / Credit Notes"),
|
||||
("bills", "Vendor Bills"),
|
||||
("both", "Both"),
|
||||
],
|
||||
string="Applicable To",
|
||||
default="both",
|
||||
required=True,
|
||||
help="Restricts this format to customer-side, vendor-side, or both.",
|
||||
)
|
||||
active = fields.Boolean(
|
||||
string="Active",
|
||||
default=True,
|
||||
help="Inactive formats are hidden from selection lists.",
|
||||
)
|
||||
sequence = fields.Integer(
|
||||
string="Sequence",
|
||||
default=10,
|
||||
help="Controls display ordering in lists and dropdowns.",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Constraints
|
||||
# ------------------------------------------------------------------
|
||||
_sql_constraints = [
|
||||
(
|
||||
"code_unique",
|
||||
"UNIQUE(code)",
|
||||
"Each EDI format must have a unique code.",
|
||||
),
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Generation / Parsing Dispatch
|
||||
# ------------------------------------------------------------------
|
||||
def generate_document(self, move):
|
||||
"""Generate an electronic document for the given journal entry.
|
||||
|
||||
Dispatches to the appropriate generator based on ``self.code``.
|
||||
|
||||
Args:
|
||||
move: An ``account.move`` recordset (single record).
|
||||
|
||||
Returns:
|
||||
bytes: The XML payload of the generated document.
|
||||
|
||||
Raises:
|
||||
UserError: When no generator is registered for this format
|
||||
code or the move type is incompatible.
|
||||
"""
|
||||
self.ensure_one()
|
||||
move.ensure_one()
|
||||
self._check_applicability(move)
|
||||
|
||||
generator_map = self._get_generator_map()
|
||||
generator_method = generator_map.get(self.code)
|
||||
if not generator_method:
|
||||
raise UserError(
|
||||
_("No generator is registered for EDI format '%s'.", self.code)
|
||||
)
|
||||
return generator_method(move)
|
||||
|
||||
def parse_document(self, xml_bytes):
|
||||
"""Parse an incoming EDI XML document and return invoice data.
|
||||
|
||||
Dispatches to the appropriate parser based on ``self.code``.
|
||||
|
||||
Args:
|
||||
xml_bytes (bytes): Raw XML content to parse.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary of invoice field values ready for
|
||||
``account.move.create()``.
|
||||
|
||||
Raises:
|
||||
UserError: When no parser is registered for this format code.
|
||||
"""
|
||||
self.ensure_one()
|
||||
parser_map = self._get_parser_map()
|
||||
parser_method = parser_map.get(self.code)
|
||||
if not parser_method:
|
||||
raise UserError(
|
||||
_("No parser is registered for EDI format '%s'.", self.code)
|
||||
)
|
||||
return parser_method(xml_bytes)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal dispatch helpers
|
||||
# ------------------------------------------------------------------
|
||||
def _get_generator_map(self):
|
||||
"""Return a mapping of format codes to generator callables.
|
||||
|
||||
Each callable accepts a single ``account.move`` record and
|
||||
returns ``bytes`` (the XML payload).
|
||||
"""
|
||||
ubl = self.env["fusion.ubl.generator"]
|
||||
cii = self.env["fusion.cii.generator"]
|
||||
return {
|
||||
"ubl_21": ubl.generate_ubl_invoice,
|
||||
"cii": cii.generate_cii_invoice,
|
||||
"facturx": cii.generate_cii_invoice,
|
||||
}
|
||||
|
||||
def _get_parser_map(self):
|
||||
"""Return a mapping of format codes to parser callables.
|
||||
|
||||
Each callable accepts ``bytes`` (raw XML) and returns a ``dict``
|
||||
of invoice values.
|
||||
"""
|
||||
ubl = self.env["fusion.ubl.generator"]
|
||||
cii = self.env["fusion.cii.generator"]
|
||||
return {
|
||||
"ubl_21": ubl.parse_ubl_invoice,
|
||||
"cii": cii.parse_cii_invoice,
|
||||
"facturx": cii.parse_cii_invoice,
|
||||
}
|
||||
|
||||
def _check_applicability(self, move):
|
||||
"""Verify that this format is applicable to the given move type.
|
||||
|
||||
Raises:
|
||||
UserError: When the format/move combination is invalid.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if self.applicable_to == "invoices" and move.move_type in (
|
||||
"in_invoice",
|
||||
"in_refund",
|
||||
):
|
||||
raise UserError(
|
||||
_(
|
||||
"EDI format '%s' is restricted to customer invoices / "
|
||||
"credit notes and cannot be used for vendor bills.",
|
||||
self.name,
|
||||
)
|
||||
)
|
||||
if self.applicable_to == "bills" and move.move_type in (
|
||||
"out_invoice",
|
||||
"out_refund",
|
||||
):
|
||||
raise UserError(
|
||||
_(
|
||||
"EDI format '%s' is restricted to vendor bills and "
|
||||
"cannot be used for customer invoices.",
|
||||
self.name,
|
||||
)
|
||||
)
|
||||
33
Fusion Accounting/models/executive_summary_report.py
Normal file
33
Fusion Accounting/models/executive_summary_report.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Fusion Accounting - Executive Summary Report
|
||||
|
||||
from odoo import fields, models
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
|
||||
class ExecutiveSummaryReport(models.Model):
|
||||
"""Extends the accounting report to provide an executive summary metric
|
||||
that computes the number of days in the selected reporting period."""
|
||||
|
||||
_inherit = 'account.report'
|
||||
|
||||
def _report_custom_engine_executive_summary_ndays(
|
||||
self, expressions, options, date_scope,
|
||||
current_groupby, next_groupby,
|
||||
offset=0, limit=None, warnings=None,
|
||||
):
|
||||
"""Calculate the total number of calendar days within the report period.
|
||||
|
||||
This engine expression is used by the executive summary layout to
|
||||
display the length of the chosen date window. Group-by is
|
||||
intentionally unsupported because the metric is inherently scalar.
|
||||
"""
|
||||
if current_groupby or next_groupby:
|
||||
raise UserError(
|
||||
"The executive summary day-count expression "
|
||||
"does not support grouping."
|
||||
)
|
||||
|
||||
period_start = fields.Date.from_string(options['date']['date_from'])
|
||||
period_end = fields.Date.from_string(options['date']['date_to'])
|
||||
elapsed = period_end - period_start
|
||||
return {'result': elapsed.days}
|
||||
258
Fusion Accounting/models/external_tax_provider.py
Normal file
258
Fusion Accounting/models/external_tax_provider.py
Normal file
@@ -0,0 +1,258 @@
|
||||
"""
|
||||
Fusion Accounting - External Tax Provider (Abstract)
|
||||
=====================================================
|
||||
|
||||
Defines an abstract interface for external tax calculation services such as
|
||||
Avalara AvaTax, Vertex, TaxJar, or any custom tax API. Concrete providers
|
||||
inherit this model and implement the core calculation and voiding methods.
|
||||
|
||||
The provider model stores connection credentials and exposes a registry of
|
||||
active providers per company so that invoice and order workflows can delegate
|
||||
tax computation transparently.
|
||||
|
||||
Copyright (c) Nexa Systems Inc. - All rights reserved.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionExternalTaxProvider(models.Model):
|
||||
"""Abstract base for external tax calculation providers.
|
||||
|
||||
Each concrete provider (AvaTax, Vertex, etc.) inherits this model
|
||||
and implements :meth:`calculate_tax` and :meth:`void_transaction`.
|
||||
Only one provider may be active per company at any time.
|
||||
"""
|
||||
|
||||
_name = "fusion.external.tax.provider"
|
||||
_description = "Fusion External Tax Provider"
|
||||
_order = "sequence, name"
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Fields
|
||||
# -------------------------------------------------------------------------
|
||||
name = fields.Char(
|
||||
string="Provider Name",
|
||||
required=True,
|
||||
help="Human-readable label for this tax provider configuration.",
|
||||
)
|
||||
code = fields.Char(
|
||||
string="Provider Code",
|
||||
required=True,
|
||||
help="Short technical identifier for the provider type (e.g. 'avatax', 'vertex').",
|
||||
)
|
||||
sequence = fields.Integer(
|
||||
string="Sequence",
|
||||
default=10,
|
||||
help="Ordering priority when multiple providers are defined.",
|
||||
)
|
||||
provider_type = fields.Selection(
|
||||
selection=[('generic', 'Generic')],
|
||||
string="Provider Type",
|
||||
default='generic',
|
||||
required=True,
|
||||
help="Discriminator used to load provider-specific configuration views.",
|
||||
)
|
||||
api_key = fields.Char(
|
||||
string="API Key",
|
||||
groups="account.group_account_manager",
|
||||
help="Authentication key for the external tax service. "
|
||||
"Stored encrypted; only visible to accounting managers.",
|
||||
)
|
||||
api_url = fields.Char(
|
||||
string="API URL",
|
||||
help="Base URL of the tax service endpoint.",
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string="Company",
|
||||
required=True,
|
||||
default=lambda self: self.env.company,
|
||||
help="Company to which this provider configuration belongs.",
|
||||
)
|
||||
is_active = fields.Boolean(
|
||||
string="Active",
|
||||
default=False,
|
||||
help="Only one provider may be active per company. "
|
||||
"Activating this provider will deactivate others for the same company.",
|
||||
)
|
||||
state = fields.Selection(
|
||||
selection=[
|
||||
('draft', 'Not Configured'),
|
||||
('test', 'Test Passed'),
|
||||
('error', 'Connection Error'),
|
||||
],
|
||||
string="Connection State",
|
||||
default='draft',
|
||||
readonly=True,
|
||||
copy=False,
|
||||
help="Reflects the result of the most recent connection test.",
|
||||
)
|
||||
last_test_message = fields.Text(
|
||||
string="Last Test Result",
|
||||
readonly=True,
|
||||
copy=False,
|
||||
help="Diagnostic message from the most recent connection test.",
|
||||
)
|
||||
log_requests = fields.Boolean(
|
||||
string="Log API Requests",
|
||||
default=False,
|
||||
help="When enabled, all API requests and responses are written to the server log "
|
||||
"at DEBUG level. Useful for troubleshooting but may expose sensitive data.",
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# SQL Constraints
|
||||
# -------------------------------------------------------------------------
|
||||
_sql_constraints = [
|
||||
(
|
||||
'unique_code_per_company',
|
||||
'UNIQUE(code, company_id)',
|
||||
'Only one provider configuration per code is allowed per company.',
|
||||
),
|
||||
]
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Constraint: single active provider per company
|
||||
# -------------------------------------------------------------------------
|
||||
@api.constrains('is_active', 'company_id')
|
||||
def _check_single_active_provider(self):
|
||||
"""Ensure at most one provider is active for each company."""
|
||||
for provider in self.filtered('is_active'):
|
||||
siblings = self.search([
|
||||
('company_id', '=', provider.company_id.id),
|
||||
('is_active', '=', True),
|
||||
('id', '!=', provider.id),
|
||||
])
|
||||
if siblings:
|
||||
raise ValidationError(_(
|
||||
"Only one external tax provider may be active per company. "
|
||||
"Provider '%(existing)s' is already active for %(company)s.",
|
||||
existing=siblings[0].name,
|
||||
company=provider.company_id.name,
|
||||
))
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Public API
|
||||
# -------------------------------------------------------------------------
|
||||
@api.model
|
||||
def get_provider(self, company=None):
|
||||
"""Return the active external tax provider for the given company.
|
||||
|
||||
:param company: ``res.company`` record or ``None`` for the current company.
|
||||
:returns: A single ``fusion.external.tax.provider`` record or an empty recordset.
|
||||
"""
|
||||
target_company = company or self.env.company
|
||||
return self.search([
|
||||
('company_id', '=', target_company.id),
|
||||
('is_active', '=', True),
|
||||
], limit=1)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Abstract Methods (to be implemented by concrete providers)
|
||||
# -------------------------------------------------------------------------
|
||||
def calculate_tax(self, order_lines):
|
||||
"""Compute tax amounts for a collection of order/invoice lines.
|
||||
|
||||
Concrete providers must override this method and return a list of
|
||||
dictionaries with at least the following keys per input line:
|
||||
|
||||
* ``line_id`` - The ``id`` of the originating ``account.move.line``.
|
||||
* ``tax_amount`` - The computed tax amount in document currency.
|
||||
* ``tax_details`` - A list of dicts ``{tax_name, tax_rate, tax_amount, jurisdiction}``.
|
||||
* ``doc_code`` - An external document reference for later void/commit.
|
||||
|
||||
:param order_lines: Recordset of ``account.move.line`` (or compatible)
|
||||
containing the products, quantities, and addresses.
|
||||
:returns: ``list[dict]`` as described above.
|
||||
:raises UserError: When the provider encounters a non-recoverable error.
|
||||
"""
|
||||
raise UserError(_(
|
||||
"The external tax provider '%(name)s' does not implement tax calculation. "
|
||||
"Please configure a concrete provider such as AvaTax.",
|
||||
name=self.name,
|
||||
))
|
||||
|
||||
def void_transaction(self, doc_code, doc_type='SalesInvoice'):
|
||||
"""Void (cancel) a previously committed tax transaction.
|
||||
|
||||
:param doc_code: The external document code returned by :meth:`calculate_tax`.
|
||||
:param doc_type: The transaction type (default ``'SalesInvoice'``).
|
||||
:returns: ``True`` on success.
|
||||
:raises UserError: When the void operation fails.
|
||||
"""
|
||||
raise UserError(_(
|
||||
"The external tax provider '%(name)s' does not implement transaction voiding.",
|
||||
name=self.name,
|
||||
))
|
||||
|
||||
def test_connection(self):
|
||||
"""Verify connectivity and credentials with the external service.
|
||||
|
||||
Concrete providers should override this to perform an actual API ping
|
||||
and update :attr:`state` and :attr:`last_test_message` accordingly.
|
||||
|
||||
:returns: ``True`` if the test succeeds.
|
||||
"""
|
||||
raise UserError(_(
|
||||
"The external tax provider '%(name)s' does not implement a connection test.",
|
||||
name=self.name,
|
||||
))
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Actions
|
||||
# -------------------------------------------------------------------------
|
||||
def action_test_connection(self):
|
||||
"""Button action: run the connection test and display the result."""
|
||||
self.ensure_one()
|
||||
try:
|
||||
self.test_connection()
|
||||
self.write({
|
||||
'state': 'test',
|
||||
'last_test_message': _("Connection successful."),
|
||||
})
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'title': _("Connection Test"),
|
||||
'message': _("Connection to '%s' succeeded.", self.name),
|
||||
'type': 'success',
|
||||
'sticky': False,
|
||||
},
|
||||
}
|
||||
except Exception as exc:
|
||||
self.write({
|
||||
'state': 'error',
|
||||
'last_test_message': str(exc),
|
||||
})
|
||||
return {
|
||||
'type': 'ir.actions.client',
|
||||
'tag': 'display_notification',
|
||||
'params': {
|
||||
'title': _("Connection Test Failed"),
|
||||
'message': str(exc),
|
||||
'type': 'danger',
|
||||
'sticky': True,
|
||||
},
|
||||
}
|
||||
|
||||
def action_activate(self):
|
||||
"""Activate this provider and deactivate all others for the same company."""
|
||||
self.ensure_one()
|
||||
self.search([
|
||||
('company_id', '=', self.company_id.id),
|
||||
('is_active', '=', True),
|
||||
('id', '!=', self.id),
|
||||
]).write({'is_active': False})
|
||||
self.is_active = True
|
||||
|
||||
def action_deactivate(self):
|
||||
"""Deactivate this provider."""
|
||||
self.ensure_one()
|
||||
self.is_active = False
|
||||
162
Fusion Accounting/models/fiscal_category.py
Normal file
162
Fusion Accounting/models/fiscal_category.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""
|
||||
Fusion Accounting - Fiscal Categories
|
||||
|
||||
Provides a classification system for grouping general ledger accounts
|
||||
into fiscal reporting categories (income, expense, asset, liability).
|
||||
These categories drive structured fiscal reports and SAF-T exports,
|
||||
allowing companies to map their chart of accounts onto standardised
|
||||
government reporting taxonomies.
|
||||
|
||||
Original implementation by Nexa Systems Inc.
|
||||
"""
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
|
||||
class FusionFiscalCategory(models.Model):
|
||||
"""
|
||||
A fiscal reporting category that groups one or more GL accounts.
|
||||
|
||||
Each category carries a ``category_type`` that mirrors the four
|
||||
fundamental pillars of double-entry bookkeeping. When a SAF-T or
|
||||
Intrastat export is generated the accounts linked here determine
|
||||
which transactions are included.
|
||||
|
||||
Uniqueness of ``code`` is enforced per company so that external
|
||||
auditors can refer to categories unambiguously.
|
||||
"""
|
||||
|
||||
_name = "fusion.fiscal.category"
|
||||
_description = "Fiscal Category"
|
||||
_order = "code, name"
|
||||
_rec_name = "display_name"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Fields
|
||||
# ------------------------------------------------------------------
|
||||
name = fields.Char(
|
||||
string="Category Name",
|
||||
required=True,
|
||||
translate=True,
|
||||
help="Human-readable label shown in reports and menus.",
|
||||
)
|
||||
code = fields.Char(
|
||||
string="Code",
|
||||
required=True,
|
||||
help=(
|
||||
"Short alphanumeric identifier used in fiscal exports "
|
||||
"(e.g. SAF-T GroupingCode). Must be unique per company."
|
||||
),
|
||||
)
|
||||
category_type = fields.Selection(
|
||||
selection=[
|
||||
("income", "Income"),
|
||||
("expense", "Expense"),
|
||||
("asset", "Asset"),
|
||||
("liability", "Liability"),
|
||||
],
|
||||
string="Type",
|
||||
required=True,
|
||||
default="expense",
|
||||
help="Determines the section of the fiscal report this category appears in.",
|
||||
)
|
||||
description = fields.Text(
|
||||
string="Description",
|
||||
translate=True,
|
||||
help="Optional long description for internal documentation purposes.",
|
||||
)
|
||||
active = fields.Boolean(
|
||||
string="Active",
|
||||
default=True,
|
||||
help="Archived categories are excluded from new exports but remain on historical records.",
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name="res.company",
|
||||
string="Company",
|
||||
required=True,
|
||||
default=lambda self: self.env.company,
|
||||
help="Company to which this fiscal category belongs.",
|
||||
)
|
||||
account_ids = fields.Many2many(
|
||||
comodel_name="account.account",
|
||||
relation="fusion_fiscal_category_account_rel",
|
||||
column1="category_id",
|
||||
column2="account_id",
|
||||
string="Accounts",
|
||||
help="General-ledger accounts assigned to this fiscal category.",
|
||||
)
|
||||
account_count = fields.Integer(
|
||||
string="# Accounts",
|
||||
compute="_compute_account_count",
|
||||
store=False,
|
||||
help="Number of accounts linked to this category.",
|
||||
)
|
||||
parent_id = fields.Many2one(
|
||||
comodel_name="fusion.fiscal.category",
|
||||
string="Parent Category",
|
||||
index=True,
|
||||
ondelete="restrict",
|
||||
help="Optional parent for hierarchical fiscal taxonomies.",
|
||||
)
|
||||
child_ids = fields.One2many(
|
||||
comodel_name="fusion.fiscal.category",
|
||||
inverse_name="parent_id",
|
||||
string="Sub-categories",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# SQL constraints
|
||||
# ------------------------------------------------------------------
|
||||
_sql_constraints = [
|
||||
(
|
||||
"unique_code_per_company",
|
||||
"UNIQUE(code, company_id)",
|
||||
"The fiscal category code must be unique within each company.",
|
||||
),
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Computed fields
|
||||
# ------------------------------------------------------------------
|
||||
@api.depends("account_ids")
|
||||
def _compute_account_count(self):
|
||||
"""Count the number of accounts linked to each category."""
|
||||
for record in self:
|
||||
record.account_count = len(record.account_ids)
|
||||
|
||||
@api.depends("name", "code")
|
||||
def _compute_display_name(self):
|
||||
"""Build a display name combining code and name for clarity."""
|
||||
for record in self:
|
||||
if record.code:
|
||||
record.display_name = f"[{record.code}] {record.name}"
|
||||
else:
|
||||
record.display_name = record.name or ""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Constraints
|
||||
# ------------------------------------------------------------------
|
||||
@api.constrains("parent_id")
|
||||
def _check_parent_recursion(self):
|
||||
"""Prevent circular parent-child references."""
|
||||
if not self._check_recursion():
|
||||
raise ValidationError(
|
||||
_("A fiscal category cannot be its own ancestor. "
|
||||
"Please choose a different parent.")
|
||||
)
|
||||
|
||||
@api.constrains("account_ids", "company_id")
|
||||
def _check_account_company(self):
|
||||
"""Ensure all linked accounts belong to the same company."""
|
||||
for record in self:
|
||||
foreign = record.account_ids.filtered(
|
||||
lambda a: a.company_id != record.company_id
|
||||
)
|
||||
if foreign:
|
||||
raise ValidationError(
|
||||
_("All linked accounts must belong to company '%(company)s'. "
|
||||
"The following accounts belong to a different company: %(accounts)s",
|
||||
company=record.company_id.name,
|
||||
accounts=", ".join(foreign.mapped("code")))
|
||||
)
|
||||
365
Fusion Accounting/models/followup.py
Normal file
365
Fusion Accounting/models/followup.py
Normal file
@@ -0,0 +1,365 @@
|
||||
# Part of Fusion Accounting. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from odoo import api, fields, models, _
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
|
||||
class FusionFollowupLevel(models.Model):
|
||||
"""Defines escalation levels for payment follow-up reminders.
|
||||
|
||||
Each level represents a stage in the collection process, configured
|
||||
with a delay (days past the invoice due date) and communication
|
||||
channels (email, SMS, letter). Levels are ordered by sequence so
|
||||
the system can automatically escalate from gentle reminders to
|
||||
more urgent notices.
|
||||
"""
|
||||
|
||||
_name = 'fusion.followup.level'
|
||||
_description = "Fusion Payment Follow-up Level"
|
||||
_order = 'sequence, id'
|
||||
|
||||
# ---- Core Fields ----
|
||||
name = fields.Char(
|
||||
string="Follow-up Action",
|
||||
required=True,
|
||||
translate=True,
|
||||
help="Short label for this follow-up step (e.g. 'First Reminder').",
|
||||
)
|
||||
description = fields.Html(
|
||||
string="Message Body",
|
||||
translate=True,
|
||||
help="Default message included in the follow-up communication.",
|
||||
)
|
||||
sequence = fields.Integer(
|
||||
string="Sequence",
|
||||
default=10,
|
||||
help="Determines the escalation order. Lower values run first.",
|
||||
)
|
||||
delay = fields.Integer(
|
||||
string="Due Days",
|
||||
required=True,
|
||||
default=15,
|
||||
help="Number of days after the invoice due date before this level triggers.",
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string="Company",
|
||||
required=True,
|
||||
default=lambda self: self.env.company,
|
||||
)
|
||||
active = fields.Boolean(
|
||||
string="Active",
|
||||
default=True,
|
||||
)
|
||||
|
||||
# ---- Communication Channels ----
|
||||
send_email = fields.Boolean(
|
||||
string="Send Email",
|
||||
default=True,
|
||||
help="Automatically send an email when this follow-up level is executed.",
|
||||
)
|
||||
send_sms = fields.Boolean(
|
||||
string="Send SMS",
|
||||
default=False,
|
||||
help="Send an SMS notification when this follow-up level is executed.",
|
||||
)
|
||||
send_letter = fields.Boolean(
|
||||
string="Print Letter",
|
||||
default=False,
|
||||
help="Generate a printable letter when this follow-up level is executed.",
|
||||
)
|
||||
|
||||
# ---- Templates ----
|
||||
email_template_id = fields.Many2one(
|
||||
comodel_name='mail.template',
|
||||
string="Email Template",
|
||||
domain="[('model', '=', 'res.partner')]",
|
||||
help="Email template to use. Leave empty to use the default follow-up template.",
|
||||
)
|
||||
sms_template_id = fields.Many2one(
|
||||
comodel_name='sms.template',
|
||||
string="SMS Template",
|
||||
domain="[('model', '=', 'res.partner')]",
|
||||
help="SMS template to use when the SMS channel is enabled.",
|
||||
)
|
||||
|
||||
# ---- Options ----
|
||||
join_invoices = fields.Boolean(
|
||||
string="Attach Open Invoices",
|
||||
default=False,
|
||||
help="When enabled, PDF copies of open invoices are attached to the email.",
|
||||
)
|
||||
|
||||
# --------------------------------------------------
|
||||
# Helpers
|
||||
# --------------------------------------------------
|
||||
|
||||
def _get_next_level(self):
|
||||
"""Return the follow-up level that comes after this one.
|
||||
|
||||
:returns: A ``fusion.followup.level`` recordset (single or empty).
|
||||
"""
|
||||
self.ensure_one()
|
||||
return self.search([
|
||||
('company_id', '=', self.company_id.id),
|
||||
('sequence', '>', self.sequence),
|
||||
], order='sequence, id', limit=1)
|
||||
|
||||
|
||||
class FusionFollowupLine(models.Model):
|
||||
"""Tracks the follow-up state for a specific partner.
|
||||
|
||||
Each record links a partner to their current follow-up level and
|
||||
stores the date of the last action. Computed fields determine
|
||||
the next action date, overdue amounts, and whether action is needed.
|
||||
"""
|
||||
|
||||
_name = 'fusion.followup.line'
|
||||
_description = "Fusion Partner Follow-up Tracking"
|
||||
_order = 'next_followup_date asc, id'
|
||||
_rec_name = 'partner_id'
|
||||
|
||||
# ---- Relational Fields ----
|
||||
partner_id = fields.Many2one(
|
||||
comodel_name='res.partner',
|
||||
string="Partner",
|
||||
required=True,
|
||||
ondelete='cascade',
|
||||
index=True,
|
||||
)
|
||||
company_id = fields.Many2one(
|
||||
comodel_name='res.company',
|
||||
string="Company",
|
||||
required=True,
|
||||
default=lambda self: self.env.company,
|
||||
)
|
||||
followup_level_id = fields.Many2one(
|
||||
comodel_name='fusion.followup.level',
|
||||
string="Current Level",
|
||||
domain="[('company_id', '=', company_id)]",
|
||||
help="The most recent follow-up level applied to this partner.",
|
||||
)
|
||||
|
||||
# ---- Date Fields ----
|
||||
date = fields.Date(
|
||||
string="Last Follow-up Date",
|
||||
help="Date of the most recent follow-up action.",
|
||||
)
|
||||
next_followup_date = fields.Date(
|
||||
string="Next Action Date",
|
||||
compute='_compute_next_followup_date',
|
||||
store=True,
|
||||
help="Calculated date for the next follow-up step.",
|
||||
)
|
||||
|
||||
# ---- Computed Amounts ----
|
||||
overdue_amount = fields.Monetary(
|
||||
string="Total Overdue",
|
||||
compute='_compute_overdue_values',
|
||||
currency_field='currency_id',
|
||||
store=True,
|
||||
help="Sum of all overdue receivable amounts for this partner.",
|
||||
)
|
||||
overdue_count = fields.Integer(
|
||||
string="Overdue Invoices",
|
||||
compute='_compute_overdue_values',
|
||||
store=True,
|
||||
help="Number of overdue invoices for this partner.",
|
||||
)
|
||||
currency_id = fields.Many2one(
|
||||
comodel_name='res.currency',
|
||||
string="Currency",
|
||||
related='company_id.currency_id',
|
||||
store=True,
|
||||
readonly=True,
|
||||
)
|
||||
|
||||
# ---- Status ----
|
||||
followup_status = fields.Selection(
|
||||
selection=[
|
||||
('in_need', 'In Need of Action'),
|
||||
('with_overdue', 'With Overdue Invoices'),
|
||||
('no_action_needed', 'No Action Needed'),
|
||||
],
|
||||
string="Follow-up Status",
|
||||
compute='_compute_followup_status',
|
||||
store=True,
|
||||
help="Indicates whether this partner requires a follow-up action.",
|
||||
)
|
||||
|
||||
# ---- SQL Constraint ----
|
||||
_sql_constraints = [
|
||||
(
|
||||
'partner_company_unique',
|
||||
'UNIQUE(partner_id, company_id)',
|
||||
'A partner can only have one follow-up tracking record per company.',
|
||||
),
|
||||
]
|
||||
|
||||
# --------------------------------------------------
|
||||
# Computed Fields
|
||||
# --------------------------------------------------
|
||||
|
||||
@api.depends('date', 'followup_level_id', 'followup_level_id.delay')
|
||||
def _compute_next_followup_date(self):
|
||||
"""Calculate the next follow-up date based on the current level delay.
|
||||
|
||||
If no level is assigned the next date equals the last follow-up
|
||||
date. When no date exists at all the field stays empty.
|
||||
"""
|
||||
for line in self:
|
||||
if line.date and line.followup_level_id:
|
||||
next_level = line.followup_level_id._get_next_level()
|
||||
if next_level:
|
||||
line.next_followup_date = line.date + relativedelta(
|
||||
days=next_level.delay - line.followup_level_id.delay,
|
||||
)
|
||||
else:
|
||||
# Already at the highest level; re-trigger after same delay
|
||||
line.next_followup_date = line.date + relativedelta(
|
||||
days=line.followup_level_id.delay,
|
||||
)
|
||||
elif line.date:
|
||||
line.next_followup_date = line.date
|
||||
else:
|
||||
line.next_followup_date = False
|
||||
|
||||
@api.depends('partner_id', 'company_id')
|
||||
def _compute_overdue_values(self):
|
||||
"""Compute overdue totals from the partner's unpaid receivable move lines."""
|
||||
today = fields.Date.context_today(self)
|
||||
for line in self:
|
||||
overdue_lines = self.env['account.move.line'].search([
|
||||
('partner_id', '=', line.partner_id.commercial_partner_id.id),
|
||||
('company_id', '=', line.company_id.id),
|
||||
('account_id.account_type', '=', 'asset_receivable'),
|
||||
('parent_state', '=', 'posted'),
|
||||
('reconciled', '=', False),
|
||||
('date_maturity', '<', today),
|
||||
])
|
||||
line.overdue_amount = sum(overdue_lines.mapped('amount_residual'))
|
||||
line.overdue_count = len(overdue_lines.mapped('move_id'))
|
||||
|
||||
@api.depends('overdue_amount', 'next_followup_date')
|
||||
def _compute_followup_status(self):
|
||||
"""Determine the follow-up status for each tracking record.
|
||||
|
||||
* **in_need** – there are overdue invoices and the next
|
||||
follow-up date has been reached.
|
||||
* **with_overdue** – there are overdue invoices but the next
|
||||
action date is still in the future.
|
||||
* **no_action_needed** – nothing is overdue.
|
||||
"""
|
||||
today = fields.Date.context_today(self)
|
||||
for line in self:
|
||||
if line.overdue_amount <= 0:
|
||||
line.followup_status = 'no_action_needed'
|
||||
elif line.next_followup_date and line.next_followup_date > today:
|
||||
line.followup_status = 'with_overdue'
|
||||
else:
|
||||
line.followup_status = 'in_need'
|
||||
|
||||
# --------------------------------------------------
|
||||
# Business Logic
|
||||
# --------------------------------------------------
|
||||
|
||||
def compute_followup_status(self):
|
||||
"""Manually recompute overdue values and status.
|
||||
|
||||
Useful for the UI refresh button and scheduled actions.
|
||||
"""
|
||||
self._compute_overdue_values()
|
||||
self._compute_followup_status()
|
||||
return True
|
||||
|
||||
def execute_followup(self):
|
||||
"""Execute the follow-up action for the current level.
|
||||
|
||||
Sends emails and/or SMS messages based on the channel settings
|
||||
of the current follow-up level, then advances the partner to
|
||||
the next level.
|
||||
|
||||
:raises UserError: If no follow-up level is set.
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not self.followup_level_id:
|
||||
raise UserError(_(
|
||||
"No follow-up level is set for partner '%s'. "
|
||||
"Please configure follow-up levels first.",
|
||||
self.partner_id.display_name,
|
||||
))
|
||||
|
||||
level = self.followup_level_id
|
||||
partner = self.partner_id
|
||||
|
||||
# ---- Send Email ----
|
||||
if level.send_email:
|
||||
template = level.email_template_id or self.env.ref(
|
||||
'fusion_accounting.email_template_fusion_followup_default',
|
||||
raise_if_not_found=False,
|
||||
)
|
||||
if template:
|
||||
attachment_ids = []
|
||||
if level.join_invoices:
|
||||
attachment_ids = self._get_invoice_attachments(partner)
|
||||
template.send_mail(
|
||||
partner.id,
|
||||
force_send=True,
|
||||
email_values={'attachment_ids': attachment_ids},
|
||||
)
|
||||
|
||||
# ---- Send SMS ----
|
||||
if level.send_sms and level.sms_template_id:
|
||||
try:
|
||||
level.sms_template_id._send_sms(partner.id)
|
||||
except Exception:
|
||||
# SMS delivery failures should not block the follow-up process
|
||||
pass
|
||||
|
||||
# ---- Advance to next level ----
|
||||
next_level = level._get_next_level()
|
||||
self.write({
|
||||
'date': fields.Date.context_today(self),
|
||||
'followup_level_id': next_level.id if next_level else level.id,
|
||||
})
|
||||
|
||||
return True
|
||||
|
||||
def _get_invoice_attachments(self, partner):
|
||||
"""Generate PDF attachments for the partner's open invoices.
|
||||
|
||||
:param partner: A ``res.partner`` recordset.
|
||||
:returns: List of ``ir.attachment`` IDs.
|
||||
"""
|
||||
overdue_invoices = self.env['account.move'].search([
|
||||
('partner_id', '=', partner.commercial_partner_id.id),
|
||||
('company_id', '=', self.company_id.id),
|
||||
('move_type', 'in', ('out_invoice', 'out_debit')),
|
||||
('payment_state', 'in', ('not_paid', 'partial')),
|
||||
('state', '=', 'posted'),
|
||||
])
|
||||
if not overdue_invoices:
|
||||
return []
|
||||
|
||||
pdf_report = self.env.ref('account.account_invoices', raise_if_not_found=False)
|
||||
if not pdf_report:
|
||||
return []
|
||||
|
||||
attachment_ids = []
|
||||
for invoice in overdue_invoices:
|
||||
content, _content_type = self.env['ir.actions.report']._render(
|
||||
pdf_report.report_name, invoice.ids,
|
||||
)
|
||||
attachment = self.env['ir.attachment'].create({
|
||||
'name': f"{invoice.name}.pdf",
|
||||
'type': 'binary',
|
||||
'raw': content,
|
||||
'mimetype': 'application/pdf',
|
||||
'res_model': 'account.move',
|
||||
'res_id': invoice.id,
|
||||
})
|
||||
attachment_ids.append(attachment.id)
|
||||
|
||||
return attachment_ids
|
||||
428
Fusion Accounting/models/integration_bridges.py
Normal file
428
Fusion Accounting/models/integration_bridges.py
Normal file
@@ -0,0 +1,428 @@
|
||||
"""
|
||||
Fusion Accounting - Integration Bridge Modules
|
||||
===============================================
|
||||
|
||||
Provides optional glue code between Fusion Accounting and other Odoo
|
||||
applications. Each bridge class extends a core accounting model with
|
||||
fields and methods that only become meaningful when the target module
|
||||
(fleet, hr_expense, helpdesk) is installed.
|
||||
|
||||
All dependencies are **soft**: the bridges use ``try/except ImportError``
|
||||
guards so that Fusion Accounting installs and operates normally even
|
||||
when the partner modules are absent.
|
||||
|
||||
Bridges
|
||||
-------
|
||||
* **FusionFleetBridge** -- tags journal-item expenses to fleet vehicles.
|
||||
* **FusionExpenseBridge** -- creates journal entries from approved
|
||||
HR expense sheets.
|
||||
* **FusionHelpdeskBridge** -- generates credit notes linked to helpdesk
|
||||
tickets for rapid customer resolution.
|
||||
|
||||
Copyright (c) Nexa Systems Inc. - All rights reserved.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, Command, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Soft-dependency probes
|
||||
# ---------------------------------------------------------------------------
|
||||
# Each flag is True only when the corresponding Odoo module is importable.
|
||||
# The flags are evaluated at *module-load* time, so they reflect the state
|
||||
# of the Odoo installation rather than the database registry.
|
||||
|
||||
_fleet_available = False
|
||||
try:
|
||||
from odoo.addons.fleet import models as _fleet_models # noqa: F401
|
||||
_fleet_available = True
|
||||
except ImportError:
|
||||
_logger.debug("fleet module not available -- FusionFleetBridge will be inert.")
|
||||
|
||||
_hr_expense_available = False
|
||||
try:
|
||||
from odoo.addons.hr_expense import models as _hr_expense_models # noqa: F401
|
||||
_hr_expense_available = True
|
||||
except ImportError:
|
||||
_logger.debug("hr_expense module not available -- FusionExpenseBridge will be inert.")
|
||||
|
||||
_helpdesk_available = False
|
||||
try:
|
||||
from odoo.addons.helpdesk import models as _helpdesk_models # noqa: F401
|
||||
_helpdesk_available = True
|
||||
except ImportError:
|
||||
_logger.debug("helpdesk module not available -- FusionHelpdeskBridge will be inert.")
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# Fleet Bridge
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
class FusionFleetBridge(models.Model):
|
||||
"""Extends journal items so each line can optionally reference a fleet
|
||||
vehicle, enabling per-vehicle cost tracking and reporting.
|
||||
|
||||
When the *fleet* module is **not** installed the ``fusion_vehicle_id``
|
||||
field is still created (as an orphan Many2one) but it will never
|
||||
resolve, and the UI hides it via conditional visibility.
|
||||
"""
|
||||
|
||||
_name = "account.move.line"
|
||||
_inherit = "account.move.line"
|
||||
|
||||
# ---- Fields ----
|
||||
fusion_vehicle_id = fields.Many2one(
|
||||
comodel_name="fleet.vehicle",
|
||||
string="Vehicle",
|
||||
index="btree_not_null",
|
||||
ondelete="set null",
|
||||
copy=True,
|
||||
help=(
|
||||
"Optionally link this journal item to a fleet vehicle for "
|
||||
"per-vehicle expense tracking and cost-center analysis."
|
||||
),
|
||||
)
|
||||
fusion_vehicle_license_plate = fields.Char(
|
||||
related="fusion_vehicle_id.license_plate",
|
||||
string="License Plate",
|
||||
readonly=True,
|
||||
store=False,
|
||||
)
|
||||
|
||||
# ---- Helpers ----
|
||||
def _fusion_is_fleet_installed(self):
|
||||
"""Runtime check: is the *fleet* model actually registered?"""
|
||||
return "fleet.vehicle" in self.env
|
||||
|
||||
@api.onchange("fusion_vehicle_id")
|
||||
def _onchange_fusion_vehicle_id(self):
|
||||
"""When a vehicle is selected, suggest the vehicle's display name
|
||||
as the line label if the label is currently empty."""
|
||||
for line in self:
|
||||
if line.fusion_vehicle_id and not line.name:
|
||||
line.name = _("Expense: %s", line.fusion_vehicle_id.display_name)
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# HR Expense Bridge
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
class FusionExpenseBridge(models.Model):
|
||||
"""Links journal entries to approved HR expense sheets and provides a
|
||||
method to generate accounting entries from those sheets.
|
||||
|
||||
When *hr_expense* is **not** installed the field and method remain on
|
||||
the model but are functionally inert.
|
||||
"""
|
||||
|
||||
_name = "account.move"
|
||||
_inherit = "account.move"
|
||||
|
||||
# ---- Fields ----
|
||||
fusion_expense_sheet_id = fields.Many2one(
|
||||
comodel_name="hr.expense.sheet",
|
||||
string="Expense Report",
|
||||
index="btree_not_null",
|
||||
ondelete="set null",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help=(
|
||||
"The HR expense sheet from which this journal entry was "
|
||||
"generated. Populated automatically by the bridge."
|
||||
),
|
||||
)
|
||||
fusion_expense_employee_id = fields.Many2one(
|
||||
related="fusion_expense_sheet_id.employee_id",
|
||||
string="Expense Employee",
|
||||
readonly=True,
|
||||
store=False,
|
||||
)
|
||||
|
||||
# ---- Helpers ----
|
||||
def _fusion_is_hr_expense_installed(self):
|
||||
"""Runtime check: is the *hr.expense.sheet* model registered?"""
|
||||
return "hr.expense.sheet" in self.env
|
||||
|
||||
# ---- Actions ----
|
||||
def action_open_expense_sheet(self):
|
||||
"""Navigate to the linked expense sheet form."""
|
||||
self.ensure_one()
|
||||
if not self.fusion_expense_sheet_id:
|
||||
raise UserError(_("No expense report is linked to this entry."))
|
||||
return {
|
||||
"type": "ir.actions.act_window",
|
||||
"res_model": "hr.expense.sheet",
|
||||
"res_id": self.fusion_expense_sheet_id.id,
|
||||
"view_mode": "form",
|
||||
"target": "current",
|
||||
}
|
||||
|
||||
# ---- Core method: create journal entry from expense sheet ----
|
||||
@api.model
|
||||
def create_move_from_expense_sheet(self, expense_sheet_id):
|
||||
"""Generate a journal entry from an approved HR expense sheet.
|
||||
|
||||
:param int expense_sheet_id: id of the ``hr.expense.sheet`` record.
|
||||
:returns: the newly created ``account.move`` recordset.
|
||||
:raises UserError: if the expense sheet is not in *approve* state,
|
||||
or if the hr_expense module is not installed.
|
||||
"""
|
||||
if not self._fusion_is_hr_expense_installed():
|
||||
raise UserError(
|
||||
_("The HR Expense module is not installed. "
|
||||
"Please install it before creating entries from expense sheets.")
|
||||
)
|
||||
|
||||
sheet = self.env["hr.expense.sheet"].browse(expense_sheet_id)
|
||||
if not sheet.exists():
|
||||
raise UserError(_("Expense sheet #%d does not exist.", expense_sheet_id))
|
||||
|
||||
if sheet.state != "approve":
|
||||
raise UserError(
|
||||
_("Only approved expense reports can be converted to journal "
|
||||
"entries. Current status: %s.", sheet.state)
|
||||
)
|
||||
|
||||
# Determine the journal -- prefer the company's expense journal,
|
||||
# fall back to the first available miscellaneous journal.
|
||||
journal = self.env["account.journal"].search(
|
||||
[
|
||||
("company_id", "=", sheet.company_id.id),
|
||||
("type", "=", "purchase"),
|
||||
],
|
||||
limit=1,
|
||||
)
|
||||
if not journal:
|
||||
journal = self.env["account.journal"].search(
|
||||
[
|
||||
("company_id", "=", sheet.company_id.id),
|
||||
("type", "=", "general"),
|
||||
],
|
||||
limit=1,
|
||||
)
|
||||
if not journal:
|
||||
raise UserError(
|
||||
_("No suitable purchase or miscellaneous journal found for "
|
||||
"company %s.", sheet.company_id.name)
|
||||
)
|
||||
|
||||
# Build move-line values from each expense line.
|
||||
move_line_vals = []
|
||||
total_amount = 0.0
|
||||
|
||||
for expense in sheet.expense_line_ids:
|
||||
amount = expense.total_amount_company
|
||||
total_amount += amount
|
||||
|
||||
# Debit: expense account
|
||||
move_line_vals.append(Command.create({
|
||||
"name": expense.name or _("Expense: %s", sheet.name),
|
||||
"account_id": expense.account_id.id,
|
||||
"debit": amount if amount > 0 else 0.0,
|
||||
"credit": -amount if amount < 0 else 0.0,
|
||||
"partner_id": sheet.employee_id.work_contact_id.id
|
||||
if sheet.employee_id.work_contact_id else False,
|
||||
"analytic_distribution": expense.analytic_distribution or False,
|
||||
}))
|
||||
|
||||
# Credit: payable account (employee)
|
||||
payable_account = (
|
||||
sheet.employee_id.work_contact_id.property_account_payable_id
|
||||
if sheet.employee_id.work_contact_id
|
||||
else self.env["account.account"].search(
|
||||
[
|
||||
("company_id", "=", sheet.company_id.id),
|
||||
("account_type", "=", "liability_payable"),
|
||||
],
|
||||
limit=1,
|
||||
)
|
||||
)
|
||||
if not payable_account:
|
||||
raise UserError(
|
||||
_("No payable account found for employee %s.",
|
||||
sheet.employee_id.name)
|
||||
)
|
||||
|
||||
move_line_vals.append(Command.create({
|
||||
"name": _("Payable: %s", sheet.name),
|
||||
"account_id": payable_account.id,
|
||||
"debit": -total_amount if total_amount < 0 else 0.0,
|
||||
"credit": total_amount if total_amount > 0 else 0.0,
|
||||
"partner_id": sheet.employee_id.work_contact_id.id
|
||||
if sheet.employee_id.work_contact_id else False,
|
||||
}))
|
||||
|
||||
move = self.create({
|
||||
"journal_id": journal.id,
|
||||
"date": fields.Date.context_today(self),
|
||||
"ref": _("Expense Report: %s", sheet.name),
|
||||
"fusion_expense_sheet_id": sheet.id,
|
||||
"move_type": "entry",
|
||||
"line_ids": move_line_vals,
|
||||
})
|
||||
|
||||
_logger.info(
|
||||
"Fusion Expense Bridge: created journal entry %s (id=%d) "
|
||||
"from expense sheet '%s' (id=%d).",
|
||||
move.name, move.id, sheet.name, sheet.id,
|
||||
)
|
||||
|
||||
return move
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# Helpdesk Bridge
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
class FusionHelpdeskBridge(models.Model):
|
||||
"""Extends journal entries with helpdesk-ticket linkage and provides
|
||||
an action to create a credit note directly from a ticket.
|
||||
|
||||
When *helpdesk* is **not** installed the fields remain on the model
|
||||
but are functionally inert, and the UI hides the button.
|
||||
"""
|
||||
|
||||
_name = "account.move"
|
||||
_inherit = "account.move"
|
||||
|
||||
# ---- Fields ----
|
||||
fusion_helpdesk_ticket_id = fields.Many2one(
|
||||
comodel_name="helpdesk.ticket",
|
||||
string="Helpdesk Ticket",
|
||||
index="btree_not_null",
|
||||
ondelete="set null",
|
||||
copy=False,
|
||||
help=(
|
||||
"The helpdesk ticket associated with this credit note. "
|
||||
"Set automatically when a credit note is created from a ticket."
|
||||
),
|
||||
)
|
||||
fusion_helpdesk_ticket_ref = fields.Char(
|
||||
related="fusion_helpdesk_ticket_id.name",
|
||||
string="Ticket Reference",
|
||||
readonly=True,
|
||||
store=False,
|
||||
)
|
||||
|
||||
# ---- Helpers ----
|
||||
def _fusion_is_helpdesk_installed(self):
|
||||
"""Runtime check: is the *helpdesk.ticket* model registered?"""
|
||||
return "helpdesk.ticket" in self.env
|
||||
|
||||
# ---- Actions ----
|
||||
def action_open_helpdesk_ticket(self):
|
||||
"""Navigate to the linked helpdesk ticket form."""
|
||||
self.ensure_one()
|
||||
if not self.fusion_helpdesk_ticket_id:
|
||||
raise UserError(_("No helpdesk ticket is linked to this entry."))
|
||||
return {
|
||||
"type": "ir.actions.act_window",
|
||||
"res_model": "helpdesk.ticket",
|
||||
"res_id": self.fusion_helpdesk_ticket_id.id,
|
||||
"view_mode": "form",
|
||||
"target": "current",
|
||||
}
|
||||
|
||||
@api.model
|
||||
def action_create_credit_note_from_ticket(self, ticket_id, invoice_id=None):
|
||||
"""Create a credit note linked to a helpdesk ticket.
|
||||
|
||||
If *invoice_id* is provided the credit note reverses that specific
|
||||
invoice. Otherwise a standalone credit note is created with the
|
||||
ticket's partner and a reference back to the ticket.
|
||||
|
||||
:param int ticket_id: id of the ``helpdesk.ticket`` record.
|
||||
:param int|None invoice_id: optional id of the invoice to reverse.
|
||||
:returns: window action pointing to the new credit note form.
|
||||
:raises UserError: if the helpdesk module is not installed.
|
||||
"""
|
||||
if not self._fusion_is_helpdesk_installed():
|
||||
raise UserError(
|
||||
_("The Helpdesk module is not installed. "
|
||||
"Please install it before creating credit notes from tickets.")
|
||||
)
|
||||
|
||||
Ticket = self.env["helpdesk.ticket"]
|
||||
ticket = Ticket.browse(ticket_id)
|
||||
if not ticket.exists():
|
||||
raise UserError(_("Helpdesk ticket #%d does not exist.", ticket_id))
|
||||
|
||||
partner = ticket.partner_id
|
||||
if not partner:
|
||||
raise UserError(
|
||||
_("Ticket '%s' has no customer set. A customer is required "
|
||||
"to create a credit note.", ticket.name)
|
||||
)
|
||||
|
||||
# ---- Path A: reverse an existing invoice ----
|
||||
if invoice_id:
|
||||
invoice = self.browse(invoice_id)
|
||||
if not invoice.exists():
|
||||
raise UserError(_("Invoice #%d does not exist.", invoice_id))
|
||||
|
||||
if invoice.move_type not in ("out_invoice", "out_receipt"):
|
||||
raise UserError(
|
||||
_("Only customer invoices can be reversed from a "
|
||||
"helpdesk ticket.")
|
||||
)
|
||||
|
||||
# Use the standard reversal wizard logic.
|
||||
reversal_vals = {
|
||||
"journal_id": invoice.journal_id.id,
|
||||
"date": fields.Date.context_today(self),
|
||||
"reason": _("Credit note from ticket: %s", ticket.name),
|
||||
}
|
||||
credit_note = invoice._reverse_moves(
|
||||
default_values_list=[reversal_vals],
|
||||
cancel=False,
|
||||
)
|
||||
credit_note.write({
|
||||
"fusion_helpdesk_ticket_id": ticket.id,
|
||||
"ref": _("Ticket: %s", ticket.name),
|
||||
})
|
||||
|
||||
# ---- Path B: create a blank credit note ----
|
||||
else:
|
||||
journal = self.env["account.journal"].search(
|
||||
[
|
||||
("company_id", "=", ticket.company_id.id or self.env.company.id),
|
||||
("type", "=", "sale"),
|
||||
],
|
||||
limit=1,
|
||||
)
|
||||
if not journal:
|
||||
raise UserError(
|
||||
_("No sales journal found. Please configure one before "
|
||||
"creating credit notes.")
|
||||
)
|
||||
|
||||
credit_note = self.create({
|
||||
"move_type": "out_refund",
|
||||
"journal_id": journal.id,
|
||||
"partner_id": partner.id,
|
||||
"date": fields.Date.context_today(self),
|
||||
"ref": _("Ticket: %s", ticket.name),
|
||||
"narration": _(
|
||||
"Credit note generated from helpdesk ticket "
|
||||
"'%s' (ID %d).", ticket.name, ticket.id,
|
||||
),
|
||||
"fusion_helpdesk_ticket_id": ticket.id,
|
||||
})
|
||||
|
||||
_logger.info(
|
||||
"Fusion Helpdesk Bridge: created credit note %s (id=%d) "
|
||||
"from ticket '%s' (id=%d).",
|
||||
credit_note.name, credit_note.id, ticket.name, ticket.id,
|
||||
)
|
||||
|
||||
return {
|
||||
"type": "ir.actions.act_window",
|
||||
"res_model": "account.move",
|
||||
"res_id": credit_note.id,
|
||||
"view_mode": "form",
|
||||
"target": "current",
|
||||
}
|
||||
238
Fusion Accounting/models/inter_company_rules.py
Normal file
238
Fusion Accounting/models/inter_company_rules.py
Normal file
@@ -0,0 +1,238 @@
|
||||
# Fusion Accounting - Inter-Company Invoice Synchronization
|
||||
# Copyright (C) 2026 Nexa Systems Inc. (https://nexasystems.ca)
|
||||
# Original implementation for the Fusion Accounting module.
|
||||
#
|
||||
# When an invoice is posted in Company A to a partner that IS Company B,
|
||||
# automatically creates a matching bill in Company B (and vice-versa).
|
||||
|
||||
import logging
|
||||
|
||||
from odoo import api, fields, models, Command, _
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FusionInterCompanyRules(models.Model):
|
||||
"""Extends res.company with inter-company invoice synchronization settings.
|
||||
|
||||
When enabled, posting an invoice in one company that targets a partner
|
||||
linked to another company in the same database will automatically
|
||||
generate the corresponding counter-document (bill ↔ invoice) in the
|
||||
target company.
|
||||
"""
|
||||
|
||||
_inherit = 'res.company'
|
||||
|
||||
# =====================================================================
|
||||
# Configuration Fields
|
||||
# =====================================================================
|
||||
|
||||
fusion_intercompany_invoice_enabled = fields.Boolean(
|
||||
string="Inter-Company Invoice Sync",
|
||||
default=False,
|
||||
help="When enabled, posting an invoice/bill to a partner that "
|
||||
"represents another company will automatically create the "
|
||||
"corresponding counter-document in that company.",
|
||||
)
|
||||
fusion_intercompany_invoice_journal_id = fields.Many2one(
|
||||
comodel_name='account.journal',
|
||||
string="Inter-Company Journal",
|
||||
domain="[('type', 'in', ('sale', 'purchase'))]",
|
||||
help="Default journal used to create inter-company invoices/bills. "
|
||||
"If empty, the system will pick the first appropriate journal "
|
||||
"in the target company.",
|
||||
)
|
||||
|
||||
# =====================================================================
|
||||
# Helpers
|
||||
# =====================================================================
|
||||
|
||||
def _fusion_get_intercompany_target(self, partner):
|
||||
"""Return the company record linked to the given partner, if any.
|
||||
|
||||
A partner is considered an inter-company partner when it shares the
|
||||
same ``company_id`` reference or the partner *is* the commercial
|
||||
entity of another company in the system.
|
||||
|
||||
:param partner: res.partner record
|
||||
:returns: res.company recordset (may be empty)
|
||||
"""
|
||||
self.ensure_one()
|
||||
if not partner:
|
||||
return self.env['res.company']
|
||||
|
||||
target_company = self.env['res.company'].sudo().search([
|
||||
('partner_id', '=', partner.commercial_partner_id.id),
|
||||
('id', '!=', self.id),
|
||||
], limit=1)
|
||||
return target_company
|
||||
|
||||
|
||||
class FusionInterCompanyAccountMove(models.Model):
|
||||
"""Extends account.move to trigger inter-company invoice creation on post."""
|
||||
|
||||
_inherit = 'account.move'
|
||||
|
||||
fusion_intercompany_move_id = fields.Many2one(
|
||||
comodel_name='account.move',
|
||||
string="Inter-Company Counter-Document",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help="The matching invoice or bill that was auto-created in the "
|
||||
"partner's company.",
|
||||
)
|
||||
fusion_intercompany_source_id = fields.Many2one(
|
||||
comodel_name='account.move',
|
||||
string="Inter-Company Source Document",
|
||||
copy=False,
|
||||
readonly=True,
|
||||
help="The original invoice or bill in the originating company "
|
||||
"that triggered the creation of this document.",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Post Override
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _post(self, soft=True):
|
||||
"""Override to trigger inter-company document creation after posting."""
|
||||
posted = super()._post(soft=soft)
|
||||
for move in posted:
|
||||
move._fusion_trigger_intercompany_sync()
|
||||
return posted
|
||||
|
||||
def _fusion_trigger_intercompany_sync(self):
|
||||
"""Check conditions and create the inter-company counter-document."""
|
||||
self.ensure_one()
|
||||
|
||||
# Only applies to customer invoices / vendor bills
|
||||
if self.move_type not in ('out_invoice', 'out_refund', 'in_invoice', 'in_refund'):
|
||||
return
|
||||
|
||||
company = self.company_id
|
||||
if not company.fusion_intercompany_invoice_enabled:
|
||||
return
|
||||
|
||||
# Already has a counter-document
|
||||
if self.fusion_intercompany_move_id:
|
||||
return
|
||||
|
||||
partner = self.partner_id.commercial_partner_id
|
||||
target_company = company._fusion_get_intercompany_target(partner)
|
||||
if not target_company:
|
||||
return
|
||||
|
||||
# Target company must also have the feature enabled
|
||||
if not target_company.fusion_intercompany_invoice_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
self._create_intercompany_invoice(target_company)
|
||||
except Exception as exc:
|
||||
_logger.warning(
|
||||
"Fusion Inter-Company: failed to create counter-document "
|
||||
"for %s (id=%s): %s",
|
||||
self.name, self.id, exc,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Counter-Document Creation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
_MOVE_TYPE_MAP = {
|
||||
'out_invoice': 'in_invoice',
|
||||
'out_refund': 'in_refund',
|
||||
'in_invoice': 'out_invoice',
|
||||
'in_refund': 'out_refund',
|
||||
}
|
||||
|
||||
def _create_intercompany_invoice(self, target_company):
|
||||
"""Create the counter-document in *target_company*.
|
||||
|
||||
Maps:
|
||||
- Customer Invoice → Vendor Bill (and vice-versa)
|
||||
- Customer Credit Note → Vendor Credit Note
|
||||
|
||||
Line items are copied with accounts resolved in the target company's
|
||||
chart of accounts. Taxes are **not** copied to avoid cross-company
|
||||
tax configuration issues; the target company's fiscal position and
|
||||
default taxes will apply instead.
|
||||
|
||||
:param target_company: res.company record of the receiving company
|
||||
"""
|
||||
self.ensure_one()
|
||||
target_move_type = self._MOVE_TYPE_MAP.get(self.move_type)
|
||||
if not target_move_type:
|
||||
return
|
||||
|
||||
# Determine journal in target company
|
||||
journal = target_company.fusion_intercompany_invoice_journal_id
|
||||
if not journal or journal.company_id != target_company:
|
||||
journal_type = 'purchase' if target_move_type.startswith('in_') else 'sale'
|
||||
journal = self.env['account.journal'].sudo().search([
|
||||
('company_id', '=', target_company.id),
|
||||
('type', '=', journal_type),
|
||||
], limit=1)
|
||||
|
||||
if not journal:
|
||||
_logger.warning(
|
||||
"Fusion Inter-Company: no %s journal found in company %s",
|
||||
'purchase' if target_move_type.startswith('in_') else 'sale',
|
||||
target_company.name,
|
||||
)
|
||||
return
|
||||
|
||||
# Build the partner reference: the originating company's partner
|
||||
source_partner = self.company_id.partner_id
|
||||
|
||||
# Prepare invoice line values
|
||||
line_vals = []
|
||||
for line in self.invoice_line_ids.filtered(lambda l: l.display_type == 'product'):
|
||||
line_vals.append(Command.create({
|
||||
'name': line.name or '/',
|
||||
'quantity': line.quantity,
|
||||
'price_unit': line.price_unit,
|
||||
'discount': line.discount,
|
||||
'product_id': line.product_id.id if line.product_id else False,
|
||||
'product_uom_id': line.product_uom_id.id if line.product_uom_id else False,
|
||||
'analytic_distribution': line.analytic_distribution,
|
||||
}))
|
||||
|
||||
if not line_vals:
|
||||
_logger.info(
|
||||
"Fusion Inter-Company: no product lines to copy for %s (id=%s)",
|
||||
self.name, self.id,
|
||||
)
|
||||
return
|
||||
|
||||
# Create the counter-document in sudo context of target company
|
||||
move_vals = {
|
||||
'move_type': target_move_type,
|
||||
'journal_id': journal.id,
|
||||
'company_id': target_company.id,
|
||||
'partner_id': source_partner.id,
|
||||
'invoice_date': self.invoice_date,
|
||||
'date': self.date,
|
||||
'ref': _("IC: %s", self.name),
|
||||
'narration': self.narration,
|
||||
'currency_id': self.currency_id.id,
|
||||
'invoice_line_ids': line_vals,
|
||||
'fusion_intercompany_source_id': self.id,
|
||||
}
|
||||
|
||||
new_move = self.env['account.move'].sudo().with_company(
|
||||
target_company
|
||||
).create(move_vals)
|
||||
|
||||
# Link the two documents
|
||||
self.sudo().write({
|
||||
'fusion_intercompany_move_id': new_move.id,
|
||||
})
|
||||
|
||||
_logger.info(
|
||||
"Fusion Inter-Company: created %s (id=%s) in %s from %s (id=%s)",
|
||||
new_move.name, new_move.id, target_company.name,
|
||||
self.name, self.id,
|
||||
)
|
||||
return new_move
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user