|
| 1 | +# Copyright 2025 Tecnativa - Carlos Dauden |
| 2 | +# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). |
| 3 | + |
| 4 | +import base64 |
| 5 | +import math |
| 6 | + |
| 7 | +from odoo import _, api, fields, models |
| 8 | +from odoo.exceptions import UserError |
| 9 | +from odoo.tools.safe_eval import safe_eval |
| 10 | + |
| 11 | + |
| 12 | +class ImportMappingTemplate(models.Model): |
| 13 | + _inherit = ["base_import.import"] |
| 14 | + _name = "base_import.mapping.template" |
| 15 | + _description = "Template to group import mapping data" |
| 16 | + _transient = False |
| 17 | + |
| 18 | + name = fields.Char() |
| 19 | + action_id = fields.Many2one(comodel_name="ir.actions.act_window") |
| 20 | + mapping_ids = fields.One2many( |
| 21 | + comodel_name="base_import.mapping", |
| 22 | + inverse_name="mapping_template_id", |
| 23 | + ) |
| 24 | + forced_context = fields.Char(string="Context Value", default={}, required=True) |
| 25 | + forced_options = fields.Char(default={}, required=True) |
| 26 | + |
| 27 | + def execute_import_template(self): |
| 28 | + options = { |
| 29 | + "import_skip_records": [], |
| 30 | + "import_set_empty_fields": [], |
| 31 | + "fallback_values": {}, |
| 32 | + "name_create_enabled_fields": {}, |
| 33 | + "encoding": "", |
| 34 | + "separator": "", |
| 35 | + "quoting": '"', |
| 36 | + "date_format": "%Y-%m-%d", |
| 37 | + "datetime_format": "", |
| 38 | + "float_thousand_separator": ",", |
| 39 | + "float_decimal_separator": ".", |
| 40 | + "advanced": True, |
| 41 | + "has_headers": True, |
| 42 | + "keep_matches": False, |
| 43 | + "limit": 2000, |
| 44 | + "sheets": [], |
| 45 | + "sheet": "", |
| 46 | + "skip": 0, |
| 47 | + "tracking_disable": True, |
| 48 | + } |
| 49 | + eval_ctx = dict(self.env.context) |
| 50 | + ctx = {} |
| 51 | + if self.action_id: |
| 52 | + ctx.update(**safe_eval(self.action_id.context, eval_ctx)) |
| 53 | + if self.forced_context != "{}": |
| 54 | + ctx.update(**safe_eval(self.forced_context, eval_ctx)) |
| 55 | + self = self.with_context(**ctx) |
| 56 | + self.file = base64.b64decode(self.file) |
| 57 | + preview = self.parse_preview(options=options) |
| 58 | + if self.forced_options != "{}": |
| 59 | + options.update(**safe_eval(self.forced_options, eval_ctx)) |
| 60 | + columns = [col.lower() for col in preview.get("headers", [])] |
| 61 | + fields = [] |
| 62 | + for column in columns: |
| 63 | + line = self.mapping_ids.filtered(lambda x, col=column: x.column_name == col) |
| 64 | + fields.append(line.field_name) |
| 65 | + limit = options["limit"] |
| 66 | + steps_number = math.ceil((preview.get("file_length", 1) - 1) / limit) |
| 67 | + all_ids = [] |
| 68 | + for step in range(steps_number): |
| 69 | + options["skip"] = step * limit |
| 70 | + options["limit"] = limit |
| 71 | + res = self.with_context( |
| 72 | + use_mapping_template_id=self.id, use_cached_db_id_for=True |
| 73 | + ).execute_import(fields, columns, options, dryrun=False) |
| 74 | + messages = res.get("messages", []) |
| 75 | + if messages: |
| 76 | + text_message = "\n".join(m.get("message", "") for m in messages) |
| 77 | + if step > 0: |
| 78 | + text_message = ( |
| 79 | + f"Already imported {step * limit} records, but \n{text_message}" |
| 80 | + ) |
| 81 | + raise UserError(text_message) |
| 82 | + res_ids = res.get("ids", []) |
| 83 | + if not res_ids: |
| 84 | + raise UserError(_("No records were imported")) |
| 85 | + all_ids.extend(res_ids) |
| 86 | + # self.env.registry.clear_cache() |
| 87 | + self.file = False |
| 88 | + return self.action_view_imported_records(all_ids, ctx) |
| 89 | + |
| 90 | + @api.model |
| 91 | + def _convert_import_data(self, fields, options): |
| 92 | + data, import_fields = super()._convert_import_data(fields, options) |
| 93 | + if not self.env.context.get("use_mapping_template_id"): |
| 94 | + return data, import_fields |
| 95 | + multilevel = "id" in import_fields and any( |
| 96 | + "_ids/" in f for f in import_fields if f |
| 97 | + ) |
| 98 | + if multilevel: |
| 99 | + id_index = import_fields.index("id") |
| 100 | + index_line_dict, index_column_dict = self.get_index_dictionaries(import_fields) |
| 101 | + # for index, field_name in enumerate(import_fields): |
| 102 | + # line = self.mapping_ids.filtered( |
| 103 | + # lambda x, f_name=field_name: x.field_name == f_name |
| 104 | + # ) |
| 105 | + for index, line in index_line_dict.items(): |
| 106 | + field_name = line.field_name |
| 107 | + if line.pre_process_method: |
| 108 | + process_fnc = getattr( |
| 109 | + line, f"pre_process_method_{line.pre_process_method}" |
| 110 | + ) |
| 111 | + for row in data: |
| 112 | + row[index] = process_fnc(row[index]) |
| 113 | + elif line.python_code: |
| 114 | + self.update_data_with_python_code( |
| 115 | + data, index, line.python_code, index_column_dict |
| 116 | + ) |
| 117 | + elif field_name == "id": |
| 118 | + prefix = self.res_model.replace(".", "_") |
| 119 | + for row in data: |
| 120 | + row[index] = f"{prefix}_{row[index]}_{self.id}" |
| 121 | + if line.mapped_value_ids: |
| 122 | + mapped_dict = { |
| 123 | + map_line.value: map_line.new_value_ref |
| 124 | + and str(map_line.new_value_ref.id) |
| 125 | + or map_line.new_value |
| 126 | + for map_line in line.mapped_value_ids |
| 127 | + } |
| 128 | + for row in data: |
| 129 | + row[index] = mapped_dict.get(row[index], row[index]) |
| 130 | + # Empty repeat values for principal record fields |
| 131 | + if multilevel and "_ids/" not in field_name: |
| 132 | + last_value = "" |
| 133 | + for row in data: |
| 134 | + if row[id_index] in ("", last_value): |
| 135 | + row[index] = "" |
| 136 | + else: |
| 137 | + last_value = row[id_index] |
| 138 | + return data, import_fields |
| 139 | + |
| 140 | + def get_index_dictionaries(self, import_fields): |
| 141 | + index_line_dict = {} |
| 142 | + index_column_dict = {} |
| 143 | + for index, field_name in enumerate(import_fields): |
| 144 | + line = self.mapping_ids.filtered( |
| 145 | + lambda x, f_name=field_name: x.field_name == f_name |
| 146 | + ) |
| 147 | + index_line_dict[index] = line |
| 148 | + index_column_dict[index] = line.column_name |
| 149 | + return index_line_dict, index_column_dict |
| 150 | + |
| 151 | + def update_data_with_python_code(self, data, index, python_code, index_column_dict): |
| 152 | + for row in data: |
| 153 | + col_vals = {} |
| 154 | + if "col_vals" in python_code: |
| 155 | + for idx, col in index_column_dict.items(): |
| 156 | + col_vals[col] = row[idx] |
| 157 | + row[index] = safe_eval( |
| 158 | + python_code, |
| 159 | + {"value": row[index], "col_vals": col_vals}, |
| 160 | + ) |
| 161 | + |
| 162 | + def action_view_imported_records(self, res_ids, context=None): |
| 163 | + if self.action_id: |
| 164 | + action = self.env["ir.actions.actions"]._for_xml_id(self.action_id.xml_id) |
| 165 | + else: |
| 166 | + action = { |
| 167 | + "type": "ir.actions.act_window", |
| 168 | + "res_model": self.res_model, |
| 169 | + "name": _("Imported Records"), |
| 170 | + "views": [[False, "tree"], [False, "kanban"], [False, "form"]], |
| 171 | + } |
| 172 | + action["domain"] = [("id", "in", res_ids)] |
| 173 | + if context: |
| 174 | + action["context"] = context |
| 175 | + return action |
0 commit comments