|
| 1 | +# pylint: disable=no-member,protected-access,invalid-name,no-self-use |
| 2 | +import base64 |
| 3 | +import logging |
| 4 | +from datetime import datetime |
| 5 | + |
| 6 | +from odoo import _, fields, models |
| 7 | +from odoo.exceptions import UserError |
| 8 | + |
| 9 | +_logger = logging.getLogger(__name__) # pylint: disable=invalid-name |
| 10 | + |
| 11 | + |
| 12 | +class WizardSaleForecastImport(models.TransientModel): |
| 13 | + """Import sale forecast""" |
| 14 | + |
| 15 | + _name = "wizard.sale.forecast.import" |
| 16 | + _description = "Import sale forecast records" |
| 17 | + |
| 18 | + file_import = fields.Binary("Import Forecast") |
| 19 | + file_name = fields.Char("file name") |
| 20 | + |
| 21 | + def action_process_import(self): |
| 22 | + """Actually process the uploaded file to import it.""" |
| 23 | + self.ensure_one() |
| 24 | + if not self.file_import: |
| 25 | + raise UserError(_("Please attach a file containing product information.")) |
| 26 | + ( |
| 27 | + rows, |
| 28 | + date_headers, |
| 29 | + default_code_index, |
| 30 | + date_index, |
| 31 | + key_index, |
| 32 | + ) = self._import_file() |
| 33 | + aggregate_info, locations = self._aggregate_info( |
| 34 | + rows, date_headers, default_code_index, date_index, key_index |
| 35 | + ) |
| 36 | + self._process_import(aggregate_info, locations) |
| 37 | + |
| 38 | + def _import_file(self): |
| 39 | + def get_field_index(header_row, name): |
| 40 | + """Get index of column in input file.""" |
| 41 | + try: |
| 42 | + index = header_row.index(name) |
| 43 | + return index |
| 44 | + except ValueError as error: |
| 45 | + raise UserError( |
| 46 | + _("Row header name %s is not found in file") % name |
| 47 | + ) from error |
| 48 | + |
| 49 | + self.ensure_one() |
| 50 | + lst = self._get_rows() |
| 51 | + if not lst or not lst[0]: |
| 52 | + raise UserError(_("Import file is empty or unreadable")) |
| 53 | + rows = lst[1] |
| 54 | + header_row = rows[1] |
| 55 | + date_headers = header_row[4:] |
| 56 | + product_headers = header_row[:4] |
| 57 | + (product_category_index, product_index, default_code_index, key_index,) = ( |
| 58 | + get_field_index(product_headers, name) |
| 59 | + for name in [ |
| 60 | + "Product Category", |
| 61 | + "Product", |
| 62 | + "Item Code (SKU)", |
| 63 | + "Key", |
| 64 | + ] |
| 65 | + ) |
| 66 | + date_index = [] |
| 67 | + date_index += [get_field_index(header_row, name) for name in date_headers] |
| 68 | + return rows, date_headers, default_code_index, date_index, key_index |
| 69 | + |
| 70 | + def _get_rows(self): |
| 71 | + """Get rows from data_file.""" |
| 72 | + self.ensure_one() |
| 73 | + import_model = self.env["base_import.import"] |
| 74 | + data_file = base64.b64decode(self.file_import) |
| 75 | + importer = import_model.create({"file": data_file, "file_name": self.file_name}) |
| 76 | + return importer._read_file({"quoting": '"', "separator": ","}) |
| 77 | + |
| 78 | + def _aggregate_info( |
| 79 | + self, rows, date_headers, default_code_index, date_index, key_index |
| 80 | + ): |
| 81 | + aggregate_info = dict() |
| 82 | + locations = set() |
| 83 | + for row in rows[2:]: |
| 84 | + if row[key_index].strip() == "Total": |
| 85 | + continue |
| 86 | + aggregate_info[row[default_code_index]] = [] |
| 87 | + for date, index in zip(date_headers, date_index): |
| 88 | + aggregate_info[row[default_code_index]].append( |
| 89 | + ( |
| 90 | + row[key_index].strip(), |
| 91 | + self._date_to_object(date.strip()), |
| 92 | + float(row[index].replace(",", "").replace(".", "")) |
| 93 | + if row[index] |
| 94 | + else 0, |
| 95 | + ) |
| 96 | + ) |
| 97 | + locations.add(row[key_index]) |
| 98 | + return aggregate_info, locations |
| 99 | + |
| 100 | + def _date_to_object(self, date): |
| 101 | + return datetime.strptime(date, "%b-%y") |
| 102 | + |
| 103 | + def _process_import(self, rows, locations): |
| 104 | + forecast_model = self.env["sale.forecast"] |
| 105 | + location_model = self.env["stock.location"] |
| 106 | + location_dict = { |
| 107 | + "Sales": location_model.browse(25), |
| 108 | + "CS consumption": location_model.browse(30), |
| 109 | + "AS consumption": location_model.browse(18), |
| 110 | + } |
| 111 | + for default_code, location_date_quantity in rows.items(): |
| 112 | + if not location_date_quantity: |
| 113 | + continue |
| 114 | + for location, date, quantity in location_date_quantity: |
| 115 | + if location not in locations: |
| 116 | + # TODO: log |
| 117 | + continue |
| 118 | + location_id = location_dict.get(location) |
| 119 | + if not location_id: |
| 120 | + _logger.debug( |
| 121 | + "No location %s exists.", |
| 122 | + location, |
| 123 | + ) |
| 124 | + continue |
| 125 | + if not quantity: |
| 126 | + continue |
| 127 | + product = self.env["product.product"].search( |
| 128 | + [("default_code", "=", default_code)] |
| 129 | + ) |
| 130 | + if not product: |
| 131 | + _logger.debug( |
| 132 | + "No product with default code %s exists.", |
| 133 | + default_code, |
| 134 | + ) |
| 135 | + continue |
| 136 | + if not date: |
| 137 | + continue |
| 138 | + date_range_id = self._get_date_range_id(date) |
| 139 | + if not date_range_id: |
| 140 | + _logger.debug( |
| 141 | + "No montly date range exists for %s.", |
| 142 | + date.strftime("%b-%y"), |
| 143 | + ) |
| 144 | + continue |
| 145 | + vals = { |
| 146 | + "product_id": product.id, |
| 147 | + "location_id": location_id.id, |
| 148 | + "product_uom_qty": quantity, |
| 149 | + "date_range_id": date_range_id.id, |
| 150 | + } |
| 151 | + existing_forecast = forecast_model.search(vals) |
| 152 | + if existing_forecast.search(vals): |
| 153 | + _logger.debug( |
| 154 | + "Forecast for product %s, location %s, date %s exists, updating...", |
| 155 | + (product.name, location, date.strftime("%b-%y")), |
| 156 | + ) |
| 157 | + existing_forecast.write(vals) |
| 158 | + continue |
| 159 | + forecast_model.create(vals) |
| 160 | + |
| 161 | + def _get_date_range_id(self, date): |
| 162 | + date_range_domain = [ |
| 163 | + ("date_start", "<=", date), |
| 164 | + ("date_end", ">", date), |
| 165 | + ("type_name", "ilike", "Monthly"), |
| 166 | + ("active", "=", True), |
| 167 | + ] |
| 168 | + return self.env["date.range"].search(date_range_domain) |
0 commit comments