|
| 1 | +# pylint: disable=no-member,protected-access,invalid-name,no-self-use |
| 2 | +import base64 |
| 3 | +import logging |
| 4 | +from datetime import datetime |
| 5 | + |
| 6 | +from odoo import _, fields, models |
| 7 | +from odoo.exceptions import UserError |
| 8 | + |
| 9 | +_logger = logging.getLogger(__name__) # pylint: disable=invalid-name |
| 10 | + |
| 11 | + |
| 12 | +class WizardSaleForecastImport(models.TransientModel): |
| 13 | + """Import sale forecast""" |
| 14 | + |
| 15 | + _name = "wizard.sale.forecast.import" |
| 16 | + _description = "Import sale forecast records" |
| 17 | + |
| 18 | + file_import = fields.Binary("Import Forecast") |
| 19 | + file_name = fields.Char("file name") |
| 20 | + |
| 21 | + def action_process_import(self): |
| 22 | + """Actually process the uploaded file to import it.""" |
| 23 | + self.ensure_one() |
| 24 | + if not self.file_import: |
| 25 | + raise UserError(_("Please attach a file containing product information.")) |
| 26 | + ( |
| 27 | + rows, |
| 28 | + date_headers, |
| 29 | + default_code_index, |
| 30 | + date_index, |
| 31 | + key_index, |
| 32 | + ) = self._import_file() |
| 33 | + aggregate_info = self._aggregate_info( |
| 34 | + rows, date_headers, default_code_index, date_index, key_index |
| 35 | + ) |
| 36 | + self._process_import(aggregate_info) |
| 37 | + |
| 38 | + def _import_file(self): |
| 39 | + def get_field_index(header_row, name): |
| 40 | + """Get index of column in input file.""" |
| 41 | + try: |
| 42 | + index = header_row.index(name) |
| 43 | + return index |
| 44 | + except ValueError as error: |
| 45 | + raise UserError( |
| 46 | + _("Row header name %s is not found in file") % name |
| 47 | + ) from error |
| 48 | + |
| 49 | + self.ensure_one() |
| 50 | + lst = self._get_rows() |
| 51 | + if not lst or not lst[0]: |
| 52 | + raise UserError(_("Import file is empty or unreadable")) |
| 53 | + rows = lst[1] |
| 54 | + header_row = rows[1] |
| 55 | + date_headers = header_row[4:] |
| 56 | + product_headers = header_row[:4] |
| 57 | + (product_category_index, product_index, default_code_index, key_index,) = ( |
| 58 | + get_field_index(product_headers, name) |
| 59 | + for name in [ |
| 60 | + "Product Category", |
| 61 | + "Product", |
| 62 | + "Item Code (SKU)", |
| 63 | + "Key", |
| 64 | + ] |
| 65 | + ) |
| 66 | + date_index = [] |
| 67 | + date_index += [get_field_index(header_row, name) for name in date_headers] |
| 68 | + return rows, date_headers, default_code_index, date_index, key_index |
| 69 | + |
| 70 | + def _get_rows(self): |
| 71 | + """Get rows from data_file.""" |
| 72 | + self.ensure_one() |
| 73 | + import_model = self.env["base_import.import"] |
| 74 | + data_file = base64.b64decode(self.file_import) |
| 75 | + importer = import_model.create({"file": data_file, "file_name": self.file_name}) |
| 76 | + return importer._read_file({"quoting": '"', "separator": ","}) |
| 77 | + |
| 78 | + def _aggregate_info( |
| 79 | + self, rows, date_headers, default_code_index, date_index, key_index |
| 80 | + ): |
| 81 | + aggregate_info = dict() |
| 82 | + for row in rows[2:]: |
| 83 | + if row[key_index].strip() == "Total": |
| 84 | + continue |
| 85 | + aggregate_info[row[default_code_index]] = [] |
| 86 | + for date, index in zip(date_headers, date_index): |
| 87 | + quantity = ( |
| 88 | + float(row[index].replace(",", "").replace(".", "")) |
| 89 | + if row[index] |
| 90 | + else 0 |
| 91 | + ) |
| 92 | + if quantity <= 0: |
| 93 | + continue |
| 94 | + date_forecast = self._date_to_object(date.strip()) |
| 95 | + if not date_forecast: |
| 96 | + continue |
| 97 | + aggregate_info[row[default_code_index]].append( |
| 98 | + ( |
| 99 | + row[key_index].strip(), |
| 100 | + date_forecast, |
| 101 | + quantity, |
| 102 | + ) |
| 103 | + ) |
| 104 | + return aggregate_info |
| 105 | + |
| 106 | + def _date_to_object(self, date): |
| 107 | + """No expired dates""" |
| 108 | + date_object = datetime.strptime(date, "%b-%y") |
| 109 | + if date_object.date() < fields.Date.today(): |
| 110 | + return False |
| 111 | + return date_object |
| 112 | + |
| 113 | + def _process_import(self, rows): |
| 114 | + forecast_model = self.env["sale.forecast"] |
| 115 | + location_model = self.env["stock.location"] |
| 116 | + location_dict = { |
| 117 | + "Sales": location_model.browse(25), |
| 118 | + "CS consumption": location_model.browse(30), |
| 119 | + "AS consumption": location_model.browse(18), |
| 120 | + } |
| 121 | + for default_code, location_date_quantity in rows.items(): |
| 122 | + product = self.env["product.product"].search( |
| 123 | + [("default_code", "=", default_code)] |
| 124 | + ) |
| 125 | + if not product: |
| 126 | + _logger.warning( |
| 127 | + "No product with default code %s exists.", |
| 128 | + default_code, |
| 129 | + ) |
| 130 | + continue |
| 131 | + if not location_date_quantity: |
| 132 | + continue |
| 133 | + for location, date, quantity in location_date_quantity: |
| 134 | + if location not in location_dict.keys(): |
| 135 | + _logger.warning( |
| 136 | + "No location %s exists.", |
| 137 | + location, |
| 138 | + ) |
| 139 | + continue |
| 140 | + location_id = location_dict.get(location) |
| 141 | + if not location_id: |
| 142 | + _logger.warning( |
| 143 | + "No location %s exists.", |
| 144 | + location, |
| 145 | + ) |
| 146 | + continue |
| 147 | + date_range_id = self._get_date_range_id(date) |
| 148 | + if not date_range_id: |
| 149 | + _logger.warning( |
| 150 | + "No monthly date range exists for %s.", |
| 151 | + date.strftime("%b-%y"), |
| 152 | + ) |
| 153 | + continue |
| 154 | + vals = { |
| 155 | + "product_id": product.id, |
| 156 | + "location_id": location_id.id, |
| 157 | + "product_uom_qty": quantity, |
| 158 | + "date_range_id": date_range_id.id, |
| 159 | + } |
| 160 | + existing_forecast = forecast_model.search( |
| 161 | + [ |
| 162 | + ("product_id", "=", vals["product_id"]), |
| 163 | + ("date_range_id", "=", vals["date_range_id"]), |
| 164 | + ("location_id", "=", vals["location_id"]), |
| 165 | + ] |
| 166 | + ) |
| 167 | + if existing_forecast: |
| 168 | + _logger.warning( |
| 169 | + "Forecast for product %s, location %s, date %s exists, updating...", |
| 170 | + (product.name, location, date.strftime("%b-%y")), |
| 171 | + ) |
| 172 | + existing_forecast.write(vals) |
| 173 | + continue |
| 174 | + forecast_model.create(vals) |
| 175 | + |
| 176 | + def _get_date_range_id(self, date): |
| 177 | + date_range_domain = [ |
| 178 | + ("date_start", "<=", date), |
| 179 | + ("date_end", ">", date), |
| 180 | + ("type_name", "ilike", "Monthly"), |
| 181 | + ("active", "=", True), |
| 182 | + ] |
| 183 | + return self.env["date.range"].search(date_range_domain) |
0 commit comments