2 # -*- coding: utf-8 -*-
4 # (c) Copyright 2003-2008 Hewlett-Packard Development Company, L.P.
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Author: Don Welch, Naga Samrat Chowdary Narla,
24 from base import utils
35 datetime_avail = False
39 pat_prod_num = re.compile("""(\d+)""", re.I)
49 TYPE_URI = TYPE_STR # (7) not used (yet)
50 TYPE_DATE = 8 # format: mm/dd/yyyy
54 "Undefined", # This will show an error (and its the default)
55 "Unsupported", # This is for unsupported models, and it will not show an error
58 #"PSB9100", not used on HPLIP
64 #"DJ400", not used on HPLIP
70 #"DJ660", not used in HPLIP
92 #'DJD2600', not used. Reassigned all these to ViperPlusTrim and ViperMinusTrim Class
116 "DJGenericVIP" : 'pcl3',
120 "LJFastRaster" : 'pclxl',
121 "LJJetReady" : 'pclxl',
127 "DJ6xxPhoto" : 'pcl3',
145 "LJZjsColor" : 'zjs',
147 "QuickConnect" : 'jpeg',
149 "OJProKx50" : 'pcl3',
152 "ViperPlusVIP" : 'pcl3',
153 "ViperMinusVIP": 'pcl3',
154 "ViperPlusTrim" : 'lidil',
155 "ViperMinusTrim": 'lidil',
161 "StingrayOJ" : 'pcl3',
162 "Copperhead" : 'pcl3',
163 "Copperhead12" : 'pcl3',
167 PDL_TYPE_PCL = 0 # less preferred
169 PDL_TYPE_HOST = 2 # more preferred (however, may req. plugin)
171 PDL_TYPES = { # Used to prioritize PPD file selection in prnt.cups.getPPDFile2()
172 'pcl3' : PDL_TYPE_PCL,
173 'pcl5' : PDL_TYPE_PCL,
174 'pcl6' : PDL_TYPE_PCL,
175 'pcl5e' : PDL_TYPE_PCL,
176 'pcl' : PDL_TYPE_PCL,
177 'pclxl' : PDL_TYPE_PCL,
179 'lidil' : PDL_TYPE_HOST,
180 'zjs' : PDL_TYPE_HOST,
181 'zjstream' : PDL_TYPE_HOST,
182 'zxs' : PDL_TYPE_HOST,
183 'zxstream' : PDL_TYPE_HOST,
184 'jpeg' : PDL_TYPE_HOST,
185 'jpg' : PDL_TYPE_HOST,
186 'jetready' : PDL_TYPE_HOST,
187 'jr' : PDL_TYPE_HOST,
202 "NoPhotoBestHiresModes",
206 "300dpiOnly", # LaserJet 4L
207 "GrayscaleOnly", # DJ540
208 "NoAutoTray", # PS Pro 8850
209 "NoEvenDuplex", # PS C8100
221 TECH_SUBCLASSES.sort()
224 # Items will be capitalized unless in this dict
225 MODEL_UI_REPLACEMENTS = {'laserjet' : 'LaserJet',
232 def normalizeModelUIName(model):
233 ml = model.lower().strip()
236 z = ml.replace('_', ' ')
238 if ml.startswith("hp"):
239 z = ml[3:].replace('_', ' ')
241 z = ml.replace('_', ' ')
245 if pat_prod_num.search(x): # don't cap items like cp1700dn
248 y.append(MODEL_UI_REPLACEMENTS.get(x, x.capitalize()))
253 return "HP " + ' '.join(y)
256 def normalizeModelName(model):
257 return utils.xstrip(model.replace(' ', '_').replace('__', '_').replace('~','').replace('/', '_'), '_')
261 def __init__(self, root_path=None):
262 if root_path is None:
263 self.root_path = prop.models_dir
265 self.root_path = root_path
268 self.reset_includes()
269 self.sec = re.compile(r'^\[(.*)\]')
270 self.inc = re.compile(r'^\%include (.*)', re.I)
271 self.inc_line = re.compile(r'^\%(.*)\%')
272 self.eq = re.compile(r'^([^=]+)=(.*)')
273 self.date = re.compile(r'^(\d{1,2})/(\d{1,2})/(\d{4,4})')
275 files = [(os.path.join(self.root_path, "models.dat"),
276 os.path.join(self.root_path, "unreleased", "unreleased.dat")),
277 (os.path.join(os.getcwd(), 'data', 'models', 'models.dat'),
278 os.path.join(os.getcwd(), 'data', 'models', 'unreleased', 'unreleased.dat'))]
280 for self.released_dat, self.unreleased_dat in files:
281 if os.path.exists(self.released_dat):
285 self.released_dat, self.unreleased_dat = None, None
286 log.error("Unable to locate models.dat file")
289 # Static model query data (from models.dat)
290 'align-type' : TYPE_INT,
291 'clean-type' : TYPE_INT,
292 'color-cal-type' : TYPE_INT,
293 'copy-type' : TYPE_INT,
294 'embedded-server-type' : TYPE_INT,
295 'fax-type' : TYPE_INT,
296 'fw-download' : TYPE_BOOL,
298 'io-mfp-mode' : TYPE_INT,
299 'io-mode' : TYPE_INT,
300 'io-support' : TYPE_BITFIELD,
301 'job-storage' : TYPE_INT,
302 'monitor-type' : TYPE_INT,
303 'linefeed-cal-type' : TYPE_INT,
304 'panel-check-type' : TYPE_INT,
305 'pcard-type' : TYPE_INT,
307 'plugin-reason' : TYPE_BITFIELD,
308 'power-settings': TYPE_INT,
309 'pq-diag-type' : TYPE_INT,
311 'scan-type' : TYPE_INT,
312 'scan-src' : TYPE_INT,
313 #'scan-duplex' : TYPE_BOOL,
314 'status-battery-check' : TYPE_INT,
315 'status-dynamic-counters' : TYPE_INT,
316 'status-type' : TYPE_INT,
317 'support-subtype' : TYPE_HEX,
318 'support-released' : TYPE_BOOL,
319 'support-type' : TYPE_INT,
320 'support-ver' : TYPE_STR,
321 'tech-class' : TYPE_LIST,
322 'tech-subclass' : TYPE_LIST,
323 'tech-type' : TYPE_INT,
324 'usb-pid' : TYPE_HEX,
325 'usb-vid' : TYPE_HEX,
326 'wifi-config': TYPE_INT,
327 'ppd-name' : TYPE_STR,
330 self.FIELD_TYPES_DYN = {
331 # Dynamic model data (from device query)
332 'dev-file' : TYPE_STR,
333 'fax-uri' : TYPE_STR,
334 'scan-uri' : TYPE_STR,
337 'status-desc' : TYPE_STR,
338 'cups-printers' : TYPE_STR,
340 'error-state' : TYPE_INT,
341 'device-state' : TYPE_INT,
343 'device-uri' : TYPE_STR,
344 'panel-line1' : TYPE_STR,
345 'panel-line2' : TYPE_STR,
346 'back-end' : TYPE_STR,
348 'deviceid' : TYPE_STR,
349 'cups-uri' : TYPE_STR,
350 'status-code' : TYPE_INT,
355 'duplexer' : TYPE_INT,
356 'supply-door' : TYPE_INT,
357 'revision' : TYPE_INT,
358 'media-path' : TYPE_INT,
359 'top-door' : TYPE_BOOL,
360 'photo-tray' : TYPE_BOOL,
363 self.RE_FIELD_TYPES = {
364 re.compile('^r(\d+)-agent(\d+)-kind', re.IGNORECASE) : TYPE_INT,
365 re.compile('^r(\d+)-agent(\d+)-type', re.IGNORECASE) : TYPE_INT,
366 re.compile('^r(\d+)-agent(\d+)-sku', re.IGNORECASE) : TYPE_STR,
367 re.compile('^agent(\d+)-desc', re.IGNORECASE) : TYPE_STR,
368 re.compile('^agent(\d+)-virgin', re.IGNORECASE) : TYPE_BOOL,
369 re.compile('^agent(\d+)-dvc', re.IGNORECASE) : TYPE_INT,
370 re.compile('^agent(\d+)-kind', re.IGNORECASE) : TYPE_INT,
371 re.compile('^agent(\d+)-type', re.IGNORECASE) : TYPE_INT,
372 re.compile('^agent(\d+)-id', re.IGNORECASE) : TYPE_INT,
373 re.compile('^agent(\d+)-hp-ink', re.IGNORECASE) : TYPE_BOOL,
374 re.compile('^agent(\d+)-health-desc', re.IGNORECASE) : TYPE_STR,
375 re.compile('^agent(\d+)-health$', re.IGNORECASE) : TYPE_INT,
376 re.compile('^agent(\d+)-known', re.IGNORECASE) : TYPE_BOOL,
377 re.compile('^agent(\d+)-level', re.IGNORECASE) : TYPE_INT,
378 re.compile('^agent(\d+)-ack', re.IGNORECASE) : TYPE_BOOL,
379 re.compile('^agent(\d+)-sku', re.IGNORECASE) : TYPE_STR,
380 re.compile('^in-tray(\d+)', re.IGNORECASE) : TYPE_BOOL,
381 re.compile('^out-tray(\d+)', re.IGNORECASE) : TYPE_BOOL,
382 re.compile('^model(\d+)', re.IGNORECASE) : TYPE_STR,
388 def read_all_files(self, unreleased=True):
389 if os.path.exists(self.released_dat):
390 self.read_section(self.released_dat)
392 if self.unreleased_dat is not None and os.path.exists(self.unreleased_dat):
393 self.read_section(self.unreleased_dat )
398 def read_section(self, filename, section=None, is_include=False): # section==None, read all sections
399 found, in_section = False, False
401 if section is not None:
402 section = section.lower()
405 log.debug("Searching for include [%s] in file %s" % (section, filename))
407 log.debug("Searching for section [%s] in file %s" % (section, filename))
410 cache = self.__includes
417 log.error("I/O Error: %s (%s)" % (filename, e.strerror))
426 if line[0] in ('#', ';'):
430 if in_section and section is not None:
433 match = self.sec.search(line)
435 if match is not None:
438 read_section = match.group(1).lower()
440 if section is not None:
441 found = in_section = (read_section == section)
444 if section is not None:
445 log.debug("Found section [%s] in file %s" % (read_section, filename))
447 cache[read_section] = {}
452 match = self.inc.match(line)
454 if match is not None:
455 inc_file = match.group(1)
456 log.debug("Found include file directive: %%include %s" % inc_file)
457 self.__include_files.append(os.path.join(os.path.dirname(filename), inc_file))
461 match = self.inc_line.match(line)
463 if match is not None:
464 inc_sect = match.group(1)
465 log.debug("Found include directive %%%s%%" % inc_sect)
468 self.__includes[inc_sect]
470 for inc in self.__include_files:
472 if self.read_section(inc, inc_sect, True):
475 log.error("Include %%%s%% not found." % inc_sect)
478 match = self.eq.search(line)
480 if match is not None:
482 value = match.group(2)
483 value = self.convert_data(key, value)
484 cache[read_section][key] = value
490 def reset_includes(self):
491 self.__include_files = []
495 def __getitem__(self, model):
496 model = model.lower()
499 return self.__cache[model]
501 log.debug("Cache miss: %s" % model)
503 log.debug("Reading file: %s" % self.released_dat)
505 if self.read_section(self.released_dat, model):
506 return self.__cache[model]
508 if self.unreleased_dat is not None and os.path.exists(self.unreleased_dat):
509 log.debug("Reading file: %s" % self.unreleased_dat)
511 if self.read_section(self.unreleased_dat, model):
512 return self.__cache[model]
517 def all_models(self):
521 def get_data_type(self, key):
523 return self.FIELD_TYPES[key]
526 return self.FIELD_TYPES_DYN[key]
529 return self.TYPE_CACHE[key]
531 for pat, typ in self.RE_FIELD_TYPES.items():
532 match = pat.match(key)
533 if match is not None:
534 self.TYPE_CACHE[key] = typ
537 log.error("get_data_type(): Field type lookup failed for key %s" % key)
541 def convert_data(self, key, value, typ=None):
543 typ = self.get_data_type(key)
545 if typ in (TYPE_BITFIELD, TYPE_INT):
548 except (ValueError, TypeError):
549 log.error("Invalid value in .dat file: %s=%s" % (key, value))
552 elif typ == TYPE_BOOL:
553 value = utils.to_bool(value)
555 elif typ == TYPE_LIST:
556 value = [x for x in value.split(',') if x]
558 elif typ == TYPE_DATE: # mm/dd/yyyy
560 # ...don't use datetime.strptime(), wasn't avail. until 2.5
561 match = self.date.search(value)
563 if match is not None:
564 month = int(match.group(1))
565 day = int(match.group(2))
566 year = int(match.group(3))
568 value = datetime.date(year, month, day)
570 elif typ == TYPE_HEX:
572 value = int(value, 16)
573 except (ValueError, TypeError):
574 log.error("Invalid hex value in .dat file: %s=%s" % (key, value))