1 #!/usr/bin/env python 2 3 """ 4 Common classes. 5 6 Copyright (C) 2007, 2008, 2009, 2010, 2011 Paul Boddie <paul@boddie.org.uk> 7 8 This program is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free Software 10 Foundation; either version 3 of the License, or (at your option) any later 11 version. 12 13 This program is distributed in the hope that it will be useful, but WITHOUT 14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 15 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more 16 details. 17 18 You should have received a copy of the GNU General Public License along with 19 this program. If not, see <http://www.gnu.org/licenses/>. 20 """ 21 22 import compiler.visitor 23 24 try: 25 set 26 except NameError: 27 from sets import Set as set 28 29 class ObjectSet: 30 31 "A set of objects with optional associated data." 32 33 def __init__(self, d=None): 34 if d is None: 35 self.objects = {} 36 elif hasattr(d, "items"): 37 self.objects = dict(d) 38 else: 39 self.objects = {} 40 for key in d: 41 self.add(key) 42 43 def __repr__(self): 44 out = ["{"] 45 first = 1 46 for key, value in self.items(): 47 if not first: 48 out.append(", ") 49 else: 50 first = 0 51 out.append(repr(key)) 52 if value: 53 out.append(" : ") 54 out.append(repr(value)) 55 out.append("}") 56 return "".join(out) 57 58 def __iter__(self): 59 return iter(self.keys()) 60 61 # Set membership and comparisons. 62 63 def __hash__(self): 64 return hash(tuple(self.keys())) 65 66 def cmp(self, other): 67 return cmp(self.keys(), other.keys()) 68 69 # Set methods. 70 71 def add(self, obj): 72 if not self.has_key(obj): 73 self[obj] = [] 74 75 # Dictionary and related methods. 76 77 def __getitem__(self, key): 78 return self.objects[key] 79 80 def get(self, key, default=None): 81 return self.objects.get(key, default) 82 83 def __setitem__(self, key, value): 84 self.objects[key] = value 85 86 def has_key(self, key): 87 return self.objects.has_key(key) 88 89 def keys(self): 90 return self.objects.keys() 91 92 def values(self): 93 return self.objects.values() 94 95 def items(self): 96 return self.objects.items() 97 98 def update(self, other): 99 100 # Combining dictionary-like objects involves combining values. 101 102 if hasattr(other, "items"): 103 for key, value in other.items(): 104 self[key] = value + self.get(key, []) 105 106 # Combining sequence-like objects involves just adding members. 107 108 else: 109 for key in other: 110 self.add(key) 111 112 def merge(self, other): 113 114 """ 115 Merge this object set with an 'other' set, combining the values where 116 possible, and incorporating values present in only one of the sets. 117 """ 118 119 merged = ObjectSet() 120 for key in other.keys(): 121 if not self.has_key(key): 122 merged[key] = other[key] 123 else: 124 for other_value in other[key]: 125 for value in self[key]: 126 merged[key] = value + other_value 127 128 for key in self.keys(): 129 if not merged.has_key(key): 130 merged[key] = self[key] 131 132 return merged 133 134 def combine_object_set_lists(l1, l2): 135 136 """ 137 Combine lists of object sets 'l1' and 'l2' to make a product of their 138 members. 139 """ 140 141 combined = [] 142 for i1 in l1: 143 for i2 in l2: 144 combined.append(i1.merge(i2)) 145 return combined 146 147 def deepen_mapping_dict(d): 148 149 "Convert the values of 'd' to be elements of a potentially larger set." 150 151 new_dict = {} 152 for key, value in d.items(): 153 new_dict[key] = set([value]) 154 return new_dict 155 156 def merge_mapping_dicts(dicts): 157 158 "Merge the given 'dicts' mapping keys to sets of objects." 159 160 new_dict = {} 161 update_mapping_dict(new_dict, dicts) 162 return new_dict 163 164 def update_mapping_dict(new_dict, dicts): 165 166 "Update 'new_dict' with the contents of the set dictionary 'dicts'." 167 168 for old_dict in dicts: 169 for key, value in old_dict.items(): 170 if not new_dict.has_key(key): 171 new_dict[key] = ObjectSet(value) 172 else: 173 new_dict[key].update(value) 174 175 def combine_mapping_dicts(d1, d2): 176 177 """ 178 Combine dictionaries 'd1' and 'd2' in a resulting dictionary, with the 179 values of the contributing dictionaries being themselves combined such that 180 a "product" of the values for a given key are stored in the combined 181 dictionary. 182 """ 183 184 combined = {} 185 d2_keys = d2.keys() 186 187 for key in d2_keys: 188 189 d1_values = d1.get(key) 190 191 if d1_values is None: 192 combined[key] = d2[key] 193 else: 194 combined[key] = combine_object_set_lists(d1_values, d2[key]) 195 196 for key in d1.keys(): 197 if key not in d2_keys: 198 combined[key] = d1[key] 199 200 return combined 201 202 # Visitors and activities related to node annotations. 203 204 class ASTVisitor(compiler.visitor.ASTVisitor): 205 206 "A base class for visitors." 207 208 def dispatch(self, node, *args): 209 210 "Dispatch using 'node', annotating any raised exceptions." 211 212 try: 213 return compiler.visitor.ASTVisitor.dispatch(self, node, *args) 214 except NodeProcessingError, exc: 215 if exc.astnode is None: 216 exc.astnode = node 217 218 # NOTE: Should perhaps specialise the subclasses appropriately. 219 220 if hasattr(self, "unit"): 221 exc.unit_name = self.unit.full_name() 222 else: 223 exc.unit_name = self.full_name() 224 raise 225 226 def possible_accessor_types(self, node): 227 228 """ 229 Given annotations made during the inspection process, return all possible 230 type names and indications of static usage for a 'node' involved in 231 attribute access. 232 """ 233 234 target_names = set() 235 236 if hasattr(node, "_attrusers"): 237 238 # Visit each attribute user. 239 240 for user in node._attrusers: 241 242 # Since users such as branches may not provide type information, 243 # attempt to find defining users. 244 245 for def_user in user._attrdefs or [user]: 246 for target_name, is_static in def_user._attrtypes.get(node._username, []): 247 target_names.add((target_name, is_static)) 248 249 return target_names 250 251 def used_by_unit(node): 252 253 """ 254 Return whether the definition made by a 'node' is actually employed by the 255 program unit within which it is found. 256 """ 257 258 return hasattr(node, "unit") and node.unit.parent.has_key(node.unit.name) 259 260 # Errors. 261 262 class ProcessingError(Exception): 263 264 "A processing error." 265 266 pass 267 268 class TableError(ProcessingError): 269 270 "An error occurring during access to a lookup table." 271 272 pass 273 274 class TableGenerationError(ProcessingError): 275 276 "An error occurring when generating a lookup table." 277 278 pass 279 280 class NodeProcessingError(ProcessingError): 281 282 "A processing error associated with a particular program node." 283 284 def __init__(self, message): 285 self.message = message 286 self.unit_name = None 287 self.astnode = None 288 289 def get_lineno(self, node): 290 lineno = node.lineno 291 if lineno is not None: 292 return lineno 293 else: 294 for child in node.getChildNodes(): 295 lineno = self.get_lineno(child) 296 if lineno is not None: 297 return lineno 298 return None 299 300 def __repr__(self): 301 return "Error in %r at line %r: %s" % (self.unit_name, self.get_lineno(self.astnode), self.message) 302 303 def __str__(self): 304 return repr(self) 305 306 class InspectError(NodeProcessingError): 307 308 "An error during the module inspection process." 309 310 pass 311 312 class TranslateError(NodeProcessingError): 313 314 "An error during the module translation process." 315 316 pass 317 318 class TranslationNotImplementedError(TranslateError): 319 320 "An error caused by a node not being supported in translation." 321 322 pass 323 324 # Special representations. 325 326 class AtLeast: 327 328 "A special representation for numbers of a given value or greater." 329 330 def __init__(self, count): 331 self.count = count 332 333 def __eq__(self, other): 334 return 0 335 336 __lt__ = __le__ = __eq__ 337 338 def __ne__(self, other): 339 return 1 340 341 def __gt__(self, other): 342 if isinstance(other, AtLeast): 343 return 0 344 else: 345 return self.count > other 346 347 def __ge__(self, other): 348 if isinstance(other, AtLeast): 349 return 0 350 else: 351 return self.count >= other 352 353 def __iadd__(self, other): 354 if isinstance(other, AtLeast): 355 self.count += other.count 356 else: 357 self.count += other 358 return self 359 360 def __radd__(self, other): 361 if isinstance(other, AtLeast): 362 return AtLeast(self.count + other.count) 363 else: 364 return AtLeast(self.count + other) 365 366 def __repr__(self): 367 return "AtLeast(%r)" % self.count 368 369 # Useful data. 370 371 comparison_methods = { 372 "==" : ("__eq__", "__eq__"), 373 "!=" : ("__ne__", "__ne__"), 374 "<" : ("__lt__", "__gt__"), 375 "<=" : ("__le__", "__ge__"), 376 ">=" : ("__ge__", "__le__"), 377 ">" : ("__gt__", "__lt__"), 378 "is" : None, 379 "is not" : None, 380 "in" : None, 381 "not in" : None 382 } 383 384 augassign_methods = { 385 "+=" : ("__iadd__", ("__add__", "__radd__")), 386 "-=" : ("__isub__", ("__sub__", "__rsub__")), 387 "*=" : ("__imul__", ("__mul__", "__rmul__")), 388 "/=" : ("__idiv__", ("__div__", "__rdiv__")), 389 "//=" : ("__ifloordiv__", ("__floordiv__", "__rfloordiv__")), 390 "%=" : ("__imod__", ("__mod__", "__rmod__")), 391 "**=" : ("__ipow__", ("__pow__", "__rpow__")), 392 "<<=" : ("__ilshift__", ("__lshift__", "__rlshift__")), 393 ">>=" : ("__irshift__", ("__rshift__", "__rrshift__")), 394 "&=" : ("__iand__", ("__and__", "__rand__")), 395 "^=" : ("__ixor__", ("__xor__", "__rxor__")), 396 "|=" : ("__ior__", ("__or__", "__ror__")) 397 } 398 399 binary_methods = { 400 "Add" : ("__add__", "__radd__"), 401 "Bitand" : ("__and__", "__rand__"), 402 "Bitor" : ("__or__", "__ror__"), 403 "Bitxor" : ("__xor__", "__rxor__"), 404 "Div" : ("__div__", "__rdiv__"), 405 "FloorDiv" : ("__floordiv__", "__rfloordiv__"), 406 "LeftShift" : ("__lshift__", "__rlshift__"), 407 "Mod" : ("__mod__", "__rmod__"), 408 "Mul" : ("__mul__", "__rmul__"), 409 "Power" : ("__pow__", "__rpow__"), 410 "RightShift" : ("__rshift__", "__rrshift__"), 411 "Sub" : ("__sub__", "__rsub__") 412 } 413 414 unary_methods = { 415 "Invert" : "__invert__", 416 "UnaryAdd" : "__pos__", 417 "UnarySub" : "__neg__" 418 } 419 420 operator_functions = { 421 422 # Binary operations. 423 424 "Add" : "add", 425 "Bitand" : "and_", 426 "Bitor" : "or_", 427 "Bitxor" : "xor", 428 "Div" : "div", 429 "FloorDiv" : "floordiv", 430 "LeftShift" : "lshift", 431 "Mod" : "mod", 432 "Mul" : "mul", 433 "Power" : "pow", 434 "RightShift" : "rshift", 435 "Sub" : "sub", 436 437 # Unary operations. 438 439 "Invert" : "invert", 440 "UnaryAdd" : "pos", 441 "UnarySub" : "neg", 442 443 # Augmented assignment. 444 445 "+=" : "iadd", 446 "-=" : "isub", 447 "*=" : "imul", 448 "/=" : "idiv", 449 "//=" : "ifloordiv", 450 "%=" : "imod", 451 "**=" : "ipow", 452 "<<=" : "ilshift", 453 ">>=" : "irshift", 454 "&=" : "iand", 455 "^=" : "ixor", 456 "|=" : "ior", 457 458 # Comparisons. 459 460 "==" : "eq", 461 "!=" : "ne", 462 "<" : "lt", 463 "<=" : "le", 464 ">=" : "ge", 465 ">" : "gt", 466 467 # Access and slicing. 468 469 "AssSlice" : "setslice", 470 "Slice" : "getslice", 471 "AssSubscript" : "setitem", 472 "Subscript" : "getitem", 473 } 474 475 # vim: tabstop=4 expandtab shiftwidth=4