Package libxyz :: Package core :: Module fsrule
[hide private]
[frames] | no frames]

Source Code for Module libxyz.core.fsrule

  1  #-*- coding: utf8 -* 
  2  # 
  3  # Max E. Kuznecov <syhpoon@syhpoon.name> 2008 
  4  # 
  5  # This file is part of XYZCommander. 
  6  # XYZCommander is free software: you can redistribute it and/or modify 
  7  # it under the terms of the GNU Lesser Public License as published by 
  8  # the Free Software Foundation, either version 3 of the License, or 
  9  # (at your option) any later version. 
 10  # XYZCommander is distributed in the hope that it will be useful, 
 11  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
 12  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 
 13  # GNU Lesser Public License for more details. 
 14  # You should have received a copy of the GNU Lesser Public License 
 15  # along with XYZCommander. If not, see <http://www.gnu.org/licenses/>. 
 16   
 17  import stat 
 18  import pwd 
 19  import grp 
 20  import re 
 21   
 22  import libxyz.parser as parser 
 23   
 24  from libxyz.exceptions import XYZValueError 
 25  from libxyz.exceptions import LexerError 
 26  from libxyz.exceptions import FSRuleError 
 27  from libxyz.vfs.vfsobj import  VFSObject 
 28  from libxyz.vfs.types import * 
 29  from libxyz.core.utils import ustring 
30 31 -class FSRule(parser.BaseParser):
32 """ 33 FS rule parser 34 35 Rule syntax is following: 36 37 rule ::= expr $ 38 | expr op rule 39 expr ::= expr_body 40 | NOT expr_body 41 | "(" rule ")" 42 expr_body ::= ftype "{" ARG "}" 43 op ::= AND | OR 44 ftype ::= TYPE | PERM | OWNER | NAME | SIZE 45 | LINK_TYPE | LINK_PERM | LINK_OWNER | LINK_NAME 46 | LINK_EXISTS | LINK_SIZE 47 48 Examples: 49 50 type{file} and perm{+0111} 51 (owner{user} and not owner{:group}) or owner{root} 52 """ 53 54 # Tokens 55 TOKEN_TYPE = "type" 56 TOKEN_PERM = "perm" 57 TOKEN_OWNER = "owner" 58 TOKEN_NAME = "name" 59 TOKEN_INAME = "iname" 60 TOKEN_SIZE = "size" 61 TOKEN_LINK_TYPE = "link_type" 62 TOKEN_LINK_PERM = "link_perm" 63 TOKEN_LINK_OWNER = "link_owner" 64 TOKEN_LINK_NAME = "link_name" 65 TOKEN_LINK_INAME = "link_iname" 66 TOKEN_LINK_EXISTS = "link_exists" 67 TOKEN_LINK_SIZE = "link_size" 68 TOKEN_AND = "and" 69 TOKEN_OR = "or" 70 TOKEN_NOT = "not" 71 TOKEN_OPEN_BR = "{" 72 TOKEN_CLOSE_BR = "}" 73 TOKEN_OPEN_PAR = "(" 74 TOKEN_CLOSE_PAR = ")" 75 TOKEN_DEFAULT = True 76 TOKEN_ARG = False 77 EOF = None 78 79 TOKENS_EXTENDED = [] 80 TRANSFORM_EXTENDED = {} 81 82 TOKENS = [TOKEN_TYPE, TOKEN_PERM, TOKEN_OWNER, TOKEN_NAME, TOKEN_INAME, 83 TOKEN_LINK_TYPE, TOKEN_LINK_PERM, TOKEN_LINK_OWNER, 84 TOKEN_LINK_NAME, TOKEN_LINK_INAME, TOKEN_LINK_EXISTS, 85 TOKEN_AND, TOKEN_OR, TOKEN_NOT, TOKEN_OPEN_BR, TOKEN_CLOSE_BR, 86 TOKEN_OPEN_PAR, TOKEN_CLOSE_PAR, TOKEN_DEFAULT, 87 TOKEN_SIZE, TOKEN_LINK_SIZE, EOF] 88 89 # Nonterminals 90 NTOKEN_START = 100 91 NTOKEN_RULE = 101 92 NTOKEN_EXPR = 102 93 NTOKEN_EXPR_BODY = 103 94 NTOKEN_OP = 104 95 NTOKEN_FTYPE = 105 96 97 FTYPE = [TOKEN_TYPE, 98 TOKEN_PERM, 99 TOKEN_OWNER, 100 TOKEN_NAME, 101 TOKEN_INAME, 102 TOKEN_SIZE, 103 TOKEN_LINK_TYPE, 104 TOKEN_LINK_PERM, 105 TOKEN_LINK_OWNER, 106 TOKEN_LINK_NAME, 107 TOKEN_LINK_INAME, 108 TOKEN_LINK_EXISTS, 109 TOKEN_LINK_SIZE, 110 ] 111 112 INFIX_OP = (TOKEN_AND, TOKEN_OR) 113 114 @classmethod
115 - def extend(cls, token, trans_func, match_func):
116 """ 117 Extend FSRule parser with new expressions 118 @param token: new token expression 119 @param trans_func: Transformation function 120 @param match_func: Match function 121 """ 122 123 if token in cls.TOKENS_EXTENDED or token in cls.TOKENS or \ 124 token in cls.FTYPE: 125 raise FSRuleError(_(u"Error extending FSRule: "\ 126 u"token %s already registered") % token) 127 128 if not callable(trans_func) or not callable(match_func): 129 raise FSRuleError(_(u"Error extending FSRule: "\ 130 u"trans_func and match_func arguments "\ 131 u"must be functions.")) 132 133 # 1. Append token to lists 134 cls.TOKENS_EXTENDED.append(token) 135 cls.TOKENS.append(token) 136 cls.FTYPE.append(token) 137 138 # 2. Add transformation func 139 cls.TRANSFORM_EXTENDED[token] = trans_func 140 141 # 3. Add match func 142 Expression.extend(token, match_func)
143 144 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 145 146 @classmethod
147 - def unextend(cls, token):
148 """ 149 Remove extended expression from parser 150 """ 151 152 if token not in cls.TOKENS_EXTENDED: 153 return False 154 155 try: 156 cls.TOKENS_EXTENDED.remove(token) 157 except ValueError: 158 pass 159 160 try: 161 cls.TOKENS.remove(token) 162 except ValueError: 163 pass 164 165 try: 166 cls.FTYPE.remove(token) 167 except ValueError: 168 pass 169 170 try: 171 del(cls.TRANSFORM_EXTENDED[token]) 172 except KeyError: 173 pass 174 175 return Expression.unextend(token)
176 177 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 178
179 - def __init__(self, rule):
180 """ 181 @param rule: String rule 182 """ 183 184 super(FSRule, self).__init__() 185 186 self.raw_rule = rule 187 188 self._stack = [] 189 self._done = False 190 self._cur_obj = None 191 self._expressions = parser.lr.Tree() 192 self._exp_pointer = self._expressions 193 self._exp_stack = [] 194 195 # Action table 196 self._action = parser.lr.ActionTable() 197 198 _s = self._shift 199 _r = self._reduce 200 201 self._action.add(0, self.TOKEN_TYPE, (_s, 2)) 202 self._action.add(0, self.TOKEN_PERM, (_s, 3)) 203 self._action.add(0, self.TOKEN_OWNER, (_s, 4)) 204 self._action.add(0, self.TOKEN_NAME, (_s, 5)) 205 self._action.add(0, self.TOKEN_INAME, (_s, 5)) 206 self._action.add(0, self.TOKEN_SIZE, (_s, 27)) 207 self._action.add(0, self.TOKEN_LINK_TYPE, (_s, 27)) 208 self._action.add(0, self.TOKEN_LINK_PERM, (_s, 27)) 209 self._action.add(0, self.TOKEN_LINK_OWNER, (_s, 27)) 210 self._action.add(0, self.TOKEN_LINK_NAME, (_s, 27)) 211 self._action.add(0, self.TOKEN_LINK_INAME, (_s, 27)) 212 self._action.add(0, self.TOKEN_LINK_EXISTS, (_s, 27)) 213 self._action.add(0, self.TOKEN_LINK_SIZE, (_s, 27)) 214 self._action.add(0, self.TOKEN_NOT, (_s, 1)) 215 self._action.add(0, self.TOKEN_OPEN_PAR, (_s, 6)) 216 217 self._action.add(1, self.TOKEN_TYPE, (_s, 2)) 218 self._action.add(1, self.TOKEN_PERM, (_s, 3)) 219 self._action.add(1, self.TOKEN_OWNER, (_s, 4)) 220 self._action.add(1, self.TOKEN_NAME, (_s, 5)) 221 self._action.add(1, self.TOKEN_INAME, (_s, 5)) 222 self._action.add(1, self.TOKEN_SIZE, (_s, 27)) 223 self._action.add(1, self.TOKEN_LINK_TYPE, (_s, 27)) 224 self._action.add(1, self.TOKEN_LINK_PERM, (_s, 27)) 225 self._action.add(1, self.TOKEN_LINK_OWNER, (_s, 27)) 226 self._action.add(1, self.TOKEN_LINK_NAME, (_s, 27)) 227 self._action.add(1, self.TOKEN_LINK_INAME, (_s, 27)) 228 self._action.add(1, self.TOKEN_LINK_EXISTS, (_s, 27)) 229 self._action.add(1, self.TOKEN_LINK_SIZE, (_s, 27)) 230 231 self._action.add(2, self.TOKEN_DEFAULT, (_r, 10)) 232 self._action.add(3, self.TOKEN_DEFAULT, (_r, 11)) 233 self._action.add(4, self.TOKEN_DEFAULT, (_r, 12)) 234 self._action.add(5, self.TOKEN_DEFAULT, (_r, 13)) 235 236 self._action.add(6, self.TOKEN_TYPE, (_s, 2)) 237 self._action.add(6, self.TOKEN_PERM, (_s, 3)) 238 self._action.add(6, self.TOKEN_OWNER, (_s, 4)) 239 self._action.add(6, self.TOKEN_NAME, (_s, 5)) 240 self._action.add(6, self.TOKEN_INAME, (_s, 5)) 241 self._action.add(6, self.TOKEN_SIZE, (_s, 27)) 242 self._action.add(6, self.TOKEN_LINK_TYPE, (_s, 27)) 243 self._action.add(6, self.TOKEN_LINK_PERM, (_s, 27)) 244 self._action.add(6, self.TOKEN_LINK_OWNER, (_s, 27)) 245 self._action.add(6, self.TOKEN_LINK_NAME, (_s, 27)) 246 self._action.add(6, self.TOKEN_LINK_INAME, (_s, 27)) 247 self._action.add(6, self.TOKEN_LINK_EXISTS, (_s, 27)) 248 self._action.add(6, self.TOKEN_LINK_SIZE, (_s, 27)) 249 self._action.add(6, self.TOKEN_NOT, (_s, 1)) 250 self._action.add(6, self.TOKEN_OPEN_PAR, (_s, 6)) 251 252 self._action.add(7, self.EOF, (_s, 14)) 253 self._action.add(8, self.TOKEN_DEFAULT, (_r, 1)) 254 255 self._action.add(9, self.TOKEN_AND, (_s, 15)) 256 self._action.add(9, self.TOKEN_OR, (_s, 16)) 257 self._action.add(9, self.TOKEN_DEFAULT, (_r, 2)) 258 259 self._action.add(10, self.TOKEN_DEFAULT, (_r, 4)) 260 self._action.add(11, self.TOKEN_OPEN_BR, (_s, 18)) 261 self._action.add(12, self.TOKEN_DEFAULT, (_r, 5)) 262 self._action.add(13, self.TOKEN_CLOSE_PAR, (_s, 19)) 263 self._action.add(14, self.TOKEN_DEFAULT, (self._accept, None)) 264 self._action.add(15, self.TOKEN_DEFAULT, (_r, 8)) 265 self._action.add(16, self.TOKEN_DEFAULT, (_r, 9)) 266 267 self._action.add(17, self.TOKEN_TYPE, (_s, 2)) 268 self._action.add(17, self.TOKEN_PERM, (_s, 3)) 269 self._action.add(17, self.TOKEN_OWNER, (_s, 4)) 270 self._action.add(17, self.TOKEN_NAME, (_s, 5)) 271 self._action.add(17, self.TOKEN_INAME, (_s, 5)) 272 self._action.add(17, self.TOKEN_SIZE, (_s, 27)) 273 self._action.add(17, self.TOKEN_LINK_TYPE, (_s, 27)) 274 self._action.add(17, self.TOKEN_LINK_PERM, (_s, 27)) 275 self._action.add(17, self.TOKEN_LINK_OWNER, (_s, 27)) 276 self._action.add(17, self.TOKEN_LINK_NAME, (_s, 27)) 277 self._action.add(17, self.TOKEN_LINK_INAME, (_s, 27)) 278 self._action.add(17, self.TOKEN_LINK_EXISTS, (_s, 27)) 279 self._action.add(17, self.TOKEN_LINK_SIZE, (_s, 27)) 280 self._action.add(17, self.TOKEN_NOT, (_s, 1)) 281 self._action.add(17, self.TOKEN_OPEN_PAR, (_s, 6)) 282 283 self._action.add(18, self.TOKEN_ARG, (_s, 21)) 284 self._action.add(19, self.TOKEN_DEFAULT, (_r, 6)) 285 self._action.add(20, self.TOKEN_DEFAULT, (_r, 3)) 286 self._action.add(21, self.TOKEN_CLOSE_BR, (_s, 22)) 287 self._action.add(22, self.TOKEN_DEFAULT, (_r, 7)) 288 self._action.add(23, self.TOKEN_OPEN_BR, (_s, 24)) 289 self._action.add(24, self.TOKEN_ARG, (_s, 25)) 290 self._action.add(25, self.TOKEN_CLOSE_BR, (_s, 26)) 291 self._action.add(26, self.TOKEN_DEFAULT, (_r, 14)) 292 self._action.add(27, self.TOKEN_DEFAULT, (_r, 131)) 293 294 # For extended functionality 295 for _ext_token in self.TOKENS_EXTENDED: 296 for _state in (0, 1, 6, 17): 297 self._action.add(_state, _ext_token, (_s, 27)) 298 299 self._rules = parser.lr.Rules() 300 301 self._rules.add(1, self.NTOKEN_START, 1) 302 self._rules.add(2, self.NTOKEN_RULE, 1) 303 self._rules.add(3, self.NTOKEN_RULE, 3) 304 self._rules.add(4, self.NTOKEN_EXPR, 1) 305 self._rules.add(5, self.NTOKEN_EXPR, 2) 306 self._rules.add(6, self.NTOKEN_EXPR, 3) 307 self._rules.add(7, self.NTOKEN_EXPR_BODY, 4) 308 self._rules.add(8, self.NTOKEN_OP, 1) 309 self._rules.add(9, self.NTOKEN_OP, 1) 310 self._rules.add(10, self.NTOKEN_FTYPE, 1) 311 self._rules.add(11, self.NTOKEN_FTYPE, 1) 312 self._rules.add(12, self.NTOKEN_FTYPE, 1) 313 self._rules.add(13, self.NTOKEN_FTYPE, 1) 314 self._rules.add(14, self.NTOKEN_EXPR_BODY, 5) 315 self._rules.add(131, self.NTOKEN_FTYPE, 1) 316 317 # Goto table 318 self._goto = parser.lr.GotoTable() 319 320 self._goto.add(0, self.NTOKEN_START, 7) 321 self._goto.add(0, self.NTOKEN_RULE, 8) 322 self._goto.add(0, self.NTOKEN_EXPR, 9) 323 self._goto.add(0, self.NTOKEN_EXPR_BODY, 10) 324 self._goto.add(0, self.NTOKEN_FTYPE, 11) 325 326 self._goto.add(1, self.NTOKEN_EXPR_BODY, 10) 327 self._goto.add(1, self.NTOKEN_FTYPE, 23) 328 329 self._goto.add(6, self.NTOKEN_RULE, 13) 330 self._goto.add(6, self.NTOKEN_EXPR, 9) 331 self._goto.add(6, self.NTOKEN_EXPR_BODY, 10) 332 self._goto.add(6, self.NTOKEN_FTYPE, 11) 333 334 self._goto.add(9, self.NTOKEN_OP, 17) 335 336 self._goto.add(17, self.NTOKEN_RULE, 20) 337 self._goto.add(17, self.NTOKEN_EXPR, 9) 338 self._goto.add(17, self.NTOKEN_EXPR_BODY, 10) 339 self._goto.add(17, self.NTOKEN_FTYPE, 11) 340 341 self._unget = [] 342 self._chain = self._parse(rule)
343 344 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 345
346 - def match(self, obj):
347 """ 348 Match given object against rule 349 350 @param obj: VFSObject instance 351 @return: True if matches and False otherwise 352 """ 353 354 if not isinstance(obj, VFSObject): 355 raise XYZValueError(_(u"Invalid argument type: %s, "\ 356 u"VFSObject expected") % type(obj)) 357 358 return self._match(obj, self._expressions)
359 360 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 361
362 - def _match(self, obj, _expressions):
363 _op = None 364 _res = None 365 366 for exp in _expressions: 367 if exp in ("AND", "OR"): 368 _op = exp 369 continue 370 371 if isinstance(exp, parser.lr.Tree): 372 # Recursive match subrule 373 _r = self._match(obj, exp) 374 else: 375 _r = exp.match(obj) 376 377 if _res is not None: 378 if _op == "AND": 379 _res = _res and _r 380 381 # Short-circuit: do not continue if got false on AND 382 # expression 383 if not _res: 384 break 385 elif _op == "OR": 386 _res = _res or _r 387 388 # Short-circuit: do not continue if got true on OR 389 # expression 390 if _res: 391 break 392 else: 393 _res = _r 394 395 _op = None 396 397 if _res is None: 398 return _r 399 else: 400 return _res
401 402 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 403
404 - def _parse(self, rule):
405 """ 406 Parse rule 407 """ 408 409 # Initial state 410 self._stack.append(0) 411 412 _tokens = (self.TOKEN_OPEN_PAR, 413 self.TOKEN_CLOSE_PAR, 414 self.TOKEN_OPEN_BR, 415 self.TOKEN_CLOSE_BR, 416 u"=", u",") 417 418 self._lexer = parser.Lexer(rule, _tokens, u"#") 419 self._lexer.escaping_on() 420 421 try: 422 while True: 423 if self._done: 424 break 425 426 if self._unget: 427 _tok = self._unget.pop() 428 else: 429 _res = self._lexer.lexer() 430 431 if _res is not None: 432 _tok = _res[1] 433 else: 434 _tok = _res 435 436 if _tok not in self.TOKENS: 437 _tok_type = self.TOKEN_ARG 438 else: 439 _tok_type = _tok 440 441 try: 442 _f, _arg = self._action.get(self._stack[-1], _tok_type) 443 except KeyError: 444 try: 445 _f, _arg = self._action.get(self._stack[-1], 446 self.TOKEN_DEFAULT) 447 except KeyError: 448 self.error(_tok) 449 450 _f(_tok, _arg) 451 452 except LexerError, e: 453 self.error(e)
454 455 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 456
457 - def _shift(self, token, state):
458 """ 459 Shift token and state onto stack 460 """ 461 462 self._stack.append(token) 463 self._stack.append(state) 464 465 if state == 6: # ( 466 _new = parser.lr.Tree() 467 self._exp_pointer.add(_new) 468 self._exp_stack.append(self._exp_pointer) 469 self._exp_pointer = _new 470 elif state == 19: # ) 471 if self._exp_stack: 472 self._exp_pointer = self._exp_stack.pop()
473 474 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 475
476 - def _reduce(self, token, rule):
477 """ 478 Reduce stack by rule 479 """ 480 _transform = { 481 u"type": self._type, 482 u"name": self._name, 483 u"iname": self._iname, 484 u"owner": self._owner, 485 u"perm": self._perm, 486 u"size": self._size, 487 u"link_type": self._type, 488 u"link_name": self._name, 489 u"link_iname": self._iname, 490 u"link_owner": self._owner, 491 u"link_perm": self._perm, 492 u"link_size": self._size, 493 } 494 495 try: 496 _ntok, _len = self._rules.get(rule) 497 except KeyError: 498 self.error(token) 499 500 if rule in (10, 11, 12, 13, 131): 501 self._cur_obj = Expression() 502 self._cur_obj.otype = self._stack[-2] 503 elif rule in (7, 14): 504 _arg = self._stack[-4] 505 _cur = self._cur_obj 506 507 if _cur.otype in _transform: 508 _cur.arg = _transform[_cur.otype](_arg) 509 elif _cur.otype in self.TRANSFORM_EXTENDED: 510 try: 511 _cur.arg = self.TRANSFORM_EXTENDED[_cur.otype](_arg) 512 except Exception, e: 513 self.error(_(u"Error in calling extended transformation "\ 514 u"function: %s") % ustring(str(e))) 515 else: 516 _cur.arg = _arg 517 518 if rule == 14: 519 self._cur_obj.negative = True 520 elif rule in (4, 5): 521 self._exp_pointer.add(self._cur_obj) 522 self._cur_obj = None 523 elif rule == 8: 524 self._exp_pointer.add("AND") 525 elif rule == 9: 526 self._exp_pointer.add("OR") 527 528 self._stack = self._stack[:(_len * -2)] 529 _top = self._stack[-1] 530 self._stack.append(_ntok) 531 532 try: 533 self._stack.append(self._goto.get(_top, _ntok)) 534 except KeyError: 535 self.error(token) 536 537 self._unget.append(token)
538 539 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 540
541 - def _accept(self, *args):
542 """ 543 Complete parsing 544 """ 545 546 self._done = True
547 548 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 549
550 - def _type(self, arg):
551 _types ={ 552 u"file": VFSTypeFile, 553 u"dir": VFSTypeDir, 554 u"link": VFSTypeLink, 555 u"socket": VFSTypeSocket, 556 u"fifo": VFSTypeFifo, 557 u"char": VFSTypeChar, 558 u"block": VFSTypeBlock, 559 } 560 561 try: 562 return _types[arg] 563 except KeyError: 564 self.error(_(u"Invalid type{} argument: %s" % arg))
565 566 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 567
568 - def _name(self, arg):
569 return re.compile(arg, re.U)
570 571 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 572
573 - def _iname(self, arg):
574 return re.compile(arg, re.U | re.I)
575 576 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 577
578 - def _owner(self, arg):
579 if not re.match(r"^(\w+)?(:(\w+))?$", arg): 580 self.error(_(u"Invalid owner{} argument: %s" % arg)) 581 582 _tmp = arg.split(":") 583 _uid = _tmp[0] 584 585 if _uid == "": 586 _uid = None 587 elif not _uid.isdigit(): 588 try: 589 _uid = pwd.getpwnam(_uid).pw_uid 590 except (KeyError, TypeError): 591 self.error(_(u"Invalid uid: %s" % _uid)) 592 else: 593 _uid = int(_uid) 594 595 if len(_tmp) > 1: 596 _gid = _tmp[1] 597 598 if not _gid.isdigit(): 599 try: 600 _gid = grp.getgrnam(_gid).gr_gid 601 except (KeyError, TypeError): 602 self.error(_(u"Invalid gid: %s" % _gid)) 603 else: 604 _gid = int(_gid) 605 else: 606 _gid = None 607 608 return (_uid, _gid)
609 610 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 611
612 - def _perm(self, arg):
613 _any = False 614 615 if not re.match(r"^\+?\d{4}$", arg): 616 self.error(_(u"Invalid perm{} argument: %s" % arg)) 617 618 if arg.startswith(u"+"): 619 _any = True 620 _perm = int(arg[1:], 8) 621 else: 622 _perm = int(arg, 8) 623 624 return (_any, _perm)
625 626 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 627
628 - def _size(self, arg):
629 _bytes = { 630 u"B": 1, 631 u"K": 1024, 632 u"M": 1024 * 1024, 633 u"G": 1024 * 1024 * 1024, 634 u"T": 1024 * 1024 * 1024 * 1024, 635 } 636 637 _re = re.match(r"^\s*([<>]?\=?)\s*(\d+)\s*([BbKkMmGgTt]?)\s*$", arg) 638 639 if _re is None: 640 self.error(_(u"Invalid size{} argument: %s") % arg) 641 else: 642 _op = _re.group(1) or u"=" 643 _size = long(_re.group(2)) 644 _mod = _re.group(3) or None 645 646 if _mod is not None: 647 _size *= _bytes[_mod.upper()] 648 649 return (_op, _size)
650 663 664 return _trans 665
666 #++++++++++++++++++++++++++++++++++++++++++++++++ 667 668 -class Expression(object):
669 """ 670 FS rule expression class 671 """ 672 673 MATCH_EXTENDED = {} 674 675 @classmethod
676 - def extend(cls, token, match_func):
677 cls.MATCH_EXTENDED[token] = match_func
678 679 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 680 681 @classmethod
682 - def unextend(cls, token):
683 try: 684 del(cls.MATCH_EXTENDED[token]) 685 except KeyError: 686 pass
687 688 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 689
690 - def __init__(self):
691 self.otype = None 692 self.arg = None 693 self.negative = False
694 695 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 696
697 - def match(self, vfsobj):
698 """ 699 Check if object matches the rule 700 """ 701 702 def _match_type(obj, arg): 703 return isinstance(obj.ftype, arg)
704 705 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 706 707 def _match_name(obj, arg): 708 if arg.search(obj.name) is None: 709 return False 710 else: 711 return True
712 713 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 714 715 def _match_iname(obj, arg): 716 if arg.search(obj.name) is None: 717 return False 718 else: 719 return True 720 721 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 722 723 def _match_owner(obj, arg): 724 if arg[0] is not None and arg[1] is not None: 725 if (obj.uid, obj.gid) == arg: 726 return True 727 elif arg[0] is not None and obj.uid == arg[0]: 728 return True 729 elif arg[1] is not None and obj.gid == arg[1]: 730 return True 731 732 return False 733 734 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 735 736 def _match_perm(obj, arg): 737 if obj.mode is None: 738 return False 739 740 _any, _m = arg 741 _mode = stat.S_IMODE(obj.mode.raw) 742 743 if not _any and _mode == _m: 744 return True 745 elif _mode & _m: 746 return True 747 748 return False 749 750 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 751 752 def _match_size(obj, args): 753 if obj.size is None: 754 return False 755 756 _op, _size = args 757 758 _data = {u">": lambda x, y: x > y, 759 u">=": lambda x, y: x >= y, 760 u"<": lambda x, y: x < y, 761 u"<=": lambda x, y: x <= y, 762 u"=": lambda x, y: x == y, 763 } 764 765 if _op in _data and _data[_op](obj.size, _size): 766 return True 767 768 return False 769 770 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 771 772 def _match_link_exists(obj, arg): 773 if isinstance(obj.ftype, VFSTypeLink) and obj.data is not None: 774 return True 775 else: 776 return False 777 778 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 779 780 _match_link_type = link(_match_type) 781 _match_link_name = link(_match_name) 782 _match_link_iname = link(_match_iname) 783 _match_link_owner = link(_match_owner) 784 _match_link_perm = link(_match_perm) 785 _match_link_size = link(_match_size) 786 787 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 788 789 _match_f = { 790 u"type": _match_type, 791 u"name": _match_name, 792 u"iname": _match_iname, 793 u"owner": _match_owner, 794 u"perm": _match_perm, 795 u"size": _match_size, 796 u"link_type": _match_link_type, 797 u"link_name": _match_link_name, 798 u"link_iname": _match_link_iname, 799 u"link_owner": _match_link_owner, 800 u"link_perm": _match_link_perm, 801 u"link_exists": _match_link_exists, 802 u"link_size": _match_link_size, 803 } 804 805 if self.otype in _match_f: 806 _res = _match_f[self.otype](vfsobj, self.arg) 807 elif self.otype in self.MATCH_EXTENDED: 808 try: 809 _res = self.MATCH_EXTENDED[self.otype](vfsobj, self.arg) 810 except Exception, e: 811 self.error(_(u"Error in calling extended match "\ 812 u"function: %s") % ustring(str(e))) 813 else: 814 raise FSRuleError(_(u"Unable to find match function for token: %s") 815 % self.otype) 816 817 if self.negative: 818 return not _res 819 else: 820 return _res 821 822 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 823
824 - def __str__(self):
825 return "<FSRule expression: %s, %s, %s>" % \ 826 (self.otype, str(self.arg), str(self.negative))
827 828 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 829
830 - def __repr__(self):
831 return self.__str__()
832