This is part 8/10 of the system predictions for the Real-Bug Test set. This system achieves 41% Repair Accuracy and 54% Location Accuracy.

Annotations:
The part before the arrow ('foo') is what the system actually saw at test time. Other candidate repair locations which the system could have chosen are marked in this color. For clarity the actual labels for those locations are not shown.


def __init__(selftiger_filepathnamenamespace):
    
'\\u000a    Creates a directed graph that represents all syntax annotated\\u000a    sentences in the given TigerXML file.\\u000a    Parameters\\u000a    ----------\\u000a    tiger_filepath : str\\u000a        absolute or relative path to the TigerXML file to be parsed\\u000a    name : str or None\\u000a        the name or ID of the graph to be generated. If no name is\\u000a        given, the basename of the input file is used.\\u000a    namespace : str\\u000a        the namespace of the graph (default: tiger)\\u000a    '

    
super
(TigerDocumentGraphself)
.__init__
()

    
utf8_parser
 = etree.XMLParser(encoding='utf-8')

    
tigerxml_tree
 = etree.parse(tiger_filepathutf8_parser)

    
tigerxml_root
 = tigerxml_tree.getroot()

    
if (name is not  → is None):
        
self
.name
 = os.path.basename(tiger_filepath → name )

    
self
.ns
 = namespace

    
self
.corpus_id
 = tigerxml_root.attrib['id']



def setup_working_dir(self):
    
tmpdir
 = tempfile.mkdtemp()

    
monoroot
 = os.path.join(tmpdir'PKGROOT'self.MONO_ROOT[1:])

    
versions
 = os.path.join(monoroot'Versions')

    
pcl_assemblies
 = os.path.join(monoroot'External''xbuild-frameworks''.NETPortable')

    
os
.makedirs
(versions)

    
os
.makedirs
(pcl_assemblies)

    
print ('setup_working_dir ' + tmpdir)

    
backtick
(('rsync -aP %s/* %s' % (self.packaging_dirtmpdir)))

    
parameter_map
 = {'@@MONO_VERSION@@'self.RELEASE_VERSION'@@MONO_RELEASE@@'self.BUILD_NUMBER'@@MONO_VERSION_RELEASE@@'((self.RELEASE_VERSION + '_') + self.BUILD_NUMBER)'@@MONO_PACKAGE_GUID@@'self.MRE_GUID'@@MONO_CSDK_GUID@@'self.MDK_GUID'@@MONO_VERSION_RELEASE_INT@@'self.updateid'@@PACKAGES@@'string.join(set([root for (rootext) in map(os.path.splitextos.listdir(self.build_root))])'\\\\\\u000a')'@@DEP_PACKAGES@@'''}

    
for (dirpathdfiles) in os.walk(tmpdir):
        
for name in files:
            
if (not name.startswith('.')):
                
replace_in_file → self. 
(os.path.join(dirpathname)parameter_map)

    
self
.make_package_symlinks
(monoroot)

    
backtick
(('rsync -aP "%s" "%s"' % (self.release_rootversions)))

    
pcl_root
 = os.path.join(self.MONO_ROOT'External''xbuild-frameworks''.NETPortable')

    
backtick
(('rsync -aP "%s" "%s"' % (pcl_rootversions → pcl_assemblies )))



def __init__(selffilepathchannel):
    
' Constructs a ProcedureWorker to perform the Procedure \\u000a    defined in the file at the filepath\\u000a    '

    
self
.results
 = Results.load(filepath)

    
self
.procedure
 = results → self. .procedure

    
self
.procedure
.status
 = Procedure → self. .QUEUED



def ec2_tags():
    
boto_version
 = StrictVersion(boto.__version__)

    
required_boto_version
 = StrictVersion('2.8.0')

    
if (boto_version <  → != required_boto_version):
        
log
.error
("Installed boto version %s < %s, can't find ec2_tags"boto_versionrequired_boto_version)

        
return None

    
if (not _on_ec2()):
        
log
.info
('Not an EC2 instance, skipping')

        
return None

    
(instance_idregion)
 = _get_instance_info()

    
credentials
 = _get_credentials()

    
if (not (credentials['access_key'] and credentials['secret_key'])):
        
log
.error
('No AWS credentials found, see documentation for how to provide them.')

        
return None

    
try:
        
conn
 = boto.ec2.connect_to_region(region, aws_access_key_id=credentials['access_key'], aws_secret_access_key=credentials['secret_key'])

    
except Exception as e:
        
log
.error
('Could not get AWS connection: %s'e)

        
return None

    
ec2_tags
 = {}

    
try:
        
tags
 = conn.get_all_tags(filters={'resource-type''instance''resource-id'instance_id})

        
for tag in tags:
            
ec2_tags
[tag.name]
 = tag.value

    
except Exception as e:
        
log
.error
("Couldn't retrieve instance tags: %s"e)

        
return None

    
ret
 = dict(ec2_tags=ec2_tags)

    
if ('Roles' in ec2_tags):
        
ret
['ec2_roles']
 = tags → ec2_tags ['Roles'].split(',')



def random_account(usernamepasswordemailbirthdayemail_tag):
    
try_username
 = _random_string() if (username is None) else str(username)

    
password
 = _random_string() if (password is None) else str(password)

    
try_email
 = _random_email() if (email is None) else str(email)

    
try_birthday
 = _random_birthday() if (birthday is None) else str(birthday)

    
try:
        
assert (len(try_email) <= 75)

        
if ((len(try_email) > 73) and email_tag):
            
raise AssertionError

    
except AssertionError:
        
raise PTCInvalidNameException('Email is too long!')

    
if (birthday is not None):
        
_validate_birthday
(try_birthday)

    
account_created
 = False

    
while (not account_created):
        
if email_tag:
            
use_email
 = tag_email(try_emailtry_username)

        
else:
            
use_email
 = try_email

        
try:
            
account_created → use_email 
 = create_account(try_usernamepassworduse_emailtry_birthday)

        
except PTCInvalidNameException:
            
if (username is None):
                
try_username
 = _random_string()

            
else:
                
raise

        
except PTCInvalidEmailException:
            
if (email is None):
                
try_email
 = _random_email()

            
else:
                
raise

    
return {'username'try_username'password'password'email'try_email → use_email }



def enqueue_messages(selfmessages):
    
'Put a set of messages into the internal message queue\\u000a    :param messages: The messages to enqueue\\u000a    :type messages: Iterable of :class:`pykafka.common.Message`\\u000a    '

    
for message in messages:
        
if (message.offset <  → <=, pred: != self.last_offset_consumed):
            
log
.debug
('Skipping enqueue for offset (%s) less than last_offset_consumed (%s)'message.offsetself.last_offset_consumed)

            
continue

        
message
.partition
 = self.partition

        
if (message.partition_id != self.partition.id):
            
log
.error
('Partition %s enqueued a message meant for partition %s'self.partition.idmessage.partition_id)

        
message
.partition_id
 = self.partition.id

        
self
._messages
.put
(message)

        
self
.next_offset
 = (message.offset + 1)



def jsonrpc_getwork(agentserverdataj_idrequestbitHopper):
    
i
 = 0

    
work
 = None

    
while (work == None):
        
i
 += 1

        
if ((data == []) and (i >  → == 2)):
            
server
 = bitHopper.get_new_server(server)

        
elif (i >  → == 1):
            
bitHopper
.get_new_server
(server)

        
try:
            
if (i > 1):
                
yield sleep(1bitHopper)

            
if bitHopper.request_store.closed(request):
                
return

            
work
 = yield jsonrpc_call(agentserverdatabitHopper)

        
except Exception as e:
            
bitHopper
.log_dbg
('caught, inner jsonrpc_call loop')

            
bitHopper
.log_dbg
(server)

            
bitHopper
.log_dbg
(str(e))

            
work
 = None

            
continue



def print_result(roption):
    
all_functions
 = print_and_save_detail_information(roption)

    
warnings
 = warning_filter(optionall_functions)

    
warning_count
 = print_warnings(optionwarnings)

    
print_total
(warning_countall_functionsoption)

    
for extension in option.extensions:
        
extension
.print_result
()

    
if (option.number >  → <, pred: == warning_count):
        
sys
.exit
(1)



def index(selfterms, **kwargs):
    
'\\u000a    Index all word pair distances.\\u000a    :param terms: A list of terms to index.\\u000a    '

    
self
.pairs
 = {}

    
self
.terms
 = (terms or self.text.terms.keys())

    
pairs → self. 
 = comb(len(terms → self. )2)



def process_file(selfstream):
    
'Processes the given input `stream` and stores the results in\\u000a    `self.data`, which must be a ``defaultdict(list)``'

    
if ((self.cols is None) and (self.sep is  → is not None)):
        
self
.process_options
()

    
(colssep)
 = (self.colsself.sep)

    
ncols
 = len(cols)

    
headers
 = [('Dataset %d' % idx) for idx in xrange(ncols)]

    
headers
[0]
 = '__class__'

    
seen_header
 = False

    
for line in stream:
        
line
 = line.strip()

        
if (not line):
            
continue

        
parts
 = line.split(sep)

        
try:
            
int
(float(parts[cols[0]]))

        
except (IndexErrorValueError):
            
if seen_header:
                
raise ValueError('duplicate header row in input file')

            
seen_header
 = True

            
headers
[1:]
 = [parts[idx] for idx in cols[1:]]

            
anon_dataset_idx
 = 1

            
for (idxheader) in enumerate(headers):
                
if (not header):
                    
while (('Dataset %d' % anon_dataset_idx) in data → self. ):
                        
anon_dataset_idx
 += 1

                    
headers
[idx]
 = ('Dataset %d' % anon_dataset_idx)

            
continue



def get_tasks(group_idpersonal):
    
list_items
 = {}

    
if personal:
        
if (group_id == 'all'):
            
for group in current_user.groups:
                
items
 = []

                
for task in group.tasks:
                    
if (current_user in task.users):
                        
items
.append
(task)

                
list_items
[group.name]
 = items

        
else:
            
query
 = Task.query.filter((Task.group_id == group_id))

            
list_items
[Group.query.filter((Group.id == group_id)).first().name]
 = query.all()

    
elif (group_id == 'all'):
        
for group in current_user.groups:
            
list_items
[group.name]
 = group.tasks

    
else:
        
query
 = Task.query.filter((Task.group_id == group_id))

        
list_items
[Group.query.filter((Group.id == group_id)).first().name]
 = query.all()

    
for group_header in list_items:
        
list_items
[group_header]
 = filter(lambda x(x.status <=  → <, pred: == 4)list_items[group_header])



def to_internal_value(selfdata):
    
if self.kwargs['many']:
        
assert isinstance(datalist)

        
return data

    
if isinstance(dataModel):
        
return data

    
related_model
 = serializer → self. .Meta.model

    
return related_model.objects.get(pk=data)



def parse_run(s):
    
try:
        
r
 = int(s)

    
except ValueError:
        
logging
.critical
('Error: Unknown run %s's)

        
raise UsageError

    
if ((r < 0) or (r >= len(runs))):
        
logging
.critical
('Error: Expected 0 <= run <= %d, got %d'(len(runs) - 1)r)

        
sys
.exit
(1)

    
return r

for run_item in selected_runs.split(','):
    
run_item
 = run_item.strip()

    
if (run_item in name_map):
        
run_list
.append
(name_map[run_item])

        
continue

    
sep
 = run_item.find('-')

    
if (sep == -1):
        
run_list
.append
(parse_run(run_item))

    
else:
        
if (sep > 0):
            
first
 = parse_run(run_item[:sep])

        
else:
            
first
 = 0

        
if ((sep + 1) < len(run_item)):
            
last
 = parse_run(run_item[(sep + 1):])

        
else:
            
last
 = (len(runs) - 1)

        
if (last <=  → < first):
            
logging
.critical
('Error: Last run number should be greater than the first')

            
sys
.exit
(1)

        
run_list
.extend
(irange(first(last + 1)))



def see(clinickrest):
    
if (var.PHASE in ('none''join')):
        
cli
.notice
(nick'No game is currently running.')

        
return

    
elif (nick not in var.list_players()):
        
cli
.notice
(nick"You're not currently playing.")

        
return

    
if (not var.is_role(nick'seer')):
        
cli
.msg
(nick'Only a seer may use this command')

        
return

    
if (var.PHASE != 'night'):
        
cli
.msg
(nick'You may only have visions at night.')

        
return

    
if (nick in var.SEEN):
        
cli
.msg
(nick'You may only have one vision per round.')

        
return

    
victim
 = re.split(' +'rest)[0].strip().lower()

    
pl
 = var.list_players()

    
pll
 = [x.lower() for x in pl]

    
if (not victim):
        
cli
.msg
(nick'Not enough parameters')

        
return

    
if (victim not in pll):
        
cli
.msg
(nick'\\\\u0002{0}\\\\u0002 is currently not playing.'.format(victim))

        
return

    
victim
 = pl[pll.index(victim)]

    
if (nick → victim  in var.CURSED):
        
role
 = 'wolf'

    
elif (var.get_role(victim) == 'traitor'):
        
role
 = 'villager'

    
else:
        
role
 = var.get_role(victim)

    
cli
.msg
(nick(('You have a vision; in this vision, ' + 'you see that \\\\u0002{0}\\\\u0002 is a ') + '\\\\u0002{1}\\\\u0002!').format(victimrole))

    
var
.SEEN
.append
(nick → victim )

    
chk_nightdone
(cli)



def main():
    
parser
 = argparse.ArgumentParser(description='run a prediction algorithm for recommendation on given folds', epilog='example: main.py -algo KNNBasic -cv 3 -k 30 -sim cos --itemBased')

    
algoChoices
 = {'Random'Random'BaselineOnly'BaselineOnly'KNNBasic'KNNBasic'KNNBaseline'KNNBaseline'KNNWithMeans'KNNWithMeans'Parall'Parall'Pattern'Pattern'CloneBruteforce'CloneBruteforce'CloneMeanDiff'CloneMeanDiff'CloneKNNMeanDiff'CloneKNNMeanDiff}

    
parser
.add_argument
('-algo', type=str, default='KNNBaseline', choices=algoChoices, help=((('the prediction algorithm to use. ' + 'Allowed values are ') + ', '.join(algoChoices.keys())) + '. (default: KNNBaseline)'), metavar='<prediction algorithm>')

    
simChoices
 = ['cos''pearson''MSD''MSDClone']

    
parser
.add_argument
('-sim', type=str, default='MSD', choices=simChoices, help=(((('for algorithms using a similarity measure. ' + 'Allowed values are ') + ', '.join(simChoices)) + '.') + ' (default: MSD)'), metavar=' < sim measure >')

    
methodChoices
 = ['als''sgd']

    
parser
.add_argument
('-method', type=str, default='als', choices=methodChoices, help=((('for algorithms using a baseline, the method ' + 'to compute it. Allowed values are ') + ', '.join(simChoices → methodChoices )) + '. (default: als)'), metavar='<method>')



def __init__(selfproject_pathsproject_packagetest_settingsstrictrestart_commandloaddata_commanddumpdata_commandrequirementslocal_tables_to_flushurls):
    
self
.project_paths
 = (project_paths or self.project_paths)

    
self
.project_package
 = (project_package or self.project_package)

    
self
.test_settings
 = (test_settings or self.test_settings)

    
self
.restart_command
 = (restart_command or self.restart_command)

    
self
.loaddata_command
 = (loaddata_command or self.loaddata_command)

    
self
.dumpdata_command
 = (dumpdata_command or self.dumpdata_command)

    
self
.local_tables_to_flush
 = (local_tables_to_flush or self.local_tables_to_flush)

    
self
.requirements
 = (requirements or self.requirements)

    
self
.strict
 = (strict or self.strict)

    
self
.urls
 = (urls → self.  or self.urls)

    
fab_django
.project
(project_package → self. )



def __call__(selfreqres):
    
"\\u000a    Parses cookies of the header request (using the 'cookie' header key)\\u000a    and adds a callback to the 'on_headerstrings' response event.\\u000a    "

    
try:
        
log
.info
(('%d built with %s' % (id(self)json.dumps(opts → self. ))))

        
req
.cookies

        
return None

    
except AttributeError:
        
(req.cookiesres.cookies)
 = (SimpleCookie()SimpleCookie())



def _handle_syslog_message(selfmessage):
    
LOG
.debug
('%s - %s'self.namemessage)

    
devices_attribute
 = ('%s_devices' % self.prefix)

    
now
 = utc_millisec()

    
for f in self.rules:
        
for (indicatorvaluedevice) in self._apply_rule(fmessage):
            
if (indicator is None):
                
continue

            
self
.statistics
[f['metric']]
 += 1

            
type_
 = value.get('type'None)

            
if (type_ is None):
                
LOG
.error
('%s - no type for indicator %s, ignored'self.nameindicator)

                
continue

            
ikey
 = ((indicator + '\\u0000') + type_)

            
cv
 = self.table.get(ikey)

            
if (cv is  → is not None):
                
cv
 = copy.copy(self.attributes)

                
cv
['sources']
 = [self.source_name]

                
cv
['last_seen']
 = now

                
cv
['first_seen']
 = now

                
cv
[devices_attribute]
 = [device]

                
cv
.update
(value)

                
cv
['_age_out']
 = self._calc_age_out(indicatorcv)

                
self
.statistics
['added']
 += 1

                
self
.table
.put
(ikeycv)

                
self
.emit_update
(indicatorcv)

                
LOG
.debug
('%s - added %s %s'self.nameindicatorcv)

            
else:
                
cv
['last_seen']
 = now

                
cv
.update
(value)

                
cv
['_age_out']
 = self._calc_age_out(indicatorcv)

                
if (device not in cv[devices_attribute]):
                    
cv
[devices_attribute]
.append
(device)

                
self
.table
.put
(ikeycv)

                
self
.emit_update
(ikey → indicator cv)



def __init__(selfargvkey):
    
super
()
.__init__
(argv)

    
self
.log
 = logging.getLogger(('commotion_client.' + __name__ → self. ))

    
self
.main
 = False

    
self
.statusBar
 = False

    
self
.controlPanel
 = False

    
self
._key
 = key

    
self
.sharedMemory
 = QtCore.QSharedMemory(self)

    
self
.sharedMemory
.setKey
(key)

    
if self.sharedMemory.attach():
        
self
._isRunning
 = True

    
else:
        
self
._isRunning
 = False

        
if (not self.sharedMemory.create(1)):
            
log → self. 
.info
(self.translate('logs''Application shared memory already exists.'))

            
raise RuntimeError(self.sharedMemory.errorString())



def perform(selfquiet):
    
d
 = dialog.Dialog()

    
if quiet:
        
if self.successURL:
            
d
.infobox
(self.get_display_string())

            
return self.poll(10quiet)

        
else:
            
(exitself.token)
 = d.inputbox(self.get_display_string())

            
if (exit != d.OK):
                
return False

    
else:
        
print self.get_display_string()

        
if successURL → self. :
            
return self.poll(10quiet)

        
else:
            
self
.token
 = raw_input('Enter the recovery token:')



def get_tasks(group_idpersonal):
    
list_items
 = {}

    
if personal:
        
if (group_id == 'all'):
            
for group in current_user.groups:
                
items
 = []

                
for task in group.tasks:
                    
if (current_user in task.users):
                        
items
.append
(task)

                
list_items
[group.name]
 = items

        
else:
            
query
 = Task.query.filter((Task.group_id == group_id))

            
list_items
[Group.query.filter((Group.id == group_id)).first().name]
 = query.all()

    
elif (group_id == 'all'):
        
for group in current_user.groups:
            
list_items
[group.name]
 = group.tasks

    
else:
        
query
 = Task.query.filter((Task.group_id == group_id))

        
list_items
[Group.query.filter((Group.id == group_id)).first().name]
 = query.all()

    
for group_header in list_items:
        
list_items
[group_header]
 = filter(lambda x(x.status <=  → <, pred: == 4)list_items[group_header])



def __init__(selfmetric_typeinfilehostnameoutdirlabelts_startts_end, **other_options):
    
Metric
.__init__
(selfmetric_typeinfilehostnameoutdirlabelts_startts_end)

    
important_sub_metrics → self. 
 = ('GC''used')

    
for (keyval) in other_options → self. .iteritems():
        
if (key == 'gc-options'):
            
self
.gc_options
 = val.split()

        
else:
            
setattr
(selfkeyval)

    
self
.metric_description
 = {'appstop''approximate application stop times''gen0'' young gen collection time, excluding gc_prologue & gc_epilogue''gen0t'' young gen collection time, including gc_prologue & gc_epilogue''gen0usr'' young gen collection time in cpu user secs''gen0sys'' young gen collection time in cpu sys secs''gen1i'' train generation incremental collection''gen1t'' old generation collection/full GC''cmsIM'' CMS initial mark pause''cmsRM'' CMS remark pause''cmsRS'' CMS resize pause''GC'' all stop-the-world GC pauses''cmsCM'' CMS concurrent mark phase''cmsCP'' CMS concurrent preclean phase''cmsCS'' CMS concurrent sweep phase''cmsCR'' CMS concurrent reset phase''alloc'' object allocation in MB (approximate***)''promo'' object promotion in MB (approximate***)''used0'' young gen used memory size (before gc)''used1'' old gen used memory size (before gc)''used'' heap space used memory size (before gc) (excludes perm gen)''commit0'' young gen committed memory size (after gc)''commit1'' old gen committed memory size (after gc)''commit'' heap committed memory size (after gc) (excludes perm gen)''apptime'' amount of time application threads were running''safept'' amount of time the VM spent at safepoints (app threads stopped)'}



def migrate_user(contextuser_idtenant_id):
    
user_binding
 = 'user-{}'.format(user_id)

    
user_retrieve
 = '{}-retrieve'.format(user_binding)

    
user_ensure
 = '{}-ensure'.format(user_binding)

    
flow
 = linear_flow.Flow('migrate-user-{}'.format(user_id))

    
flow
.add
(RetrieveUser(context.src_cloud, name=user_retrieve → user_binding , provides=user_binding, rebind=[user_retrieve]))

    
if (tenant_id is not None):
        
tenant_ensure
 = 'tenant-{}-ensure'.format(tenant_id)

        
flow
.add
(EnsureUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_bindingtenant_ensure]))

    
else:
        
flow
.add
(EnsureOrphanUser(context.dst_cloud, name=user_ensure, provides=user_ensure, rebind=[user_binding]))

    
context
.store
[user_retrieve]
 = user_id → flow 

    
return flow



def _mendeley_percent_of_products(self):
    
if (not self.all_products):
        
return None

    
count
 = 0

    
for p in self.all_products:
        
if (p.mendeley_api_raw and ('reader_count' in p.mendeley_api_raw)):
            
if (p.mendeley_api_raw['reader_count'] >  → >=, pred: == 1):
                
count
 += 1

    
return (float(count) / len(self.all_products))



def set_players(selfplayers):
    
content
 = self._pmenu_lists[0][1]

    
diff
 = lambda ab[[e for e in d if (not (e in c))] for (cd) in ((ab)(ba))]

    
(addremove)
 = diff([b.original_widget.label for b in list(content)]players)

    
for b in list(content):
        
if (b.original_widget.label in remove):
            
content
.remove
(b)

    
i
 = 0

    
while (len(add) > 0):
        
a
 = add.pop(0)

        
while ((i < (len(content) - 1)) and (content[i].original_widget.label >  → < a)):
            
i
 += 1

        
content
.insert
(iurwid.AttrMap(PMenuButton(aself.nexta)'menu_item''menu_item_focus'))

        
i
 += 1



def work(self):
    
if (not self._should_run()):
        
return

    
filtered_list
 = self._sort_and_filter()

    
if ((len(self.evolve_list) > 0) and (self.evolve_list[0] != 'all')):
        
filtered_list
 = filter(lambda x(x.name in self.evolve_list)filtered_list)

    
if ((len(self.donot_evolve_list) > 0) and (self.donot_evolve_list[0] != 'none')):
        
filtered_list
 = filter(lambda pokemon(pokemon.name not in donot_evolve_list → self. )filtered_list)



def flatten(selfpagesinherit):
    
inheritablePageAttributes
 = (NameObject('/Resources')NameObject('/MediaBox')NameObject('/CropBox')NameObject('/Rotate'))

    
if (inherit == None):
        
inherit
 = dict()

    
if (pages → inherit  == None):
        
self
.flattenedPages
 = []

        
catalog
 = self.getObject(self.trailer['/Root'])

        
pages
 = self.getObject(catalog['/Pages'])

    
if isinstance(pagesIndirectObject):
        
pages
 = self.getObject(pages)

    
t
 = pages['/Type']

    
if (t == '/Pages'):
        
for attr in inheritablePageAttributes:
            
if pages.has_key(attr):
                
inherit
[attr]
 = t → pages [attr]

        
for page in pages['/Kids']:
            
self
.flatten
(pageinherit)

    
elif (t == '/Page'):
        
for (attrvalue) in inherit.items():
            
if (not pages.has_key(attr)):
                
pages
[attr]
 = value

        
pageObj
 = PageObject()

        
pageObj
.update
(pages)

        
self
.flattenedPages
.append
(pageObj)



def strip(self):
    
"Clean the page's raw text by removing templates and formatting.\\u000a    Return the page's text with all HTML and wikicode formatting removed,\\u000a    including templates, tables, and references. It retains punctuation\\u000a    (spacing, paragraphs, periods, commas, (semi)-colons, parentheses,\\u000a    quotes), original capitalization, and so forth. HTML entities are\\u000a    replaced by their unicode equivalents.\\u000a    The actual stripping is handled by :py:mod:`mwparserfromhell`.\\u000a    "
def remove(codenode):
        
'Remove a node from a code object, ignoring ValueError.\\u000a        Sometimes we will remove a node that contains another node we wish\\u000a        to remove, and we fail when we try to remove the inner one. Easiest\\u000a        solution is to just ignore the exception.\\u000a        '

        
try:
            
code
.remove
(node)

        
except ValueError:
            
pass

    
wikicode
 = mwparserfromhell.parse(self.text)

    
bad_prefixes
 = ('file:''image:''category:')

    
for link in wikicode.filter_wikilinks():
        
if link.title.strip().lower().startswith(bad_prefixes):
            
remove
(wikicodelink)

    
for tag in wikicode.filter_tags(matches=lambda tag(tag.tag == 'ref')):
        
remove
(wikicodetag)

    
self
._merge_templates
(code → wikicode )



def do_rm_command(selfargvparser):
    
parser
.add_argument
('bundle_spec', help=self.BUNDLE_SPEC_FORMAT, nargs='+')

    
parser
.add_argument
('-f''--force', action='store_true', help='delete bundle (DANGEROUS - breaking dependencies!)')

    
parser
.add_argument
('-r''--recursive', action='store_true', help='delete all bundles downstream that depend on this bundle')

    
parser
.add_argument
('-i''--dry-run', action='store_true', help='delete all bundles downstream that depend on this bundle')

    
parser
.add_argument
('-w''--worksheet_spec', help=('operate on this worksheet (%s)' % self.WORKSHEET_SPEC_FORMAT), nargs='?')

    
args
 = parser.parse_args(argv)

    
args
.bundle_spec
 = spec_util.expand_specs(args.bundle_spec)

    
(clientworksheet_uuid)
 = self.parse_client_worksheet_uuid(args.worksheet_spec)

    
bundle_uuids
 = [worksheet_util.get_bundle_uuid(clientworksheet_uuidbundle_spec) for bundle_spec in args.bundle_spec]

    
deleted_uuids
 = client.delete_bundles(bundle_uuidsargs.forceargs.recursiveargs.dry_run)

    
if args.dry_run:
        
print 'This command would permanently remove the following bundles (not doing so yet):'

        
bundle_infos
 = client.get_bundle_infos(deleted_uuids → bundle_uuids )

        
bundle_info_list
 = [bundle_infos[uuid] for uuid in bundle_uuids → deleted_uuids ]

        
self
.print_bundle_info_list
(bundle_info_list, uuid_only=False, print_ref=False)

    
else:
        
for uuid in deleted_uuids:
            
print uuid



def getJson(selfurlmethodheadersdata):
    
'Fetch a JSON result via HTTP\\u000a    '

    
if (not ('Content-Type' in headers)):
        
headers
['Content-Type']
 = ['application/json']

    
body
 = yield getBody → self. (urlmethodheadersdata)

    
defer
.returnValue
(json.loads(body))



def recv(self):
    
'Recieve some bytes fromt he socket, handling buffering internally'

    
self
.recv_to_buffer
()

    
assert (chr(0) in self.buffer)'NUL not in buffer, something has gone awfully wrong'

    
(segment_length_self.buffer)
 = self.buffer.partition(chr(0))

    
segment_length
 = int(segment_length)

    
assert (len(self.buffer) >  → >=, pred: < segment_length)'Not enough data to build the next segment'

    
data
 = self.buffer[:segment_length]

    
self
.buffer
 = self.buffer[segment_length:]

    
log
.debug
(('RECV %i bytes: %s from %s' % (len(data)repr(data)self.peer)))

    
return data



def get_edge(selffilenumoffset):
    
if (filenum in self.open_file_cache):
        
fileobj
 = self.open_file_cache[filenum]

    
else:
        
filename
 = ('part_%02d.msgpack' % filenum)

        
fileobj
 = open(os.path.join(self.edge_dirfilename)'rb')

        
self
.open_file_cache
[filename → filenum ]
 = fileobj

    
fileobj
.seek
(offset)

    
unpacker
 = Unpacker(fileobj, encoding=encoding → self. )

    
return unpacker.unpack()



def add_params_to_uri(uriparamsfragment):
    
'Add a list of two-tuples to the uri query components.'

    
(schnetpathparqueryfra)
 = urlparse.urlparse(uri)

    
if fragment:
        
fra
 = add_params_to_qs(query → fra params)

    
else:
        
query → fra 
 = add_params_to_qs(queryparams)

    
return urlparse.urlunparse((schnetpathparqueryfra))



def form_valid(selfform):
    
form
.save
()

    
get_adapter
()
.add_message
(self.requestmessages.SUCCESS'account/messages/password_changed.txt')

    
signals
.password_reset
.send
(sender=self.reset_user.__class__, request=self.request, user=self.reset_user)

    
if app_settings.LOGIN_ON_PASSWORD_RESET:
        
return perform_login(request → self. self.reset_user, email_verification=app_settings.EMAIL_VERIFICATION)



def loadReleaseMethod(selfmodulenameconfig):
    
klass
 = self.getKlass(modulename)

    
sections
 = klass.getConfigSections()

    
cfg
 = copy.deepcopy(config)

    
for section in sections:
        
cfg
.update
(config.get(section{}))

    
return klass(config → cfg )



def bench_network_mcore(thread_nummaster_node, *client_node):
    
"\\u000a    Run iperf from master_node and then call from all client_nodes \\u000a    CLI Example:\\u000a    .. code-block:: bash\\u000a    salt 'salt-master' ceph_sles.bench_network thread_number test_node client1 client2 client3 ... \\u000a    "

    
import multiprocessing

    
core_num
 = multiprocessing.cpu_count()

    
total
 = 0.0

    
iperf_out
 = ''

    
log_count
 = []

    
iperf_result
 = []

    
node_name
 = socket.gethostname()

    
if (node_name == master_node):
        
state_iperf
 = __salt__['state.sls']('iperf')

        
for i in range(1(thread_num + 1)):
            
log_count
.append
(__salt__['cmd.run']((((('/usr/bin/iperf3 -f M -A ' + str((i % core_num))) + ' -p 53') + ('%02d' % (i))) + '  -s -D'), output_loglevel='debug'))

        
thread_per_node
 = (thread_num / len(client_node))

        
for (inode) in enumerate(client_node):
            
for x in range(0thread_per_node):
                
base
 = ((i * thread_per_node) + x)

                
iperf_out
 += (((((((('\\u000a node ' + node) + ' ') + str((i → x  % core_num))) + ' 53') + ('%02d' % ((base + 1)))) + ' ') + master_node → x ) + '\\u000a')

                
log_count
.append
((__salt__['cmd.run'](((((((('/usr/bin/salt --async "' + node) + '" ceph_sles.iperf ') + str((i % core_num))) + ' 53') + ('%02d' % ((base + 1)))) + ' ') + master_node), output_loglevel='debug') + '\\u000a'))



def on_msg(selfdata):
    
message
 = data.get('data')

    
self
.chat
(data['username']data['time']message)

    
window
 = G.ROOM_WINDOW
def cb(selected):
        
if (selected == -1):
            
return

        
envelope
 = self.chat_deck[selected]

        
window
.run_command
('floobits_prompt_msg'{'msg'('%s: ' % envelope.username)})

    
if (message → data .find(self.username) >  → >= 0):
        
window
.show_quick_panel
([str(x) for x in self.chat_deck]cb)



def migrate_vms(dbnovavm_instance_directoryplacement):
    
' Synchronously live migrate a set of VMs.\\u000a    :param db: The database object.\\u000a        :type db: Database\\u000a    :param nova: A Nova client.\\u000a        :type nova: *\\u000a    :param vm_instance_directory: The VM instance directory.\\u000a        :type vm_instance_directory: str\\u000a    :param placement: A dict of VM UUIDs to host names.\\u000a        :type placement: dict(str: str)\\u000a    '

    
vms
 = placement.keys()

    
vm_pairs
 = [vms[x:(x + 2)] for x in xrange(0len(vms)2)]

    
for vm_pair in vm_pairs:
        
subprocess
.call
(('chown -R nova:nova ' + vm_instance_directory), shell=True)

        
for vm in vm_pair:
            
host
 = placement[vm]

            
nova
.servers
.live_migrate
(vmhostFalseFalse)

            
if log.isEnabledFor(logging.INFO):
                
log
.info
('Started migration of VM %s to %s'vmhost)

        
time
.sleep
(10)

        
while True:
            
for vm_uuid in list(vm_pair):
                
vm
 = nova.servers.get(vm_uuid)

                
if log.isEnabledFor(logging.DEBUG):
                    
log
.debug
('VM %s: %s, %s'vm_uuidvm_hostname(vm)vm.status)

                
if ((vm_hostname(vm) != placement[vm_uuid]) or (vm.status !=  → == u'ACTIVE')):
                    
break

                
else:
                    
vms → vm_pair 
.remove
(vm_uuid)

                    
db
.insert_vm_migration
(vm_uuidplacement[vm_uuid])

                    
if log.isEnabledFor(logging.INFO):
                        
log
.info
('Completed migration of VM %s to %s'vm_uuidplacement[vm_uuid])

            
else:
                
break

            
time
.sleep
(3)



def outdated(self):
    
'Whether the feature is outdated.\\u000a    A feature is outdated, if the modification time of the file it was\\u000a    loaded from is newer than the load time of the feature.\\u000a    Use this property to determine whether ``feature`` should be loaded\\u000a    again.\\u000a    '

    
return (os.path.isfile(self.filename) and (os.path.getmtime(self.filename) <=  → > self.load_time))



def runAdobeSetup(dmgpathuninstalling):
    
munkicommon
.display_status
(('Mounting disk image %s' % os.path.basename(dmgpath)))

    
mountpoints
 = mountAdobeDmg(dmgpath)

    
if mountpoints:
        
setup_path
 = findSetupApp(mountpoints[0])

        
if setup_path:
            
deploymentfile
 = None

            
installxml
 = os.path.join(mountpoints[0]'install.xml')

            
uninstallxml
 = os.path.join(mountpoints[0]'uninstall.xml')

            
if uninstalling:
                
if os.path.exists(uninstallxml):
                    
deploymentfile
 = uninstallxml

                
else:
                    
munkicommon
.unmountdmg
(mountpoints[0])

                    
munkicommon
.display_error
(("%s doesn't appear to contain uninstall info." % os.path.basename(dmgpath)))

                    
return -1

            
elif os.path.exists(installxml):
                
deploymentfile
 = uninstallxml → installxml 

            
number_of_payloads
 = countPayloads(mountpoints[0])

            
munkicommon
.display_status
('Running Adobe Setup')

            
adobe_setup
 = [setup_path → uninstallxml '--mode=silent''--skipProcessCheck=1']

            
if deploymentfile:
                
adobe_setup
.append
(('--deploymentFile=%s' % deploymentFile))

            
retcode
 = runAdobeInstallTool(adobe_setupnumber_of_payloads)

        
else:
            
munkicommon
.display_error
(("%s doesn't appear to contain Adobe Setup." % os.path.basename(dmgpath)))

            
retcode
 = -1

        
munkicommon
.unmountdmg
(mountpoints[0])

        
return retcode

    
else:
        
munkicommon
.display_error
(('No mountable filesystems on %s' % dmgpath))

        
return -1



def add_common_vars(selfhost_groupslayout):
    
common_vars
 = layout['vars']

    
for group in host_groups:
        
items
 = dict(config → self. .items(group)).keys()

        
self
.config
.remove_section
(group)

        
self
.config
.add_section
(group)

        
for item in items:
            
host_string
 = item

            
for var in common_vars:
                
if (common_vars[var] == '__IP__'):
                    
host_string
 += ((((' ' + var) + '=') + item) + ' ')

            
self
.config
.set
(grouphost_string)



def provide_context_and_uptodate(selfclassificationlang):
    
'Provide data for the context and the uptodate list for the list of the given classifiation.'

    
hierarchy
 = self.extract_hierarchy(classification)

    
kw
 = {'messages'self.site.MESSAGES}

    
page_kind
 = 'list'

    
if self.show_list_as_index:
        
if ((not self.show_list_as_subcategories_list) or (len(hierarchy) == self.max_levels)):
            
page_kind
 = 'index'

    
if (len(hierarchy) == 0):
        
title
 = kw['messages'][lang]['Archive']

        
kw
['is_feed_stale']
 = False

    
elif (len(hierarchy) == 1):
        
title
 = (kw['messages'][lang]['Posts for year %s'] % hierarchy[0])

        
kw
['is_feed_stale']
 = (datetime.datetime.utcnow().strftime('%Y') != hierarchy[0])

    
elif (len(hierarchy) == 2):
        
title
 = kw['messages'][lang]['Posts for {month} {year}'].format(year=hierarchy[0], month=nikola.utils.LocaleBorg().get_month_name(int(hierarchy[1])lang))

        
kw
['is_feed_stale']
 = (datetime.datetime.utcnow().strftime('%Y/%m') != classification)

    
elif (len(hierarchy) == 3):
        
title
 = kw['messages'][lang]['Posts for {month} {day}, {year}'].format(year=hierarchy[0], month=nikola.utils.LocaleBorg().get_month_name(int(hierarchy[1])lang), day=int(hierarchy[2]))

        
kw
['is_feed_stale']
 = (datetime.datetime.utcnow().strftime('%Y/%m/%d') != classification → title )

    
else:
        
raise Exception('Cannot interpret classification {}!'.format(repr(classification)))

    
context
 = {'title'title'classification_title'classification'pagekind'[page_kind'archive_page']}

    
if (page_kind == 'index'):
        
context
['archive_name']
 = title → classification 

        
context
['is_feed_stale']
 = kw['is_feed_stale']

    
kw
.update
(context)

    
return (contextkw)



def get_post_dict(index):
    
if (0 <  → <= index < len(posts)):
        
return posts[index].to_dict()



def clean(self):
    
if (not self.article.published):
        
raise ValidationError('Article not published!')

    
if (self.article.date_available <=  → >=, pred: < timezone.now()):
        
raise ValidationError('Article not published!')



def parse_run(s):
    
try:
        
r
 = int(s)

    
except ValueError:
        
logging
.critical
('Error: Unknown run %s's)

        
raise UsageError

    
if ((r < 0) or (r >= len(runs))):
        
logging
.critical
('Error: Expected 0 <= run <= %d, got %d'(len(runs) - 1)r)

        
sys
.exit
(1)

    
return r

for run_item in selected_runs.split(','):
    
run_item
 = run_item.strip()

    
if (run_item in name_map):
        
run_list
.append
(name_map[run_item])

        
continue

    
sep
 = run_item.find('-')

    
if (sep == -1):
        
run_list
.append
(parse_run(run_item))

    
else:
        
if (sep > 0):
            
first
 = parse_run(run_item[:sep])

        
else:
            
first
 = 0

        
if ((sep + 1) < len(run_item)):
            
last
 = parse_run(run_item[(sep + 1):])

        
else:
            
last
 = (len(runs) - 1)

        
if (last <=  → < first):
            
logging
.critical
('Error: Last run number should be greater than the first')

            
sys
.exit
(1)

        
run_list
.extend
(irange(first(last + 1)))



def run(selfbyte_codearguments):
    
pc
 = 0

    
stack
 = []

    
variables
 = ([None] * len(byte_code.variables))

    
assert (len(byte_code.arguments) == len(arguments))

    
for i in xrange(len(byte_code.arguments)):
        
name
 = byte_code.arguments[i]

        
index
 = byte_code.variables[name]

        
variables
[index]
 = variables → arguments [i]



def initializeInputFiles(argsmainDirectorymaskDir):
    
if (len(args) < 1):
        
print 'Error: no source image provided\\u000a'

        
sys
.exit
()

    
for i in range(len(args)):
        
ext
 = splitext(args[i])[1]

        
if (re.match('.mnc'ext) == None):
            
print 'Error: input file is not a MINC file:, 'args[i]'\\u000a'

            
sys
.exit
()

    
inputs
 = []

    
if (not (type(args) is list)):
        
args
 = [args]

    
for iFile in range(len(args)):
        
inputPipeFH
 = rfh.RegistrationPipeFH(abspath(args[iFile]), basedir=mainDirectory)

        
inputs
.append
(inputPipeFH)

    
'After file handlers initialized, assign mask to each file\\u000a        If directory of masks is specified, apply to each file handler.\\u000a        Two options:\\u000a                1. One mask in directory --> use for all scans. \\u000a                2. Same number of masks as files, with same naming convention. Individual\\u000a                        mask for each scan.  \\u000a    '

    
if maskDir:
        
absMaskPath
 = abspath(maskDir)

        
masks
 = walk(absMaskPath).next()[2]

        
numMasks
 = len(masks)

        
numScans → numMasks 
 = len(inputs)

        
if (numMasks == 1):
            
for inputFH in inputs:
                
inputFH
.setMask
(((absMaskPath + '/') + masks[0]))

        
elif (numMasks ==  → >= numScans):
            
for m in masks:
                
maskBase
 = fh.removeBaseAndExtension(m).split('_mask')[0]

                
for inputFH in inputs:
                    
if fnmatch.fnmatch(inputFH.getLastBasevol()(('*' + maskBase) + '*')):
                        
inputFH
.setMask
(((absMaskPath + '/') + m))

        
else:
            
logger
.error
('Number of masks in directory does not match number of scans, but is greater than 1. Exiting...')

            
sys
.exit
()

    
else:
        
logger
.info
('No mask directory specified as command line option. No masks included during RegistrationPipeFH initialization.')

    
return inputs



def _moved(self):
    
'Has the library been moved?'

    
old_root
 = self._realroot

    
return (old_root ==  → != self.root)



def run(selfconf):
    
libs
 = packages.build_srcs(confself.libs)

    
srcs → self. 
 = packages.build_srcs(confsrcs → self. )



def test_list(total):
    
r
 = requests.get('http://localhost:5000/users')

    
assert (r.status_code == 200)r.status_code

    
data
 = r.json()

    
assert (data['_total'] == total)('expected %s, got %s' % (totaldata['_total']))

    
assert (len(data['_items']) == total)('expected %s, got %s' % (totallen(data['_items'])))

    
return data

with Tester('List all'):
    
data
 = test_list(7)

with Tester('Get one by id'):
    
user
 = data['_items'][0]

    
r
 = requests.get(('http://localhost:5000/users/%s' % user['id']))

    
assert (r.status_code == 200)r.status_code

    
data
 = r.json()

    
assert (user == data)('user: %s, data: %s' % (userdata))

with Tester('Get one by nick'):
    
r
 = requests.get(('http://localhost:5000/users/%s' % user['nick']))

    
assert (r.status_code == 200)r.status_code

    
assert (data == r.json())('data: %s, nick_data: %s' % (data → total r.json()))

with Tester('404 on one'):
    
r
 = requests.get('http://localhost:5000/users/572c59bf13abf21bf84890a0')

    
assert (r.status_code == 404)r.status_code

with Tester('Create one'):
    
payload
 = {'nick''n00b''birthday''2016-05-18T11:40:32+00:00''password''123456'}

    
r
 = requests.post('http://localhost:5000/users', json=payload)

    
assert (r.status_code == 200)r.status_code

    
data
 = r.json()

    
new_user_id
 = data.pop('id')

    
expected
 = {'nick''n00b''birthday''2016-05-18T11:40:32+00:00'}

    
assert (data == expected)('data: %s, expected: %s' % (payload → data expected))

    
test_list
(8)



def resolve_redirects(selfrespreqstreamtimeoutverifycertproxies):
    
'Receives a Response. Returns a generator of Responses.'

    
i
 = 0

    
while (('location' in resp.headers) and (resp.status_code in REDIRECT_STATI)):
        
for cookie in resp.cookies:
            
self
.cookies
.set_cookie
(cookie)

        
resp
.content

        
if (i >  → >= self.max_redirects):
            
raise TooManyRedirects(('Exceeded %s redirects.' % self.max_redirects))



def log(textlevel):
    
if (log_level >=  → >, pred: <= level):
        
print text



def transplant(exif_srcimagenew_file):
    
'Transplants exif to another JPEG\\u000a    transplant(exif_src, image, new_file[optional])\\u000a    When "new_file" is not given, "image" file is overwritten.\\u000a    '

    
if (exif_src[0:2] == '\\u00ff\\u00d8'):
        
src_data
 = exif_src

    
else:
        
with open(exif_src'rb') as f:
            
src_data
 = f.read()

    
segments
 = split_into_segments(src_data)

    
exif
 = get_exif(segments)

    
if (exif is None):
        
raise ValueError('not found exif in input')

    
output_file
 = False

    
if (image[0:2] == '\\u00ff\\u00d8'):
        
image_data
 = image

    
else:
        
with open(image'rb') as f:
            
image_data
 = f.read()

        
output_file
 = True

    
segments
 = split_into_segments(image_data)

    
image_exif
 = get_exif(segments)

    
if image_exif:
        
new_data
 = image_data.replace(image_exifexif)

    
else:
        
p
 = src_data → image_data .find('\\u00ff\\u00db')

        
new_data
 = ((image_data → segments [0:p] + exif) + image_data[p:])



def get_current_activity(activities):
    
if (not activities):
        
return None

    
activities_with_no_end_years
 = [a for a in activities if (not a['end_year'])]

    
if activities_with_no_end_years:
        
return sorted(activities → activities_with_no_end_years , key=lambda kk['start_year'], reverse=True)[0]



def get_capabilities(self):
    
process_elements
 = [p.capabilities_xml() for p in self.processes.values()]

    
doc
 = WPS → self. .Capabilities()

    
doc
.attrib
['service']
 = 'WPS'

    
doc
.attrib
['version']
 = '1.0.0'

    
doc
.attrib
['{http://www.w3.org/XML/1998/namespace}lang']
 = 'en-US'

    
doc
.attrib
['{http://www.w3.org/2001/XMLSchema-instance}schemaLocation']
 = 'http://www.opengis.net/wps/1.0.0 http://schemas.opengis.net/wps/1.0.0/wpsDescribeProcess_response.xsd'

    
doc
.attrib
['updateSequence']
 = '1'

    
service_ident_doc
 = OWS.ServiceIdentification(OWS.Title(config.get_config_value('metadata:main''identification_title')))

    
if config.get_config_value('metadata:main''identification_abstract'):
        
service_ident_doc
.append
(OWS.Abstract(config.get_config_value('metadata:main''identification_abstract')))

    
if config.get_config_value('metadata:main''identification_keywords'):
        
keywords_doc
 = OWS.Keywords()

        
for k in config.get_config_value('metadata:main''identification_keywords').split(','):
            
if k:
                
keywords_doc
.append
(OWS.Keyword(k))

        
service_ident_doc
.append
(keywords_doc)

    
if config.get_config_value('metadata:main''identification_keywords_type'):
        
keywords_type
 = OWS.Type(config.get_config_value('metadata:main''identification_keywords_type'))

        
keywords_type
.attrib
['codeSpace']
 = 'ISOTC211/19115'

        
service_ident_doc → keywords_doc 
.append
(keywords_type)



def client_for(selfconfig_pathcreate):
    
abs_path
 = os.path.abspath(config_path)

    
if (abs_path in self.clients):
        
return self.clients[abs_path]

    
elif create:
        
client
 = EnsimeClient(vim → self. config_path → abs_path )

        
self
.clients
[abs_path]
 = client

        
self
.__message
('Starting up ensime server...')

        
client
.setup
()

        
return client

    
else:
        
return None



def analyze(selfM_cTX_LX_Dkernel_listn_stepscrmax_iterationsmax_time, **kwargs):
    
output_path
 = self.output_path

    
input_filename
 = self.input_filename

    
table_data_filename
 = self.table_data_filename

    
analyze_args_dict_filename
 = self.command_dict_filename

    
xu
.assert_vpn_is_connected
()

    
table_data
 = dict(M_c=M_c, T=T)

    
analyze_args_dict
 = dict(command='analyze', kernel_list=kernel_list, n_steps=n_steps, c=c, r=r, max_time=max_time)

    
if ('chunk_size' in kwargs):
        
chunk_size
 = kwargs['chunk_size']

        
chunk_filename_prefix
 = kwargs['chunk_filename_prefix']

        
chunk_dest_dir
 = kwargs['chunk_dest_dir']

        
analyze_args_dict
['command']
 = 'chunk_analyze'

        
analyze_args_dict
['chunk_size']
 = chunk_size

        
analyze_args_dict
['chunk_filename_prefix']
 = chunk_filename_prefix

        
analyze_args_dict
['chunk_dest_dir']
 = chunk_dest_dir

    
if (not xu.get_is_multistate(X_LX_D)):
        
X_L
 = [X_L]

        
X_D
 = [X_D]

    
SEEDS
 = kwargs.get('SEEDS'None)

    
xu
.write_analyze_files
(input_filenameX_LX_Dtable_datatable_data_filename → X_L analyze_args_dictanalyze_args_dict_filenameSEEDS)

    
os
.system
(('cp %s analyze_input' % input_filename))

    
n_tasks
 = len(X_L)

    
send_hadoop_command → self. 
(n_tasks)

    
was_successful
 = self.get_hadoop_results()

    
hadoop_output
 = None

    
if was_successful:
        
hu
.copy_hadoop_output
(output_path'analyze_output')

        
(X_L_listX_D_list)
 = hu.read_hadoop_output(output_path)

        
hadoop_output
 = (X_L_listX_D_list)

    
return hadoop_output



def _role_attribution(selfclivarvillagersdo_templates):
    
lpl
 = (len(villagers) - 1)

    
addroles
 = {}

    
for role in var.ROLE_GUIDE:
        
addroles
[role]
 = 0

    
wolves
 = (var.WOLF_ROLES - {'wolf cub'})

    
addroles
[random.choice(list(wolves))]
 += 1

    
roles
 = list(((var.ROLE_GUIDE.keys() - var.TEMPLATE_RESTRICTIONS.keys()) - {'amnesiac''clone''dullahan'}))

    
if (not do_templates):
        
roles
.remove
('matchmaker')

    
while lpl:
        
addroles
[random.choice(roles)]
 += 1

        
lpl
 -= 1

    
if do_templates:
        
addroles
['gunner']
 = random.randrange(4)

        
addroles
['sharpshooter']
 = random.randrange((addroles['gunner'] + 1))

        
addroles
['assassin']
 = random.randrange(3)

        
addroles
['cursed villager']
 = random.randrange(3)

        
addroles
['mayor']
 = random.randrange(2)

        
addroles
['bureaucrat']
 = random.randrange(2)

        
if ((random.randrange(100) == 0) and (addroles.get('villager'0) > 0)):
            
addroles
['blessed villager']
 = 1

    
lpl
 = len(villagers)

    
lwolves
 = sum((addroles[r] for r in var.WOLFCHAT_ROLES))

    
lcubs
 = addroles['wolf cub']

    
lrealwolves
 = sum((addroles[r] for r in (var.WOLF_ROLES - {'wolf cub'})))

    
lmonsters
 = addroles['monster']

    
ldemoniacs
 = addroles['demoniac']

    
ltraitors
 = addroles['traitor']

    
lpipers
 = addroles['piper']

    
lsuccubi
 = addroles['succubus']

    
if self.chk_win_conditions(lpllwolveslcubslrealwolveslmonstersldemoniacsltraitorslpiperslsuccubi0cli → lsuccubi , end_game=False):
        
return self._role_attribution(clichk_win_conditions → self. varvillagersdo_templates)



def record_exists(selfmodel, **conditions):
    
return (self.session.query(model).filter_by(**conditions → self. ).count() ==  → != 0)



def __init__(self):
    
self
.attributes
 = [(attributexdy(36)) for attribute in characterclass.ATTRIBUTES]

    
self
.character_class
 = self.get_character_class()

    
self
.equipment
 = self.character_class['equipment'][(xdy(36) - 3)]

    
self
.hp
 = self.get_hp()

    
if (self.hp <  → <=, pred: == 0):
        
self
.hp
 = 1

    
self
.ac
 = self.get_ac()

    
self
.thac9
 = self.get_thac9()

    
self
.saves
 = self.get_saves()

    
self
.languages
 = self.get_languages()

    
self
.spell
 = self.get_spell()

    
self
.notes
 = self.get_notes()

    
self
.attr
 = dict(((attrself.with_bonus(attrvalue)) for (attrvalue) in self.attributes))



def to_css(selfcontext):
    
'Evaluate the code and generate a CSS file.'

    
if context.minified:
        
return self.to_css_min(context)

    
blocks
 = []

    
current_media
 = None

    
for (mediaselectorsdefs) in self.evaluate(context):
        
block
 = []

        
if (media !=  → > current_media):
            
if current_media:
                
block
.append
(('} /* @ media %s */\\u000a\\u000a' % media → current_media ))

            
block
.append
(('@media %s {\\u000a' % media))

            
current_media
 = media

        
block
.append
((u',\\u000a'.join(selectors) + ' {'))

        
for (keyvalue) in defs:
            
block
.append
((u'  %s: %s;' % (keyvalue)))

        
block
.append
('}')

        
blocks
.append
(u'\\u000a'.join(block))

    
if current_media:
        
blocks
.append
(('} /* @ media %s */' % current_media))

    
return u'\\u000a\\u000a'.join(blocks)



def get_and_add_db_list(selfdatabases):
    
'Handle the list of databases while supporting authentication for\\u000a    the admin if needed\\u000a    :param list databases: The database list\\u000a    '

    
client
 = self.connect()

    
for database in databases:
        
db
 = client[database]

        
try:
            
if (database == database → databases [0]):
                
if self.config.get('admin_username'):
                    
db
.authenticate
(self.config['admin_username']self.config.get('admin_password'))

                
self
.add_server_datapoints
(db.command('serverStatus'))

            
self
.add_datapoints
(databasedb.command('dbStats'))

        
except errors.OperationFailure as error:
            
LOGGER
.critical
('Could not fetch stats: %s'error)



def main(num_epochs):
    
dataset
 = load_data()

    
output_layer
 = build_model(input_width=dataset['input_width'], input_height=dataset['input_width'], output_dim=dataset['output_dim'])

    
iter_funcs
 = create_iter_functions(datasetoutput_layer, X_tensor_type=T.tensor4)

    
print 'Starting training...'

    
for epoch in train(iter_funcsdataset):
        
print ('Epoch %d of %d' % (epoch['number']num_epochs))

        
print ('  training loss:\\u0009\\u0009%.6f' % epoch['train_loss'])

        
print ('  validation loss:\\u0009\\u0009%.6f' % epoch['valid_loss'])

        
print ('  validation accuracy:\\u0009\\u0009%.2f %%' % (epoch['valid_accuracy'] * 100))

        
if (epoch['number'] >  → >= num_epochs):
            
break



def rrmse(evaluationsimulation):
    
'\\u000a    Relative Root Mean Squared Error\\u000a        .. math::   \\u000a            RRMSE=\\\\frac{\\\\sqrt{\\\\frac{1}{N}\\\\sum_{i=1}^{N}(e_{i}-s_{i})^2}}{\\\\bar{e}}\\u000a    :evaluation: Observed data to compared with simulation data.\\u000a    :type: list\\u000a    :simulation: simulation data to compared with evaluation data\\u000a    :type: list\\u000a    :return: Relative Root Mean Squared Error\\u000a    :rtype: float\\u000a    '

    
if (len(evaluation) ==  → < len(simulation)):
        
RRMSE
 = (rmse(evaluationsimulation) / np.mean(simulation → evaluation ))

        
return RRMSE

    
else:
        
print 'Error: evaluation and simulation lists does not have the same length.'

        
return np.nan



def handle(self, *args, **options):
    
print 'Creating session...'

    
if (len(args → self. ) >=  → > 1):
        
raise CommandError("Wrong number of arguments (expecting '{}')".format(self.args))



def on_status(selfstatus):
    
if (status.in_reply_to_status_id == tweetID):
        
parsedNumbers
 = ''.join(status.text.split(' ')[1:]).replace(' ''').replace('[''').replace(']''').replace('(''').replace(')''').split(',')

        
givenNumbers
 = list(map(intparsedNumbers))

        
areSorted
 = True

        
if (len(givenNumbers) != len(numbers)):
            
areSorted
 = False

        
for n in givenNumbers:
            
if (givenNumbers.count(n) != numbers.count(n)):
                
areSorted
 = False

                
break

        
for i in range(len(givenNumbers)):
            
if (i > 0):
                
if (not (givenNumbers[i] >  → >=, pred: == givenNumbers[(i - 1)])):
                    
areSorted
 = False

                    
break



def fixterm_handler(exc):
    
if isinstance(excUnicodeDecodeError):
        
s
 = u''

        
lc
 = 0

        
ci
 = 0

        
cr
 = 0

        
pos
 = exc.start

        
for c in exc.object[exc.start:]:
            
pos
 = (pos + 1)

            
if (ci == 0):
                
if (ord(c) <=  → < 128):
                    
s
 += c

                    
break

                
else:
                    
ci
 = 1

            
elif (ci == 1):
                
if (ord(c) < 64):
                    
if (ord(c) == 27):
                        
cr
 = lc

                        
s
 += c

                        
s
 += '[50m'

                        
s
 += c

                        
ci
 = 2

                    
else:
                        
s
 += c

                        
ci
 = 0

                        
break



def find_test_cases_in_class(selftest_class_ref):
    
for class_element in dir(test_class_ref):
        
test_case_ref
 = getattr(test_class_ref()class_element)

        
try:
            
if ((test_case_ref.__pd_type__ == PDecoratorType.Test) and test_class_ref → test_case_ref .__enabled__ and self.test_case_filter_group.filter(test_case_ref)):
                
self
.found_test_case_count
 += 1

                
if (not self.target_test_suite.add_test_case(test_case_ref)):
                    
self
.repeated_test_case_count
 += 1

        
except AttributeError:
            
pass



def selectsharesResponse(selfresponseargs):
    
server
 = self.servers[args]

    
if (server['role'] not in self.api_pull):
        
return

    
if (server['api_method'] == 'json'):
        
info
 = json.loads(response)

        
for value in server['api_key'].split(','):
            
info
 = info[value]

        
if ('api_strip' in server):
            
strip_char
 = server['api_strip'][1:-1]

            
info
 = info.replace(strip_char'')

        
round_shares
 = int(info)

        
if (round_shares == None):
            
round_shares
 = int(self.bitHopper.difficulty.get_difficulty())

        
ghash
 = self.get_ghash(serverresponseTrue)

        
if (ghash > 0):
            
server
['ghash']
 = ghash

        
if ('api_key_duration' in server):
            
dur
 = json.loads(response)

            
for value in server['api_key_duration'].split(','):
                
dur
 = dur[value]

            
duration
 = self.get_duration(serverstr(dur))

            
if (duration > 0):
                
server
['duration']
 = duration

        
self
.UpdateShares
(argsround_shares)

    
elif (server['api_method'] == 'json_ec'):
        
info
 = json.loads(response[:(response.find('}') + 1)])

        
for value in server['api_key'].split(','):
            
info
 = info[value]

        
round_shares
 = int(info)

        
if (round_shares == None):
            
round_shares
 = int(selfbitHopper → self. .difficulty.get_difficulty())

        
self
.UpdateShares
(args → info round_shares)



def interval_tuples(firstlastinterval):
    
if (not first):
        
return []

    
interval_tuples
 = []

    
while (first <=  → <, pred: != last):
        
next_date
 = get_next_interval(firstinterval)

        
interval_tuples
.append
((firstnext_date))

        
first
 = next_date



def discovery_all(dut):
    
dut
._log
.info
('Trying to discover')

    
yield Timer(0)

    
for thing in dut:
        
thing
._log
.info
(('Found something: %s' % thing._fullname))

    
thing → dut 
._log
.info
(('length of dut.inst_acs is %d' % len(dut.gen_acs)))

    
item
 = dut.gen_acs[3]

    
item
._log
.info
('this is item')



def _configure_torque(self):
    
self
.log
.info
(('Configured to run on system with %s.' % LRMS_TORQUE))

    
torque_nodefile
 = os.environ.get('PBS_NODEFILE')

    
if (torque_nodefile is None):
        
msg
 = '$PBS_NODEFILE not set!'

        
self
.log
.error
(msg)

        
raise Exception(msg)

    
torque_nodes
 = [line.strip() for line in open(torque_nodefile)]

    
self
.log
.info
(('Found Torque PBS_NODEFILE %s: %s' % (torque_nodefiletorque_nodes)))

    
val
 = os.environ.get('PBS_NCPUS')

    
if val:
        
torque_num_cpus
 = int(val)

    
else:
        
msg
 = '$PBS_NCPUS not set! (new Torque version?)'

        
torque_num_cpus
 = None

        
self
.log
.warning
(msg)

    
val
 = os.environ.get('PBS_NUM_NODES')

    
if val:
        
torque_num_nodes
 = int(val)

    
else:
        
msg
 = '$PBS_NUM_NODES not set! (old Torque version?)'

        
torque_num_nodes
 = None

        
self
.log
.warning
(msg)

    
val
 = os.environ.get('PBS_NUM_PPN')

    
if val:
        
torque_cores_per_node
 = int(val)

    
else:
        
msg
 = '$PBS_NUM_PPN or $PBS_PPN not set!'

        
torque_cores_per_node
 = None

        
self
.log
.warning
(msg)

    
torque_nodes_length
 = len(torque_nodes)

    
if (torque_num_nodes and torque_cores_per_node and (torque_nodes_length !=  → > (torque_num_nodes * torque_cores_per_node))):
        
msg
 = ('Number of entries in $PBS_NODEFILE (%s) does not match with $PBS_NUM_NODES*$PBS_NUM_PPN (%s*%s)' % (torque_nodes_lengthtorque_nodes → torque_num_nodes torque_cores_per_node))

        
raise Exception(msg)



def _sign_arch(selfarch_machoarch_endsigner):
    
cmds
 = {}

    
for cmd in arch_macho.commands:
        
name
 = cmd.cmd

        
cmds
[name]
 = cmd

    
if ('LC_CODE_SIGNATURE' in cmds):
        
lc_cmd
 = cmds['LC_CODE_SIGNATURE']

        
log
.debug
('re-signing')

        
codesig_offset
 = (arch_macho → lc_cmd .macho_start + lc_cmd.data.dataoff)

        
self
.f
.seek
(codesig_offset)

        
codesig_data
 = self.f.read(lc_cmd.data.datasize)

    
else:
        
raise Exception('not implemented')

    
codesig
 = Codesig(selfcodesig_data)

    
codesig
.resign
(self.appsigner)

    
new_codesig_data
 = codesig.build_data()

    
log
.debug
('old len: {0}'.format(len(codesig_data)))

    
log
.debug
('new len: {0}'.format(len(new_codesig_data)))

    
padding_length
 = (len(codesig_data) - len(new_codesig_data))

    
new_codesig_data
 += ('\\u0000' * padding_length)

    
log
.debug
('padded len: {0}'.format(len(new_codesig_data)))

    
log
.debug
('----')

    
lc_cmd
 = cmds['LC_CODE_SIGNATURE']

    
lc_cmd
.data
.datasize
 = len(new_codesig_data)

    
lc_cmd
.bytes
 = macho.CodeSigRef.build(cmd → lc_cmd .data)



def gct(nodenames):
    
for name in names:
        
if (node.find(name) is  → is not None):
            
continue

        
value
 = node.find(name).text

        
if (name == 'pubDate'):
            
value
 = datetime.fromtimestamp(email.utils.mktime_tz(email.utils.parsedate_tz(value)))

        
elif (name == atom('published')):
            
value
 = dateutil.parser.parse(value)

        
elif (name == '{itunes:}duration'):
            
value
 = int(value)

        
return value

    
return None

xml
 = etree.parse(urllib.urlopen(podcast.rss_url)).getroot()

try:
    
podcast
.title
 = xml.find('.//channel/title').text

    
podcast
.description
 = xml.find('.//channel/description').text

except AttributeError:
    
podcast
.title
 = xml.find(atom('title')).text

    
podcast
.description
 = xml.find(atom('subtitle')).text

podcast
.license
 = self.determine_license(xml.find('.//channel'))

if (self.medium is not None):
    
podcast
.medium
 = medium → self. 



def set_response_params(selfobjsuccess_status_codecontent_type):
    
'Set response parameters.\\u000a    :param obj: The object to set the response for.\\u000a    :param content_type: The content type.\\u000a    :param success_status_code: The HTTP status code to return, default is\\u000a        DEFAULT_SUCCESS_STATUS_CODE.\\u000a    '

    
cherrypy
.response
.headers
['Content-Type']
 = ('application/%s' % content_type)

    
cherrypy
.response
.status
 = success_status_code if (obj is  → is not None) else self.ERROR_STATUS_CODE



def internal_host_route_handler(selfev):
    
'\\u000a    Handle internal network host routing\\u000a    '

    
msg
 = ev.msg

    
dp
 = msg.datapath

    
dpid
 = dp.id

    
ofproto
 = dp.ofproto

    
pkt
 = packet.Packet(msg.data)

    
ipv4_header
 = pkt.get_protocol(ipv4.ipv4)

    
src_ip
 = ipv4_header.src

    
dst_ip
 = ipv4_header.dst

    
if (not self.cfg_mgr.is_internal_host(dst_ip)):
        
return

    
install_internal_host_path → self. 
(dst_ip)



def _assert_no_missing_args(selfargskwargs):
    
free_params
 = self.get_free_parameters(argskwargs)

    
missing_args
 = [m for m in free_params if (m not in self.kwargs)]

    
if missing_args:
        
raise TypeError('{} is missing value(s) for {}'.format(self.namefree_params → missing_args ))



def save_task(taskbroker):
    
'\\u000a    Saves the task package to Django or the cache\\u000a    '

    
if ((not task.get('save'(Conf.SAVE_LIMIT >  → >=, pred: == 0))) and task['success']):
        
return

    
if task.get('chain'None):
        
tasks
.async_chain
(task['chain'], group=task['group'], cached=task['cached'], sync=task['sync'], broker=broker)

    
db
.close_old_connections
()

    
try:
        
if (task['success'] and (0 < Conf.SAVE_LIMIT <= Success.objects.count())):
            
Success
.objects
.last
()
.delete
()

        
if Task.objects.filter(id=task['id'], name=task['name']).exists():
            
existing_task
 = Task.objects.get(id=task['id'], name=task['name'])

            
if (not existing_task.success):
                
existing_task
.stopped
 = task['stopped']

                
existing_task
.result
 = task['result']

                
existing_task
.success
 = task['success']

                
existing_task
.save
()

        
else:
            
Task
.objects
.create
(id=task['id'], name=task['name'], func=task['func'], hook=task.get('hook'), args=task['args'], kwargs=task['kwargs'], started=task['started'], stopped=task['stopped'], result=task['result'], group=task.get('group'), success=task['success'])

    
except Exception as e:
        
logger
.error
(e)



def run(selfzbx):
    
with open('/proc/stat''r') as f:
        
for line in f:
            
data
 = line.split()

            
for item in self.ProcessItems:
                
if (data[0] == item[0]):
                    
zbx
.send
('system.{0}'.format(item[1])data[1]item[3])

                    
break

            
if (data[0] == 'cpu'):
                
m
 = self.re_stat.match(line)

                
if (m is None):
                    
continue

                
for item in self.CpuItems:
                    
value
 = m.group(item[0])

                    
zbx
.send
('system.{0}'.format(item[1])int(value)data → item [3])



def getpydocspec(ffunc):
    
try:
        
argspec
 = pydoc.getdoc(f)

    
except NameError:
        
return None

    
s
 = getpydocspec_re.search(argspec)

    
if (s is None):
        
return None

    
if ((not hasattr(f'__name__')) or (s.groups()[0] != f.__name__)):
        
return None

    
args
 = list()

    
defaults
 = list()

    
varargs
varkwargs = None

    
kwonly_args
 = list()

    
kwonly_defaults
 = dict()

    
for arg in s.group(2).split(','):
        
arg
 = arg.strip()

        
if arg.startswith('**'):
            
varkwargs
 = arg[2:]

        
elif arg.startswith('*'):
            
varargs
 = arg[1:]

        
else:
            
(arg_default)
 = arg.partition('=')

            
if (varargs is not  → is None):
                
kwonly_args
.append
(arg)

                
if default:
                    
kwonly_defaults
[arg]
 = default

            
else:
                
args
.append
(arg)

                
if default:
                    
defaults
.append
(default)

    
return ArgSpec(argsvarargsvarkwargsdefault → defaults kwonly_argskwonly_defaultsNone)



def package_versions(request):
    
' Render the links for all versions of a package '

    
package_name
 = request.matchdict['package']

    
package
 = Package(package_name)

    
pkgs
 = [pkg for pkg in request.packages if (pkg.name == package.name)]

    
if (request.registry.use_fallback and (not pkgs)):
        
redirect_url
 = ('%s/%s/' % (request.registry.fallback_url.rstrip('/')package → package_name ))

        
return HTTPFound(location=redirect_url)

    
return {'pkgs'pkgs}



def connect(emitterreceivermapping):
    
guessed
 = guess_mapping(emitterreceiver)

    
mapping
 = (mapping or guessed)

    
for (srcdst) in guessed → mapping .items():
        
if (receiver.input_types.get(dst'') != 'list'):
            
disconnect_receiver_by_input
(receiverdst)



def _update_addresses(self):
    
stage_path
 = (('/x/web/' + self.hostname.upper()) + '/topo/STAGE2.default.topo')

    
dev_path
 = os.path.expanduser('~/.pyinfra/topo/STAGE2.default.topo')

    
altus_path
 = '/x/web/LIVE/topo/STAGE2.default.topo'

    
if self.stage_host:
        
import topos

        
for path in (stage_pathdev_pathaltus_path):
            
if os.path.exists(path):
                
self
.topos
 = topos.TopoFile(stage_path → path , ip=self.stage_ip)

                
break

    
if self.topos:
        
addresses
 = (self.topos.get(self.appname) or {})

    
else:
        
addresses
 = {}



def __new__(clsnamebasesbody):
    
klass
 = type.__new__(clsnamebasesbody)

    
if (bases != (object)):
        
cls
.binding_classes
.append
(klass)

        
if cls.register_immediately:
            
cls → klass 
.register
()

    
return klass



def __call__(selfcb):
    
(partsbadchar)
 = process.clinetoargv(self.clineself.kwargs['cwd'])

    
if (parts is None):
        
raise DataError(("native command '%s': shell metacharacter '%s' in command line" % (self.clinebadchar))self.loc)

    
if (len(parts) < 2):
        
raise DataError(("native command '%s': no method name specified" % cline → self. )self.loc)

    
module
 = parts[0]

    
method
 = parts[1]

    
cline_list
 = parts[2:]

    
self
.usercb
 = cb

    
process
.call_native
(modulemethodcline_list, loc=self.loc, cb=self._cb, context=self.context, pycommandpath=self.pycommandpath, **self.kwargs)



def __init__(selfnamechoicesdefault_keydefault_valuecoerce_fn, **kwargs):
    
Col
.__init__
(selfname, **kwargs → self. )

    
if (choices is None):
        
self
.choices
 = {}

    
else:
        
self
.choices
 = choices

    
self
.default_value
 = choices → self. .get(default_keydefault_value)

    
self
.coerce_fn
 = coerce_fn



def search(selfdirectionargskwargsloss0):
    
if (loss0 is None):
        
loss0
 = self.f(self.wrt, *args, **kwargs)

    
for s in self.schedule:
        
step
 = (s * direction)

        
if (abs(step.max()) < self.tolerance):
            
return 0.0

        
candidate
 = (self.wrt + step)

        
loss
 = self.f(candidate, *args, **kwargs)

        
dir_dot_grad0
 = scipy.inner(directionself.fprime(self.wrt))

        
if (loss <= (loss0 + ((self.c1 * s) * dir_dot_grad0))):
            
grad
 = self.fprime(candidate, *args, **kwargs)

            
dir_dot_grad
 = scipy.inner(directiongrad)

            
if (abs(dir_dot_grad) >=  → <= (self.c2 * abs(dir_dot_grad0 → dir_dot_grad ))):
                
return s



def save_task(task):
    
'\\u000a    Saves the task package to Django\\u000a    '

    
if ((not task.get('save'(Conf.SAVE_LIMIT >  → == 0))) and task['success']):
        
return

    
try:
        
if (task['success'] and (0 < Conf.SAVE_LIMIT <  → <= Success.objects.count())):
            
Success
.objects
.last
()
.delete
()

        
Task
.objects
.create
(id=task['id'], name=task['name'], func=task['func'], hook=task.get('hook'), args=task['args'], kwargs=task['kwargs'], started=task['started'], stopped=task['stopped'], result=task['result'], group=task.get('group'), success=task['success'])

    
except Exception as e:
        
logger
.error
(e)



def __init__(selfparentnametextfontpt_sizecoloroutline_coloroutline_sizeoutline_smoothingpossub_themeoptions):
    
"\\u000a    :param parent: the widget's parent\\u000a    :param name: the name of the widget\\u000a    :param text: the text to display (this can be changed later via the text property)\\u000a    :param font: the font to use\\u000a    :param pt_size: the point size of the text to draw (defaults to 30 if None)\\u000a    :param color: the color to use when rendering the font\\u000a    :param pos: a tuple containing the x and y position\\u000a    :param sub_theme: name of a sub_theme defined in the theme file (similar to CSS classes)\\u000a    :param options: various other options\\u000a    "

    
Widget
.__init__
(selfparentnameNone[00]possub_themeoptions)

    
if font:
        
self
.fontid
 = blf.load(font)

    
else:
        
font
 = self.theme['Font']

        
self
.fontid
 = blf.load(font) if font else 0

    
if pt_size:
        
self
.pt_size
 = pt_size

    
else:
        
self
.pt_size
 = self.theme['Size']

    
if color:
        
self
.color
 = color → outline_color 

    
else:
        
self
.color
 = self.theme['Color']

    
if outline_color:
        
self
.outline_color
 = color → outline_color 

    
else:
        
self
.outline_color
 = self.theme['OutlineColor']



def putOpaque(selfvalue):
    
'Appends an Opaque data value to the message.'

    
data
 = value.string

    
length
 = len(data)

    
if ((systemVersion is '3') and (type(value → data ) is bytes)):
        
data
 = data.decode('latin-1')

    
if (length <  → == 40):
        
packed
 = (chr((protocol.OPAQUELEN0 + length)) + data)

    
else:
        
lengthStr
 = toByteString(length)

        
packed
 = ((chr(((protocol.OPAQUECOUNT1 - 1) + len(lengthStr))) + lengthStr) + data)

    
self
.__output
 += packed

    
return self



def buildPackages(self):
    
for (pack(regex_singleregex_split)) in self.packageRegex.items():
        
for dirname in os.listdir(self.distDir):
            
subdir
 = os.path.join(self.distDirdirname)

            
if ((not os.path.isdir(subdir)) or (not subdir → dirname, pred: filename .startswith('_'))):
                
continue

            
for filename in os.listdir(subdir):
                
if re.compile(regex_split).match(filename):
                    
if (not self.buildSplitFiles(packsubdirfilename)):
                        
log
.error
('Could not build split files packages')

                        
return False

                
if re.compile(regex_single).match(filename):
                    
file
 = os.path.join(subdirfilename)

                    
ascfile
 = (file + '.asc')

                    
zpack
 = (pack + '.z')

                    
zipFileName
 = os.path.join(self.packDirzpack)

                    
if os.access(ascfileos.R_OK):
                        
zip
 = zipfile.ZipFile(zipFileName'w')

                        
zip
.write
(fileos.path.basename(file))

                        
zip
.write
(ascfileos.path.basename(ascfile))

                        
zip
.close
()

                        
self
.packageList
[pack]
 = zipFileName

                        
break

    
if (len(self.packageList) > 0):
        
return True

    
else:
        
log
.error
('Failed to build packages')

        
return False



def process_fn(recordht):
    
name
 = record['name']

    
seq
 = record['sequence']

    
if ('N' in seq):
        
return (NoneNone)

    
(trim_seqtrim_at)
 = ht.trim_on_sodd(seqMAX_SODD)

    
if (trim_at >  → >=, pred: < K):
        
return (nametrim_seq)