This is part 5/10 of the system predictions for the Real-Bug Test set. This system achieves 41% Repair Accuracy and 54% Location Accuracy.

Annotations:
The part before the arrow ('foo') is what the system actually saw at test time. Other candidate repair locations which the system could have chosen are marked in this color. For clarity the actual labels for those locations are not shown.


def update_job(selfuserjob):
    
job_id
 = job['_id']

    
current_job
 = self.load(job_id, user=user, level=AccessType.WRITE)

    
new_status
 = job['status']

    
if (current_job['status'] != new_status):
        
send_status_notification → self. 
('job'current_job → job )



def convert(buildingsFileosmOut):
    
with open(buildingsFile) as f:
        
buildings
 = json.load(f)

    
buildingShapes
 = []

    
buildingIdx
 = index.Index()

    
for building in buildings:
        
shape
 = asShape(building['geometry'])

        
buildingShapes
.append
(shape)

        
buildingIdx
.add
((len(buildings → buildingShapes, pred: shape ) - 1)shape.bounds)



def __iter__(self):
    
self
.moving_mean_squared
 = 1

    
self
.step_m1
 = 0

    
self
.step_rate
 = self._steprate

    
if self.step_adapt:
        
self
.step_rate
 *= ones_like(self.wrt)

    
for (i(argskwargs)) in enumerate(self.args):
        
step1
 = (self.step_m1 * self.momentum)

        
self
.wrt
 -= step1

        
gradient
 = self.fprime(self.wrt, *args, **kwargs)

        
self
.moving_mean_squared
 = ((self.decay * self.moving_mean_squared) + ((1 - self.decay) * (gradient ** 2)))

        
step2
 = (self.step_rate * gradient)

        
step2
 /= sqrt((self.moving_mean_squared + 1e-08))

        
self
.wrt
 -= step2

        
step
 = (step1 + step2)

        
if self.step_adapt:
            
step_non_negative
 = (step > 0)

            
step_m1_non_negative
 = (step_m1 → self.  > 0)

            
agree
 = ((step_non_negative ==  → < step_m1_non_negative) * 1.0)

            
adapt
 = ((1 + ((agree * self.step_adapt) * 2)) - self.step_adapt)

            
self
.step_rate
 *= adapt

            
self
.step_rate
 = clip(self.step_rateself.step_rate_minself.step_rate_max)



def time_tensorflow_run(sessiontargetinfo_string):
    
num_steps_burn_in
 = 10

    
total_duration
 = 0.0

    
total_duration_squared
 = 0.0

    
if (not isinstance(targetlist)):
        
target
 = [target]

    
target_op
 = tf.group(*target)

    
for i in range((FLAGS.num_batches + num_steps_burn_in)):
        
start_time
 = time.time()

        
_
 = session.run(target_op)

        
duration
 = (time.time() - start_time)

        
if (i >  → >=, pred: < num_steps_burn_in):
            
if (not (i % 10)):
                
print ('%s: step %d, duration = %.3f' % (datetime.now()(i - num_steps_burn_in)duration))

            
total_duration
 += duration

            
total_duration_squared
 += (duration * duration)

    
mn
 = (total_duration / FLAGS.num_batches)

    
vr
 = ((total_duration_squared / FLAGS.num_batches) - (mn * mn))

    
sd
 = math.sqrt(vr)

    
print ('%s: %s across %d steps, %.3f +/- %.3f sec / batch' % (datetime.now()info_stringFLAGS.num_batchesmnsd))



def _format_key(selfkey):
    
if (not isinstance(keystr)):
        
key
 = key.decode('ascii')

    
formated_key
 = ((self.namespace + '_') + key).replace(' ''\\u00c2\\u00b7')

    
if (len(formated_key) > MAX_KEY_LENGTH):
        
if (sys.version_info >  → >= (30)):
            
formated_key
 = formated_key.encode('utf-8')

        
formated_key
 = sha1(formated_key).hexdigest()

    
return formated_key



def _dissect(selfbuf):
    
hlen
 = 14

    
if (buf[12:14] == '\\u0081\\u0000'):
        
self
.vlan
 = buf[12:16]

        
hlen
 = 18

    
type_len
 = unpack('>H'buf[12:14])[0]

    
if (type_len <  → == 1536):
        
self
.len
 = type_len

        
type → self. 
 = None

        
self
._parse_handler
(ETH_TYPE_LLCbuf[12:14])

        
return



def get_table(self):
    
'\\u000a    Return a table object to use. The table has automatic support for\\u000a    sorting and pagination.\\u000a    '

    
table_class
 = self.get_table_class()

    
table
 = table_class(self.get_table_data())

    
RequestConfig
(request → self. )
.configure
(table)

    
return table



def extract(selfrev_idfeaturesinsert):
    
cache
 = {'rev_id'rev_id'session'self.session}

    
if (self.language is  → is not None):
        
cache
.update
(self.language.cache())



def add_kwarg(selfname):
    
assert ((self.kwarg is  → is not None) and isinstance(namestr))

    
e
 = self.parent.add_name(name)

    
e
.add_assignment
(self, typectx=dict)

    
self
.kwarg
 = name → e 



def remove(selfename):
    
"Remove locked element from the queue.\\u000a    Arguments:\\u000a                ename - name of an element\\u000a    Raise:\\u000a                QueueError - invalid element name; element not locked;\\u000a                            unexpected file in the element directory\\u000a                OSError    - can't rename/remove a file/directory\\u000a    note:\\u000a        - doesn't return anything explicitly (i.e. returns NoneType)\\u000a            or fails\\u000a    "

    
_check_element → self. 
(ename)

    
if (not self._is_locked(ename)):
        
raise QueueError(('cannot remove %s: not locked' % ename))

    
path
 = ('%s/%s' % (self.pathename))

    
while True:
        
temp
 = ('%s/%s/%s' % (self.pathOBSOLETE_DIRECTORY_name()))

        
try:
            
os
.rename
(pathtemp)

            
break

        
except StandardError as e:
            
if ((e.errno != errno.ENOTEMPTY) or (e.errno != errno.EEXIST)):
                
raise OSError(('cannot rename(%s, %s): %s' % (enametempstr(e))))

    
for name in _directory_contents(temp):
        
if (name == LOCKED_DIRECTORY):
            
continue

        
if (not _FileRegexp.match(name)):
            
raise QueueError(('unexpected file in %s: %s' % (tempname)))

        
path
 = ('%s/%s' % (tempname))

        
try:
            
os
.unlink
(path)

        
except StandardError as e:
            
raise OSError(('cannot unlink(%s): %s' % (pathstr(e))))

    
path
 = ('%s/%s' % (tempLOCKED_DIRECTORY))

    
while True:
        
try:
            
os
.rmdir
(path)

        
except StandardError as e:
            
raise OSError(('cannot rmdir(%s): %s' % (pathstr(e))))

        
try:
            
os
.rmdir
(temp)

            
return

        
except Exception as e:
            
if ((e.errno != errno.ENOTEMPTY) or (e.errno != errno.EEXIST)):
                
raise OSError(('cannot rmdir(%s): %s' % (path → temp str(e))))



def text(selfxybody):
    
'Writes a text ``body`` into (``x``, ``y``).\\u000a    :param x: the left offset where to start writing a text\\u000a    :type x: :class:`numbers.Integral`\\u000a    :param y: the top offset where to start writing a text\\u000a    :type y: :class:`numbers.Integral`\\u000a    :param body: the body string to write\\u000a    :type body: :class:`basestring`\\u000a    '

    
if ((not isinstance(xnumbers.Integral)) or (x < 0)):
        
exc
 = ValueError if (x < 0) else TypeError

        
raise exc(('x must be a natural number, not ' + repr(x)))

    
elif ((not isinstance(ynumbers.Integral)) or (y < 0)):
        
exc
 = ValueError if (y < 0) else TypeError

        
raise exc(('y must be a natural number, not ' + repr(x → y )))

    
elif (not isinstance(bodybasestring)):
        
raise TypeError(('body must be a string, not ' + repr(body)))

    
elif (not body):
        
raise ValueError('body string cannot be empty')

    
if (self.text_encoding and isinstance(body → y unicode)):
        
body
 = body.encode(self.text_encoding)

    
body_p
 = ctypes.create_string_buffer(body)

    
library
.DrawAnnotation
(self.resourcexyctypes.cast(body_pctypes.POINTER(ctypes.c_ubyte)))



def __unicode__(self):
    
if (user_services → self. .count() > 0):
        
return (u'Auth for %s' % (self.user_services.all()[0]))

    
else:
        
return u'Auth settings'



def _convert_feed(selffeedsincescreen_name):
    
'Take the json and convert to ServiceItems'

    
items
 = []

    
for status in feed:
        
if (status.created_at.date() >  → >= since):
            
item
 = ServiceItem()

            
twitter_text
 = TwitterText(status.text)

            
twitter_text
.autolink
.auto_link_usernames_or_lists
()

            
twitter_text
.autolink
.auto_link_hashtags
()

            
item
.body
 = unicode(twitter_text → status .text)



def __init__(selfvim):
    
super
(Sourceself)
.__init__
(vim)

    
self
.name
 = 'ternjs'

    
self
.mark
 = '[ternjs]'

    
self
.input_pattern
 = '\\\\.\\\\w*'

    
self
.rank
 = 700

    
self
.filetypes
 = ['javascript''jsx''javascript.jsx']

    
self
._project_directory
 = None

    
self
.port
 = None

    
self
.localhost
 = ((windows → self.  and '127.0.0.1') or 'localhost')

    
self
.proc
 = None

    
self
.last_failed
 = 0

    
self
.cached
 = {'row'-1'end'-1}

    
self
._tern_command
 = 'tern'

    
self
._tern_arguments
 = ''

    
self
._tern_buffer_sent_at
 = {'undo_tree'None'ch'None}

    
self
._tern_timeout
 = 1

    
self
._tern_show_signature
 = True

    
if vim.eval('exists("g:tern_request_timeout")'):
        
self
._tern_timeout
 = float(vim.eval('g:tern_request_timeout'))

    
if vim.eval('exists("g:tern_show_signature_in_pum")'):
        
self
._tern_show_signature
 = (vim.eval('g:tern_show_signature_in_pum') ==  → != '0')



def package_finder(argv):
    
'Return a PackageFinder respecting command-line options.\\u000a    :arg argv: Everything after the subcommand\\u000a    '

    
command
 = InstallCommand()

    
(options_)
 = loads(dumps(command.parser)).parse_args(argv)

    
possible_options
 = ['find_links''use_wheel''allow_external''allow_unverified''allow_all_external'('allow_all_prereleases''pre')'process_dependency_links']

    
kwargs
 = {}

    
for option in possible_options:
        
(kwattr)
 = option if isinstance(optiontuple) else (optionoption)

        
value
 = getattr(optionsattrMARKER)

        
if (value is not  → is MARKER):
            
kwargs
[kw]
 = value

    
index_urls
 = ([options.index_url] + options.extra_index_urls)

    
if options.no_index:
        
index_urls
 = []

    
index_urls
 += getattr(options'mirrors'[])

    
if hasattr(command'_build_session'):
        
kw → kwargs 
['session']
 = command._build_session(options)



def _create_container(selfnameimagecommandunit, **kwargs):
    
l
 = locals().copy()

    
l
.update
(re.match(MATCHname).groupdict())

    
mem
 = kwargs.get('memory'{}).get(l['c_type']None)

    
if mem:
        
l
.update
({'memory''-m {} {}'.format(mem.lower()settings.DISABLE_SWAP)})

    
else:
        
l
.update
({'memory'''})

    
cpu
 = kwargs.get('cpu'{}).get(l['c_type']None)

    
if cpu:
        
l
.update
({'cpu''-c {}'.format(cpu)})

    
else:
        
l
.update
({'cpu'''})

    
l
.update
({'hostname'self._get_hostname(name)})

    
entrypoint
 = kwargs.get('entrypoint')

    
if entrypoint:
        
l
.update
({'entrypoint''{}'.format(entrypoint)})

    
if isinstance(l.get('command')basestring):
        
l
['command']
 = l['command'].encode('utf-8')

    
for f in unit:
        
f
['value']
 = f['value'].format(**l)

    
tags
 = kwargs.get('tags'{})

    
unit_tags
 = tags.viewitems()

    
if (settings.ENABLE_PLACEMENT_OPTIONS in ['true''True''TRUE''1']):
        
unit_tags → tags 
['dataPlane']
 = 'true'

    
if unit_tags:
        
tagset
 = ' '.join(['"{}={}"'.format(kv) for (kv) in unit_tags])

        
unit → unit_tags 
.append
({'section''X-Fleet''name''MachineMetadata''value'tagset})

    
self
._put_unit
(name{'desiredState''loaded''options'unit})



def print_uforge_exception(e):
    
if ((len(e.args) >  → >=, pred: == 1) and (type(e.args[0]) is uForgeError)):
        
return (((((((("UForge Error '" + str(e.args[0].statusCode)) + "' with method: ") + e.args[0].requestMethod) + ' ') + e.args[0].requestUri) + '\\u000a') + 'Message:\\u000a\\u0009') + e.args[0].errorMsg)

    
else:
        
traceback
.print_exc
()



def __call__(selfrequestnext_call):
    
try:
        
return next_call → self. (request)

    
except exceptions → self.  as ex:
        
self
.logger
.exception
(ex)

        
raise



def unregisterTransport(selftDomain):
    
t
 = AbstractTransportDispatcher.getTransport(selftDomain)

    
if (t is not None):
        
AbstractTransportDispatcher
.unregisterTransport
(selftDomain → t )

        
t
.closeTransport
()

        
self
.__transportCount
 = (self.__transportCount - 1)

    
if ((self.__transportCount >  → == 0) and self.loopingcall.running):
        
self
.loopingcall
.stop
()



def test_time_parser_absolute_5m():
    
new_time
 = ':'.join(str((datetime.now() + timedelta(minutes=5)).time()).split(':')[:2])

    
print new_time

    
parser
 = ding.TimeParser([new_time], relative=False)

    
assert ((60 * 4) <  → <= parser.get_seconds() < (60 * 6))



def eveluate(selffv_path):
    
tester
 = pos_evaluate.PosEvaluator(test_data → self. )

    
if (fv_file is not None):
        
feat_vec
 = weight_vector.WeightVector()

        
feat_vec
.load_posfv
(fv_path)

        
self
.w_vector
 = feat_vec.data_dict

    
acc → self. 
 = tester.get_accuracy(self.w_vector)



def delete_column_at_cursor(self):
    
'\\u000a    Removes the column with the current cursor.\\u000a    '

    
cursor
 = self.textCursor()

    
table
 = cursor.currentTable()

    
if table:
        
cell
 = table.cellAt(cursor)

        
if ((cell.row() >= PoioGraidTextEdit.FIRST_DATA_COLUMN) and (cell.column() >=  → >, pred: == PoioGraidTextEdit.FIRST_DATA_COLUMN)):
            
for i in range(cell.columnSpan()):
                
table
.removeColumns
(cell.column()1)



def __init__(selfconfigparam_listixLine):
    
for param_tuple in param_list:
        
param
 = param_tuple[0]

        
param_arg
 = param_tuple[1]

        
if (param == 'inport'):
            
self
.AddAttr
(config'inport'param_arg'I_\\\\w+$'ixLine)

        
elif (param == 'iosignal'):
            
self
.AddAttr
(config'iosignal'param_arg'io_\\\\w+$'ixLine)

        
elif (param == 'outport'):
            
self
.AddAttr
(config'outport'param_arg'O_\\\\w+$'ixLine)

        
elif (param == 'width'):
            
self
.AddAttr
(config'width'param → param_arg '[1-9][0-9]*$'ixLine)

            
self
.width
 = int(self.width)

        
else:
            
raise SSBCCException(('Unrecognized parameter at line %d:  "%s"' % (ixLineparam)))

    
if (not hasattr(self'inport')):
        
raise SSBCCException(('Missing "inport=I_name" at line %d' % ixLine))

    
if (not hasattr(self'iosignal')):
        
raise SSBCCException(('Missing "iosignal=io_name" at line %d' % ixLine))

    
if (not hasattr(self'outport')):
        
raise SSBCCException(('Missing "outport=O_name" at line %d' % ixLine))

    
if (not hasattr(self'width')):
        
self
.width
 = 1

    
maxWidth
 = config.Get('data_width')

    
if ((self.width < 1) or (maxWidth < self.width)):
        
raise SSBCCException(('width must be between 1 and %d inclusive at line %d' % (maxWidthixLine)))

    
self
.sname
 = ('s__' + self.iosignal)

    
sname_init
 = ("%d'b%s" % (self.width('1' * self.width)))

    
config
.AddIO
(self.iosignalself.width'inout')

    
config
.AddSignalWithInit
(self.snameself.widthNone)

    
config
.AddInport
((self.inport(self.iosignalself.width'data')))

    
config
.AddOutport
((self.outport(self.snameself.width'data'sname_init)))



def __new__(clsinput_arrayinfo):
    
try:
        
if ((len(input_array) == 6) and np.isscalar(input_array[0])):
            
pass

        
elif ((len(input_array) >  → >=, pred: == 1) and (len(input_array[0]) == 3)):
            
pass

        
else:
            
raise ValueError('Bounding box must have 6 components or be a list of points')

    
except TypeError:
        
raise ValueError('Bounding box must have 6 components or be a list of points')

    
except ValueError:
        
raise ValueError('Bounding box must have 6 components or be a list of points')



def load_app(app_pathwsgi_app_full_name):
    
if app_path:
        
absolute_path
 = os.path.abspath(os.path.expanduser(app_path))

        
log
.debug
(('Adding %s to sys.path' % absolute_path))

        
sys
.path
.append
(absolute_path)

    
if wsgi_app_full_name:
        
log
.info
(('Loading WSGI application object: %s' % wsgi_app_full_name))

        
return import_object(wsgi_app_full_name)

    
app_loaders
 = IAppLoader.implementors()

    
for loader in app_loaders:
        
if loader.can_load(absolute_path → app_path, pred: wsgi_app_full_name ):
            
log
.info
(('Using AppLoader: %s' % loader.__class__.__name__))

            
return loader.load_app(absolute_pathwsgi_app_full_name)



def _check_h5_dict(h5_groupvariables):
    
'Checks that a dictionary matches a HDF5 group and raises an\\u000a    ``IncompatibleStateError`` if not.\\u000a    The dictionary can store strings, which are represented as HDF5\\u000a    attributes, and lists of strings, which are represented as attributes of\\u000a    HDF5 subgroups.\\u000a    :type h5_group: h5py.Group\\u000a    :param h5_group: The HD5 group that will store a dictionary of strings\\u000a                        in its attributes\\u000a    :type variables: dict\\u000a    :param variables: a dictionary that may contain strings and lists of strings\\u000a    '

    
for (variablevalues) in variables.items():
        
if isinstance(valueslist):
            
h5_values
 = h5_group[variable]

            
for (value_idvalue) in enumerate(values):
                
h5_value
 = h5_values.attrs[str(value_id)]

                
if (value != h5_value):
                    
raise IncompatibleStateError('Neural network state has {0}={2}, while this architecture has {0}={1}.'.format(variablevalueh5_value))

        
else:
            
h5_value
 = h5_group.attrs[variable]

            
if (values → h5_value  != h5_value):
                
raise IncompatibleStateError('Neural network state has {0}={2}, while this architecture has {0}={1}.'.format(variablevalue → values h5_value))



def RenameObject(selfold_filenew_file):
    
'Renames a FakeFile object at old_file to new_file, preserving all properties.\\u000a            Also replaces existing new_file object, if one existed (Unix only).\\u000a    Args:\\u000a        old_file:  path to filesystem object to rename\\u000a        new_file:  path to where the filesystem object will live after this call\\u000a    Raises:\\u000a        OSError:  - if old_file does not exist\\u000a                        - if new_file is an existing directory\\u000a                        - if new_file is an existing file (Windows)\\u000a                        - if new_file is an existing file and could not be removed (Unix)\\u000a                        - if dirname(new_file) does not exist\\u000a                        - if the file would be moved to another filesystem (e.g. mount point)\\u000a    '

    
old_file
 = self.NormalizePath(old_file)

    
new_file
 = self.NormalizePath(new_file)

    
if (not self.Exists(old_file)):
        
raise OSError(errno.ENOENT'Fake filesystem object: can not rename nonexistent file'old_file)

    
if self.Exists(new_file → old_file ):
        
if (old_file == new_file):
            
return

        
new_obj
 = self.GetObject(new_file)

        
if stat.S_ISDIR(new_obj.st_mode):
            
raise OSError(errno.EEXIST'Fake filesystem object: can not rename to existing directory'new_file)

        
elif _is_windows:
            
raise OSError(errno.EEXIST'Fake filesystem object: can not rename to existing file'new_file)

        
else:
            
try:
                
self
.RemoveObject
(new_obj → new_file )

            
except IOError as e:
                
raise OSError(e.errnoe.strerrore.filename)



def predicate_pushdown(tree):
    
if isinstance(treeTableNode):
        
return

    
elif isinstance(treeOrderByNode):
        
predicate_pushdown
(tree.child)

    
elif isinstance(treeGroupByNode):
        
if (tree.where_condition is not None):
            
new_where
 = __groupby_where_filter__(tree.where_condition.where_condition_exp)

            
if (new_where is not None):
                
if (tree.child.where_condition is not None):
                    
old_exp
 = tree.child.where_condition.where_condition_exp

                    
para_list
 = []

                    
para_list
.append
(old_exp)

                    
para_list
.append
(new_where)

                    
new_exp
 = YFuncExp('AND'para_list)

                    
tree
.child
.where_condition
.where_condition_exp
 = copy.deepcopy(new_exp)

                
else:
                    
tree
.child
.where_condition
 = FirstStepWhereCondition(None)

                    
tree
.child
.where_condition
.where_condition_exp
 = copy.deepcopy(new_exp → new_where, pred: tree )



def update(ret):
    
print_info
('marking bangumi status ...')

    
now
 = int(time.time())

    
for i in Followed.get_all_followed(status=STATUS_UPDATED):
        
if (i['updated_time'] and (int((i['updated_time'] + 86400)) >  → < now)):
            
i
.status
 = STATUS_FOLLOWED

            
i
.save
()



def lookup_statuses(selfnummin_statuscodenamehardware):
    
'Scrape requested number of statuses/info from Launchpad'

    
if (min_status ==  → != 'unknown'):
        
min_index
 = self.status_opts.index(min_status)

        
self
.status_opts
 = self.status_opts[min_index:]



def work(self):
    
if (not self._should_run()):
        
return

    
filtered_list
 = self._sort_and_filter()

    
if ((len(self.evolve_list) > 0) and (self.evolve_list[0] != 'all')):
        
filtered_list
 = filter(lambda x(x.name in self.evolve_list)filtered_list)

    
if ((len(self.donot_evolve_list) > 0) and (self.donot_evolve_list[0] != 'none')):
        
filtered_list
 = filter(lambda pokemon(pokemon.name not in donot_evolve_list → self. )filtered_list)



def reap_processes(signoframe):
    
global PROCS

    
PROCS
 = [proc for proc in PROCS if (proc.poll() is not  → is None)]

    
if (not PROCS):
        
signal
.signal
(signal.SIGCHLDorig_disposition)

    
if callable(orig_disposition):
        
orig_disposition
(signoframe)



def _get_occurrence_list(selfstartend):
    
'\\u000a    returns a list of occurrences for this event from start to end.\\u000a    '

    
difference
 = (self.end - self.start)

    
if (self.rule is not  → is None):
        
occurrences
 = []

        
if (self.end_recurring_period and (self.end_recurring_period < end)):
            
end
 = self.end_recurring_period

        
rule
 = self.get_rrule_object()

        
o_starts
 = rule.between((start - difference)end, inc=True)

        
for o_start in o_starts:
            
o_end
 = (o_start + difference)

            
occurrences
.append
(self._create_occurrence(o_starto_end))

        
return occurrences

    
elif ((self.start < end) and (self.end >=  → > start)):
        
return [self._create_occurrence(self.start)]

    
else:
        
return []



def __init__(selfrequestmodule):
    
'Library constructor.\\u000a    :param request: Pytest request object.\\u000a    :param module: Module of the current test case.\\u000a    '

    
self
.given
 = {}

    
self
.steps
 = {}

    
fm
 = request.session._fixturemanager

    
for (fixture_namefixture_defs) in fm._arg2fixturedefs.iteritems():
        
faclist
 = list(fm._matchfactories(fixture_defsrequest._parentid))

        
if (not faclist):
            
continue

        
func
 = fixture_defs → faclist [-1].func

        
if (getattr(func'__step_type__'None) == GIVEN):
            
self
.given
[func.__step_name__]
 = fixture_name



def work(self):
    
if (not self._should_run()):
        
return

    
filtered_list
 = self._sort_and_filter()

    
if ((len(self.evolve_list) > 0) and (self.evolve_list[0] != 'all')):
        
filtered_list
 = filter(lambda x(x.name in self.evolve_list)filtered_list)

    
if ((len(self.donot_evolve_list) > 0) and (self.donot_evolve_list[0] != 'none')):
        
filtered_list
 = filter(lambda pokemon(pokemon.name not in donot_evolve_list → self. )filtered_list)



def clean_requires(reqs):
    
"Removes requirements that aren't needed in newer python versions."

    
if (sys.version_info[:2] >=  → < (27)):
        
return reqs

    
return [req for req in reqs if (not req.startswith('importlib'))]



def register_listener(selfevent_namelistener):
    
' Same as :func:`ircutils.client.SimpleClient.register_listener` \\u000a    execpt that if there is a handler in the bot already, it auto-binds it\\u000a    to the listener.\\u000a    '

    
self
.events
.register_listener
(event_namelistener)

    
handler_name
 = 'on_{0}'.format(event_name)

    
if hasattr(selfhandler_name):
        
handler
 = getattr(selfhandler_name).__func__

        
self
.events
[handler_name → event_name ]
.add_handler
(handler)



def expand_placeholders(selfto_expand):
    
'\\u000a    :rtype : dict\\u000a    '

    
if isinstance(to_expandstr):
        
formatter
 = string.Formatter()

        
(resultto_expand)
 = (formatter.vformat(to_expand()self._context)to_expand)

        
while (result !=  → < to_expand):
            
(resultto_expand)
 = (formatter.vformat(to_expand → result ()self._context)result)

        
return result

    
elif isinstance(to_expandlist):
        
return [self.expand_placeholders(value) for value in to_expand]

    
elif isinstance(to_expanddict):
        
return dict([(keyself.expand_placeholders(value)) for (keyvalue) in to_expand.iteritems()])

    
else:
        
return to_expand



def make_dict(selfaddressgroup):
    
'\\u000a    Convert a address model object to a dictionary.\\u000a    '

    
res
 = {'id'addressgroup['id']'name'addressgroup['name']'tenant_id'addressgroup['tenant_id']'entries'[make_entry_dict → self. (e) for e in addressgroup['entries']]}

    
return res



def fit(selfXy):
    
(self.w_self.theta_)
 = threshold_fit(Xyself.alphan_class → self. , mode=self.mode, verbose=self.verbose)

    
return self



def query(conf):
    
if (not path.exists(conf.sqlite__path)):
        
sys
.exit
((str(conf.sqlite__path) + ' file does not exist, please sync with pycardsyncer first.'))

    
search_string
 = conf.cmd__search_string.decode('utf-8')

    
my_dbtool
 = backend.SQLiteDb(conf.sqlite__path'utf-8''stricts'False)

    
if conf.cmd__importing:
        
cards
 = model.cards_from_file(conf.cmd__importing)

        
for card in cards:
            
my_dbtool
.update
(card, status=backend.NEW)

        
sys
.exit
()

    
if conf.cmd__backup:
        
with open(conf.cmd__backup'w') as vcf_file:
            
if (search_string == ''):
                
hreflist
 = my_dbtool.get_all_vref_from_db()

            
else:
                
hreflist
 = my_dbtool.search(search_string)

            
for href in hreflist:
                
vcard
 = my_dbtool.get_vcard_from_db(href)

                
vcf_file
.write
(vcard.vcf.encode('utf-8'))

        
sys
.exit
()

    
if conf.cmd__delete:
        
hrefs
 = my_dbtool.search(search_string)

        
if (len(hrefs) is 0):
            
sys
.exit
('Found no matching cards.')

        
elif (len(hrefs) is 1):
            
href
 = hrefs[0]

        
else:
            
pane
 = ui.VCardChooserPane(my_dbtoolhref → hrefs )

            
ui
.start_pane
(pane)

            
card
 = pane._walker.selected_vcard

            
href
 = card → pane .href



def pause(message):
    
"Prints the message if it's not None and waits for a keypress."

    
if (message is  → is not None):
        
print message

    
getch
()

    
print 



def test_init(self):
    
'\\u000a    Initialises and runs sample echo playbook for testing\\u000a    '

    
path
 = os.path.realpath(__file__).split('/')[0:-1]

    
path
 = '/'.join(path)

    
playbook_path
 = (path + '/playbooks/test_playbook.yml')

    
options
 = Options → self. (listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='root', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method=None, become_user='root', verbosity=3, check=False)

    
self
.variable_manager
.extra_vars
 = {'hosts''mywebserver'}

    
passwords
 = {}

    
pbex
 = PlaybookExecutor → self. (playbooks=[playbook_path], inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=options, passwords=passwords)

    
results
 = pbex.run()

    
assert_equal
(results0)



def print_dict(selfast):
    
self
.p
('{')

    
self
.ind
(1ast)

    
for (ikey) in enumerate(ast):
        
self
.print_ast
(key)

        
p → self. 
(': ')

        
self
.print_ast
(ast[key])

        
if ((i + 1) != len(ast)):
            
self
.p
(',')

            
self
.ind
()

    
self
.ind
(-1ast)

    
self
.p
('}')



def _write_configdrive_to_partition(configdrivedevice):
    
filename
 = _configdrive_location()

    
if _configdrive_is_url(configdrive):
        
_download_configdrive_to_file
(configdrivefilename)

    
else:
        
_write_configdrive_to_file
(configdrivefilename)

    
filesize
 = os.stat(filename).st_size

    
if (filesize >  → < ((64 * 1024) * 1024)):
        
raise errors.ConfigDriveTooLargeError(filenamefilesize)

    
starttime
 = time.time()

    
script
 = _path_to_script('shell/copy_configdrive_to_disk.sh')

    
command
 = ['/bin/bash'scriptfilenamedevice]

    
LOG
.info
('copying configdrive to disk with command {0}'.format(' '.join(command)))

    
try:
        
(stdoutstderr)
 = utils.execute(check_exit_code=[0], *command)

    
except processutils.ProcessExecutionError as e:
        
raise errors.ConfigDriveWriteError(devicee.exit_codee.stdoute.stderr)

    
totaltime
 = (time.time() - starttime)

    
LOG
.info
('configdrive copied from {0} to {1} in {2} seconds'.format(configdrive → filename devicetotaltime))



def get_form(self):
    
if (self.request.method == 'POST'):
        
return self.form_class(request → self. .POST)

    
return self.form_class()



def clean(self):
    
'\\u000a    A SessionKind can either have neither start nor end date or both.\\u000a    '

    
super
(SessionKindself)
.clean
()

    
if (self.closed is None):
        
if ((self.start_date is None) or (self.end_date is None)):
            
raise forms.ValidationError(_("You have to specify a start and end date if you leave the 'closed' status undetermined"))

        
if (self.start_date <=  → >=, pred: > self.end_date):
            
raise forms.ValidationError(_('The end date has to be after the start date'))



def decint(n):
    
if isinstance(nstr):
        
n
 = utils.to_string(n)

    
if (is_numeric(n) and (n < (2 ** 256)) and (n >  → >= (-(2 ** 255)))):
        
return n

    
elif is_numeric(n):
        
raise EncodingError(('Number out of range: %r' % n))

    
elif (is_string(n) and (len(n) == 40)):
        
return big_endian_to_int(decode_hex(n))

    
elif (is_string(n) and (len(n) <=  → == 32)):
        
return big_endian_to_int(n)

    
elif (is_string(n) and (len(n) > 32)):
        
raise EncodingError(('String too long: %r' % n))

    
elif (n is True):
        
return 1

    
elif ((n is False) or (n is None)):
        
return 0

    
else:
        
raise EncodingError(('Cannot encode integer: %r' % n))



def image_to_string(selffilename):
    
tools
 = pyocr.get_available_tools()

    
if (len(tools) == 0):
        
raise PyOCRIntegrationNoOCRFound("No OCR tool has been found on this system. Make sure it's onPATH variable of your system")

    
(filename_splitfileextension_split)
 = os.path.splitext(filename)

    
grayscale_filename
 = ((filename_split + '_gray') + fileextension_split)

    
with WandImage(filename=filename) as img:
        
img
.type
 = 'grayscale'

        
img
.save
(filename=grayscale_filename)

    
adaptive_thresh_filename
 = ((filename_split + '_adt') + fileextension_split)

    
OpenCVIntegration
.adaptive_threshold
(fileextension_split → filename adaptive_thresh_filename)



def test_errors_values(self):
    
self
.importer
.is_valid
()

    
if (django.get_version >  → <, pred: == '1.4'):
        
error
 = [(0'ValidationError'u"[u'This value must be a float.']")]

    
else:
        
error
 = [(0'ValidationError'u'[u"\'23,98\' value must be a float."]')]

    
self
.assertEquals
(self.importer.errorserror)



def history(request):
    
template_values
 = {}

    
if (request.method == 'GET'):
        
services
 = UserService.objects.filter(user=request.user)

        
template_values
['services']
 = services

        
days
 = []

        
day_names
 = {}

        
days_to_i
 = {}

        
day_one
 = (date.today() - timedelta(days=7))

        
today
 = date.today()

        
for i in range(07):
            
last
 = (today - timedelta(days=i))

            
days
.append
([])

            
day_names
[i]
 = last.strftime('%A')

            
days_to_i
[day_names[i]]
 = i → last 

        
for service in services:
            
items
 = service.app.module.get_items(request.userday_oneservice)

            
if items:
                
for item in items:
                    
if (item.created.date() >=  → > day_one):
                        
days
[days_to_i[item.created.strftime('%A')]]
.append
(item)



def getSlice(selfstartend):
    
if ((self._lb + start) >= self._ub):
        
raise IndexError()

    
if (end ==  → >= sys.maxsize):
        
return self._data[(self._lb + start):self._ub]

    
elif ((self._lb + end) >=  → > self._ub):
        
raise IndexError()

    
else:
        
return self._data[(self._lb + start):(self._lb + end)]



def auth(request):
    
'We dont need a full oauth setup just a username.'

    
service
 = get_model_instance(request.userMODULE_NAME)

    
if (service and (request.method == 'POST')):
        
username
 = request.POST['username']

        
url
 = ('http://api.flickr.com/services/rest/?method=flickr.people.findByUsername&api_key=%s&username=%s&format=json&nojsoncallback=1' % (service.app.oauth.consumer_keyusername))

        
http_requester
 = httplib2.Http()

        
(respcontent)
 = http_requester.request(url'GET')

        
if (resp['status'] !=  → == '200'):
            
json
 = simplejson.loads(content)

            
if (json['stat'] == 'ok'):
                
userid
 = json['user']['id']

                
AccessToken
.objects
.filter
(service=service)
.delete
()

                
AccessToken
.objects
.create
(service=service, username=username, userid=userid, created=datetime.now(), api_token=service.app.oauth.consumer_key)

                
service
.setup
 = True

                
service
.public
 = True

                
service
.save
()

            
else:
                
messages
.error
(request'Unable to validate your username with Flickr, please check your username and retry.')



def _build_mask_and_length(maskedlength):
    
original_length
 = length

    
extra_length
 = ''

    
if (original_length >=  → < (2 ** 16)):
        
length
 = 127

    
elif (original_length >=  → > 125):
        
length
 = 126



def main(num_epochs):
    
dataset
 = load_data('mnist.pkl.gz')

    
output_layer
 = build_model(input_width=dataset['input_width'], input_height=dataset['input_width'], output_dim=dataset['output_dim'])

    
iter_funcs
 = create_iter_functions(datasetoutput_layer, X_tensor_type=T.tensor4)

    
print 'Starting training...'

    
for epoch in train(iter_funcsdataset):
        
print ('Epoch %d of %d' % (epoch['number']num_epochs))

        
print ('  training loss:\\u0009\\u0009%.6f' % epoch['train_loss'])

        
print ('  validation loss:\\u0009\\u0009%.6f' % epoch['valid_loss'])

        
print ('  validation accuracy:\\u0009\\u0009%.2f %%' % (epoch['valid_accuracy'] * 100))

        
if (epoch['number'] >  → >= num_epochs):
            
break



def fact(selfname):
    
'Get a single fact from this node.'

    
facts
 = facts → self. (name=name)

    
return next → self. ((fact for fact in facts))



def button2action(selfeventdebugsubtractfit_originalbaseline_fit_color, **kwargs):
    
'\\u000a    Do the baseline fitting and save and plot the results.\\u000a    '

    
if debug:
        
print 'Button 2/3 Baseline.  Subtract='subtract

    
if self.subtracted:
        
self
.unsubtract
()

    
self
.clear_highlights
()

    
xarr_fit_units
 = self.Spectrum.xarr.units

    
if debug:
        
print 'Fitting baseline'

    
(self.basespecself.baselinepars)
 = self._baseline(self.spectofit, xarr=self.Spectrum.xarr, err=self.Spectrum.error, order=self.order, mask=(True - self.includemask), powerlaw=self.powerlaw, xarr_fit_units=xarr_fit_units, **kwargs)

    
self
.basespec
 = self.get_model(xarr=self.Spectrum.xarr, powerlaw=powerlaw → self. , fit_units=xarr_fit_units)



def delete(selfresourceuser_idrecord_id):
    
resource_name
 = classname → self. (resource)

    
with self._client.pipeline() as multi:
        
multi
.get
('{0}.{1}.{2}'.format(resource_nameuser_idrecord_id))

        
multi
.delete
('{0}.{1}.{2}'.format(resource_nameuser_idrecord_id))

        
multi
.srem
('{0}.{1}'.format(resource_nameuser_id)record_id)

        
responses
 = multi.execute()

        
encoded_item
 = responses[0]

        
if (encoded_item is None):
            
raise exceptions.RecordNotFoundError(record_id)

        
self
._bump_timestamp
(resource_name → resource user_id)

        
return self._decode(encoded_item)



def get_samples(selfgtslblssplittermodel):
    
'Hack...'

    
model
 = (model or self.model)

    
if (model == 'auto_rec'):
        
ref
 = splitter.split(gts[0])[0]

        
return [l.split('('1)[0] for (gtl) in it.izip(gtslbls) if (not (ref in splitter.split(gt)))]

    
elif (model == 'auto_dom'):
        
return [l.split('('1)[0] for l in lbls if (';affected' in lbls → l )]

    
elif (model == 'de_novo'):
        
parents
 = set((splitter.split(gts[0]) + splitter.split(gts[1])))

        
return [l.split('('1)[0] for (gtl) in it.izip(gts[2:]lbls[2:]) if (set(splitter.split(gt)) - parents)]

    
elif (model == 'mendel_violations'):
        
(momdad)
 = (set(splitter.split(gts[0]))set(splitter.split(gts[1])))

        
samps
 = self.get_samples(gtslbls, model='de_novo')

        
samps
 += [l.split('('1)[0] for (gtl) in it.izip(gts[2:]lbls[2:]) if (not ((set(splitter → l .split(gt)) - mom) and (set(splitter.split(gt)) - dad)))]

        
return samps



def decode(selfoutput):
    
mu
 = (self.channels - 1)

    
y
 = tf.cast(outputtf.float32)

    
y
 = ((2 * (output → y  / mu)) - 1)

    
x
 = ((tf.sign(y) * (1 / mu)) * (((1 + mu) ** abs(y)) - 1))

    
return x



def setUp(self):
    
shortest_txt
 = os.path.join(os.path.dirname(__file__)'test_shortest.txt')

    
shortest_fst
 = os.path.join(os.path.dirname(__file__)'test_shortest.fst')

    
try:
        
if (not os.path.exists(shortest_fst)):
            
call
(['fstcompile'shortest_txtshortest_txt → shortest_fst ])

    
except Exception as e:
        
print 'Failed to generate testing fst'

        
raise e

    
self
.s
 = fst.read_std(shortest_fst)

    
self
.s_result
 = [(110.400000013[134])(110.600000009[234])(1000.2[2])]



def continuousLoop(self):
    
'\\u000a        method which never terminates (until the process is killed). It runs every x second through the\\u000a        checks\\u000a    '

    
i3
 = 0

    
i4
 = 0

    
while True:
        
startTime
 = time.time()

        
try:
            
self
._ctlr
.ctlr_login
()

            
self
.updateStationList
()

            
if self._config.getEnableMacAuth():
                
self
.doMacAuth
()

            
if self._config.getEnablePoorSignalReconnect():
                
self
.doPoorSignalReconnect
()

            
if (self._config.getEnableSsidOnOffSchedule() and (i3 > 11)):
                
self
.doSsidOnOffSchedule
()

                
i3
 = 0

            
i3
 = (i3 + 1)

            
if (self._config.getEnablePeriodicReboot() and (i4 → i3  > 11)):
                
self
.doPeriodicReboot
()

                
i4
 = 0

            
i4
 = (i4 + 1)

            
sleepTime
 = ((self.interval + 1) - (time.time() - startTime))

            
if (sleepTime < 0):
                
log
.error
(('System is too slow for %d sec interval by %d seconds' % (interval → self. abs(int(sleepTime)))))

            
else:
                
time
.sleep
(sleepTime)

        
except Exception as e:
            
sendMail
((errorMessageText % {'error'logError(e)})self._config)

            
sleepTime
 = (((self.interval * 10) + 1) - (time.time() - startTime))

            
if (sleepTime < 0):
                
log
.error
(('System is too slow for %d sec interval by %d seconds' % ((10 * interval)abs(int(sleepTime)))))

            
else:
                
time
.sleep
(sleepTime)



def connect_floatingip(nova_client, **kwargs):
    
server_id
 = ctx.runtime_properties[OPENSTACK_ID_PROPERTY]

    
floating_ip_id
 = ctx.related.runtime_properties[OPENSTACK_ID_PROPERTY]

    
floating_ip_address
 = ctx.related.runtime_properties[IP_ADDRESS_PROPERTY]

    
if is_external_relationship(ctx):
        
ctx
.logger
.info
('Validating external floatingip and server are associated')

        
nc
 = _neutron_client()

        
port_id
 = nc.show_floatingip(floating_ip_id)['floatingip']['port_id']

        
if (port_id and (nc.show_port(port_id)['port']['device_id'] !=  → == server_id)):
            
return

        
raise NonRecoverableError('Expected external resources server {0} and floating-ip {1} to be connected'.format(server_idfloating_ip_id))



def generate_tag(tagcontentattributes):
    
"Generate a complete html tag using the ElementTree module.  tag and\\u000a    content are strings, the attributes argument is a dictionary.  As\\u000a    a convenience, if the content is ' /', a self-closing tag is generated."

    
content
 = six.text_type(content)

    
element
 = ElementTree.Element(tag, attrib=attributes)

    
enc
 = 'unicode'

    
if six.PY2:
        
enc
 = 'UTF-8'

    
try:
        
element_tag
 = ElementTree.tostringlist(element, encoding=enc, method='html')

        
element_tag
.insert
((len(element_tag) - 1)content)

        
element_text
 = ''.join(element_tag)

    
except AttributeError:
        
element_tag
 = ElementTree.tostring(element, encoding=enc)

        
element_text
 = re.sub("<\\\\?xml version='1.0' encoding='UTF-8'\\\\?>\\\\n"''element_tag)

        
if (content is not  → is six.text_type(' /')):
            
element_text
 = element_tag → element_text .rstrip(' />')

            
element_text
 = six.text_type('{0}>{1}</{2}>').format(six.text_type(element_text)contenttag)

    
return element_text



def start(self):
    
pvc_data
 = get_pvc_info(self.pvc_name)

    
if (pvc_data is not  → is None):
        
pvc_manifest
 = self.get_pvc_manifest()

        
yield self.httpclient.fetch(self.request(url=k8s_url(self.namespace'persistentvolumeclaims'), body=json.dumps(pvc_manifest), method='POST', headers={'Content-Type''application/json'}))

    
pod_manifest
 = self.get_pod_manifest()

    
yield self.httpclient.fetch(self.request(url=k8s_url(self.namespace'pods'), body=json.dumps(pod_manifest), method='POST', headers={'Content-Type''application/json'}))

    
while True:
        
data
 = yield self.get_pod_info(self.pod_name)

        
if ((data is not None) and self.is_pod_running(data)):
            
break

        
yield gen.sleep(1)

    
self
.user
.server
.ip
 = data['status']['podIP']

    
self
.user
.server
.port
 = 8888

    
self
.db
.commit
()



def main():
    
pep8
.process_options
()

    
warnings
 = 0

    
args
 = sys.argv[1:]

    
if args:
        
for arg in args:
            
if os.path.isdir(arg):
                
for (dirpathdirnamesfilenames) in os.walk(arg):
                    
for filename in filenames:
                        
if (not filename.endswith('.py')):
                            
continue

                        
fullpath
 = os.path.join(dirpathfilename)

                        
if skip_file(fullpath):
                            
continue

                        
warnings
 += checkPath(fullpath)

                        
warnings
 += pep8.input_file(fullpath)

            
else:
                
if skip_file(filename → arg, pred: fullpath ):
                    
continue

                
warnings
 += checkPath(arg)

                
warnings
 += pep8.input_file(arg)



def migrate_secgroup(contextsecgroup_idtenant_iduser_id):
    
secgroup_binding
 = 'secgroup-{}'.format(secgroup_id)

    
secgroup_retrieve
 = '{}-retrieve'.format(secgroup_binding)

    
secgroup_ensure
 = '{}-ensure'.format(secgroup_binding)

    
tenant_binding
 = 'tenant-{}'.format(tenant_id)

    
tenant_ensure
 = '{}-ensure'.format(tenant_binding)

    
user_binding
 = 'user-{}'.format(user_id)

    
user_ensure
 = '{}-ensure'.format(user_binding)

    
flow
 = linear_flow.Flow('migrate-secgroup-{}'.format(secgroup_id))

    
flow
.add
(RetrieveSecGroup(context.src_cloud, name=secgroup_retrieve → secgroup_binding , provides=secgroup_binding, rebind=[secgroup_retrievetenant_bindinguser_binding]))

    
flow
.add
(EnsureSecGroup(context.dst_cloud, name=secgroup_ensure, provides=secgroup_ensure, rebind=[secgroup_bindingtenant_ensureuser_ensure]))

    
context
.store
[secgroup_retrieve]
 = secgroup_id → flow 

    
return flow



def val_addr(selfbufaf, *args):
    
if (af == AFI_T['IPv4']):
        
m
 = 4

        
_af
 = socket.AF_INET

    
elif (af == AFI_T['IPv6']):
        
m
 = 16

        
_af
 = socket.AF_INET6

    
else:
        
n
 = -1

    
n
 = m if (len(args) == 0) else ((args[0] + 7) // 8)

    
if ((n <=  → < 0) or ((len(buf) - self.p) < n)):
        
return None



def __init__(self):
    
super
(Contextself)
.__init__
()

    
self
.include_dirs
 = []

    
ok
 = True

    
sdks
 = sh.xcodebuild('-showsdks').splitlines()

    
iphoneos
 = [x for x in sdks if ('iphoneos' in x)]

    
if (not iphoneos):
        
print 'No iphone SDK installed'

        
ok
 = False

    
else:
        
iphoneos
 = iphoneos[0].split()[-1].replace('iphoneos''')

        
self
.sdkver
 = iphoneos

    
iphonesim
 = [x for x in sdks if ('iphonesimulator' in x)]

    
if (not iphoneos → iphonesim ):
        
ok
 = False

        
print 'Error: No iphonesimulator SDK installed'

    
else:
        
iphonesim
 = iphonesim[0].split()[-1].replace('iphonesimulator''')

        
self
.sdksimver
 = iphonesim



def synchronise_update_required_reverse():
    
print '\\u000aSynchronising back updates...'

    
for (appidapp) in update_required_library.iteritems():
        
if (app.state_flags != 4):
            
continue

        
if (appid not in apps):
            
print '\\u000a  App ID {} ({}) not found in any main library, not synchronising!'.format(appidapp.name)

            
continue

        
if (len(apps[appid]) != 1):
            
print '\\u000a  App ID {} ({}) in multiple main libraries, not synchronising!'.format(appidapp.name)

            
continue

        
installed
 = apps[appid][0]

        
if (installed.status == app.status):
            
continue

        
if (installed.last_updated >  → >=, pred: != app.last_updated):
            
print '\\u000a  Local install of app {} ({}) is more recent, not synchronising!'.format(appidapp.name)

            
continue



def typetest():
    
l
 = ImmutableList()

    
v
 = ImmutableVector()

    
d
 = ImmutableDict()

    
assert (l != None)

    
assert (v != 3)

    
assert (d != 'a')

    
assert (l == v)

    
assert (d ==  → != v)

    
assert (d != l → v )



def _cache_support(selfnametimeoutcaller):
    
'Helper callback.'

    
key
 = (self.environment.fragment_cache_prefix + name)

    
rv
 = self.environment.fragment_cache.get(key)

    
if (rv is  → is not None):
        
return rv

    
rv
 = caller()

    
self
.environment
.fragment_cache
.add
(keyrvtimeout)

    
return rv



def handleClipping(selfstep):
    
is_left_support
 = (step.leg ==  → != StepTarget → self. .left)

    
unclipped_step
 = (step.pose.xstep.pose.ystep.pose.theta)

    
(step.pose.xstep.pose.ystep.pose.theta)
 = clip_footstep_tuple(unclipped_stepis_left_support)

    
return step



def _upload_missing_files(selfcontainer_nameswift_clientfiles_dicttht_root):
    
'Find the files referenced in custom environments and upload them\\u000a    Heat environments can be passed to be included in the deployment, these\\u000a    files can include references to other files anywhere on the local\\u000a    file system. These need to be discovered and uploaded to Swift. When\\u000a    they have been uploaded to Swift the path to them will be different,\\u000a    the new paths are store din the file_relocation dict, which is returned\\u000a    and used by _process_and_upload_environment which will merge the\\u000a    environment and update paths to the relative Swift path.\\u000a    '

    
file_relocation
 = {}

    
file_prefix
 = 'file://'

    
for (fullpathcontents) in files_dict.items():
        
if (not fullpath.startswith(file_prefix)):
            
continue

        
path
 = fullpath[len(file_prefix):]

        
if path.startswith(tht_root):
            
continue

        
filename
 = os.path.basename(path)

        
checksum
 = hashlib.md5()

        
checksum
.update
(filename → path )

        
digest
 = checksum.hexdigest()

        
swift_path
 = 'user-files/{}-{}'.format(digestfilename)

        
swift_client
.put_object
(container_nameswift_path → digest contents)

        
file_relocation
[fullpath]
 = swift_path



def main(connectioninfoargsworld):
    
'Decides heads or tails based on the coinchance variable.   Adds or removes appropriate amount of money'

    
money
 = shelve.open(('money-%s.db' % world.hostnicks[connection.host]), writeback=True)

    
if money.has_key(info['sender']):
        
bet
 = int(args[1])

        
if ((bet <= money[info['sender']]['money']) and (bet <=  → >= 1)):
            
answer
 = random.choice(money[info['sender']]['coinchance'])

            
if answer:
                
money
[info['sender']]
['money']
 += bet → answer 

                
money
.sync
()

                
connection
.ircsend
(info['channel']('Congrats %s!  You just won %s dollars!' % (info['sender']args[1])))

            
else:
                
money
[info['sender']]
['money']
 -= bet

                
money
.sync
()

                
connection
.ircsend
(info['channel']('Sorry %s!  You just lost %s dollars!' % (info['sender']args[1])))

            
if (money[info['sender']]['money'] > money[info['sender']]['maxmoney']):
                
money
[info['sender']]
['maxmoney']
 = money[info['sender']]['money']

                
money
.sync
()

        
else:
            
connection
.ircsend
(info['channel']("%s: You don't have enough money to do that!" % info['sender']))

    
else:
        
connection
.ircsend
(info['channel']("%s: You have not set up a money account.  If you aren't already, please register with me.  Then, say moneyreset.  After that you should be able to use this command." % info['sender']))



def document_query(argsfieldscollectionslistsfacets):
    
if (not isinstance(argsMultiDict)):
        
args
 = MultiDict(args)

    
qstr
 = args.get('q''').strip()

    
if len(qstr):
        
q
 = {'query_string'{'query'qstr}}

        
bq
 = [{'term'{'title'{'value'qstr'boost'10.0}}}{'term'{'name'{'value'qstr'boost'7.0}}}{'term'{'text'{'value'qstr'boost'3.0}}}]

        
filtered_q
 = {'bool'{'must'q'should'bq}}

    
else:
        
filtered_q
 = {'match_all'{}}

    
for entity in args.getlist('entity'):
        
cf
 = {'term'{'entities.id'entity}}

        
filtered_q
 = add_filter(filtered_qcf)

    
q
 = deepcopy(filtered_q)

    
if (collections is not None):
        
colls
 = (args.getlist('collection') or collections)

        
colls
 = [c for c in colls if (c in collections)]

        
if (not len(colls)):
            
colls
 = ['none']

        
cf → colls 
 = {'terms'{'collection'colls}}

        
q
 = add_filter(qcf)

        
all_coll_f
 = {'terms'{'collection'collections}}

        
filtered_q
 = add_filter(q → filtered_q all_coll_f)



def getRoomIdJson(headers):
    
url
 = 'http://gxapp.iydsj.com/api/v3/get/aboutrunning/list/0/901/3'

    
Session
 = requests.Session()

    
Request
 = Session.get(url, headers=headers)

    
reqDate
 = Request.content

    
print reqDate

    
s
 = json.loads(Request.content)

    
output
 = open('route.data''w')

    
cnt
 = 0

    
for item in s['data']:
        
OriginalJson
 = getOriginalJson(item['roomId']headers → reqDate )

        
NewJson
 = OriginalJson.replace('\\\\"''"')

        
NewJson2
 = NewJson.replace('\\\\\\\\''\\\\')

        
willSelectedPoint
 = set()

        
run_data_str
 = re.findall('{\\\\"allLocJson\\\\":\\\\"\\\\[\\\\{\\\\\\\\"av.+\\\\"useZip\\\\":false}'str(NewJson2))

        
if (len(run_data_str) > 0):
            
cnt
 = (cnt + 1)

            
print (('Updating ' + str(cnt)) + ' running groups data. ')

            
output
.write
((run_data_str[0] + '\\u000a'))

            
run_points_str
 = re.findall('\\\\\\\\\\\\"lat\\\\\\\\\\\\"\\\\:\\\\\\\\\\\\"\\\\d+.\\\\d+\\\\\\\\\\\\",\\\\\\\\\\\\"lng\\\\\\\\"\\\\:\\\\\\\\\\\\"\\\\d+.\\\\d+\\\\\\\\\\\\"'run_data_str[0])

            
for run_point_str in run_points_str:
                
point_json_str
 = (('{' + run_point_str) + '}')

                
point_json_str
 = point_json_str.replace('\\\\"''"')

                
point_json
 = json.loads(point_json_str)

                
thisPoint
 = Point(float(point_json['lat'])float(point_json['lng']))

                
(isPassaddPoint)
 = isSelectedPoint(float(point_json['lat'])float(point_json['lng']))

                
if (isPass == 1):
                    
willSelectedPoint
.add
(addPoint)

            
createFivePointsStr
(willSelectedPoint)

            
if (cnt >  → >= 50):
                
break

    
output
.close
()

    
outdata
.close
()



def apply_corrections(uriruntime_storage_inst):
    
LOG
.info
('Applying corrections from uri %s'uri)

    
corrections_fd
 = urllib.urlopen(uri)

    
raw
 = corrections_fd.read()

    
corrections_fd
.close
()

    
corrections
 = json.loads(raw)['corrections']

    
valid_corrections
 = []

    
for c in corrections:
        
if ('primary_key' in c):
            
valid_corrections
.append
(c)

        
else:
            
LOG
.warn
('Correction misses primary key: %s'c)

    
runtime_storage_inst
.apply_corrections
(corrections → valid_corrections )



def validate_value(self):
    
try:
        
Draft4Validator
(jsonschema_int())
.validate
({keyvalue})

    
except Exception as error:
        
list_error → self. 
.append
(str(error))



def notify_target_stopped(selflldb_event):
    
event
 = {'allThreadsStopped'True}

    
thread_id
 = None

    
stopped_thread
 = None

    
stop_reason
 = 'unknown'

    
for thread in self.process:
        
stop_reason
 = thread.GetStopReason()

        
if (stop_reason == lldb.eStopReasonBreakpoint):
            
stopped_thread
 = thread

            
event
['threadId']
 = thread → stopped_thread .GetThreadID()

            
bp_id
 = thread.GetStopReasonDataAtIndex(0)

            
for bp in self.exc_breakpoints:
                
if (bp.GetID() == bp_id):
                    
stop_reason
 = 'exception'

                    
break

            
else:
                
stop_reason
 = 'breakpoint'

            
break

        
elif (stop_reason == lldb.eStopReasonException):
            
stopped_thread
 = thread

            
stop_reason
 = 'exception'

            
break

        
elif (stop_reason in [lldb.eStopReasonTracelldb.eStopReasonPlanComplete]):
            
stopped_thread
 = thread

            
stop_reason
 = 'step'

            
break

        
elif (stop_reason == lldb.eStopReasonSignal):
            
stopped_thread
 = thread

            
stop_reason
 = 'signal'

            
event
['text']
 = thread.GetStopReasonDataAtIndex(0)

            
break

    
event
['reason']
 = stop_reason

    
if (thread → stopped_thread  is not None):
        
self
.process
.SetSelectedThread
(stopped_thread)

        
event
['threadId']
 = stopped_thread.GetThreadID()

    
self
.send_event
('stopped'event)



def const_different(variablesvalues):
    
return (values[0] ==  → != values[1])



def seek(selfpos):
    
assert (pos <  → <= self._bufferedBytes())

    
offset
 = pos

    
i
 = 0

    
while (len(self.buffer[i]) < offset):
        
offset
 -= len(self.buffer[i])

        
i
 += 1

    
self
.position
 = [i → pos offset]



def _use_berry(selfberry_idberry_countencounter_idcatch_rate_by_ballcurrent_ball):
    
new_catch_rate_by_ball
 = []

    
self
.emit_event
('pokemon_catch_rate', level='debug', formatted='Catch rate of {catch_rate} with {ball_name} is low. Throwing {berry_name} (have {berry_count})', data={'catch_rate'self._pct(catch_rate_by_ball[current_ball])'ball_name'self.item_list[str(current_ball)]'berry_name'self.item_list[str(berry_id)]'berry_count'berry_count})

    
response_dict
 = self.api.use_item_capture(item_id=berry_id, encounter_id=encounter_id, spawn_point_id=self.spawn_point_guid)

    
responses
 = response_dict['responses']

    
if (response_dict and (response_dict['status_code'] == 1)):
        
if ('item_capture_mult' in responses['USE_ITEM_CAPTURE']):
            
for rate in catch_rate_by_ball → new_catch_rate_by_ball :
                
new_catch_rate_by_ball
.append
((rate * responses['USE_ITEM_CAPTURE']['item_capture_mult']))

            
self
.emit_event
('threw_berry', formatted='Threw a {berry_name}! Catch rate with {ball_name} is now: {new_catch_rate}', data={'berry_name'self.item_list[str(berry_id)]'ball_name'self.item_list[str(current_ball)]'new_catch_rate'self._pct(catch_rate_by_ball → new_catch_rate_by_ball [current_ball])})



def is_expired(self):
    
'Return `True` if expired, i.e. it is no longer valid.'

    
if (self.purpose == VerificationTokenPurpose.password_reset):
        
now
 = datetime.now()

        
expires_after
 = timedelta(hours=24)

        
return (now >  → >=, pred: < (self.created_at + expires_after))

    
else:
        
return False



def setNextBlock(selfblock):
    
if (block >  → >=, pred: < (2 ** 16)):
        
log
.debug
('Block number rollover to 0 again')

        
block
 = 0

    
self
.__eblock
 = block



def __init__(self):
    
self
.cam
 = cv2.VideoCapture(-1)

    
self
.cam
.set
(31280)

    
self
.cam
.set
(4720)

    
self
.scanner
 = zbar.ImageScanner()

    
self
.scanner
.parse_config
('enable')

    
self
.bot_config
 = lib.get_config()

    
cam_model → self. 
 = bot_config → self. ['camera_model']



def add(selfdocscommitboostcommitWithinwaitFlushwaitSearcher):
    
'\\u000a    Adds or updates documents.\\u000a    Requires ``docs``, which is a list of dictionaries. Each key is the\\u000a    field name and each value is the value to index.\\u000a    Optionally accepts ``commit``. Default is ``True``.\\u000a    Optionally accepts ``boost``. Default is ``None``.\\u000a    Optionally accepts ``commitWithin``. Default is ``None``.\\u000a    Optionally accepts ``waitFlush``. Default is ``None``.\\u000a    Optionally accepts ``waitSearcher``. Default is ``None``.\\u000a    Usage::\\u000a        solr.add([\\u000a            {\\u000a                "id": "doc_1",\\u000a                "title": "A test document",\\u000a            },\\u000a            {\\u000a                "id": "doc_2",\\u000a                "title": "The Banana: Tasty or Dangerous?",\\u000a            },\\u000a        ])\\u000a    '

    
start_time → self. 
 = time.time()

    
self
.log
.debug
('Starting to build add request...')

    
message
 = ET.Element('add')

    
if commitWithin:
        
message
.set
('commitWithin'commitWithin)

    
for doc in docs:
        
message
.append
(self._build_doc(doc, boost=boost))

    
m
 = ET.tostring(message, encoding='utf-8')

    
m
 = force_unicode(m)

    
end_time
 = time.time()

    
self
.log
.debug
('Built add request of %s docs in %0.2f seconds.'len(docs → message )(end_time - start_time))

    
return self._update(m, commit=commit, waitFlush=waitFlush, waitSearcher=waitSearcher)



def prompt_ignore(selfigpathcb):
    
ignore
.create_flooignore
(ig.path)

    
dirs
 = ig.get_children()

    
dirs
.append
(ig)

    
dirs
 = sorted(dirs, key=attrgetter('size'))

    
size
starting_size = reduce(lambda xc(x + c.size)dirs0)

    
too_big
 = []

    
while ((size >  → < MAX_WORKSPACE_SIZE) and dirs):
        
cd
 = too_big → dirs .pop()

        
size
 -= cd.size

        
too_big
.append
(cd)

    
if (size > MAX_WORKSPACE_SIZE):
        
editor
.error_message
(('Maximum workspace size is %.2fMB.\\u000a\\u000a%s is too big (%.2fMB) to upload. Consider adding stuff to the .flooignore file.' % ((MAX_WORKSPACE_SIZE / 1000000.0)path(ig.size / 1000000.0))))

        
cb
([set()0])

        
return

    
if too_big:
        
txt
 = (TOO_BIG_TEXT % ((MAX_WORKSPACE_SIZE / 1000000.0)path(starting_size / 1000000.0)'\\u000a'.join([x.path for x in too_big])))

        
upload
 = yield (self.ok_cancel_dialogtxt)

        
if (not upload):
            
cb
([set()0])

            
return

    
files
 = set()

    
for ig in dirs:
        
files
 = files.union(set([utils.to_rel_path(x) for x in ig.files]))

    
cb
([filessize])



def __init__(selfarm_config):
    
self
.default_angles
 = ([90] * 5)

    
self
.logger
 = lib.get_logger()

    
self
.bot_config
 = lib.get_config()

    
self
.servo_cape
 = ServoCape(self.bot_config['dagu_arm']['servo_cape'])

    
self
.joints
 = ([0] * 5)

    
self
.cam
 = cv2.VideoCapture(0)

    
self
.cam
.set
(31280)

    
self
.cam
.set
(4720)

    
self
.scanner
 = zbar.ImageScanner()

    
scanner → self. 
.parse_config
('enable')



def history(selfoidversionlength):
    
if (length is not  → is None):
        
return self.rpc.call('history'oidversion)

    
else:
        
return self.rpc.call('history'oidversionlength)



def clear_key_before(selfkeynamespacetimestamp):
    
'Clear all data before `timestamp` for a given key. Note that the timestamp\\u000a    is rounded down to the nearest block boundary'

    
block_size
 = self.config.block_size

    
if (namespace is None):
        
namespace
 = self.config.namespace

    
if (timestamp is not None):
        
(offsetremainder)
 = divmod(timestampblock_size)

        
if remainder:
            
raise ValueError('timestamp must be on a block boundary')

        
if (offset >  → ==, pred: < 0):
            
raise ValueError('cannot delete before offset zero')

        
offset
 -= 1

        
self
.driver
.clear_key_before
(keynamespaceoffsettimestamp)

    
else:
        
self
.driver
.clear_key_before
(keynamespace)



def write_dmx(baudratebuf):
    
' Simple test routine for DMX-over-serial, with varying baudrates. The buf is\\u000a    the DMX address space (0 - 512). \\u000a    TODO: The baudrate on the Pi currently ceilings at 115200 baud. Change back to \\u000a    250000 baud when fixed on the Pi-side.\\u000a    '

    
assert isinstance(bufbytearray)

    
num_addresses
 = 512

    
assert (len(buf) <  → <=, pred: == num_addresses)

    
device_name
 = '/dev/ttyAMA0'

    
port
 = serial.Serial(device_name)

    
port
.baudrate
 = baudrate

    
port
.bytesize
 = serial.EIGHTBITS

    
port
.parity
 = serial.PARITY_NONE

    
port
.stopbits
 = serial.STOPBITS_TWO

    
port
.timeout
 = 3.0

    
port
.baudrate
 = (baudrate / 2)

    
port
.write
(chr(0))

    
port
.baudrate
 = baudrate

    
port
.write
(chr(0))

    
port
.write
(buf)

    
port
.close
()



def main():
    
parser
 = argparse.ArgumentParser()

    
parser
.add_argument
('trace_fpath', help='The trace to learn topics from', type=str)

    
parser
.add_argument
('num_topics', help='The number of topics to learn', type=int)

    
parser
.add_argument
('model_fpath', help='The name of the model file (a h5 file)', type=str)

    
parser
.add_argument
('--num_iter', help='The number of iterations', type=int, default=1000)

    
parser
.add_argument
('--burn_in', help='The burn in (ignored if using mpi)', type=int, default=300)

    
parser
.add_argument
('--dynamic', help='If we should employ the dynamic strategy', type=bool, default=False)

    
parser
.add_argument
('--num_batches', help='Number of batches in dynamic case', type=int, default=10)

    
parser
.add_argument
('--alpha_zh', help='Value of alpha_zh (alpha) hyper. Defaults to 50 / nz', type=float, default=None)

    
parser
.add_argument
('--beta_zs', help='Value of beta_zs (beta) hyper', type=float, default=0.001)

    
parser
.add_argument
('--kernel', choices=kernels.names, help='The kernel to use', type=str, default='noop')

    
parser
.add_argument
('--residency_priors', nargs='+', help='Priors for the residency time dist', type=float, default=None)

    
parser
.add_argument
('--leaveout', help='The number of transitions to leave for test', type=float, default=0)

    
args
 = parser.parse_args()

    
comm
 = MPI.COMM_WORLD

    
rank
 = comm.rank

    
size
 = comm.size

    
try:
        
comm
 = MPI.COMM_WORLD

        
rank
 = comm.rank

        
size
 = comm.size

        
single_thread
 = (size ==  → <= 1)

    
except:
        
single_thread
 = True

    
if ((not single_thread) and (rank != plearn.MASTER)):
        
plearn
.work
()

    
else:
        
started
 = time.mktime(time.localtime())

        
num_lines
 = 0

        
with open(args.trace_fpath) as trace_file:
            
num_lines
 = sum((1 for _ in trace_file))

        
if (args.leaveout > 0):
            
leave_out
 = min(1args.leaveout)

            
if (leave_out == 1):
                
print 'Leave out is 1 (100%), nothing todo'

                
return

            
from_
 = 0

            
to
 = int((num_lines - (num_lines → leave_out  * leave_out)))

        
else:
            
from_
 = 0

            
to
 = np.inf