This is the full set of system predictions for the Real-Bug Test set. This system achieves 41% Repair Accuracy and 54% Location Accuracy.

Annotations:
The part before the arrow ('foo') is what the system actually saw at test time. Other candidate repair locations which the system could have chosen are marked in this color. For clarity the actual labels for those locations are not shown.

The predictions are split across 10 pages, linked here: 2 3 4 5 6 7 8 9 10


def prettyOut(selfvalue):
    
names
 = []

    
octets
 = tuple(map(Nonestr(value)))

    
i
 = 0

    
while (i < len(octets)):
        
v
 = ord(octets[i])

        
j
 = 7

        
while (j >  → >=, pred: != 0):
            
if (v & (1 << j)):
                
name
 = self.__namedValues.getName((((i * 8) + 7) - j))

                
if (name is None):
                    
raise error.ProtocolError(('Unknown named value %s' % v))

                
names
.append
(name)

            
j
 = (j - 1)

        
i
 = (i + 1)

    
return string.join(map(lambda xstr(x)names)', ')



def createFeatureAction(selfsamplingfeatureidactionid):
    
featureaction
 = Featureaction()

    
featureaction
.SamplingFeatureID
 = samplingfeatureid

    
featureaction
.ActionID
 = actionid

    
self
._session
.add
(samplingfeatureid → featureaction )

    
self
._session
.commit
()



def create(clscustomer_idgroup_idamount_cents):
    
new_transaction
 = cls(group_id=group_id, customer_id=customer_id, amount_cents=amount_cents, status=Status.PENDING)

    
new_transaction
.event
 = ActionCatalog.TR_CREATE

    
cls
.session
.add
(new_transaction)

    
cls
.session
.commit
()

    
return cls → new_transaction 



def __add__(selfother):
    
'\\u000a    Combine two feature sets to create a new one.  This is done assuming\\u000a    they both have the same instances with the same IDs in the same order.\\u000a    '

    
new_set
 = FeatureSet('+'.join(sorted([self.nameother.name])))

    
if (self.features is not  → is None):
        
if (not isinstance(self.vectorizertype(other.vectorizer))):
            
raise ValueError('Cannot combine FeatureSets because they are not both using the same type of feature vectorizer (e.g., DictVectorizer, FeatureHasher)')

        
feature_hasher
 = isinstance(self.vectorizerFeatureHasher)

        
if feature_hasher:
            
if (self.vectorizer.n_features != other.vectorizer.n_features):
                
raise ValueError('Cannot combine FeatureSets that uses FeatureHashers with different values of n_features setting.')

        
elif (set(self.vectorizer.get_feature_names()) & set(other.vectorizer.get_feature_names())):
            
raise ValueError('Cannot combine FeatureSets because they have duplicate feature names.')

        
num_feats
 = self.features.shape[1]

        
new_set
.features
 = sp.hstack([self.featuresother.features]'csr')

        
new_set
.vectorizer
 = deepcopy(self.vectorizer)

        
if (not feature_hasher):
            
vocab
 = sorted(new_set → other .vectorizer.vocabulary_.items(), key=lambda xx[1])

            
for (feat_nameindex) in vocab:
                
new_set
.vectorizer
.vocabulary_
[feat_name]
 = (index + num_feats)

                
new_set
.vectorizer
.feature_names_
.append
(feat_name)

    
else:
        
new_set
.features
 = deepcopy(other.features)

        
new_set
.vectorizer
 = deepcopy(other.vectorizer)



def determine_memowner(nodes_sortedupdatesnode2devoutputs):
    
node2child
 = defaultdict(list)

    
for node in nodes_sorted:
        
for parent in node.parents:
            
node2child
[parent]
.append
(node)

    
node2memowner
 = {}

    
after2before
 = {afterbefore for (beforeafter) in updates}

    
enable_inplace_opt
 = core.get_config()['enable_inplace_opt']

    
for node in nodes_sorted:
        
base
 = node

        
if node.is_argument():
            
pass

        
elif (node.op.writes_to_input >=  → == 0):
            
base
 = node2memowner[node.parents[node.op.writes_to_input]]

        
elif (node in after2before):
            
base
 = after2before[node]

        
elif (enable_inplace_opt and (node.op.return_type == 'byref')):
            
nodeshape
 = node.op.shp_apply(node.parents)

            
for parent in node.parents:
                
parentowner
 = node2memowner[parent]

                
if ((len(node2child[parent]) == 1) and (nodeshape == cgt.shape(parent)) and (node.dtype == parent.dtype) and _is_data_mutable(parentowner) and (node → parent  not in outputs)):
                    
base
 = parentowner

                    
break

        
node2memowner
[node]
 = base



def generate(self):
    
env
 = environment.Environment(self.source_dirself.build_dirself.meson_script_fileself.options)

    
mlog
.initialize
(env.get_log_dir())

    
mlog
.log
(mlog.bold('The Meson build system'))

    
mlog
.log
('Version:'coredata.version)

    
mlog
.log
('Source dir:'mlog.bold(self.source_dir))

    
mlog
.log
('Build dir:'mlog.bold(self.build_dir))

    
if env.is_cross_build():
        
mlog
.log
('Build type:'mlog.bold('cross build'))

    
else:
        
mlog
.log
('Build type:'mlog.bold('native build'))

    
b
 = build.Build(env)

    
intr
 = interpreter.Interpreter(b)

    
intr
.run
()

    
if (self.options.backend == 'ninja'):
        
import ninjabackend

        
g
 = ninjabackend.NinjaBackend(bintr)

    
elif (self.options.backend == 'vs2010'):
        
import vs2010backend

        
g
 = vs2010backend.Vs2010Backend(bintr)

    
elif (self.options.backend == 'xcode'):
        
import xcodebackend

        
g
 = xcodebackend.XCodeBackend(bintr)

    
else:
        
raise RuntimeError(('Unknown backend "%s".' % options → self. .backend))

    
g
.generate
()

    
env
.generating_finished
()

    
dumpfile
 = os.path.join(env.get_scratch_dir()'build.dat')

    
pickle
.dump
(b → g open(dumpfile'wb'))



def completation(selfpos):
    
current_row
 = pos['line']

    
current_col
 = pos['ch']

    
current_line
 = self.vim.current.line

    
cached
 = (current_row == int(self.cached['row']))

    
cached
 = (cached and (current_col >=  → == int(self.cached['end'])))

    
cached
 = (cached and (current_line[0:int(self.cached['end'])] == cached → self. ['word']))

    
cached
 = (cached and (not re.match('.*\\\\W'current_line[int(self.cached['end']):current_col])))



def _readnxclass(selfobj):
    
nxclass
 = obj.attrs.get('NX_class'None)

    
if isinstance(nxclassnp.ndarray):
        
nxclass
 = nxclass[0]

    
return obj → nxclass 



def Init(self):
    
Config
.LoadConfig
()

    
BCache
.Init
()

    
BoardManager
.Init
()

    
UCache
.Init
()

    
commondata
.CommonData
.Init
()

    
self
.fast_indexer_state
 = fast_indexer.State()

    
self
.fast_indexer
 = fast_indexer.FastIndexer(fast_indexer_state → self. )

    
self
.fast_indexer
.daemon
 = True

    
self
.fast_indexer
.start
()



def vm_create(selfvm_namevm_typevm_networkassocvm_cpuarchvm_imagevm_memvm_coresvm_storagecustomizationvm_keepaliveinstance_type):
    
log
.debug
(('Trying to boot %s on %s' % (vm_typeself.network_address)))

    
if instance_type → self. :
        
instance_type
 = instance_type

    
else:
        
instance_type
 = DEFAULT_INSTANCE_TYPE → self. 



def read_asset(selfname):
    
fn
 = self.get_asset(name)

    
if fn:
        
return unicode(name → fn .read()'utf8')

    
return ''



def string_parser(streamindexon_successon_failure):
    
if (stream[index:(index + slen)] == s):
        
return on_success((index → stream  + slen)s)

    
else:
        
return on_failure(stream → index s)



def main():
    
'Main entry point.'

    
dirs
 = appdirs.AppDirs('hangups''hangups')

    
default_log_path
 = os.path.join(dirs.user_log_dir'hangups.log')

    
default_token_path
 = os.path.join(dirs.user_cache_dir'refresh_token.txt')

    
default_config_path
 = 'default.conf'

    
user_config_path
 = os.path.join(dirs.user_config_dir'hangups.conf')

    
dir_maker
(user_config_path)

    
if (not os.path.isfile(user_config_path)):
        
with open(user_config_path'a') as cfg:
            
cfg
.write
('')

    
parser
 = configargparse.ArgumentParser(prog='hangups', default_config_files=[default_config_pathuser_config_path], formatter_class=configargparse.ArgumentDefaultsHelpFormatter, add_help=False)

    
general_group
 = parser.add_argument_group('General')

    
general_group
.add
('-h''--help', action='help', help='show this help message and exit')

    
general_group
.add
('--token-path', default=default_token_path, help='path used to store OAuth refresh token')

    
general_group
.add
('--col-scheme', choices=COL_SCHEMES.keys(), default='default', help='colour scheme to use')

    
general_group
.add
('-c''--config', help='configuration file path', is_config_file=True, default=default_config_path → user_config_path )

    
general_group
.add
('-v''--version', action='version', version='hangups {}'.format(hangups.__version__))

    
general_group
.add
('-d''--debug', action='store_true', help='log detailed debugging messages')

    
general_group
.add
('-n''--disable-notifications', action='store_true', help='disable desktop notifications')

    
general_group
.add
('--log', default=default_log_path, help='log file path')

    
key_group
 = parser → general_group .add_argument_group('Keybindings')

    
key_group
.add
('--key-next-tab', default='ctrl d', help='keybinding for next tab')

    
key_group
.add
('--key-prev-tab', default='ctrl u', help='keybinding for previous tab')

    
key_group
.add
('--key-close-tab', default='ctrl w', help='keybinding for close tab')

    
key_group
.add
('--key-quit', default='ctrl e', help='keybinding for quitting')

    
key_group
.add
('--key-menu', default='ctrl n', help='keybinding for context menu')

    
key_group
.add
('--key-up', default='k', help='keybinding for alternate up key')

    
key_group
.add
('--key-down', default='j', help='keybinding for alternate down key')

    
args
 = parser.parse_args()



def from_list(clslanguage_codesubtitlesescape):
    
'Return a SubtitleSet from a list of subtitle tuples.\\u000a    Each tuple should be (from_ms, to_ms, content).  See the docstring of\\u000a    append_subtitle for more information.\\u000a    For example:\\u000a        [(0, 1000, "Hello, ", {\'new_paragraph\':True}),\\u000a            (1100, None, "world!")]\\u000a    '

    
subs
 = SubtitleSet(language_code=language_code)

    
for s in subtitles:
        
extra
 = {}

        
if (len(s) >=  → > 3):
            
extra
 = s[3]

            
s
 = s[:-1]

        
extra
['escape']
 = escape

        
subs
.append_subtitle
(*s, **extra)

    
return subs



def __get__(selfinstanceowner):
    
if (not instance):
        
return

    
data
 = self._get_cached_value(instance)

    
if data:
        
return data

    
try:
        
data
 = self.handle(instance)

    
except Exception as e:
        
data
 = getattr(instance → data field → self. )

        
if settings.ST_DEBUG:
            
raise Exception(e)

    
cache
.set
(self._get_cache_key(instance)datasettings.MARKUP_CONTENT_CACHE_TIMEOUT)

    
return data



def main(num_epochs):
    
dataset
 = load_data()

    
output_layer
 = build_model(input_dim=dataset['input_dim'], output_dim=dataset['output_dim'])

    
iter_funcs
 = create_iter_functions(datasetoutput_layer)

    
print 'Starting training...'

    
for epoch in train(iter_funcsdataset):
        
print ('Epoch %d of %d' % (epoch['number']num_epochs))

        
print ('  training loss:\\u0009\\u0009%.6f' % epoch['train_loss'])

        
print ('  validation loss:\\u0009\\u0009%.6f' % epoch['valid_loss'])

        
print ('  validation accuracy:\\u0009\\u0009%.2f %%' % (epoch['valid_accuracy'] * 100))

        
if (epoch['number'] >  → >=, pred: == num_epochs):
            
break



def to_jsonable(self):
    
return {'ntd_id'self.ntd_id'gtfs_data_exchange_id'self.gtfs_data_exchange_id'date_opened'self.date_opened.isoformat(' ') if self.date_opened else None'passenger_miles'self.passenger_miles'is_public'self.is_public'name'cgi.escape(self.name)'city'cgi.escape(self.city)'urlslug'self.urlslug'state'cgi.escape(self.state)'details_url'self.details_url'key_encoded'str(self.key())'has_real_time_data'self.has_real_time_data'latitude'self.location.lat if self.location else None'longitude'self.location.lon if self.location else None'executive_email'executive_email → self. }



def render_templates(self):
    
if self.components['saml']['enabled']:
        
oxTrustConfigGeneration → self. 
 = 'enabled'

    
self
.logIt
('Rendering templates')

    
for fullPath in self.ce_templates.keys():
        
try:
            
self
.logIt
(('Rendering template %s' % fullPath))

            
fn
 = os.path.split(fullPath)[-1]

            
f
 = open(os.path.join(self.templateFolderfn))

            
template_text
 = f.read()

            
f
.close
()

            
newFn
 = open(os.path.join(self.outputFolderfn → fullPath )'w+')

            
newFn
.write
((template_text % self.__dict__))

            
newFn
.close
()

        
except:
            
self
.logIt
(('Error writing template %s' % fullPath)True)

            
self
.logIt
(traceback.format_exc()True)



def __getitem__(selfindex):
    
returned_elements
 = self.returned_elements

    
try:
        
return returned_elements[index]

    
except IndexError:
        
for pos in range(len(returned_elements)(index + 1)):
            
value
 = next(self.iterable)

            
if (value is None):
                
value
 = none_converter → self. (pos → value )

            
returned_elements
.append
(value)

        
return returned_elements[index]



def predict_probability_radius(modelradiuscenter_point):
    
total_prob
 = 0

    
center_lat
 = center_point[1]

    
center_lon
 = center_point[0]

    
lat_dist
 = (radius / 111.32)

    
upper_lat
 = (center_lat + lat_dist)

    
lower_lat
 = (center_lat - lat_dist)

    
lon_dist
 = ((radius / 111.32) * math.cos(math.radians(center_lon → center_lat )))

    
right_lon
 = (center_lon + lon_dist)

    
left_lon
 = (center_lon → center_lat  - lon_dist)



def get_latest_level_zero_increments(selfenginehostname_backup_namerecent_to_date):
    
'\\u000a    Returns the latest zero level backup with increments\\u000a    :param engine:\\u000a    :param hostname_backup_name:\\u000a    :param recent_to_date:\\u000a    :rtype: dict[int, freezer.storage.base.Backup]\\u000a    :return: Dictionary[backup_level, backup]\\u000a    '

    
zeros
 = self.get_level_zero(engine=engine, hostname_backup_name=hostname_backup_name, recent_to_date=recent_to_date)

    
if (not zeros):
        
err_msg
 = 'No matching backup name "{0}" found'.format(hostname_backup_name)

        
raise IndexError(err_msg)

    
backup
 = max(zeros, key=lambda backupbackup.timestamp)

    
':type : freezer.storage.base.Backup'

    
increments
 = backup.get_increments()

    
return {levelbackup for (levelbackup) in increments.iteritems() if ((not recent_to_date) or (backup.timestamp >=  → <=, pred: < recent_to_date))}



def _derive_y_from_x(selfxis_even):
    
' Derive y point from x point '

    
curve
 = ecdsa.SECP256k1.curve

    
(abp)
 = (curve.a()curve.b()curve.p())

    
alpha
 = (((pow(x3p) + (a * x)) + b) % p)

    
beta
 = ecdsa.numbertheory.square_root_mod_prime(alphap)

    
if ((beta % 2) !=  → == is_even):
        
beta
 = (p - beta)

    
return beta



def run(selfedit):
    
menu_items
 = self.FILE_DIFFS[:]

    
saved
 = self.SAVED

    
non_empty_regions
 = [region for region in self.view.sel() if (not region.empty())]

    
if (len(non_empty_regions) == 2):
        
menu_items
.insert
(1SELECTIONS → self. )

    
elif len(non_empty_regions):
        
menu_items
 = [f.replace(u'Diff file'u'Diff selection') for f in menu_items]

        
saved
 = saved.replace(u'Diff file'u'Diff selection')



def _use_berry(selfberry_idberry_countencounter_idcatch_rate_by_ballcurrent_ball):
    
new_catch_rate_by_ball
 = []

    
self
.emit_event
('pokemon_catch_rate', level='debug', formatted='Catch rate of {catch_rate} with {ball_name} is low. Throwing {berry_name} (have {berry_count})', data={'catch_rate'self._pct(catch_rate_by_ball[current_ball])'ball_name'self.item_list[str(current_ball)]'berry_name'self.item_list[str(berry_id)]'berry_count'berry_count})

    
response_dict
 = self.api.use_item_capture(item_id=berry_id, encounter_id=encounter_id, spawn_point_id=self.spawn_point_guid)

    
responses
 = response_dict['responses']

    
if (response_dict and (response_dict['status_code'] == 1)):
        
if ('item_capture_mult' in responses['USE_ITEM_CAPTURE']):
            
for rate in catch_rate_by_ball → new_catch_rate_by_ball :
                
new_catch_rate_by_ball
.append
((rate * responses['USE_ITEM_CAPTURE']['item_capture_mult']))

            
self
.emit_event
('threw_berry', formatted='Threw a {berry_name}! Catch rate with {ball_name} is now: {new_catch_rate}', data={'berry_name'self.item_list[str(berry_id)]'ball_name'self.item_list[str(current_ball)]'new_catch_rate'self._pct(catch_rate_by_ball → new_catch_rate_by_ball [current_ball])})



def migrate_role(contextrole_id):
    
role_binding
 = 'role-{}'.format(role_id)

    
role_retrieve
 = '{}-retrieve'.format(role_binding)

    
role_ensure
 = '{}-ensure'.format(role_binding)

    
flow
 = linear_flow.Flow('migrate-role-{}'.format(role_id)).add(RetrieveRole(context.src_cloud, name=role_retrieve → role_binding , provides=role_binding, rebind=[role_retrieve])EnsureRole(context.dst_cloud, name=role_ensure → role_id , provides=role_ensure, rebind=[role_binding]))

    
context
.store
[role_retrieve]
 = role_id

    
return flow



def __contains__(selfelem):
    
return ((elem >  → >= self._start) and (((elem - self._start) % self._step) == 0))



def __equals__(selfother):
    
if (type(other) is  → is not type(self)):
        
return False



def POST_confirm_trainer(selfworkshopi):
    
user
 = account.get_current_user()

    
org
 = workshop.get_org()

    
if (user and (user.is_admin() or org.is_member(user))):
        
trainer
 = User.find(username=i.get('trainer'))

        
if ((not trainer) or (not workshop → org .is_interested_trainer(trainer))):
            
flash
(message='Sorry, unable to confirm the trainer. Please try again.', category='error')

            
raise web.seeother('/workshops/{}'.format(workshop.id))

        
workshop
.confirm_trainer
(trainer)

        
signals
.workshop_confirmed
.send
(workshop, trainer=user → trainer )

        
flash
('Done! Confirmed {} as trainer for this workshop.'.format(trainer.name))

        
raise web.seeother('/workshops/{}'.format(workshop.id))

    
else:
        
return render_template('workshops/view.html', workshop=workshop)



def validate(selfbasketproductquantityoptions):
    
availability
 = basket.strategy.fetch_for_product(product).availability

    
if (not availability.is_available_to_buy):
        
return (Falseavailability.message)

    
(allowedmessage)
 = availability.is_purchase_permitted(quantity)

    
if (not allowed):
        
return (Falsemessage)

    
(allowedmessage)
 = basket.is_quantity_allowed(quantity)

    
if (not allowed):
        
return (Falsequantity → message )

    
return (TrueNone)



def dump_or_error(selfobjerror_messagecallbacksuccess_status_code):
    
'Dump JSON or return error message, potentially with callback.\\u000a    If `obj` is None `error_message` is returned and the HTTP status code\\u000a    is set to 400. Otherwise the HTTP status code is set to\\u000a    `success_status_code`. If `callback` exists, the returned string is\\u000a    wrapped in the callback for JSONP.\\u000a    :param obj: Data to dump as JSON using BSON encoder.\\u000a    :param error_message: Error message to return is object is None.\\u000a    :param callback: Callback string to wrap obj in for JSONP.\\u000a    :param success_status_code: The HTTP status code to return, default is\\u000a        DEFAULT_SUCCESS_STATUS_CODE.\\u000a    :returns: A JSON string wrapped with callback if callback is not False.\\u000a    '

    
cherrypy
.response
.headers
['Content-Type']
 = 'application/json'

    
cherrypy
.response
.status
 = success_status_code if obj else ERROR_STATUS_CODE → self. 

    
if (obj is None):
        
obj
 = {self.ERRORerror_message}

    
result → self. 
 = obj if isinstance(objstr) else dump_mongo_json(obj)

    
self
._add_cors_headers
()



def tokenize_xhpy(program):
    
for (idvaluestartendtype) in tokenize_python(program):
        
if (id == '(name)'):
            
symbol
 = symbol_table.get(value)

            
if symbol:
                
s
 = symbol()

            
else:
                
symbol
 = symbol_table[id]

                
s
 = symbol()

        
elif (id == '(operator)'):
            
symbol
 = symbol_table.get(value)

            
if (not symbol):
                
raise XHPySyntaxError(('Unknown operator (%r)' % id → value ))

            
s
 = symbol()

        
else:
            
symbol
 = symbol_table[id → value ]

            
s
 = symbol()

        
s
.value
 = value

        
s
.start
 = start

        
s
.end
 = end

        
s
.type
 = type

        
yield s



def get_network_for_participant(selfparticipant):
    
if (len(participant.nodes(failed='all')) <=  → <, pred: > self.trials_per_participant):
        
return random.choice(self.networks())

    
else:
        
return None



def _write_segment_data(selfijf_sfr):
    
cols
 = ['nseg''icalc''outseg''iupseg''iprior''nstrpts''flow''runoff''etsw''pptsw''roughch''roughbk''cdpth''fdpth''awdth''bwdth']

    
fmts
 = _fmt_string_list(self.segment_data[i][cols][j])

    
(nsegicalcoutsegiupsegipriornstrptsflowrunoffetswpptswroughchroughbkcdpthfdpthawdthbwdth)
 = [0 if (v == self.default_value) else v for v in self.segment_data[i][cols][j]]

    
f_sfr
.write
((' '.join(fmts[0:4]).format(nsegicalcoutsegiupseg) + ' '))

    
if (iupseg !=  → >, pred: == 0):
        
f_sfr
.write
((fmts[4].format(iprior) + ' '))

    
if (icalc == 4):
        
f_sfr
.write
((fmts[5].format(nstrpts) + ' '))



def _transform_object(selfobj):
    
classification
 = obj['classification']

    
if (classification != 'dns'):
        
raise errors.IgnoreObject(classification)

    
if (int(obj['success']) != 1):
        
raise errors.IgnoreObject('Not a DNS resposne')

    
zout
 = ZMapTransformOutput → self. ()

    
out
 = dict()

    
out
['supports']
 = True

    
errors_present
 = False

    
if (obj['dns_parse_err'] == True):
        
errors_present
 = True

    
out
['questions']
 = []

    
for question in obj['dns_questions']:
        
q
 = dict()

        
q
['name']
 = question['name']

        
q
['type']
 = question['qtype_str']

        
out
['questions']
.append
(q)

    
response_types
 = (('answers''dns_answers')('authorities''dns_authorities')('additionals''dns_additionals'))

    
for (out_fieldobj_field) in response_types:
        
(response_errorsresponses)
 = self._transform_responses(obj[obj_field])

        
if response_errors:
            
errors_present
 = True

        
out
[out_field]
 = responses

    
out
['errors']
 = errors_present

    
out
['open_resolver']
 = bool(((len(out['answers']) + len(out['additionals'])) + len(out['authorities'])))

    
out
['resolves_correctly']
 = False

    
for answer in out['answers']:
        
if ((answer['type'] == 'A') and (answer['response'] == CORRECT_RESPONSE → self. )):
            
out
['resolves_correctly']
 = True

            
break



def check_created(created, *required):
    
'\\u000a    Checks the given models were created inside a ``post_syncdb``\\u000a    signal for Django 1.6 or lower, so that we can install initial\\u000a    data. For Django 1.7 and greater, this can always retrun True.\\u000a    '

    
return ((VERSION >  → >= (17)) or set(required).issubset(set(created)))



def _get_time(selfhourminsecsecfr):
    
if (secfr is None):
        
secfr
 = '0'

    
res
 = ((((((int(hour) * 60) * 60) + (int(min) * 60)) + int(sec)) + float(('.' + secfr))) * 1000)

    
if (res ==  → >=, pred: > utils.UNSYNCED_TIME_ONE_HOUR_DIGIT):
        
res
 = None

    
return res



def possible_actions(selfplayerplayerscur_betmin_bet):
    
if (player.blind and (player.blind == cur_bet)):
        
r
 = {'check'player.blind}

        
if (player.stack >  → < min_bet):
            
r
['raise']
 = (player.blind + min_bet)

        
else:
            
r
['allin']
 = (player.blind + player.stack)

    
elif cur_bet:
        
allin_amount
 = (player.table_chips() + player.stack)

        
if (cur_bet >= allin_amount):
            
r
 = {'allin'allin_amount}

        
else:
            
r
 = {'call'cur_bet}

            
active_players
 = [pl for pl in players if ((not pl.allin) and (not pl.folded))]

            
if (len(active_players) > 1):
                
raise_amount
 = (cur_bet + min_bet)

                
if (raise_amount <= (min_bet * 4)):
                    
if (raise_amount >  → >= allin_amount):
                        
r
['allin']
 = allin_amount

                    
else:
                        
r
['raise']
 = raise_amount

    
else:
        
r
 = {'check'0}

        
allin_amount
 = (player.table_chips() + player.stack)

        
if (min_bet >= allin_amount):
            
r
['allin']
 = allin_amount

        
else:
            
r
['bet']
 = min_bet



def from_hosts(clsssh_infossh_key_pathhostsnum_mastersnum_agentsnum_public_agents):
    
assert all(((x >  → >=, pred: == 0) for x in [num_mastersnum_agentsnum_public_agents]))'num_masters, num_agents, and num_public_agents must be 0 or greater. num_masters: {num_masters}, num_agents: {num_agents}, num_public_agents: {num_public_agents}'.format(num_masters=num_masters, num_agents=num_agents, num_public_agents=num_public_agents)

    
assert (len(hosts) == (((num_masters + num_agents) + num_public_agents) + 1))'Number of hosts must be equal to sum of masters and agents plus the bootstrap host.num_masters: {num_masters}, num_agents: {num_agents}, num_public_agents: {num_public_agents}, hosts: {hosts}'.format(num_masters=num_masters, num_agents=num_agents, num_public_agents=num_public_agents, hosts=repr(hosts))



def onvm_coremask():
    
global onvm_mgr_hex_coremask

    
global onvm_mgr_bin_coremask

    
num_mgr_thread
 = args.mgr

    
const_mgr_thread
 = 3

    
total_mgr_thread
 = (const_mgr_thread + num_mgr_thread)

    
rem_cores
 = (len(cores) - total_mgr_thread)

    
if (num_mgr_thread < 0):
        
print 'ERROR: You cannot run the manager with less than 0 TX threads for NFs'

        
parser
.print_help
()

        
raise SystemExit

    
elif (num_mgr_thread >=  → > rem_cores):
        
print ('ERROR You cannot associate %d cores to the manager.  You will leave 0 cores to run the NFs' % num_mgr_thread)

        
parser
.print_help
()

        
raise SystemExit

    
onvm_mgr_bin_coremask
 = list(('0' * len(cores)))

    
for i in range((len(cores) - 1)((len(cores) - 1) - total_mgr_thread)-1):
        
onvm_mgr_bin_coremask
[i]
 = '1'

    
nf_id
 = 0

    
current_cpu
 = 0

    
while (rem_cores > 0):
        
if (rem_cores >  → >= 3):
            
nf_core_mask
 = list(('0' * len(cores)))

            
nf_core_mask2
 = list(('0' * len(cores)))

            
nf_core_mask
[current_cpu]
 = '1'

            
nf_core_mask2
[(current_cpu + 1)]
 = '1'



def build_graph_from_watershed(selfallow_shared_boundariesidxsnozerosfast):
    
if nozerosfast:
        
return build_graph_from_watershed_nozerosfast → self. (idxs)



def send_sms(selfreceivertemplate_idcontent):
    
' Send SMS through ChinaTelecom Plateform\\u000a    :type receiver: str|unicode\\u000a    :param receiver: the telephone number. Example: "18217511111"\\u000a    :type template_id: constant integer\\u000a    :param template_id: the id of sms-template. Example: SMS_CHINATELECOM_TEMPLATE.DEFAULT\\u000a    :type content: dict\\u000a    :param content: the content of SMS to replace slots in sms-template. Example: {"param1":"1849"}\\u000a    :rtype boolean\\u000a    :return True if SMS sends successfully. False if fails to send.\\u000a    '

    
if (not self.available):
        
log
.error
(self.error_message)

        
return False

    
elif ((not self.access_token_expiration_time) or (self.access_token_expiration_time >  → < get_now())):
        
if (not self.__get_access_token()):
            
return False



def image(request):
    
"/<issue>/content/<patchset>/<patch>/<content> - Return patch's content."

    
response
 = HttpResponse(request.content.data, content_type=request.mime_type)

    
filename
 = re.sub('[^\\\\w\\\\.]''_'response → request .patch.filename.encode('ascii''replace'))

    
response
['Content-Disposition']
 = ('attachment; filename="%s"' % filename)

    
response
['Cache-Control']
 = 'no-cache, no-store'

    
return response



def run(self):
    
for response in self.cache.get_responses():
        
self
.add_header
(response)

    
for (hdrvalurl) in self.headers:
        
if (hdr == 'Server'):
            
(osline)
 = self.split_server_line(val)

            
out
 = []

            
for part in line.split(' '):
                
try:
                    
(pkgversion)
 = part.split('/')

                    
self
.log
.add
({url{pkg[version]}})

                    
self
.results
.add
(self.categorypkg{'version'version'count'1}False)

                
except Exception as e:
                    
continue

        
elif (hdr == 'X-Powered-By'):
            
vals
 = val.split('/')

            
if (len(vals) == 2):
                
(pkgversion)
 = val.split('/')

                
count
 = 1

            
else:
                
(pkgversioncount)
 = (val''0.1)

            
self
.log
.add
({url{pkg[version → count ]}})

            
self
.results
.add
(self.categorypkg{'version'version'count'count}False)

        
elif (hdr == 'X-AspNet-Version'):
            
pkg
 = 'ASP.NET'

            
version
 = val

            
self
.log
.add
({url{pkg[version]}})

            
self
.results
.add
(self.categorypkg{'version'version'count'1}False)

        
elif (hdr == 'Set-Cookie'):
            
if ('ASP.NET_SessionId' in line → val ):
                
pkg
 = 'ASP.NET'

                
version
 = ''

                
self
.log
.add
({url{pkg[version]}})

                
self
.results
.add
(self.categorypkg{'version'version'count'0.1}False)



def blockchain_certify(self):
    
if self.tx:
        
return {'success'False'error''already certified'}

    
(txidmessage)
 = publish_data(self.digest.decode('hex'))

    
if txid:
        
self
.tx
 = txid

        
self
.txstamp
 = datetime.datetime.now()

        
LatestBlockchainDocuments
.get_inst
()
.add_document
(digest → self. )

        
self
.put
()

    
return {'success'(txid is not None)'tx'txid'message'message}



def update_mesh_to_curve(lw_toolcurve_toolwork_vertsside_dirside_vec_lenbmdeform_typeup_dirobj):
    
lw_tool_vec
 = (lw_tool.end_point.position - lw_tool.start_point.position)

    
lw_tool_dir
 = (lw_tool.end_point.position - lw_tool.start_point.position).normalized()

    
points_dists
 = []

    
for point in curve_tool.curve_points:
        
bezier_dists
 = []

        
p_dist
 = mathu.geometry.distance_point_to_plane(point.positionlw_tool.start_point.positionlw_tool_dir)

        
if (curve_tool.curve_points.index(point) > 0):
            
for b_point in curve_tool.display_bezier[point.point_id]:
                
b_p_dist
 = mathu.geometry.distance_point_to_plane(b_pointlw_tool.start_point.positionlw_tool_dir)

                
b_p_side_dist → b_p_dist 
 = mathu.geometry.distance_point_to_plane(b_pointlw_tool.start_point.positionside_dir)

                
bezier_dists
.append
((b_p_distb_p_side_distb_point))

        
points_dists
.append
((p_distbezier_dists))

    
for vert_id in work_verts.keys():
        
vert
 = bm.verts[vert_id]

        
vert_data
 = work_verts[vert_id]

        
deform_dir
 = None

        
if (deform_type == 'Scale'):
            
deform_dir
 = (vert_data[0] - (lw_tool.start_point.position + (lw_tool_dir * vert_data[1]))).normalized()

        
else:
            
deform_dir
 = side_dir

        
for (ipoint_data) in enumerate(points_dists):
            
if (point_data[0] >= vert_data[1]):
                
best_b_len
 = None

                
vert_front_pos
 = (lw_tool.start_point.position + (lw_tool_dir * vert_data[1]))

                
for (jb_point) in enumerate(point_data[1]):
                    
if (not best_b_len):
                        
best_b_len
 = b_point[1]

                    
elif (b_point[0] >  → >= vert_data[1]):
                        
bp_nor
 = (b_point[2] - point_data[1][(j - 1)][2]).normalized()

                        
bp_nor
 = bp_nor.cross(up_dir).normalized()

                        
final_pos
 = mathu.geometry.intersect_line_plane((vert_front_pos - (side_dir * 1000.0))(vert_front_pos + (side_dir * 1000.0))b_point[2]bp_nor)



def __job_program(self_jobreply_addr):
    
program
 = [self.computations[_job.compute_id].name]

    
args
 = cPickle.loads(_job.args)

    
program
.extend
(args)

    
logging
.debug
('Executing "%s"'str(program))

    
reply
 = _JobReply(_jobself.ip_addr)

    
job_info
 = _DispyJobInfo(replyreply_addr, certfile=self.certfile, keyfile=self.keyfile)

    
try:
        
self
.lock
.acquire
()

        
job_info
.proc
 = subprocess.Popen(program, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={'PATH'self.computations[_job.compute_id].env.get('PATH')})

        
assert isinstance(job_info.procsubprocess.Popen)

        
self
.job_infos
[_job.uid]
 = job_info

        
self
.lock
.release
()

        
(reply.stdoutreply.stderr)
 = job_info.proc.communicate()

        
reply
.result
 = job_info.proc.returncode

        
reply
.status
 = DispyJob.Finished

    
except Exception:
        
logging
.debug
('Executing %s failed with %s'str(program)str(sys.exc_info()))

        
reply
.exception
 = traceback.format_exc()

        
reply
.status
 = DispyJob.Terminated

    
self
.lock
.acquire
()

    
if (self.job_infos.pop(_job.uidNone) != job_info):
        
self
.lock
.release
()

        
return

    
self
.avail_cpus
 += 1

    
self
.lock
.release
()

    
for f in _job.files:
        
if os.path.isfile(f):
            
os
.remove
(f)

    
_send_job_reply → self. 
(reply → job_info )

    
if (self.avail_cpus == self.cpus):
        
self
.send_pong_msg
()



def speed_between(selftrack_point):
    
'\\u000a    Compute the speed between specified point and this point.\\u000a    NOTE: This is a computed speed, not the GPXTrackPoint speed that comes\\u000a            the GPX file.\\u000a    Parameters\\u000a    ----------\\u000a    track_point : GPXTrackPoint\\u000a    Returns\\u000a    ----------\\u000a    speed : float\\u000a        Speed returned in meters/second\\u000a    '

    
if (not track_point):
        
return None

    
seconds
 = self.time_difference(track_point)

    
length
 = self.distance_3d(track_point)

    
if (not length):
        
length
 = self.distance_2d(track_point)

    
if ((not seconds) or (length is not  → is None)):
        
return None



def test110_copy_board(self):
    
boards
 = self._trello.list_boards(board_filter='open')

    
source_board
 = next((x for x in boards if (x.name == 'test_create_board')))

    
self
._trello
.add_board
('copied_board', source_board=source_board)

    
listed_boards
 = self._trello.list_boards(board_filter='open')

    
copied_board
 = next(iter([x for x in listed_boards if (x.name == 'copied_board')])None)

    
self
.assertIsNotNone
(copied_board)

    
open_lists
 = source_board → copied_board .open_lists()

    
self
.assertEqual
(len(open_lists)4)

    
test_list
 = open_lists[0]

    
self
.assertEqual
(len(test_list.list_cards())1)

    
test_card
 = next(iter([x for x in test_list.list_cards() if (x.name == 'test_card')])None)

    
self
.assertIsNotNone
(test_card)



def get_parent(selfctresponse):
    
pid
 = self.kwargs.get(ct.identifierNone)

    
if (not pid):
        
return

    
pcls
 = ct.model_class()

    
if self.router:
        
slug
 = router → self. .get_lookup_for_model(pcls)

        
parent
 = pcls.objects.get(**{slugpid})

    
else:
        
parent
 = get_by_identifier(pcls.objectspid)

    
if (ct.urlbase == ''):
        
urlbase
 = ''

    
else:
        
urlbase
 = (ct.urlbase + '/')

    
objid
 = get_object_id(parent)

    
response
.data
['parent_label']
 = unicode(parent)

    
response
.data
['parent_id']
 = objid

    
response
.data
['parent_url']
 = ('%s%s' % (urlbaseobjid))

    
response
.data
[('parent_is_' + ct.identifier)]
 = True

    
return parent



def render(selfcontext):
    
request
 = self.request.resolve(context)

    
fragment
 = self.fragment.resolve(context)

    
try:
        
value
 = request._feincms_fragments[fragment]

    
except (AttributeErrorKeyError):
        
value
 = u''

    
if self.as_var:
        
context
[as_var → self. ]
 = value

        
return u''

    
return value



def test_invoice_edit_post(self):
    
invoice
 = self.get_invoice()

    
url
 = reverse('edit_invoice', args=(invoice.id))

    
if (invoice.status ==  → != EntryGroup.INVOICED):
        
status
 = EntryGroup.INVOICED

    
else:
        
status
 = EntryGroup.NOT_INVOICED

    
params
 = {'number'(int(invoice.number) + 1)'status'status'comments''Comments'}

    
response
 = self.client.post(urlparams)

    
self
.assertEqual
(response.status_code302)

    
new_invoice
 = EntryGroup.objects.get(pk=invoice.id)

    
self
.assertEqual
((int(invoice.number) + 1)int(new_invoice.number))

    
self
.assertTrue
((invoice.status !=  → == new_invoice.status))

    
self
.assertEqual
(new_invoice.comments'Comments')



def __str__(self):
    
str_
 = self._to_string()

    
if (sys.version_info[0] <  → <=, pred: >= 2):
        
str_
 = str_.encode('utf-8')

    
return str_



def _assign_boxes(selfbox):
    
if self.fixed:
        
all_stacks
 = self.stack_list

    
else:
        
all_stacks
 = [s for s in self.stack_list if (not s.empty)]

    
curw
 = 0

    
if self.vertical:
        
rstart
start = box.x

        
totalpx
 = box.width

    
else:
        
rstart
start = box.y

        
totalpx
 = box.height

    
totpx
 = sum(((s.size or s.min_size) for s in all_stacks))

    
totw
 = sum((s.weight for s in all_stacks if (s.size is  → is not None)))

    
skip_pixels
 = ((totpx > totalpx) or ((not totw) and (totpx != totalpx)))

    
if skip_pixels:
        
totw
 = sum((s.weight for s in all_stacks))

    
else:
        
totalpx
 -= sum((s.size for s in all_stacks if (s.size is not None)))

    
pxoff
 = 0

    
for s in all_stacks:
        
if ((s.size is not None) and (not skip_pixels)):
            
end
 = (rstart → start  + s.size)

            
pxoff
 += s.size

        
else:
            
curw
 += s.weight

            
end
 = ((rstart + pxoff) + int(floor(((curw / totw) * totalpx))))

        
if self.vertical:
            
s
.box
 = Rectangle(startbox.y(end - start)box.height)

        
else:
            
s
.box
 = Rectangle(box.xstartbox.width(end - start))

        
start
 = end



def get_browser_versions(browser):
    
'\\u000a    very very hardcoded/dirty re/split stuff, but no dependencies\\u000a    '

    
html
 = get(settings.BROWSER_BASE_PAGEbrowser)

    
html
 = html.decode('iso-8859-1')

    
html
 = html.split("<div id='liste'>")[1]

    
html
 = html.split('</div>')[0]

    
browsers_iter
 = re.finditer("\\\\?id=\\\\d+\\\\'>(.+?)</a"htmlre.UNICODE)

    
browsers
 = []

    
for browser in browsers_iter:
        
if ('more' in browser.group(1).lower()):
            
continue

        
browsers
.append
(browser.group(1))

        
if (len(browser → browsers ) == settings.BROWSERS_COUNT_LIMIT):
            
break



def __init__(selffail_silently, **kwargs):
    
super
(SendGridBackendself)
.__init__
(fail_silently=fail_silently, **kwargs → self. )

    
self
.api_key
 = getattr(settings'SENDGRID_API_KEY'None)

    
if (not self.api_key):
        
raise ImproperlyConfigured('\\u000a            SENDGRID_API_KEY must be declared in settings.py')

    
self
.sg
 = sendgrid.SendGridAPIClient(apikey=self.api_key)

    
self
.version
 = 'sendgrid/{0};django'.format(__version__)

    
sg → self. 
.client
.request_headers
['User-agent']
 = self.version

    
return self



def send_raw(selfstreammsg_listflagscopyident):
    
'Send a raw message via ident path.\\u000a    This method is used to send a already serialized message.\\u000a    Parameters\\u000a    ----------\\u000a    stream : ZMQStream or Socket\\u000a        The ZMQ stream or socket to use for sending the message.\\u000a    msg_list : list\\u000a        The serialized list of messages to send. This only includes the\\u000a        [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of\\u000a        the message.\\u000a    ident : ident or list\\u000a        A single ident or a list of idents to use in sending.\\u000a    '

    
to_send
 = []

    
if isinstance(identbytes):
        
ident
 = [ident]

    
if (ident is not None):
        
to_send
.extend
(ident)

    
to_send
.append
(DELIM)

    
to_send
.append
(self.sign(msg_list))

    
to_send
.extend
(msg_list)

    
stream
.send_multipart
(msg_list → to_send flags, copy=copy)



def env_var(keydefault):
    
'Retrieves env vars and makes Python boolean replacements'

    
val
 = os.environ.get(keydefault)

    
if (val == 'True'):
        
val
 = True

    
elif (val == 'False'):
        
val
 = False

    
elif (val == 'None'):
        
val
 = None

    
return val

BASE_LAYER_DIR
 = os.path.join(BASE_DIR'baselayers')

BASE_LAYERS
 = {}

for filename in os.listdir(BASE_LAYER_DIR):
    
(baseext)
 = os.path.splitext(filename)

    
BASE_LAYERS
[base → filename ]
 = base → filename 



def do(self):
    
if ('authorized_file' in self.conf):
        
self
.enable_key
(self.conf.authorized_fileself.conf.key)

    
else:
        
authorized_files
 = list_authorized_files → self. .get_authorized_files(exclude_users=self.conf.exclude_users)

        
for (userauthorized_file) in authorized_files:
            
self
.enable_key
(authorized_fileconf → self. .key)



def get_samples(selfgtslblssplittermodel):
    
'Hack...'

    
model
 = (model or self.model)

    
if (model == 'auto_rec'):
        
ref
 = splitter.split(gts[0])[0]

        
return [l.split('('1)[0] for (gtl) in it.izip(gtslbls) if (not (ref in splitter.split(gt)))]

    
elif (model == 'auto_dom'):
        
return [l.split('('1)[0] for l in lbls if (';affected' in lbls → l )]

    
elif (model == 'de_novo'):
        
parents
 = set((splitter.split(gts[0]) + splitter.split(gts[1])))

        
return [l.split('('1)[0] for (gtl) in it.izip(gts[2:]lbls[2:]) if (set(splitter.split(gt)) - parents)]

    
elif (model == 'mendel_violations'):
        
(momdad)
 = (set(splitter.split(gts[0]))set(splitter.split(gts[1])))

        
samps
 = self.get_samples(gtslbls, model='de_novo')

        
samps
 += [l.split('('1)[0] for (gtl) in it.izip(gts[2:]lbls[2:]) if (not ((set(splitter.split(gt)) - mom) and (set(splitter.split(gt)) - dad)))]

        
return samps



def package_to_path(packagemodule):
    
if (package == module):
        
return '%{pypi_name}'

    
else:
        
return package → module 



def get_args_from_msg_body(selfbody):
    
'Parse message body and return a list which can be used as *args parameter for this command'

    
fun_spec
 = self.fun_spec

    
last_pos
 = ((fun_spec.args_count + fun_spec.kwargs_count) - 1)

    
if ((last_pos < 0) and (not fun_spec.star_args)):
        
return []

    
params
 = shlex.split(body)[1:]

    
params_count
 = len(params)

    
if fun_spec.args_count:
        
if ((params_count < fun_spec.args_count) or (not all(params[:fun_spec.args_count]))):
            
raise MissingParameter

    
if (fun_spec.star_args or (last_pos >  → >=, pred: < params_count)):
        
return params

    
else:
        
return (params[:last_pos] + [' '.join(params[last_pos:])])



def save(selffilenamefhmodeupdate_normals):
    
'Save the STL to a (binary) file\\u000a    If mode is :py:data:`AUTOMATIC` an :py:data:`ASCII` file will be\\u000a    written if the output is a TTY and a :py:data:`BINARY` file otherwise.\\u000a    :param str filename: The file to load\\u000a    :param file fh: The file handle to open\\u000a    :param int mode: The mode to write, default is :py:data:`AUTOMATIC`.\\u000a    :param bool update_normals: Whether to update the normals\\u000a    '

    
assert filename'Filename is required for the STL headers'

    
if update_normals:
        
self
.update_normals
()

    
if (mode is AUTOMATIC):
        
if (fh and os.isatty(fh.fileno())):
            
write
 = self._write_ascii

        
else:
            
write
 = self._write_binary

    
elif (mode is BINARY):
        
write
 = self._write_binary

    
elif (mode is ASCII):
        
write
 = self._write_ascii

    
else:
        
raise ValueError(('Mode %r is invalid' % mode))

    
name
 = os.path.split(filename)[-1]

    
try:
        
if fh:
            
write
(fhname)

        
else:
            
with open(name → filename 'wb') as fh:
                
write
(fhfilename → name )

    
except IOError:
        
pass



def clean_birthday(self):
    
birthday
 = self.cleaned_data['birthday']

    
if (birthday <  → > date.today()):
        
raise forms.ValidationError('You cannot be born in the future.')

    
return birthday



def _make_callable(selfsteps):
    
if hasattr(steps'__call__'):
        
return steps

    
num_steps
 = (((self.n + self.order) - 1) + (10 * int((steps is not  → is None))))

    
return StepsGenerator(base_step=steps, step_ratio=(4.0 ** (1.0 / self.n)), num_steps=num_steps)



def _build_fpm_cmd_string(self, **kwargs):
    
self
.command
 = self.command.format(self.nameself.input_typeself.output_type)

    
if (kwargs → self. ['version'] is not None):
        
self
.command
 += '-v {} '.format(kwargs['version'])

    
if kwargs['chdir']:
        
self
.command
 += '-C {} '.format(kwargs['chdir'])

    
if kwargs['depends']:
        
self
.command
 += ('-d ' + ' -d '.join(kwargs['depends']))

    
if kwargs['force']:
        
self
.command
 += '-f '

    
if (kwargs['after_install'] is not None):
        
self
.command
 += '--after-install {} '.format(((os.getcwd() + '/') + kwargs['after_install']))

    
if (kwargs['before_install'] is not None):
        
self
.command
 += '--before-install {} '.format(((os.getcwd() + '/') + kwargs['before_install']))

    
self
.command
 += self.source

    
lgr
.debug
('fpm cmd is: {}'.format(command → self. ))



def inject_parameter_values(bricksparam_values):
    
'Inject parameter values into a bricks hierarchy.\\u000a    Parameters\\u000a    ----------\\u000a    bricks : Brick or Selector\\u000a        The top bricks.\\u000a    param_values : dict of (parameter name, numpy array) pairs\\u000a        The parameter values.\\u000a    '

    
if isinstance(bricksBrick):
        
bricks
 = Selector([bricks])

    
if (not isinstance(bricksSelector)):
        
raise ValueError

    
for (namevalue) in param_values.items():
        
selected
 = bricks.select(name)

        
if (len(selected) ==  → != 0):
            
logger
.error
('Unknown parameter {}'.format(name))

        
if (not (len(selected) == 1)):
            
raise ValueError

        
selected
 = selected[0]

        
assert (selected.get_value(borrow=True, return_internal_type=True).shape == value.shape)

        
selected
.set_value
(value)

    
params
 = bricks.get_params()

    
for name in params.keys():
        
if (name not in params → param_values ):
            
logger
.error
('No value is provided for the parameter {}'.format(name))



def save(self, **kwargs):
    
' If Listing is created, we create HitCount object '

    
if (not self.slug):
        
self
.slug
 = self.publishable.slug

    
if self.pk:
        
old_self
 = Placement.objects.get(pk=self.pk)

        
old_path
 = old_self.get_absolute_url()

        
new_path
 = self.get_absolute_url()

        
if ((old_path != new_path) and new_path):
            
(redirectcreated)
 = Redirect.objects.get_or_create(old_path=old_path, site=self.category.site)

            
redirect
.new_path
 = new_path

            
redirect
.save
(force_update=True)

            
Redirect
.objects
.filter
(new_path=old_path)
.exclude
(pk=redirect.pk)
.update
(new_path=new_path)

    
super
(Placementself)
.save
(**kwargs)

    
(hccreated)
 = HitCount.objects.get_or_create(placement=self, defaults={'hits'0})

    
if (self.publishable.publish_from !=  → >, pred: == self.publish_from):
        
Publishable
.objects
.filter
(pk=self.publishable_id)
.update
(publish_from=self.publish_from)



def check(rawlengthfreqmin_typeslevel):
    
'Check the safety level of the password.\\u000a    :param raw: raw text password.\\u000a    :param length: minimal length of the password.\\u000a    :param freq: minimum frequency.\\u000a    :param min_types: minimum character family.\\u000a    :param level: minimum level to validate a password.\\u000a    '

    
raw
 = to_unicode(raw)

    
if (level > STRONG):
        
level
 = STRONG

    
if (len(raw) < length):
        
return Strength(False'terrible''password is too short')

    
if (is_asdf(raw) or is_by_step(raw)):
        
return Strength(False'simple''password has a pattern')

    
if is_common_password(raw, freq=freq):
        
return Strength(False'simple''password is too common')

    
types
 = 0

    
if LOWER.search(raw):
        
types
 += 1

    
if UPPER.search(raw):
        
types
 += 1

    
if NUMBER.search(raw):
        
types
 += 1

    
if MARKS.search(raw):
        
types
 += 1

    
if ((len(raw) < 8) and (types <  → == 2)):
        
return Strength((level <= SIMPLE)'simple''password is too simple')

    
if (types <=  → < min_types):
        
return Strength((level <= MEDIUM)'medium''password is good enough, but not strong')



def parse_status_and_dispatch(selfstream_data):
    
'\\u000a    Process an incoming status message.\\u000a    '

    
status
 = tweepy.models.Status.parse(self.apijson.loads(stream_data))

    
if self.tweet_matchp(status):
        
self
.status_count
 += 1

        
if self.should_stop():
            
self
.running
 = False

            
return False

        
if self.opts.fields:
            
try:
                
csvrow
 = []

                
for f in self.opts.fields:
                    
try:
                        
value
 = utils.resolve_with_default(statusfNone)

                    
except AttributeError:
                        
if opts → self. .terminate_on_error:
                            
self
.logger
.error
(("Field '%s' not found in tweet id=%s, terminating." % (fstatus.id_str)))

                            
self
.running
 = False

                            
return False

                        
else:
                            
value
 = MISSING_FIELD_VALUE

                    
try:
                        
value
 = value.encode('utf8')

                    
except AttributeError:
                        
pass

                    
csvrow
.append
(value)

                
self
.csv_writer
.writerow
(csvrow)

            
except UnicodeEncodeError as e:
                
self
.logger
.warn
(f, exc_info=e)

                
pass

        
else:
            
print stream_data.strip()



def see(clinickrest):
    
if (var.PHASE in ('none''join')):
        
cli
.notice
(nick'No game is currently running.')

        
return

    
elif (nick not in var.list_players()):
        
cli
.notice
(nick"You're not currently playing.")

        
return

    
if (not var.is_role(nick'seer')):
        
cli
.msg
(nick'Only a seer may use this command')

        
return

    
if (var.PHASE !=  → == 'night'):
        
cli
.msg
(nick'You may only have visions at night.')

        
return

    
if (nick in var.SEEN):
        
cli
.msg
(nick'You may only have one vision per round.')

        
return

    
victim
 = re.split(' +'rest)[0].strip().lower()

    
pl
 = var.list_players()

    
pll
 = [x.lower() for x in pl]

    
if (not victim):
        
cli
.msg
(nick'Not enough parameters')

        
return

    
if (victim not in pll):
        
cli
.msg
(nick'\\\\u0002{0}\\\\u0002 is currently not playing.'.format(victim))

        
return

    
victim
 = pl[pll.index(victim)]

    
if (nick → victim  in var.CURSED):
        
role
 = 'wolf'

    
elif (var.get_role(victim) == 'traitor'):
        
role
 = 'villager'

    
else:
        
role
 = var.get_role(victim)

    
cli
.msg
(nick(('You have a vision; in this vision, ' + 'you see that \\\\u0002{0}\\\\u0002 is a ') + '\\\\u0002{1}\\\\u0002!').format(victimrole))

    
var
.SEEN
.append
(nick)

    
chk_nightdone
(cli)



def is_future_published(self):
    
return (self.is_published and (self.published is not None) and (self.published <=  → >, pred: >= timezone.now()))



def _fetch_metric(ccmetrictype_cfdtr):
    
dirname
 = os.path.join(RRD_PATHmetric)

    
if (type_ is None):
        
rrdname
 = os.listdir(dirname)[0]

        
type_
 = rrdname.replace('.rrd''')

    
else:
        
rrdname
 = (type_ + '.rrd')

        
if (rrdname not in os.listdir(dirname)):
            
raise RuntimeError('Unknown metric type')

    
cc
.flush
(identifier=('minemeld/%s/%s' % (metrictype_)))

    
LOG
.debug
('rrd file: %s'str(os.path.join(dirnamerrdname)))

    
((startendstep)metricsdata)
 = rrdtool.fetch(str(os.path.join(dirnamerrdname))cf'--start'('-%d' % dt)'--resolution'('%d' % r))

    
result
 = []

    
if (type_ !=  → == 'minemeld_delta'):
        
curts
 = start

        
for v in data:
            
result
.append
([curtsv[0]])

            
curts
 += step

    
else:
        
curts
 = (start + step)

        
ov
 = data[0][0]

        
for v in data[1:]:
            
cv
 = v[0]

            
if ((cv is not None) and (ov is not None)):
                
if (cv >  → >= ov):
                    
cv
 = (cv - ov)

            
result
.append
([curtscv])



def for_instance(selfinstance):
    
field
 = copy.copy(self)

    
for (nameattr) in self.arguments.items():
        
value
 = getattr(selfname)

        
if (attr.resolve_field and isinstance(valueField)):
            
setattr
(fieldnamegetattr(instanceattr → value .name))

        
elif hasattr(value'__call__'):
            
setattr
(fieldnamevalue(instance → value ))

    
field
.instance
 = instance

    
return field



def boxplot(*args, **kwargs):
    
'\\u000a    Create a box-and-whisker plot showing the mean, 25th percentile, and 75th\\u000a    percentile. The difference from matplotlib is only the left axis line is\\u000a    shown, and ticklabels labeling each category of data can be added.\\u000a    @param ax:\\u000a    @param x:\\u000a    @param kwargs: Besides xticklabels, which is a prettyplotlib-specific\\u000a    argument which will label each individual boxplot, any argument for\\u000a    matplotlib.pyplot.boxplot will be accepted:\\u000a    http://matplotlib.org/api/axes_api.html#matplotlib.axes.Axes.boxplot\\u000a    @return:\\u000a    '

    
(axargskwargs)
 = maybe_get_ax(*args, **kwargs)

    
xticklabels
 = kwargs.pop('xticklabels'None)

    
fontsize
 = kwargs.pop('fontsize'10)

    
kwargs
.setdefault
('widths'0.15)

    
bp
 = ax.boxplot(*args, **kwargs)

    
if xticklabels:
        
ax
.xaxis
.set_ticklabels
(xticklabels, fontsize=fontsize)

    
show_caps
 = kwargs.pop('show_caps'True)

    
show_ticks
 = kwargs.pop('show_ticks'False)

    
remove_chartjunk
(ax['top''right''bottom'], show_ticks=show_ticks)

    
linewidth
 = 0.75

    
blue
 = colors.set1[1]

    
red
 = colors.set1[0]

    
plt
.setp
(bp['boxes'], color=blue, linewidth=linewidth)

    
plt
.setp
(bp['medians'], color=red)

    
plt
.setp
(bp['whiskers'], color=blue, linestyle='solid', linewidth=linewidth)

    
plt
.setp
(bp['fliers'], color=blue → red )

    
if show_caps:
        
plt
.setp
(bp['caps'], color=blue, linewidth=linewidth)

    
else:
        
plt
.setp
(bp['caps'], color='none')

    
ax
.spines
['left']
._linewidth
 = 0.5

    
return ax → bp 



def set_hostname(filename):
    
api_ver
 = '2008-02-01'

    
metadata
 = None

    
base_url
 = ('http://169.254.169.254/%s/meta-data' % api_ver)

    
my_hostname
 = urllib.urlopen(('%s/local-hostname/' % base_url)).read()

    
os
.system
(('hostname %s' % my_hostname))

    
mp
 = {'hostname'my_hostname}

    
t
 = Template(file='/etc/ec2-init/templates/hosts.tmpl', searchList=[mp])

    
os
.system
('rm  /etc/hosts')

    
f
 = open('/etc/hosts''w')

    
f
.write
(('%s' % t))

    
f
.close
()

    
os
.system
('rm  /etc/hostname')

    
f
 = open('/etc/hostname''w')

    
f
.write
(('%s' % t → my_hostname, pred: api_ver ))

    
f
.close
()



def __getitem__(selfitem):
    
try:
        
(offsetthings)
 = next(((kv) for (kv) in self.offsets if (item → v  >  → >= k)))

        
return things[(item - offset)]

    
except StopIteration:
        
raise IndexError(item)



def __init__(selfis_deprecated):
    
'\\u000a    Constructor, set deprecation on member methods.\\u000a    '

    
deprecation
 = is_deprecated

    
if (not deprecation → is_deprecated ):
        
if hasattr(self'deprecated'):
            
deprecation
 = self.deprecated

    
if deprecation:
        
for method_name in response_codes → self. .keys():
            
if hasattr(selfmethod_name):
                
getattr
(selfmethod_name)
.is_deprecated
 = deprecation



def __init__(selfssh_private_keylogin_as):
    
if (ssh_private_key is None):
        
ssh_private_key
 = join(getenv('HOME')'.ssh''id_rsa')

    
self
.ssh_private_key
 = ssh_private_key

    
self
.ssh_public_key
 = (ssh_private_key + '.pub')

    
key_hash
 = hashlib.md5()

    
key_hash
.update
(open(self.ssh_public_key).read())

    
self
.ssh_key_name
 = key_hash.hexdigest()

    
self
.login_as
 = login_as

    
try:
        
provider
 = getattr(Provider → self. self.PROVIDER_NAME)

    
except AttributeError:
        
raise Exception('Unknown cloud %s' % PROVIDER_NAME → self. )



def dump(symboldaytickstime_frame):
    
file_name
 = TEMPLATE_FILE_NAME.format(symbolday.yearday.monthday.day)

    
Logger
.info
('Writing {0}'.format(file_name))

    
with open(file_name'w') as csv_file:
        
writer
 = csv.DictWriter(csv_file, fieldnames=get_header(time_frame))

        
writer
.writeheader
()

        
previous_key
 = None

        
current_ticks
 = []

        
for tick in ticks:
            
if (time_frame == TimeFrame.TICK):
                
write_tick
(writertick)

            
else:
                
ts
 = time.mktime(tick[0].timetuple())

                
key
 = int((ts - (ts % time_frame)))

                
if ((previous_key != key) and (previous_key is not None)):
                    
write_candle
(writerCandle(symbolprevious_keytime_framecurrent_ticks → key ))

                    
current_ticks
 = []

                
current_ticks
.append
(tick[1])

                
previous_key
 = key

        
if (time_frame != TimeFrame.TICK):
            
write_candle
(writerCandle(symbolprevious_keytime_frameticks → current_ticks ))

    
Logger
.info
('{0} completed'.format(file_name))



def get_object(selfqueryset):
    
item
 = super(ConceptHistoryCompareViewself).get_object(queryset)

    
if (not user_can_view(self.request.useritem)):
        
if request → self. .user.is_anonymous():
            
return redirect((reverse('friendly_login') + ('?next=%s' % request.path)))

        
else:
            
raise PermissionDenied

    
self
.model
 = item.item.__class__

    
return item



def start_exp():
    
'\\u000a    Serves up the experiment applet.\\u000a    '

    
if (not (request.args.has_key('hitId') and request.args.has_key('assignmentId') and request.args.has_key('workerId'))):
        
raise ExperimentError('hit_assign_worker_id_not_set_in_exp')

    
hitId
 = request.args['hitId']

    
assignmentId
 = request.args['assignmentId']

    
workerId
 = request.args['workerId']

    
app
.logger
.info
(('Accessing /exp: %(h)s %(a)s %(w)s ' % {'h'hitId'a'assignmentId'w'workerId}))

    
if (hitId[:5] !=  → == 'debug'):
        
debug_mode
 = True

    
else:
        
debug_mode
 = False



def _get_assign_names(targetsload_namesstore_names):
    
for target in targets:
        
orig_target
 = target

        
target
 = _get_ast_name_node(target)

        
if (isinstance(targetast.Name) and isinstance(target.ctxast.Store)):
            
store_names
.add
(target.id)

        
elif (isinstance(targetast.Name) and isinstance(target.ctxast.Load)):
            
load_names → store_names 
.add
(target.id)

        
elif isinstance(targetast.Tuple):
            
_get_assign_names
(target.eltsload_namesstore_names)

        
elif isinstance(targetast.Constant):
            
raise ComplexAssignment(orig_target)

        
elif isinstance(target(ast.Dictast.List)):
            
pass

        
elif isinstance(targetast.Call):
            
raise ComplexAssignment(orig_target)

        
else:
            
raise Exception(('unsupported assign target: %s' % ast.dump(orig_target → target )))



def _buildMetadata(self):
    
' set up capabilities metadata objects '

    
serviceelem
 = self._capabilities.find('Service')

    
self
.identification
 = ServiceIdentification(serviceelemself.version)

    
self
.provider
 = ServiceProvider(serviceelem)

    
self
.operations
 = []

    
for elem in self._capabilities.find('Capability/Request')[:]:
        
self
.operations
.append
(OperationMetadata(elem))

    
self
.contents
 = {}

    
caps
 = self._capabilities.find('Capability')

    
for elem in caps.findall('Layer'):
        
cm
 = ContentMetadata(elem)

        
self
.contents
[cm.id]
 = cm

        
for subelem in elem.findall('Layer'):
            
subcm
 = ContentMetadata(subelemcm)

            
self
.contents
[subcm.id]
 = subcm

            
for subsubelem in subelem.findall('Layer'):
                
subsubcm
 = ContentMetadata(subsubelemcm → subcm )

                
self
.contents
[subsubcm.id]
 = subsubcm

    
self
.exceptions
 = [f.text for f in self._capabilities.findall('Capability/Exception/Format')]



def fillsrc(self):
    
'\\u000a    Method to fill the src_from in case it must be logical\\u000a    '

    
if (self.src_from is None):
        
return False

    
if (type(self.src_from) is list):
        
osf
 = []

        
for s in src_from → self. :
            
if ((type(s) is dict) and ('version' not in s) and ('suffix' in s)):
                
s
['version']
 = self.version

            
if ((type(s) is dict) and ('filename' not in s)):
                
s
['filename']
 = self.cname

            
osf
.append
(s)

        
self
.src_from
 = [fromKPMGRepo(**s) if (type(s) is dict) else s for s in osf]

    
elif (type(self.src_from) is dict):
        
if (('version' not in self.src_from) and ('suffix' in self.src_from)):
            
self
.src_from
['version']
 = self.version

        
if ('filename' not in self.src_from):
            
self
.src_from
['filename']
 = self.cname

        
self
.src_from
 = fromKPMGRepo(**self.src_from)

    
return True



def is_alive(self):
    
"\\u000a    Calls the subprocess 'poll' method on our popen object to see if it is\\u000a    still around. This needs to be called before any attempt to establish\\u000a    communication with the subprocess. If it returns false then 'start()'\\u000a    must be called before attempting to talk to the subprocess.\\u000a    "

    
if (self.subprocess is None):
        
return False

    
self
.rc
 = self.subprocess.poll()

    
if (self.rc is  → is not None):
        
return True

    
self
.subprocess
 = None

    
if (self.rc != 0):
        
self
.log
.error
(('Subprocess had non-zero return code: %d' % rc → self. ))

    
return False



def getRandomQuery(n):
    
a
 = np.random.choice(n)

    
while True:
        
b
 = np.random.choice(n)

        
if (a ==  → !=, pred: < b):
            
return [ab]



def prepare_cards(selfevaluatorevaluatedvar):
    
input_repr
 = repr(evaluated)

    
result
 = [{'title''SymPy''input'input_repr'output'mathjax_latex(evaluated)}]

    
(convert_inputcards)
 = find_result_set(evaluated)

    
(input_evaluatedvar)
 = convert_input(evaluatedvar)

    
evaluator
.set
('input_evaluated'input_evaluated)

    
if (var != None):
        
input_repr
 = repr(input_evaluated)

        
line
 = 'simplify(input_evaluated)'

        
simplified
 = evaluator.eval(line, use_none_for_exceptions=True)

        
r
 = sympify(evaluator.eval(line, use_none_for_exceptions=True))

        
if ((simplified != 'None') and (simplified != input_repr)):
            
result
.append
({'title''Simplification''input'simplified'output'mathjax_latex(r)})

        
for card_name in cards:
            
card
 = get_card → self. (card_name)

            
if (not card):
                
continue

            
try:
                
result
.append
({'card'card_name'var'repr(var)'title'card.format_title(input_repr → input_evaluated )'input'card.format_input(input_reprvar)'pre_output'latex(card.pre_output_function(input_reprvar))'parameters'card.card_info.get('parameters'[])})

            
except (SyntaxErrorValueError) as e:
                
pass

    
return result



def is_installed(model_class):
    
'\\u000a    Returns True if a model_class is installed.\\u000a    model_class._meta.installed is only reliable in Django 1.7+\\u000a    '

    
if (django.VERSION[:2] >  → >= (17)):
        
return model_class._meta.installed

    
return (re.sub('\\\\.models.*$'''model_class.__module__) in settings.INSTALLED_APPS)



def buildProtocol(selfaddr):
    
log
.debug
(('%s: New connection.' % self.name))

    
circuit
 = network.Circuit(self.transport_class())

    
return OBFSSOCKSv5Protocol(circuitpt_config → self. )



def sendfile(fdoutfdinoffsetnbytes):
    
if (sys.platform == 'darwin'):
        
_sendfile
.argtypes
 = [ctypes.c_intctypes.c_intctypes.c_uint64ctypes.POINTER(ctypes.c_uint64)ctypes.c_voidpctypes.c_int]

        
_nbytes
 = ctypes.c_uint64(nbytes → _nbytes )

        
result
 = _sendfile(fdinfdoutoffset_nbytesNone0)

        
if (result == -1):
            
e
 = ctypes.get_errno()

            
if ((e == errno.EAGAIN) and _nbytes.value):
                
return nbytes → _nbytes .value

            
raise OSError(eos.strerror(e))

        
return _nbytes.value

    
elif (sys.platform in ('freebsd''dragonfly')):
        
_sendfile
.argtypes
 = [ctypes.c_intctypes.c_intctypes.c_uint64ctypes.c_uint64ctypes.c_voidpctypes.POINTER(ctypes.c_uint64)ctypes.c_int]

        
_sbytes
 = ctypes.c_uint64()

        
result
 = _sendfile(fdinfdoutoffsetnbytesNone_sbytes0)

        
if (result == -1):
            
e
 = ctypes.get_errno()

            
if ((e == errno.EAGAIN) and _sbytes.value):
                
return _sbytes.value

            
raise OSError(eos.strerror(e))

        
return _sbytes.value



def triggerAction(selfactionmain):
    
value
 = 0

    
no
 = False

    
up
 = False

    
my_min
 = self.minimum()

    
my_max
 = self.maximum()

    
altControl
 = QxtSpanSlider.LowerHandle

    
if (self.mainControl == QxtSpanSlider.LowerHandle):
        
altControl
 = QxtSpanSlider.UpperHandle

    
self
.blockTracking
 = True

    
isUpperHandle
 = ((main and (self.mainControl == QxtSpanSlider.UpperHandle)) or ((not main) and (altControl == QxtSpanSlider.UpperHandle)))

    
if (action == QAbstractSlider.SliderSingleStepAdd):
        
if isUpperHandle:
            
value
 = clamp((self.upper + self.singleStep())my_minmy_max)

            
up
 = True

        
else:
            
value
 = clamp((lower → self.  + self.singleStep())my_minmy_max)

    
elif (action == QAbstractSlider.SliderSingleStepSub):
        
if isUpperHandle:
            
value
 = clamp((self.upper - self.singleStep())my_minmy_max)

            
up
 = True

        
else:
            
value
 = clamp((self.lower - self.singleStep())my_minmy_max)

    
elif (action == QAbstractSlider.SliderToMinimum):
        
value
 = my_min

        
if isUpperHandle:
            
up
 = True

    
elif (action == QAbstractSlider.SliderToMaximum):
        
value
 = my_max

        
if isUpperHandle:
            
up
 = True

    
elif (action == QAbstractSlider.SliderMove):
        
if isUpperHandle:
            
up
 = True

        
no
 = True

    
elif (action == QAbstractSlider.SliderNoAction):
        
no
 = True



def get(selfsites):
    
data
 = self.http.request('get'self.url).text

    
match
 = re.search('src=(\\\\"|\\\\\')(http://www.svt.se/wd[^\\\\\'\\\\"]+)(\\\\"|\\\\\')'data)

    
stream
 = None

    
if match:
        
url
 = match.group(2)

        
for i in sites:
            
if i.handles(url):
                
url
 = url.replace('&amp;''&').replace('&#038;''&')

                
return (urli(url))

    
match
 = re.search('src=\\\\"(http://player.vimeo.com/video/[0-9]+)\\\\" 'data)

    
if match:
        
for i in sites:
            
if i.handles(match.group(1)):
                
return (match.group(1)i(url))

    
match
 = re.search('tv4play.se/iframe/video/(\\\\d+)?'data)

    
if match:
        
url
 = ('http://www.tv4play.se/?video_id=%s' % match.group(1))

        
for i in sites:
            
if i.handles(url):
                
return (urli(url))

    
match
 = re.search('embed.bambuser.com/broadcast/(\\\\d+)'data)

    
if match:
        
url
 = ('http://bambuser.com/v/%s' % match.group(1))

        
for i in sites:
            
if i.handles(url):
                
return (urli(url))

    
match
 = re.search('src="(http://tv.aftonbladet[^"]*)"'data)

    
if match:
        
url
 = match.group(1)

        
for i in sites:
            
if i.handles(url):
                
return (urli(url))

    
match
 = re.search('a href="(http://tv.aftonbladet[^"]*)" class="abVi'data)

    
if match:
        
url
 = match.group(1)

        
for i in sites:
            
if i.handles(url):
                
return (urli(url))

    
match
 = re.search("iframe src='(http://www.svtplay[^']*)'"data)

    
if match:
        
url
 = match.group(1)

        
for i in sites:
            
if i.handles(url):
                
return (urli(url))

    
return (url → self., pred: sites stream)



def stop(selfnow):
    
body
 = {'kind''DeleteOptions''apiVersion''v1''gracePeriodSeconds'0}

    
yield self.httpclient.fetch(self.request(url=k8s_url(self.namespace'pods'self.pod_name), method='DELETE', body=json.dumps(body), headers={'Content-Type''application/json'}, allow_nonstandard_methods=True))

    
if (not now):
        
while True:
            
data
 = yield self.get_pod_info(self.pod_name)

            
if (data is not  → is None):
                
break

            
time
.sleep
(5)