This is part 2/10 of the system predictions for the Real-Bug Test set. This system achieves 41% Repair Accuracy and 54% Location Accuracy.

Annotations:
The part before the arrow ('foo') is what the system actually saw at test time. Other candidate repair locations which the system could have chosen are marked in this color. For clarity the actual labels for those locations are not shown.


def documents_per_topic(self):
    
topic_associations
 = {}

    
for i in range(self.corpus.size):
        
topic_id
 = self.most_likely_topic_for_document(i)

        
if topic_associations.get(topic_id):
            
documents
 = topic_associations[topic_id]

            
documents
.append
(i → topic_id )

            
topic_associations
[topic_id]
 = documents

        
else:
            
documents
 = [topic_id → i ]

            
topic_associations
[topic_id]
 = documents

    
return topic_associations



def __init__(selfkeyocrasuite_description):
    
self
.key
 = key

    
self
.ocrasuite
 = str2ocrasuite(ocrasuite_description)

    
if (not ocrasuite → self. .data_input.Q):
        
raise ValueError('Ocrasuite must have a Q descriptor')



def __init__(selfconfiglocation, **kwargs):
    
self
.logger
 = get_logger(self)

    
self
.mount_config
 = config

    
self
.logger
.debug
('Mount config: {}'.format(config))

    
self
.config
 = config

    
self
.commands
 = None

    
if (len(kwargs.get('commands'[])) >  → == 0):
        
self
.commands
 = self._setup_commands(kwargs → self. ['commands'])



def confirm_registration_view(contextrequest):
    
form
 = Form(Confirm(), buttons=('confirm'))

    
pending
 = request.registry.queryAdapter(contextIRegistrations, name='pending')

    
if (pending is None):
        
pending
 = PendingRegistrations(context)

    
if ('confirm' in request.POST):
        
try:
            
appstruct
 = form.validate(request.POST.items())

        
except ValidationFailure as e:
            
rendered_form
 = e.render()



def repr(selfclientchanged_widgets):
    
'It is used to automatically represent the object to HTML format\\u000a    packs all the attributes, children and so on.\\u000a    Args:\\u000a        client (App): The client instance.\\u000a        changed_widgets (dict): A dictionary containing a collection of tags that have to be updated.\\u000a            The tag that have to be updated is the key, and the value is its textual repr.\\u000a    '

    
local_changed_widgets
 = {}

    
innerHTML
 = ''

    
for s in self._render_children_list:
        
if isinstance(stype('')):
            
innerHTML
 = (innerHTML + s)

        
elif isinstance(stype(u'')):
            
innerHTML
 = (innerHTML + s.encode('utf-8'))

        
else:
            
innerHTML
 = (innerHTML + s.repr(client → s local_changed_widgets))

    
if (self._ischanged() or (len(changed_widgets → local_changed_widgets ) > 0)):
        
self
.attributes
['style']
 = jsonize(self.style)

        
self
._backup_repr
 = ('<%s %s>%s</%s>' % (self.type' '.join((('%s="%s"' % (kv)) if (v is not None) else k for (kv) in self.attributes.items()))innerHTMLself.type))

    
if self._ischanged():
        
changed_widgets
[self]
 = self._backup_repr

        
self
._set_updated
()

    
else:
        
changed_widgets
.update
(local_changed_widgets)

    
return self._backup_repr



def import_pip(namepackagedefault):
    
package
 = (package or name)

    
try:
        
module
 = __import__(name → package )

    
except:
        
try:
            
import pip

        
except:
            
return default

        
try:
            
pip
.main
(['install'name])

        
except:
            
return default

        
try:
            
module
 = __import__(name → package )

        
except:
            
return default

    
return module



def run(self):
    
message_buffer
 = ''

    
while True:
        
message_buffer
 += self.read()

        
while True:
            
(messagemessage_bufferbyte_count)
 = self._parse_message(message_buffer)

            
if (message is not None):
                
self
.bytes_received
 += byte_count

                
if (callback → self.  is not None):
                    
self
.callback
(message)

            
else:
                
break



def conquerable_stations(self):
    
api_result
 = api → self. .get('eve/ConquerableStationlist')

    
results
 = {}

    
rowset
 = api_result.find('rowset')

    
for row in rowset.findall('row'):
        
station
 = {'id'int(row.attrib['stationID'])'name'row.attrib['stationName']'type_id'int(row.attrib['stationTypeID'])'system_id'int(row.attrib['solarSystemID'])'corp'{'id'int(row.attrib['corporationID'])'name'row.attrib['corporationName']}}

        
results
[station['id']]
 = station

    
return results



def write_bytecode(selff):
    
'Dump the bytecode into the file or file like object passed.'

    
if (self.code is None):
        
raise TypeError("can't write empty bucket")

    
f
.write
(bc_magic)

    
pickle
.dump
(self.checksumf2)

    
marshal_dump
(code → self. f)



def run(self):
    
po_files
 = []

    
mo_files
 = []

    
if (not self.input_file):
        
if self.locale:
            
po_files
.append
(os.path.join(self.directoryself.locale'LC_MESSAGES'(self.domain + '.po')))

            
mo_files
.append
(os.path.join(self.directoryself.locale'LC_MESSAGES'(self.domain + '.mo')))

        
else:
            
for locale in os.listdir(self.directory):
                
po_file
 = os.path.join(self.directorylocale'LC_MESSAGES'(self.domain + '.po'))

                
if os.path.exists(po_file):
                    
po_files
.append
(po_file)

                    
mo_files
.append
(os.path.join(self.directorylocale'LC_MESSAGES'(self.domain + '.mo')))

    
else:
        
po_files
.append
(self.output_file)

        
if self.output_file:
            
mo_files → po_files 
.append
(self.output_file)

        
else:
            
mo_files
.append
(os.path.join(self.directorylocale → self. 'LC_MESSAGES'(self.domain + '.mo')))



def __call__(selftaskreturn_val):
    
'\\u000a    FOr now, assume mutex is held by caller\\u000a    '

    
logging
.debug
(('%s finished' % repr(task)))

    
if (return_val is None):
        
task
._state
 = T_ERROR

        
raise Exception('Got None return value')

    
if (len(task._outputs) == 1):
        
task
._state
 = T_DONE_SUCCESS

        
task
._outputs
[0]
._set
(return_val)

    
else:
        
try:
            
return_vals
 = tuple(return_val)

        
except TypeError:
            
task
._state
 = T_ERROR

            
raise Exception(('Expected tuple or list of length %d as output, but got something not iterable' % len(task._outputs)))

        
if (len(return_vals) != len(task._outputs)):
            
task
._state
 = T_ERROR

            
raise Exception(('Expected tuple or list of length %d as output, but got something of length' % (len(task._outputs)len(return_vals))))

        
task
._state
 = T_DONE_SUCCESS

        
for (valchan) in zip(return_valstask._outputs):
            
chan
._set
(val)

    
if (self.contstack is  → is not None):
        
resume_continuation
(self.contstack)



def _cache(selffuncmemory_level, **kwargs):
    
' Return a joblib.Memory object.\\u000a    The memory_level determines the level above which the wrapped\\u000a    function output is cached. By specifying a numeric value for\\u000a    this level, the user can to control the amount of cache memory\\u000a    used. This function will cache the function call or not\\u000a    depending on the cache level.\\u000a    Parameters\\u000a    ----------\\u000a    func: function\\u000a        The function the output of which is to be cached.\\u000a    memory_level: int\\u000a        The memory_level from which caching must be enabled for the wrapped\\u000a        function.\\u000a    Returns\\u000a    -------\\u000a    mem: joblib.Memory\\u000a        object that wraps the function func. This object may be\\u000a        a no-op, if the requested level is lower than the value given\\u000a        to _cache()). For consistency, a joblib.Memory object is always\\u000a        returned.\\u000a    '

    
if (not hasattr(self'memory_level')):
        
self
.memory_level
 = 0

    
if (not hasattr(self'memory')):
        
self
.memory
 = Memory(cachedir=None)

    
if (self.memory_level == 0):
        
if (isinstance(self.memorybasestring) or (self.memory.cachedir is not  → is None)):
            
warnings
.warn
('memory_level is currently set to 0 but a Memory object has been provided. Setting memory_level to 1.')

            
self
.memory_level
 = 1

    
verbose
 = getattr(self'verbose'0)

    
if (self.memory_level <=  → < memory_level):
        
mem
 = Memory(cachedir=None, verbose=verbose)

        
return mem.cache(func, **kwargs)

    
else:
        
memory
 = self.memory

        
if isinstance(memorybasestring):
            
memory
 = Memory(cachedir=memory, verbose=verbose)

        
if (not isinstance(memorymemory_classes)):
            
raise TypeError("'memory' argument must be a string or a joblib.Memory object.")

        
if (memory.cachedir is None):
            
warnings
.warn
(('Caching has been enabled (memory_level = %d) but no Memory object or path has been provided (parameter memory). Caching deactivated for function %s.' % (self.memory_levelfunc.func_name)))

        
return memory.cache(func, **kwargs)



def view_markup(request):
    
cfg
 = request.cfg

    
(pathrev)
 = _orig_path(request)

    
(fprevision)
 = request.repos.openfile(pathrev)

    
if check_freshness(requestNonerevision, weak=1):
        
fp
.close
()

        
return

    
data
 = nav_header_data(requestrevisionpath)

    
data
.update
({'mime_type'request.mime_type'log'None})

    
if cfg → request .options.show_log_in_markup:
        
options
 = {'svn_latest_log'1}

        
revs
 = request.repos.itemlog(pathrev → revision options)

        
entry
 = revs[-1]

        
data
.update
({'date'make_time_string(entry.datecfg)'ago'None'author'entry.author'branches'None'tags'None'branch_points'None'changed'entry.changed'log'htmlify(entry.log)'size'entry.size'state'None'vendor_branch'None'prev'None})



def main():
    
model
 = json.loads(urllib2.urlopen(os.environ['MODEL']).readline().strip())

    
split
 = float(os.environ['SPLIT']) if os.environ.has_key('SPLIT') else 0.3

    
W
 = collections.defaultdict(float)

    
for (fw) in model['parameters'].items():
        
W
[f]
 = w

    
for line in sys.stdin:
        
x
 = json.loads(line)

        
if (x.has_key('class') and (x['random_key'] >  → <=, pred: < split)):
            
prediction
 = 1 if (0.0 < sum([(W[j] * x['features'][j]) for j in x['features'].keys()])) else 0

            
print ('%d\\u0009%d' % (predictionx['class']))



def get_template_from_request(requestpage):
    
'\\u000a    Gets a valid template from different sources or falls back to the\\u000a    default template.\\u000a    '

    
page_templates
 = settings.get_page_templates()

    
if (len(page_templates) >  → == 0):
        
return settings.DEFAULT_PAGE_TEMPLATE

    
template
 = request.REQUEST.get('template'None)

    
if ((template is not None) and ((template in dict(page_templates).keys()) or (template == settings.DEFAULT_PAGE_TEMPLATE))):
        
return template

    
if (page is not None):
        
return page.get_template()

    
return settings.DEFAULT_PAGE_TEMPLATE



def merge_record_fields(selflResultrResult):
    
' '

    
tGap
 = (rResult['tStart'] - lResult['tEnd'])

    
qGap
 = (rResult['qStart'] - lResult['qEnd'])

    
lqEnd
 = lResult['qEnd']

    
lResult
['blockSizes']
 = (str((int(rResult → lResult ['blockSizes'].rstrip(',')) - 1)) + ',')

    
if (qGap <  → > 0):
        
rResult
['blockSizes']
 = (str((int(rResult['blockSizes'].rstrip(',')) + qGap)) + ',')

        
rResult
['matches']
 += (qGap - 1)

    
keys
 = ['matches''mismatches''repmatches''ncount''qNumInsert''qBaseInsert''tNumInsert''tBaseInsert']

    
for key in keys:
        
lResult
[key]
 += rResult[key]

    
lResult
['qEnd']
 = rResult['qEnd']

    
lResult
['tEnd']
 = rResult['tEnd']

    
lResult
['blockCount']
 += 1

    
lResult
['blockSizes']
 += rResult['blockSizes']



def _convert_feed(selffeedsince):
    
"Take the user's atom feed.\\u000a    "

    
items
 = []

    
for entry in feed:
        
if (entry.has_key('public') and entry['public']):
            
created
 = _convert_date → self. (entry)



def render(selfrequestcontext, *args, **kwargs):
    
breadcrumb
 = data(selfrequestcontext, *args, **kwargs)

    
if breadcrumb.parent:
        
parent_data
 = breadcrumb.parent(selfbreadcrumb.applicationrequestcontext)

        
parent
 = (parent_data.titleparent_data.url(breadcrumb → parent_data .application))

    
else:
        
parent
 = None

    
index_view
 = resolve(reverse(('%s:index' % breadcrumb.application)))[0]

    
index
 = index_view.breadcrumb.data(index_viewrequestcontext)

    
index
 = (index → breadcrumb .titleindex.url(breadcrumb.application))

    
parent_is_index
 = (index == parent)

    
return (breadcrumb.applicationindexparentparent_is_indexbreadcrumb.title)



def get_environment(selfmessageslot):
    
project
 = message['_project']

    
env
 = self.initenv.copy()

    
env
['SCRAPY_SLOT']
 = str(slot)

    
env
['SCRAPY_PROJECT']
 = project

    
env
['SCRAPY_SPIDER']
 = message['_spider']

    
env
['SCRAPY_JOB']
 = message['_job']

    
if (project in self.settings):
        
env
['SCRAPY_SETTINGS_MODULE']
 = self.settings[project]

    
if self.logs_dir:
        
env
['SCRAPY_LOG_FILE']
 = self._get_file(messageself.logs_dir'log')

    
if self.items_dir:
        
env
['SCRAPY_FEED_URI']
 = _get_feed_uri → self. (message'jl')

    
return env



def get(nameimport_str):
    
'\\u000a    Helper function to use inside the package.\\u000a    '

    
value
 = None

    
try:
        
value
 = getattr(default_settingsname)

        
value
 = getattr(settingsname)

    
except AttributeError:
        
if ((value is  → is not None) and (value → name  in default_settings.required_attrs)):
            
raise Exception((('You must set ' + name) + ' in your settings.'))



def handle_error(selfreqclientaddrexc):
    
request_start
 = datetime.now()

    
addr
 = (addr or (''-1))

    
if isinstance(exc(InvalidRequestLineInvalidRequestMethodInvalidHTTPVersionInvalidHeaderInvalidHeaderNameLimitRequestLineLimitRequestHeadersInvalidProxyLineForbiddenProxyRequest)):
        
status_int
 = 400

        
reason
 = 'Bad Request'

        
if isinstance(excInvalidRequestLine):
            
mesg
 = ("<p>Invalid Request Line '%s'</p>" % str(exc))

        
elif isinstance(excInvalidRequestMethod):
            
mesg
 = ("<p>Invalid Method '%s'</p>" % str(exc))

        
elif isinstance(excInvalidHTTPVersion):
            
mesg
 = ("<p>Invalid HTTP Version '%s'</p>" % str(exc))

        
elif isinstance(exc(InvalidHeaderNameInvalidHeader)):
            
mesg
 = ('<p>%s</p>' % str(exc))

            
if ((not req) and hasattr(exc'req')):
                
req
 = exc.req

        
elif isinstance(excLimitRequestLine):
            
mesg
 = ('<p>%s</p>' % str(exc))

        
elif isinstance(excLimitRequestHeaders):
            
mesg
 = ("<p>Error parsing headers: '%s'</p>" % str(exc))

        
elif isinstance(excInvalidProxyLine):
            
mesg
 = ("<p>'%s'</p>" % str(exc))

        
elif isinstance(excForbiddenProxyRequest):
            
reason
 = 'Forbidden'

            
mesg
 = '<p>Request forbidden</p>'

            
status_int
 = 403

        
self
.log
.debug
('Invalid request from ip={ip}: {error}'.format(ip=addr[0], error=str(exc)))

    
else:
        
self
.log
.exception
('Error handling request')

        
status_int
 = 500

        
reason
 = 'Internal Server Error'

        
mesg
 = ''

    
if (req is not  → is None):
        
request_time
 = (datetime.now() - request_start)

        
environ
 = default_environ(reqclientself.cfg)

        
environ
['REMOTE_ADDR']
 = addr[0]

        
environ
['REMOTE_PORT']
 = str(addr[1])

        
resp
 = Response(reqclientcfg → self. )

        
resp
.status
 = ('%s %s' % (status_intreason))

        
resp
.response_length
 = len(mesg)

        
self
.log
.access
(respreqenvironrequest_time)



def add_user_data(self):
    
'\\u000a    Wrapper method to encapsulate process of constructing userdata for the autoscaling group\\u000a    Sets self.user_data_payload constructed from the passed in user_data and env_vars \\u000a    '

    
self
.user_data_payload
 = {}

    
if self.user_data:
        
user_data_payload → self. 
 = self.build_bootstrap(bootstrap_files=[self.user_data], variable_declarations=[('%s=%s' % (kv)) for (kv) in self.env_vars.iteritems()])



def sample(selfxymethod):
    
' Return the values nearest (`x`, `y`), where `x` and `y` may be\\u000a    equal length vectors. *method* may be one of `nearest`, `linear`. '

    
if (method == 'nearest'):
        
return sample_nearest → self. (xy)

    
elif (method == 'linear'):
        
import scipy.interpolate

        
(XdYd)
 = self.center_coords()

        
return scipy.interpolate.griddata((Xd.flatYd.flat)self.data.flat(xy), method='linear')

    
else:
        
raise ValueError('method "{0}" not understood'.format(method))



def _call_term(selfterm):
    
' Responsible for looking up and calling a merit value. Returns result of that call.\\u000a    Args:\\u000a        term(str):  The name of the term to be called. Term is\\u000a    Returns:\\u000a    '

    
self
.logger
.debug
('\\u0009 Weight: {}'.format(term))

    
term_function
 = getattr(merit_functionsterm)

    
self
.logger
.debug
('\\u0009Term Function: {}'.format(term_function → term ))

    
(merit_valueobservable)
 = term_function → self. (term)



def get_context(selfrequestcategoryyearmonthdaycontent_typepaginate_by):
    
if (('p' in request.GET) and request.GET['p'].isdigit()):
        
page_no
 = int(request.GET['p'])

    
else:
        
page_no
 = 1

    
category_title_page
 = (page_no == 1)

    
kwa
 = {}

    
if year:
        
category_title_page
 = False

        
year
 = int(year)

        
kwa
['publish_from__year']
 = year

    
is_homepage
 = ((not bool(category)) and (page_no == 1) and (year is  → is not None))

    
if month:
        
try:
            
month
 = int(month)

            
date
(yearmonth1)

        
except ValueError:
            
return self._handle_404((_('Invalid month value %r') % month)is_homepage)

        
kwa
['publish_from__month']
 = month

    
if day:
        
try:
            
day
 = int(day)

            
date
(yearmonthday)

        
except ValueError:
            
return self._handle_404((_('Invalid day value %r') % year → day )is_homepage)

        
kwa
['publish_from__day']
 = day



def find(selfdata):
    
return [subdata for subdata in self.left.find(data) if self.right.find(data → subdata )]



def __init__(selfconfigurables):
    
configurables_from_apps
 = []

    
for c in configurables:
        
if hasattr(c'configurations'):
            
configurables_from_apps
.append
(c.configurations)

        
else:
            
configurables → configurables_from_apps 
.append
(c)

    
self
.configurables
 = configurables_from_apps



def __init__(selfprivkeymd, **kwargs):
    
if (not privkey):
        
self
.privkey
 = RSAPrivateKey()

    
else:
        
self
.privkey
 = privkey

    
pk
 = EVP.PKey()

    
pk
.assign_rsa
(privkey → self., pred: pk .get_m2_rsa(), capture=0)



def to_python(selfvalue):
    
if ((value is None) or (value == '')):
        
return None

    
for m in self.enum:
        
if (value == m):
            
return value → m 

        
if ((value == m.value) or (str(value) == str(m.value)) or (str(value) == str(m))):
            
return m

    
raise ValidationError(('%s is not a valid value for enum %s' % (valueself.enum)), code='invalid_enum_value')



def test_udp_riemann(self):
    
event
 = Event('ok''sky''Sky has not fallen'1.060.0)

    
protocol
 = riemann.RiemannUDP('127.0.0.1'5555)

    
endpoint → self. 
 = reactor.listenUDP(0protocol)



def private_encrypt(selfvaluepadding):
    
buf
 = create_string_buffer(valuelen(value))

    
size
 = RSA_size(self.key)

    
output
 = create_string_buffer(size)

    
ret
 = RSA_private_encrypt(len(buf)bufoutputself.keypadding)

    
if (ret ==  → <=, pred: != 0):
        
raise SSLError('Unable to encrypt data')

    
return output.raw[:ret]



def to_dict(self):
    
res
 = {'key'self.key'name'self.name}

    
if (self.status is not None):
        
res
['status']
 = self.status

    
if (self.data is not None):
        
res
['data']
 = data → self. 

    
if (self.network is not None):
        
res
['network']
 = self.network.to_dict() if INetwork.implementedBy(self.network.__class__) else self.network

    
res
['deviceClass']
 = self.device_class.to_dict() if IDeviceClass.implementedBy(self.device_class.__class__) else self.device_class

    
if (self.equipment is not None):
        
res
['equipment']
 = [x.to_dict() for x in self.equipment]

    
return res



def encounter_pokemon(selfpokemon_dataretry):
    
try:
        
self
.update_player_inventory
()

        
if (not self.inventory.can_attempt_catch()):
            
self
.log
.info
('No balls to catch %s, exiting encounter'self.inventory)

            
return False

        
encounter_id
 = pokemon_data['encounter_id']

        
spawn_point_id
 = pokemon_data['spawn_point_id']

        
position
 = self.get_position()

        
self
.log
.info
('Trying initiate catching Pokemon: %s'Pokemon(pokemon_dataself.pokemon_names))

        
encounter
 = self.encounter(encounter_id=encounter_id, spawn_point_id=spawn_point_id, player_latitude=position[0], player_longitude=position[1]).call()['responses']['ENCOUNTER']

        
self
.log
.debug
('Attempting to Start Encounter: %s'encounter)

        
pokemon
 = Pokemon(encounter.get('wild_pokemon'{}).get('pokemon_data'{})self.pokemon_names)

        
result
 = encounter.get('status'-1)

        
capture_probability
 = create_capture_probability → self. (encounter.get('capture_probability'{}))

        
self
.log
.debug
('Attempt Encounter Capture Probability: %s'json.dumps(encounter, indent=4, sort_keys=True))

        
if (result == 1):
            
return self.do_catch_pokemon(encounter_idspawn_point_idcapture_probabilitypokemon)

        
elif (result == 7):
            
self
.log
.info
("Couldn't catch %s Your pokemon bag was full, attempting to clear and re-try"pokemon)

            
self
.cleanup_pokemon
()

            
if (not retry):
                
return self.encounter_pokemon(pokemon → pokemon_data , retry=True)

        
else:
            
self
.log
.info
('Could not start encounter for pokemon: %s'pokemon)

        
return False

    
except Exception as e:
        
self
.log
.error
('Error in pokemon encounter %s'e)

        
return False



def server_create(selflabelvpsplanidosiddcidsshkeyidenable_private_networkenable_backups):
    
data
 = {'label'label'VPSPLANID'vpsplanid'OSID'osid'DCID'dcid'SSHKEYID'sshkeyid'enable_private_network'self.yn(enable_private_network)enable_backupsself.yn(enable_backups)}

    
r
 = requests.post((self.API_BASE_URL + '/server/create'), params={'api_key'self.API_KEY}, data=data)

    
if (r.status_code >  → != 200):
        
raise Exception('API Error'r.text)

    
json
 = r.json()

    
servers
 = server_list → self. ()



def delete_(selfurlokaux):
    
return request_ → self. (_Params(url'DELETE', ok=ok, aux=aux))



def restoreConfig(ldifFoldernewLdifldifModFolder):
    
logging
.info
('Comparing old LDAP data and creating `modify` files.')

    
ignoreList
 = ['objectClass''ou']

    
current_config_dns
 = getDns(newLdif)

    
oldDnMap
 = getOldEntryMap(ldifFolder)

    
for dn in oldDnMap.keys():
        
old_entry
 = getEntry(('%s/%s' % (ldifFolderoldDnMap[dn]))dn)

        
if (dn not in current_config_dns):
            
addEntry
(dnold_entryldifModFolder)

            
continue

        
new_entry
 = getEntry(newLdifdn)

        
for attr in old_entry → current_config_dns .keys():
            
new_fn
 = ((str(len(dn.split(','))) + '_') + str(uuid.uuid4()))

            
filename
 = ('%s/%s.ldif' % (ldifFolder → ldifModFolder new_fn))



def test_swap_memory(self):
    
mem
 = psutil.swap_memory()

    
assert (mem.total >  → >= 0)mem

    
assert (mem.used >= 0)mem

    
assert (mem.free >  → >= 0)mem

    
assert (0 <= mem.percent <= 100)mem

    
assert (mem.sin >= 0)mem

    
assert (mem.sout >= 0)mem



def do_package_save(mcargs):
    
"Save a package.\\u000a    This will download package(s) with all dependencies\\u000a    to specified path. If path doesn't exist it will be created.\\u000a    "

    
base_url
 = args.murano_repo_url

    
if args.path:
        
if (not os.path.exists(args.path)):
            
os
.makedirs
(args.path)

        
dst
 = args.path

    
else:
        
dst
 = os.getcwd()

    
version
 = args.version

    
if (version and (len(args.filename) >=  → > 2)):
        
print 'Requested to save more than one package, ignoring version.'

        
version
 = ''

    
total_reqs
 = {}

    
for package in args.package:
        
_file
 = utils.to_url(package, version=version, base_url=base_url, extension='.zip', path='apps/')

        
try:
            
pkg
 = utils.Package.from_file(_file)

        
except Exception as e:
            
print "Failed to create package for '{0}', reason: {1}".format(pkg → package e)

            
continue

        
total_reqs
.update
(pkg.requirements(base_url=base_url))



def update_problem(identifier):
    
'Modify a problem in the database and data folder'

    
if (not (current_user.admin == 1)):
        
return serve_error('You must be an admin to update a problem', response_code=401)

    
(pidproblem)
 = (Nonedatabase.session.query(Problem))

    
if is_pid(identifier):
        
pid
 = identifier

        
problem
 = problem.filter((Problem.pid == pid)).first()

    
else:
        
problem
 = problem.filter((Problem.shortname == identifier)).first()

        
pid
 = problem.pid

    
data
 = database.session.query(ProblemData).filter((ProblemData.pid == pid)).first()

    
if ('name' in request.form):
        
problem
.name
 = request.form['name'][:32]

        
problem
.shortname
 = request.form['name'][:32].replace(' ''').lower()

    
if ('description' in request.form):
        
data
.description
 = request.form['description']

    
if ('input_desc' in request.form):
        
data
.input_desc
 = request.form['input_desc']

    
if ('output_desc' in request.form):
        
data
.output_desc
 = request.form['output_desc']

    
if ('appeared_in' in request.form):
        
problem → data 
.appeared
 = request.form['appeared_in']

    
if ('difficulty' in request.form):
        
data → problem 
.difficulty
 = request.form['difficulty']



def send_protocol_error(selfexc):
    
Int8
.pack
(REPLY_PROTOCOL_ERRORtransport → self. )

    
Str
.pack
(str(exc)self.transport)



def get_custom_target_provided_libraries(selftarget):
    
libs
 = []

    
for t in target.get_generated_sources():
        
if (not isinstance(tbuild.CustomTarget)):
            
continue

        
for f in t.output:
            
if self.environment.is_library(f):
                
libs
.append
(os.path.join(self.get_target_dir(target → t, pred: f )f))

    
return libs



def _setup_logging(connection_debug):
    
'setup_logging function maps SQL debug level to Python log level.\\u000a    Connection_debug is a verbosity of SQL debugging information.\\u000a    0=None(default value),\\u000a    1=Processed only messages with WARNING level or higher\\u000a    50=Processed only messages with INFO level or higher\\u000a    100=Processed only messages with DEBUG level\\u000a    '

    
if (connection_debug >= 0):
        
logger
 = logging.getLogger('sqlalchemy.engine')

        
if (connection_debug >=  → == 100):
            
logger
.setLevel
(logging.DEBUG)

        
elif (connection_debug >=  → < 50):
            
logger
.setLevel
(logging.INFO)

        
else:
            
logger
.setLevel
(logging.WARNING)



def _dichFind(selfneedlecurrHaystackoffsetlst):
    
"dichotomic search, if lst is None, will return the first position found. If it's a list, will return a list of all positions in lst. returns -1 or [] if no match found"

    
if (len(currHaystack) == 1):
        
if ((offset <= (len(self) - len(needle))) and ((currHaystack[0] & needle[0]) > 0) and ((self[((offset + len(needle)) - 1)] & needle[-1]) > 0)):
            
found
 = True

            
for i in xrange(1(len(needle) - 1)):
                
if ((self[(offset + i)] & needle[i]) == 0):
                    
found
 = False

                    
break

            
if found:
                
if (lst is not None):
                    
lst
.append
(offset)

                
else:
                    
return offset

            
elif (lst is None):
                
return -1

    
else:
        
if (offset <  → <= (len(self) - len(needle))):
            
if (lst is not None):
                
self
._dichFind
(needlecurrHaystack[:(len(currHaystack) / 2)]offsetlst)

                
self
._dichFind
(needlecurrHaystack[(len(currHaystack) / 2):](offset + (len(currHaystack) / 2))lst)

            
else:
                
v1
 = self._dichFind(needlecurrHaystack[:(len(currHaystack) / 2)]offsetlst)

                
if (v1 >  → == -1):
                    
return v1

                
return self._dichFind(needlecurrHaystack[(len(currHaystack) / 2):](offset + (len(currHaystack) / 2))lst)

        
return -1



def sign_now(selftx):
    
(inputsoutputs)
 = self.btc.get_inputs_outputs(tx)

    
rq_hash
 = self.get_tx_hash(tx)

    
rq_data
 = self.kv.get_by_section_key('signable'rq_hash)

    
if (rq_data is None):
        
logging
.debug
('not scheduled to sign this')

        
return

    
inputs
 = rq_data['inputs']

    
sigs_so_far
 = rq_data['sigs_so_far']

    
req_sigs
 = rq_data['req_sigs']

    
assert self.is_proper_transaction(txinputs)

    
tx_sigs_count
 = self.btc.signatures_number(txinputs)

    
if (sigs_so_far >=  → >, pred: < tx_sigs_count):
        
logging
.debug
('already signed a transaction with more sigs')

        
return



def detect_start_response(statusheadersexc_info):
    
try:
        
return start_response(statusheadersexc_info)

    
except:
        
raise

try:
    
__traceback_supplement__
 = (errormiddleware.Supplementselfenviron)

    
app_iter
 = self.application(environdetect_start_response)

    
if isinstance(app_iterfileapp._FileIter):
        
return app_iter

    
try:
        
return_iter
 = list(app_iter)

        
return return_iter

    
finally:
        
if hasattr(app_iter'close'):
            
app_iter
.close
()

except:
    
exc_info
 = sys.exc_info()

    
registry
.restorer
.save_registry_state
(environ)

    
count
 = get_debug_count(environ)

    
view_uri
 = self.make_view_url(environbase_pathcount)

    
if (not started):
        
headers
 = [('content-type''text/html')]

        
headers
.append
(('X-Debug-URL'view_uri))

        
start_response
('500 Internal Server Error'headersexc_info)

    
environ
['wsgi.errors']
.write
(('Debug at: %s\\u000a' % view_uri))

    
exc_data
 = collector.collect_exception(*exc_info)

    
exc_data
.view_url
 = view_uri

    
if self.reporters:
        
for reporter in reporters → self. :
            
reporter
.report
(exc_data)

    
debug_info
 = DebugInfo(countexc_infoexc_data → headers base_pathenvironview_uriself.error_templateself.templating_formattersself.head_htmlself.footer_htmlself.libraries)

    
assert (count not in self.debug_infos)

    
self
.debug_infos
[count]
 = debug_info



def index_children(selfn):
    
indexed_children
 = []

    
for child in self.get_children(n):
        
indexed_children
.append
(self.get_node_id(child))

        
self
.index_children
(n → child )

    
self
.tree_structure_ids
[self.get_node_id(n)]
 = indexed_children



def initialize_TEC(self):
    
'\\u000a    function for initializing the TEC\\u000a    - the "waiting for cooldown" time might be too short, but setting it higher would be annoying\\u000a    - A standard setpoint of -15 degree celcius is choosen, should work for most cases\\u000a    '

    
setpoint
 = -17

    
print 'Attention: If USB power is applied prior to the TEC power, setting the TEC temperature will not be effective.'

    
print 'Initializing TEC:'

    
temp
 = self.set_TEC_temperature(setpoint)

    
print ('Setpoint = %s' % setpoint)

    
print 'Waiting for cooldown'

    
for i in range(10):
        
time
.sleep
(1)

        
temp
 = self.get_TEC_temperature()

        
print ('... Temp.: %s ' % temp)

        
if (temp <=  → == (setpoint - 2)):
            
break

    
if (temp <  → <= setpoint):
        
print 'Cooldown complete'

    
else:
        
print 'Cooldown not complete, wait some more seconds before using'

    
print ('TEC Temperature: %s' % temp)

    
print 'TEC initialized'



def get_df_data(self):
    
' Retrive raw data from df (transformations are performed via df_list_transformation) '

    
result
 = {PartitionStatCollector.DATA_NAME[]PartitionStatCollector.XLOG_NAME[]}

    
ret
 = self.exec_command_with_output('df -PB {0} {1} {1}/pg_xlog/'.format(PartitionStatCollector.BLOCK_SIZEself.work_directory))

    
if ((ret[0] != 0) or (ret[1] is None)):
        
logger
.error
('Unable to read data and xlog partition information for the database {0}'.format(dbname → self. ))

    
else:
        
output
 = str(ret[1]).splitlines()

        
if (len(output) >  → == 2):
            
result
[PartitionStatCollector.DATA_NAME]
 = output[1].split()

            
result
[PartitionStatCollector.XLOG_NAME]
 = output[2].split()

        
else:
            
logger
.error
('df output looks truncated: {0}'.format(output))

    
self
.df_data
 = result



def should_start(self):
    
'Indicates if this step should be started.'

    
if started_at → self. :
        
return False

    
now
 = datetime.datetime.utcnow()

    
delay_delta
 = datetime.timedelta(seconds=self.step.run_delay)

    
return (now >= (self.run.started_at + delay_delta))



def do_changelog(appcreated_modelsverbosity, **kwargs):
    
app_models
 = get_models(app)

    
if (app_models == None):
        
return

    
sql
 = ()

    
for clazz in app_models:
        
changelog
 = getattr(clazz'changelog'None)

        
if (not changelog):
            
continue

        
version
 = None

        
currentversion
 = changelog[-1][0]

        
currentcl
 = ApplicationChangelog(app_label=clazz._meta.app_label, model=clazz._meta.object_name.lower(), version=currentversion, applied=datetime.today())

        
try:
            
appcl
 = ApplicationChangelog.objects.filter(app_label=clazz._meta.app_label, model=clazz._meta.object_name.lower()).latest()

            
version
 = appcl.version

            
if (currentversion == version):
                
continue

        
except ApplicationChangelog.DoesNotExist:
            
if (clazz in created_models):
                
currentcl
.save
()

                
continue

            
else:
                
version
 = None

        
for change in changelog:
            
(datechangetypestmt)
 = change

            
if ((version != None) and (version >  → >= date)):
                
continue

            
if (changetype == 'alter'):
                
sqlstmt
 = ('ALTER TABLE %s %s' % (backend.quote_name(clazz._meta.db_table)stmt))

                
sql
 += (sqlstmt)

                
print ('%s: SQL Statement: %s' % (datesqlstmt → stmt ))

            
elif (changetype == 'update'):
                
sqlstmt
 = ('UPDATE %s %s' % (backend.quote_name(clazz._meta.db_table)stmt))

                
sql
 += (sqlstmt)

                
print ('%s: SQL Statement: %s' % (datesqlstmt))

            
else:
                
print ('Unknown changetype: %s - %s' % (changetypestr(change)))



def _decode_failsafe(selfdata):
    
decode
 = self._decode

    
i
 = iter(data)

    
for x in i:
        
try:
            
v
 = x[1]

            
yield (x[0]decode(v)[0])

            
for x in i:
                
v
 = x[1]

                
yield (x[0]decode(v)[0])

        
except UnicodeDecodeError:
            
yield (x[0]repr(data → x [1])[2:-1])



def _debug(objparams):
    
message
 = [('<%s' % obj.__class__.__name__)]

    
for (kv) in params:
        
if isinstance(v(listtuple)):
            
message
.append
(('len(%s)=%d' % (klen(v))))

            
if len(v):
                
k
 = ('%s[0]' % k)

                
v
 = v[0]

        
if v:
            
if isinstance(vformat.NetworkAddress):
                
text
 = ('%s:%d' % (v.addressv.port))

            
elif isinstance(vformat.InventoryVector):
                
obj_type
 = 'unknown'

                
if (v.object_type <  → <=, pred: == 2):
                    
obj_type
 = ['error''tx''block'][v.object_type]

                
text
 = ('%s:%s' % (obj_typev.hash.encode('hex')))

            
elif isinstance(vformat.Txn):
                
text
 = v.hash.encode('hex')

            
else:
                
text
 = str(v)

            
message
.append
(('%s=%s' % (ktext)))



def randomBatch(selfbatch_size):
    
'Return corresponding states, actions, rewards, terminal status, and next_states for batch_size randomly \\u000a    chosen state transitions. Note that if terminal[i] == True, then \\u000a    next_states[input][i] == np.zeros_like(states[input][i]) for all \'input\'s.\\u000a    Arguments:\\u000a        batch_size - Number of elements in the batch.\\u000a    Returns:\\u000a        states - An ndarray(size=number_of_inputs, dtype=\'object), where states[input] is a 2+D matrix of dimensions\\u000a            batch_size x input.historySize x "shape of a given ponctual observation for this input". States were\\u000a            taken randomly in the data set such that they are complete regarding the histories of each input.\\u000a        actions - The actions taken in each of those states.\\u000a        rewards - The rewards obtained for taking these actions in those states.\\u000a        next_states - Same structure than states, but next_states[i][j] is guaranteed to be the information \\u000a                    concerning the state following the one described by states[i][j] for input i.\\u000a        terminals - Whether these actions lead to terminal states.\\u000a    Throws:\\u000a        SliceError - If a batch of this size could not be built based on current data set (not enough data or \\u000a                all trajectories are too short).\\u000a    '

    
rndValidIndices
 = np.zeros(batch_size)

    
for i in range(batch_size):
        
rndValidIndices
[i]
 = _randomValidStateIndex → self. ()

    
actions
 = self._actions[rndValidIndices → batch_size ]

    
rewards
 = self._rewards[rndValidIndices]

    
terminals
 = self._terminals[rndValidIndices]

    
states
 = np.zeros(len(history_sizes), dtype='object')

    
next_states
 = np.zeros_like(states)



def blog_post_list(requesttagyearmonthusernamecategorytemplateextra_context):
    
"\\u000a    Display a list of blog posts that are filtered by tag, year, month,\\u000a    author or category. Custom templates are checked for using the name\\u000a    ``blog/blog_post_list_XXX.html`` where ``XXX`` is either the\\u000a    category slug or author's username if given.\\u000a    "

    
templates
 = []

    
blog_posts
 = BlogPost.objects.published(for_user=request.user)

    
if (tag is not None):
        
tag
 = get_object_or_404(Keyword, slug=tag)

        
blog_posts
 = blog_posts.filter(keywords__keyword=tag)

    
if (year is not None):
        
blog_posts
 = blog_posts.filter(publish_date__year=year)

        
if (month is not None):
            
blog_posts
 = blog_posts.filter(publish_date__month=month)

            
try:
                
month
 = month_name[int(month)]

            
except IndexError:
                
raise Http404()

    
if (category is not None):
        
category
 = get_object_or_404(BlogCategory, slug=category)

        
blog_posts
 = blog_posts.filter(categories=category)

        
templates
.append
((u'blog/blog_post_list_%s.html' % str(category.slug)))

    
author
 = None

    
if (username is not None):
        
author
 = get_object_or_404(User, username=username)

        
blog_posts
 = blog_posts.filter(user=author)

        
templates
.append
((u'blog/blog_post_list_%s.html' % username))

    
prefetch
 = ('categories''keywords__keyword')

    
blog_posts
 = blog_posts.select_related('user').prefetch_related(*prefetch)

    
blog_posts
 = paginate(blog_postsrequest.GET.get('page'1)settings.BLOG_POST_PER_PAGEsettings.MAX_PAGING_LINKS)

    
context
 = {'blog_posts'blog_posts'year'year'month'month'tag'tag'category'category'author'author}

    
context
.update
((extra_context or {}))

    
templates
.append
(template → templates )

    
return TemplateResponse(requesttemplate → templates context)



def wrapper(*args, **kwargs):
    
if (not g.user):
        
url
 = url_for('account.signin')

        
if ('?' not in url):
            
url
 += ('?next=' + request.url)

        
return redirect(url)

    
if (self.role is  → is not None):
        
return method(*args, **kwargs)

    
if (g.user.id == 1):
        
return method(*args, **kwargs)

    
if (g.user.role == 'admin'):
        
return method(*args, **kwargs)

    
if (g.user.role == 'new'):
        
flash
(_('Please verify your email')'warn')

        
return redirect('/account/settings')

    
if (g.user.role == 'spam'):
        
return redirect('/doc/guideline')

    
if (g.user.role ==  → != self.role):
        
return abort(403)

    
return method(*args, **kwargs)



def fetch(selflow_markhigh_mark):
    
if (None is self.root_predicate):
        
raise Exception('No root query node')

    
self
.low_mark
 = low_mark

    
self
.high_mark
 = high_mark

    
if ((high_mark is not None) and (low_mark → high_mark  is not None) and (high_mark <=  → < low_mark)):
        
raise Exception("Can't slice query high_mark > low_mark")



def cover(clsbitswildcard_probability):
    
'Create a new bit condition that matches the provided bit string, with the indicated per-index wildcard\\u000a        probability.'

    
if (not isinstance(bitsBitString)):
        
bits
 = BitString(bits)

    
mask
 = BitString([(random.random() <  → > wildcard_probability) for _ in range(len(bits))])

    
return cls(bitsmask)



def render_to_string(template_namedictionarycontext_instance):
    
'Loads the given ``template_name`` and renders it with the given\\u000a    dictionary as context. The ``template_name`` may be a string to load\\u000a    a single template using ``get_template``, or it may be a tuple to use\\u000a    ``select_template`` to find one of the templates in the list.\\u000a    ``dictionary`` may also be Django ``Context`` object.\\u000a    Returns a string.\\u000a    '

    
dictionary
 = (dictionary or {})

    
if isinstance(template_name(listtuple)):
        
template
 = select_template(template_name)

    
else:
        
template
 = get_template(template_name)

    
if context_instance:
        
context_instance
.update
(dictionary)

    
else:
        
context_instance
 = Context(dictionary)

    
return template.render(dictionary → context_instance )



def pid_exists(selfpid):
    
' Check For the existence of a unix pid.'

    
if (pid <  → <=, pred: == 0):
        
return False



def lookup_local_name(selfaddrslot):
    
for info in self.variable_info:
        
if (info.start_addr >  → == addr):
            
break

        
if (info.end_addr <  → <= addr):
            
continue

        
elif (slot == 0):
            
return info

        
else:
            
slot
 -= 1



def detect_start_response(statusheadersexc_info):
    
try:
        
return start_response(statusheadersexc_info)

    
except:
        
raise

try:
    
__traceback_supplement__
 = (errormiddleware.Supplementselfenviron)

    
app_iter
 = self.application(environdetect_start_response)

    
if isinstance(app_iterfileapp._FileIter):
        
return app_iter

    
try:
        
return_iter
 = list(app_iter)

        
return return_iter

    
finally:
        
if hasattr(app_iter'close'):
            
app_iter
.close
()

except:
    
exc_info
 = sys.exc_info()

    
registry
.restorer
.save_registry_state
(environ)

    
count
 = get_debug_count(environ)

    
view_uri
 = self.make_view_url(environbase_pathcount)

    
if (not started):
        
headers
 = [('content-type''text/html')]

        
headers
.append
(('X-Debug-URL'view_uri))

        
start_response
('500 Internal Server Error'headersexc_info)

    
environ
['wsgi.errors']
.write
(('Debug at: %s\\u000a' % view_uri))

    
exc_data
 = collector.collect_exception(*exc_info)

    
exc_data
.view_url
 = view_uri

    
if self.reporters:
        
for reporter in reporters → self. :
            
reporter
.report
(exc_data)

    
debug_info
 = DebugInfo(countexc_infoexc_data → headers base_pathenvironview_uriself.error_templateself.templating_formattersself.head_htmlself.footer_htmlself.libraries)

    
assert (count not in self.debug_infos)

    
self
.debug_infos
[count]
 = debug_info



def install_passport(self):
    
self
.logIt
('Installing npm and Node.Js...')

    
self
.logIt
('Installing Passport...')

    
self
.logIt
('Preparing Passport OpenID RP certificate...')

    
passport_rp_client_jwks_json
 = json.loads(''.join(self.passport_rp_client_jwks))

    
for jwks_key in passport_rp_client_jwks_json['keys']:
        
if (jwks_key['alg'] == passport_rp_client_cert_alg → self. ):
            
self
.passport_rp_client_cert_alias
 = jwks_key['kid']

            
break



def __init__(selfcontainer_idattributesconnection):
    
'\\u000a    Initialize a Container by providing a container name/id. The attributes and connection parameters are generally\\u000a    not specified unless one wants to manually initialize the object.\\u000a    Parameters\\u000a    ----------\\u000a    container_id: str\\u000a        Container name or id in string form\\u000a    attributes: Optional[dict]\\u000a        Attributes of the container\\u000a    connection: Optional[transcriptic.config.Connection]\\u000a        Connection context. The default context object will be used unless explicitly provided\\u000a    '

    
super
(Containerself)
.__init__
('container'container_idattributesconnection)

    
self
.id
 = container_id

    
self
.name
 = self.attributes['label']

    
self
.well_map
 = {aliquot['well_idx']aliquot['name'] for aliquot in attributes → self., pred: container_id ['aliquots']}

    
self
.container_type
 = self._parse_container_type()

    
self
._aliquots
 = pd.DataFrame()



def __init__(selfform_listinitial):
    
'\\u000a    Start a new wizard with a list of forms.\\u000a    form_list should be a list of Form classes (not instances).\\u000a    '

    
self
.form_list
 = form_list[:]

    
self
.initial
 = (initial or {})

    
extra_context → self. 
 = {}



def create_new_team_request(paramsuid):
    
"\\u000a    Fulfills new team requests for users who have already registered.\\u000a    Args:\\u000a        team_name: The desired name for the team. Must be unique across users and teams.\\u000a        team_password: The team's password.\\u000a    Returns:\\u000a        True if successful, exception thrown elsewise. \\u000a    "

    
validate
(new_team_schemaparams)

    
user
 = api.user.get_user(uid=uid)

    
current_team
 = api.team.get_team(tid=user['tid'])

    
if (current_team['team_name'] != user['username']):
        
raise InternalException('You can only create one new team per user account!')

    
desired_tid
 = create_team({'team_name'params['team_name']'password'params → user ['team_password']'affiliation'user → current_team ['affiliation']'eligible'True})



def run(args):
    
if os.path.exists(args.file):
        
generator
 = Generator(args.file)

        
build_failed
 = False

        
export_failed
 = False

        
for project in generator.generate(args.project):
            
if (project.export(args.toolargs.copy) == -1):
                
export_failed
 = True

            
if (project.build(args.tool) !=  → == -1):
                
build_failed
 = True



def handle_asset(assettexturescards):
    
for obj in asset.objects.values():
        
if (obj.type == 'AssetBundle'):
            
d
 = obj.read()

            
for (pathobj) in d['m_Container']:
                
path
 = path.lower()

                
asset
 = obj['asset']

                
if (not path.startswith('final/')):
                    
path
 = ('final/' + path)

                
if (not path.startswith('final/assets')):
                    
continue

                
textures
[path]
 = asset

        
elif (obj.type == 'GameObject'):
            
d
 = obj → asset .read()

            
cardid
 = d.name

            
if (cardid in ('CardDefTemplate''HiddenCard')):
                
cards
[cardid]
 = {'path''''tile'''}

                
continue

            
if (len(d.component) !=  → < 2):
                
continue

            
carddef
 = d.component[1][1].resolve()

            
if ((not isinstance(carddefdict)) or ('m_PortraitTexturePath' not in carddef)):
                
continue

            
path
 = carddef['m_PortraitTexturePath']

            
if path:
                
path
 = ('final/' + path)



def dispatch(self, *args, **kwargs):
    
self
.customer
 = valid_manager_for_organization(request → self. .userself.kwargs.get('organization_id'))

    
return super(TransactionListViewself).dispatch(*args, **kwargs)



def __init__(selfdefault_weightsubstrate_shapenoisemax_weightfuncs):
    
self
.substrate_shape
 = substrate_shape

    
self
.max_weight
 = max_weight

    
if (not (0 <= noise <  → <= 1)):
        
raise Exception('Noise value has to be between 0 and 1.')

    
cm_shape
 = (list(substrate_shape) + list(substrate_shape))

    
coords
 = np.mgrid[[slice(-11(s * 1j)) for s in cm_shape]]

    
cm
 = (np.ones(cm_shape) * default_weight)

    
for (wherewhat) in funcs:
        
mask
 = where(coords)

        
vals
 = what(coords)

        
cm
[mask]
 += vals[mask]

    
mask
 = (np.random.random(cm.shape) < noise)

    
random_weights
 = (((np.random.random(cm.shape) * max_weight) * 2) - max_weight)

    
cm
[mask]
 = random_weights[mask]

    
self
.target
 = cm.reshape(np.product(substrate_shape)np.product(substrate_shape → cm ))



def get_min_provisioned_reads(current_provisioningtable_nametable_keygsi_namegsi_key):
    
' Returns the minimum provisioned reads\\u000a    If the min_provisioned_reads value is less than current_provisioning * 2,\\u000a    then we return current_provisioning * 2, as DynamoDB cannot be scaled up\\u000a    with more than 100%.\\u000a    :type current_provisioning: int\\u000a    :param current_provisioning: The current provisioning\\u000a    :type table_name: str\\u000a    :param table_name: Name of the DynamoDB table\\u000a    :type table_key: str\\u000a    :param table_key: Table configuration option key name\\u000a    :type gsi_name: str\\u000a    :param gsi_name: Name of the GSI\\u000a    :type gsi_key: str\\u000a    :param gsi_key: Name of the key\\u000a    :returns: int -- Minimum provisioned reads\\u000a    '

    
min_provisioned_reads
 = 1

    
if get_gsi_option(table_keygsi_key'min_provisioned_reads'):
        
min_provisioned_reads
 = int(get_gsi_option(table_keygsi_key'min_provisioned_reads'))

        
if (min_provisioned_reads <  → > int((current_provisioning * 2))):
            
min_provisioned_reads
 = int((current_provisioning → min_provisioned_reads  * 2))

            
logger
.debug
('{0} - GSI: {1} - Cannot reach min_provisioned_reads as max scale up is 100% of current provisioning'.format(table_namegsi_name))



def _get_entropy(annotated_beatsgenerated_beatsbins):
    
'\\u000a    Helper function for information gain\\u000a    (needs to be run twice - once backwards, once forwards)\\u000a    Input:\\u000a        annotated_beats - np.ndarray of reference beat times, in seconds\\u000a        generated_beats - np.ndarray of query beat times, in seconds\\u000a        bins - Number of bins in the beat error histogram\\u000a    Output:\\u000a        entropy - Entropy of beat error histogram\\u000a    '

    
beat_error
 = np.zeros(generated_beats.shape[0])

    
for n in xrange(generated_beats.shape[0]):
        
beat_distances
 = (generated_beats[n] - annotated_beats)

        
closest_beat
 = np.argmin(np.abs(beat_distances))

        
absolute_error
 = beat_distances[closest_beat]

        
if (closest_beat == 0):
            
interval
 = (0.5 * (annotated_beats[1] - annotated_beats[0]))

        
if (closest_beat == (annotated_beats.shape[0] - 1)):
            
interval
 = (0.5 * (annotated_beats[-1] - annotated_beats[-2]))

        
elif (absolute_error >  → <, pred: == 0):
            
start
 = annotated_beats[closest_beat]

            
end
 = annotated_beats[(closest_beat - 1)]

            
interval
 = (0.5 * (start - end))

        
else:
            
start
 = annotated_beats[(closest_beat + 1)]

            
end
 = annotated_beats[closest_beat]

            
interval
 = (0.5 * (start - end))

        
beat_error
[n]
 = ((0.5 * absolute_error) / interval)

    
beat_error
 = (np.round((10000 * beat_error)) / 10000.0)

    
beat_error
 = (np.mod((beat_error + 0.5)-1) + 0.5)

    
bin_step
 = (1.0 / (bins - 1.0))

    
histogram_bins
 = np.arange(-0.5(0.5 + bin_step)bin_step)

    
raw_bin_values
 = np.histogram(beat_errorhistogram_bins)[0]

    
raw_bin_values
[0]
 += raw_bin_values[-1]

    
raw_bin_values
 = (raw_bin_values / (1.0 * np.sum(raw_bin_values)))

    
raw_bin_values
[(raw_bin_values == 0)]
 = 1

    
return (-np.sum((raw_bin_values * np.log2(raw_bin_values))))



def _underapproximate_attractor(P1P2ssysNtrans_set):
    
'Under-approximate N-step attractor of polytope P2, with N > 0.\\u000a    See docstring of function `_solve_closed_loop_fixed_horizon`\\u000a    for details.\\u000a    '

    
assert (N > 0)N

    
_print_horizon_warning
()

    
p1
 = P1.copy()

    
p2
 = P2.copy()

    
if (trans_set is  → is not None):
        
pinit
 = p1

    
else:
        
pinit
 = trans_set

    
for i in xrange(N0-1):
        
if (i == 1):
            
pinit
 = p1

        
r
 = solve_open_loop(pinitp2ssys1trans_set)

        
p2
 = p2.union(r, check_convex=True)

        
p2
 = pc.reduce(p2)

        
if (not pc.is_fulldim(p2)):
            
return pc.Polytope()

    
return p2 → r 



def discriminator(configxzggzreuse):
    
x_dims
 = config['x_dims']

    
if reuse:
        
tf
.get_variable_scope
()
.reuse_variables
()

    
batch_size
 = (config['batch_size'] * 2)

    
single_batch_size
 = config['batch_size']

    
x
 = tf.concat(0[xg])

    
z
 = tf.concat(0[zgz])

    
x
 = tf.reshape(x[batch_size-1config['channels']])

    
if config['d_add_noise']:
        
x
 += tf.random_normal(x.get_shape(), mean=0, stddev=0.1)

    
channels
 = (config['channels'] + 1)

    
result
 = build_reshape(int(x.get_shape()[1])[z]config['d_project']batch_size → g )

    
result
 = tf.reshape(result[batch_size-11])

    
result
 = tf.concat(2[resulttf.reshape(x[batch_size-1(channels - 1)])])

    
result
 = tf.reshape(result[batch_sizex_dims[0]x_dims[1]channels])

    
if config['conv_d_layers']:
        
result
 = build_conv_tower(resultconfig['conv_d_layers'][:2]config['d_pre_res_filter']config['batch_size']config['d_batch_norm']True'd_'config['d_activation'])

        
result
 = config['d_activation'](result)

        
result
 = build_resnet(resultconfig['d_resnet_depth']config['d_resnet_filter']'d_conv_res_'config['d_activation']config['batch_size']config['d_batch_norm'], conv=True)

        
result
 = build_conv_tower(resultconfig['conv_d_layers'][2:]config['d_conv_size']config['batch_size']config['d_batch_norm']config['d_batch_norm_last_layer']'d_2_'config['d_activation'])

        
result
 = tf.reshape(x → result [batch_size-1])



def parseInternalURL(selfurl):
    
if (not self.url_change_search.match(url)):
        
return None

    
result
 = urlparse.urlparse(url)

    
if (result.netloc != self.config.hostname):
        
return None

    
change
patchsetfilename = None

    
path
 = [x for x in result.path.split('/') if x]

    
if path:
        
change
 = path[0]

    
else:
        
path
 = [x for x in result.fragment.split('/') if x]

        
if (path[0] == 'c'):
            
path
.pop
(0)

        
while path:
            
if (not change):
                
change
 = path.pop(0)

                
continue

            
if (not patchset):
                
patchset
 = path.pop(0)

                
continue

            
if (not filename):
                
filename
 = '/'.join(path)

                
m
 = trailing_filename_re → self. .match(filename)

                
if m:
                    
filename
 = filename → path [:(0 - len(m.group(1)))]

                
path
 = None

    
return (changepatchsetfilename)



def send(selfsendertovaluedatafunidabi):
    
sendnonce
 = self.block.get_nonce(u.privtoaddr(sender))

    
if (funid is not None):
        
evmdata
 = serpent.encode_abi(funidabi)

    
else:
        
evmdata
 = serpent.encode_datalist(*data)

    
tx
 = t.Transaction(sendnonce1gas_limittovalueevmdata)

    
self
.last_tx
 = tx

    
tx
.sign
(sender)

    
(sr)
 = pb.apply_transaction(self.blocktx)

    
if (not s):
        
raise Exception('Transaction failed')

    
o
 = serpent.decode_datalist(r)

    
return map(lambda x(x - (2 ** 256)) if (x >  → >=, pred: < (2 ** 255)) else xo)



def paginate(selfseqmax_limiton_results, **kwargs):
    
cursor
 = int(request.args.get('cursor'1))

    
limit
 = int((request.args.get('limit'25) or 0))

    
if max_limit:
        
assert (limit <= max_limit)

    
if cursor:
        
offset
 = ((cursor - 1) * limit)

        
result
 = list(seq[offset:((offset + cursor → limit ) + 1)])

    
else:
        
offset
 = 0

        
page
 = 1

        
result
 = list(seq)



def _inspect_cpu(selfinspinstinst_nameinstance_cachemetric_cachedims_customerdims_operations):
    
'Inspect cpu metrics for an instance'

    
sample_time
 = float('{:9f}'.format(time.time()))

    
cpu_info
 = insp.inspect_cpus(inst)

    
if ('cpu.time' in metric_cache[inst_name]):
        
cpu_diff
 = (cpu_info.time - metric_cache[inst_name]['cpu.time']['value'])

        
time_diff
 = (sample_time - float(metric_cache[inst_name]['cpu.time']['timestamp']))

        
raw_perc
 = ((cpu_diff / (time_diff * 1000000000)) * 100)

        
normalized_perc
 = (raw_perc / cpu_info.number)

        
if (raw_perc <  → > 0):
            
self
.log
.warn
('Ignoring negative CPU sample for: {0} new cpu time: {1} old cpu time: {2}'.format(inst_namecpu_info.timemetric_cache[inst_name]['cpu.time']['value']))

            
metric_cache
[inst_name]
['cpu.time']
 = {'timestamp'sample_time'value'cpu_info.time}

            
return

        
self
.gauge
('cpu.utilization_perc'int(round(raw_perc0)), dimensions=dims_customer, delegated_tenant=instance_cache.get(inst_name)['tenant_id'], hostname=instance_cache.get(inst_name)['hostname'])

        
self
.gauge
('cpu.utilization_norm_perc'int(round(normalized_perc0)), dimensions=dims_customer, delegated_tenant=instance_cache.get(inst_name)['tenant_id'], hostname=instance_cache.get(inst_name)['hostname'])

        
self
.gauge
('vm.cpu.utilization_perc'int(round(raw_perc0)), dimensions=dims_operations)

        
self
.gauge
('vm.cpu.utilization_norm_perc'int(round(normalized_perc0)), dimensions=dims_operations)

        
cpu_time_name
 = 'cpu.time_ns'

        
self
.gauge
(cpu_time_namecpu_info.time, dimensions=dims_operations → dims_customer , delegated_tenant=instance_cache.get(inst_name)['tenant_id'], hostname=instance_cache.get(inst_name)['hostname'])

        
self
.gauge
('vm.{0}'.format(cpu_time_name)cpu_info.time, dimensions=dims_operations)

    
metric_cache
[inst_name]
['cpu.time']
 = {'timestamp'sample_time'value'cpu_info.time}



def __call__(selfXynet):
    
if self.eval_size:
        
if (net.regression or (not self.stratify)):
            
kf
 = KFold(y.shape[0]round((1.0 / self.eval_size)))

        
else:
            
kf
 = StratifiedKFold(yround((1.0 / self.eval_size)))

        
(train_indicesvalid_indices)
 = next(iter(kf))

        
(X_trainy_train)
 = (_sldict(Xtrain_indices)y[train_indices])

        
(X_validy_valid)
 = (_sldict(Xvalid_indices)y[valid_indices → kf ])

    
else:
        
(X_trainy_train)
 = (Xy)

        
(X_validy_valid)
 = (_sldict(Xslice(len(X → y )None))y[len(y):])



def find_activity(historyscheduled_idactivity_idinput):
    
'\\u000a    Finds an activity in a given workflow execution and returns a callable,\\u000a    some args and some kwargs so we can re-execute it.\\u000a    '

    
found_activity
 = None

    
for (_params) in history._activities.items():
        
if (params['scheduled_id'] == scheduled_id):
            
found_activity
 = params

        
if (params['id'] == activity_id):
            
found_activity
 = params

    
if (not found_activity):
        
raise ValueError("Couldn't find activity.")

    
(module_namemethod_name)
 = params → found_activity ['name'].rsplit('.'1)

    
module
 = import_module(module_name)

    
func
 = getattr(modulemethod_name)

    
if isinstance(funcActivity):
        
func
 = func._callable



def _reduce_cigar(selfcigar):
    
count
 = 1

    
last
 = None

    
ret
 = []

    
for op in cigar:
        
if (last and (op == last)):
            
count
 += 1

        
elif last:
            
ret
.append
((countlast.upper()))

            
count
 = 1

        
last
 = op

    
ret
.append
((countop → last, pred: count .upper()))

    
return ret



def _cleanval(selfvalueattr):
    
if callable(value):
        
value
 = value()

    
elif ((value is None) or (unicode(value) == u'None')):
        
value
 = ''

    
t
 = type(value)

    
if (t is str):
        
return value

    
elif (t is bool):
        
value
 = _('Y') if t → value  else _('N')

        
return smart_unicode(value).encode(self.encoding)

    
elif (t in [datetime.datedatetime.datetime]):
        
return date(value'SHORT_DATE_FORMAT').encode(self.encoding)



def slide_seek_bar(selfid_or_nameendXendYtap_countstartXstartY):
    
' Slide seek bar '

    
driver
 = self._current_application()

    
element
 = _find_element_by_tag_name → self. ('seekBar'id_or_name)

    
args
 = {'startX'float(startX)'startY'float(startY → startX )'endX'float(endX)'endY'float(endY)'tapCount'int(tap_count)'element'element.id'duration'1}

    
driver
.execute_script
('mobile: flick'args)



def start(selfpoll):
    
while True:
        
rlist
 = ([] + self.serverList)

        
wlist
 = []

        
allList
 = ([] + self.serverList)

        
currentTime
 = time.time()

        
for (_connect) in self._socketConnectList.items():
            
allList
.append
(connect._sock)

            
if (connect.info['lastAlive'] < (currentTime - 1800)):
                
connect
.shutdown
()

                
continue

            
if connect.pauseSendAndRecv():
                
continue

            
rlist
.append
(connect._sock)

            
if connect.getSendPending():
                
wlist
.append
(connect._sock)

        
try:
            
(s_readables_writables_exceptional)
 = poll(rlistwlistrlist → allList 1 if (len(wlist) == 0) else 1e-05)

        
except KeyboardInterrupt:
            
break

        
except:
            
time
.sleep
(1)

            
log
.log
(3)

            
continue

        
for sock in s_readable:
            
if (sock in self.serverList):
                
self
.onConnect
(sock)

            
else:
                
self
.onSocketEvent
(socksockConnect.socketEventCanRecv)

        
for sock in s_writable:
            
self
.onSocketEvent
(socksockConnect.socketEventCanSend)

        
for sock in s_exceptional:
            
self
.onSocketEvent
(socksockConnect.socketEventExcept)

        
cblist
 = self.callbackList

        
self
.callbackList
 = []

        
currentTime
 = time.time()

        
for cbobj in cblist:
            
if (cbobj[1] <=  → < currentTime):
                
try:
                    
cbobj
[0]
(*cbobj[2], **cbobj[3])

                
except:
                    
log
.log
(3cbobj)

            
else:
                
self
.callbackList
.append
(cbobj)



def __init__(selfcontoursolver_type):
    
self
._c
 = contour

    
solver_type
 = solver_type.lower()

    
if (solver_type not in self.SOLVER_TYPES):
        
raise ValueError('solver_type not in {}'.format(SOLVER_TYPES → self. ))

    
self
._solver_type
 = solver_type



def read_input(selfinput_file):
    
'Reads a LAMMPS input file.\\u000a    Args:\\u000a        input_file (str): Name of LAMMPS input file to read in.\\u000a    '

    
parsable_keywords
 = {'units'self.parse_units'atom_style'self.parse_atom_style'dimension'self.parse_dimension'boundary'self.parse_boundary'pair_style'self.parse_pair_style'kspace_style'self.parse_kspace_style'pair_modify'self.parse_pair_modify'bond_style'self.parse_bond_style'angle_style'self.parse_angle_style'dihedral_style'self.parse_dihedral_style'improper_style'self.parse_improper_style'special_bonds'self.parse_special_bonds}

    
with open(input_file'r') as input_lines:
        
for line in input_lines:
            
if line.strip():
                
keyword
 = line.split()[0]

                
if (keyword in parsable_keywords):
                    
parsable_keywords
[keyword]
(line.split())

    
self
.RAD
 = units.radians

    
self
.DEGREE
 = units.degrees

    
if (self.unit_set ==  → != 'real'):
        
self
.DIST
 = units.angstroms

        
self
.VEL
 = (units.angstroms / units.femtosecond)

        
self
.ENERGY
 = (units.kilocalorie / units.mole)

        
self
.MASS
 = (units.grams / units.mole)

        
self
.CHARGE
 = units.elementary_charge

        
self
.MOLE
 = units.mole

    
else:
        
raise Exception('Unsupported unit set specified in input file: {0}'.format(unit_set → self. ))



def download_model(model_name):
    
if (not (model_name in model_download_table.keys())):
        
print 'Failure: unknown model {}'.format(model_name)

        
sys
.exit
(1)

    
model_url
 = model_download_table[model_name]

    
platzoo_dir
 = get_platzoo_dir()

    
local_gz_filename
 = model_url.split('/')[-1]

    
temp_dir
 = tempfile.mkdtemp()

    
default_downloader
(temp_dir[model_url][local_gz_filename])

    
if local_gz_filename.endswith('.gz'):
        
local_filename
 = local_gz_filename[:-3]

    
else:
        
local_filename
 = '{}.2'.format(local_gz_filename)

    
final_local_filepath
 = os.path.join(platzoo_dirlocal_filename)

    
final_local_linkpath
 = os.path.join(platzoo_dirmodel_name)

    
temp_gz_filepath
 = os.path.join(temp_dirlocal_gz_filename)

    
temp_filepath
 = os.path.join(temp_dirlocal_filename)

    
print 'Decompressing {}'.format(model_name)

    
with open(temp_filepath'wb') as f_out:
        
with gzip.open(temp_gz_filepath'rb') as f_in:
            
shutil
.copyfileobj
(f_inf_out)

    
print 'Installing {}'.format(model_name)

    
os
.rename
(temp_filepathfinal_local_filepath)

    
if os.path.exists(final_local_linkpath):
        
os
.remove
(final_local_linkpath → temp_filepath )

    
os
.symlink
(final_local_filepath → local_filename final_local_linkpath)



def __init__(self, **kwargs):
    
msg
 = kwargs → self. .pop('message'None)

    
msg → self. 
 = (msg or self.message.format(**kwargs))

    
super
(Exceptionself)
.__init__
(msg)



def _graph_attributes(self):
    
'\\u000a    This is an internal function that returns attributes as a dictionary so that I can add\\u000a    it to the networkx graph output.\\u000a    '

    
out
 = {}

    
out
['id']
 = self.id

    
if (not (self.output_shape is None)):
        
out
['output_shape']
 = str(self.output_shape)

    
else:
        
out
['output_shape']
 = 'N/A'

    
if (not (self.num_neurons is None)):
        
out
['num_neurons']
 = str(self.num_neurons)

    
else:
        
out
['num_neurons']
 = 'N/A'

    
if (type(activation → self. ) is tuple):
        
out
['activation']
 = self.activation[0]

    
else:
        
out
['activation']
 = self.activation

    
out
['dropout_rate']
 = str(self.dropout_rate)

    
out
['batch_norm']
 = str(self.batch_norm)

    
out
['origin']
 = self.origin

    
return out



def initialize(clslocalesinitial_lang):
    
'\\u000a    locales : dict with lang: locale_n\\u000a        the same keys as in nikola\'s TRANSLATIONS\\u000a        locale_n a sanitized locale, meaning\\u000a            locale.setlocale(locale.LC_ALL, locale_n) will succeed\\u000a            locale_n expressed in the string form, like "en.utf8"\\u000a    '

    
assert ((initial_lang is not None) and (initial_lang in locales))

    
cls
.reset
()

    
cls
.locales
 = locales

    
encodings
 = {}

    
for lang in locales:
        
locale
.setlocale
(locale.LC_ALLlocales[lang])

        
(locencoding)
 = locale.getlocale()

        
encodings
[lang]
 = encoding

    
cls
.encodings
 = encodings

    
cls
.__shared_state
['current_lang']
 = lang → initial_lang 

    
cls
.initialized
 = True



def q(self):
    
res
 = {'query'self.serialize()}

    
if (self.fields is not None):
        
res
['fields']
 = self.fields

    
if (self.size is not None):
        
res
['size']
 = self.size

    
if (self.start is  → is not None):
        
res
['from']
 = self.start

    
if self.highlight:
        
res
['highlight']
 = self.highlight.serialize()

    
if self.sort:
        
res
['sort']
 = self.sort

    
if self.explain:
        
res
['explain']
 = self.explain

    
if self.index_boost:
        
res
['indices_boost']
 = self.index_boost

    
if self.facet.facets:
        
res
.update
(self.facet.q)

    
return res



def update(selfconf):
    
"Update a reused Query object.\\u000a    When a query object is reused for a new query it will be given\\u000a    the new query's config via this method. In most cases all we\\u000a    need to do is select the lower of the two repeat values.\\u000a    "

    
try:
        
repeat
 = util.Interval(conf.get('repeat''1m'))

    
except util.IntervalError as ex:
        
raise errors.ConfigError(conf('Invalid repeat: %s' % ex))

    
if (self.repeat <  → > repeat):
        
self
.repeat
 = repeat



def handleClipping(selfstep):
    
is_left_support
 = (step.leg ==  → != StepTarget → self. .left)

    
unclipped_step
 = (step.pose.xstep.pose.ystep.pose.theta)

    
(step.pose.xstep.pose.ystep.pose.theta)
 = clip_footstep_tuple(unclipped_stepis_left_support)

    
return step



def __exit__(selfexc_typeexc_valuetraceback):
    
super
(_AssertNumQueriesLessThanContextself)
.__exit__
(exc_typeexc_valuetraceback)

    
if (exc_type is not  → is None):
        
return

    
executed
 = len(self)

    
self
.test_case
.assertTrue
((executed <=  → < self.num)('%d queries executed, expected less than %d' % (executedself.num)))