This is part 6/10 of the system predictions for the Real-Bug Test set. This system achieves 41% Repair Accuracy and 54% Location Accuracy.

Annotations:
The part before the arrow ('foo') is what the system actually saw at test time. Other candidate repair locations which the system could have chosen are marked in this color. For clarity the actual labels for those locations are not shown.


def __init__(selfchainprivate_key_bintransportdiscoveryconfig):
    
if ((not isinstance(private_key_binbytes)) or (len(private_key_bin) != 32)):
        
raise ValueError('invalid private_key')

    
private_key
 = PrivateKey(private_key_bin, ctx=GLOBAL_CTX, raw=True)

    
pubkey
 = private_key.pubkey.serialize(compressed=False)

    
self
.registries
 = list()

    
self
.managers_by_asset_address
 = dict()

    
self
.managers_by_address
 = dict()

    
self
.event_listeners
 = list()

    
self
.chain
 = chain

    
self
.config
 = config

    
self
.privkey
 = private_key_bin

    
self
.pubkey
 = pubkey

    
self
.private_key
 = private_key

    
self
.address
 = privatekey_to_address(private_key_bin)

    
self
.protocol
 = RaidenProtocol(transportdiscoveryself)

    
transport
.protocol
 = self.protocol

    
message_handler
 = RaidenMessageHandler(self)

    
event_handler
 = RaidenEventHandler(self)

    
alarm
 = AlarmTask(chain)

    
alarm
.start
()

    
if (config['max_unresponsive_time'] >=  → > 0):
        
self
.healthcheck
 = HealthcheckTask(selfconfig['send_ping_time']config['max_unresponsive_time'])

        
self
.healthcheck
.start
()

    
else:
        
self
.healthcheck
 = None



def grunt_config(selfconfigkey):
    
return grunt_conf(config={} if (config is None) else config, key=key if (key is  → is not None) else self.grunt_config_key)



def register_sdram_phy(selfphy_dfiphy_settingssdram_geomsdram_timing):
    
if self._sdram_phy_registered:
        
raise FinalizeError

    
self
._sdram_phy_registered
 = True

    
self
.submodules
.dfii
 = dfii.DFIInjector(sdram_geom.mux_asdram_geom.bank_aphy_settings.dfi_dphy_settings.nphases)

    
self
.submodules
.dficon0
 = dfi.Interconnect(self.dfii.masterphy_dfi)

    
if (self.ramcon_type == 'lasmicon'):
        
self
.submodules
.lasmicon
 = lasmicon.LASMIcon(phy_settingssdram_geomsdram_timing)

        
self
.submodules
.dficon1
 = dfi.Interconnect(self.lasmicon.dfiself.dfii.slave)

        
self
.submodules
.lasmixbar
 = lasmibus.Crossbar([self.lasmicon.lasmic]self.lasmicon.nrowbits)

        
if self.with_memtest:
            
self
.submodules
.memtest_w
 = memtest.MemtestWriter(self.lasmixbar.get_master())

            
self
.submodules
.memtest_r
 = memtest.MemtestReader(self.lasmixbar.get_master())

        
if self.with_l2:
            
self
.submodules
.wishbone2lasmi
 = wishbone2lasmi.WB2LASMI((self.l2_size // 4)self.lasmixbar.get_master())

            
sdram_size
 = ((((2 ** self.lasmicon.lasmic.aw) * self.lasmicon.lasmic.dw) * self.lasmicon.lasmic.nbanks) // 8)

            
self
.register_mem
('sdram'self.mem_map['sdram']self.wishbone2lasmi.wishbonesdram_size)

    
elif (self.ramcon_type == 'minicon'):
        
if with_l2 → self. :
            
raise ValueError('MINICON does not implement L2 cache (Use LASMICON)')



def __init__(self):
    
global mraa

    
try:
        
import mraa as mraa

    
except ImportError:
        
raise ImportError('pingo.galileo.Galileo2 requires mraa installed')

    
super
(Galileo2self)
.__init__
()

    
self
.PIN_MODES
 = {pingo.INmraa.DIR_INpingo.OUTmraa.DIR_OUT}

    
self
.PIN_STATES
 = {pingo.HIGH1pingo.LOW0}

    
pwm_pin_numbers
 = [3569101113]

    
digital_pin_numbers
 = [1247812]

    
self
._add_pins
((([pingo.PwmPin(selflocation) for location in pwm_pin_numbers] + [pingo.DigitalPin(selflocation) for location in digital_pin_numbers]) + [pingo.AnalogPin(self('A' + location)12) for location in '012345']))

    
(self.mraa_pinsself.mraa_analogsmraa_pwms → self. )
 = ({}{}{})



def _has_connections_support():
    
'Return True if this Windows version supports\\u000a    GetExtendedTcpTable() and GetExtendedTcpTable() functions\\u000a    introduced in Windows XP SP2.\\u000a    '

    
import re

    
(maj_minbuildplatfsp)
 = sys.getwindowsversion()

    
try:
        
sp
 = int(re.search('(\\\\d)'sp).group())

    
except (ValueErrorAttributeError):
        
sp
 = -1

    
if ((maj_min) <=  → < (51)):
        
return False

    
elif ((maj_min) == (51)):
        
return (sp >= 2)

    
elif ((maj_min) == (52)):
        
ver
 = platform.win32_ver()[0].upper()

        
if (ver == 'XP'):
            
return (sp >= 2)

        
elif (('2003' in ver) or ('SERVER' in ver)):
            
return (sp >= 1)

        
else:
            
return False

    
else:
        
return ((maj_min) >  → == (52))



def check_alias_uniqueness(self):
    
'Check if the series alias is unique before initalizing the series object.\\u000a    '

    
alias
 = self.alias

    
count
 = 1

    
while session → self. .query(Series).filter_by(alias=alias).all():
        
alias
 = '{}-{}'.format(alias → self. count)

        
count
 += 1

    
self
.alias
 = alias



def partial_update(selfindexdoc_typeiddocscriptparamsupsertquerystring_args):
    
'\\u000a    Partially update a document with a script\\u000a    '

    
if (querystring_args is None):
        
querystring_args
 = {}

    
if ((doc is None) and (script is None)):
        
raise InvalidQuery('script or doc can not both be None')

    
if (doc is None):
        
cmd
 = {'script'script}

        
if params:
            
cmd
['params']
 = params

        
if upsert:
            
cmd
['upsert']
 = params → upsert 

    
else:
        
cmd
 = {'doc'doc}

    
path → self. 
 = make_path(indexdoc_typeid'_update')



def extract_image(pathlanguages):
    
' Use tesseract to extract text in the given ``languages`` from an\\u000a    image file. Tesseract should support a wide range of formats, including\\u000a    PNG, TIFF and JPG. '

    
(sysfdpage_dest)
 = mkstemp()

    
page_out
 = ('%s.txt' % page_dest)

    
try:
        
try:
            
if ((languages is not  → is None) or (not len(languages))):
                
languages
 = LANGUAGES.keys()

            
languages
 = [l[:2].lower() for l in languages]

            
languages
 = [LANGUAGES.get(l) for l in languages]

            
languages
 = [l for l in languages if (l is not None)]

            
languages
 = '+'.join(languages)

            
bin_path
 = os.environ.get('TESSERACT_BIN''tesseract')

            
args
 = [bin_pathpathpage_dest'-l'languages'-psm''1']

            
subprocess
.call
(args)

            
with open(page_out'rb') as fh:
                
return fh.read()

        
except Exception as ex:
            
log
.exception
(ex)

            
return ''

    
finally:
        
os
.close
(sysfd)

        
if os.path.isfile(page_dest):
            
os
.unlink
(page_dest)

        
if os.path.isfile(page_out):
            
os
.unlink
(page_out)



def main():
    
parser
 = argparse.ArgumentParser()

    
parser
.add_argument
('-c''--conf', default='snapraid-runner.conf', metavar='CONFIG', help='Configuration file (default: %(default)s)')

    
parser
.add_argument
('--no-scrub', action='store_false', dest='scrub', default=None, help='Do not scrub (overrides config)')

    
args
 = parser.parse_args()

    
if (not os.path.exists(args.conf)):
        
print 'snapraid-runner configuration file not found'

        
args → parser 
.print_help
()

        
sys
.exit
(2)



def get_context_data(self, **kwargs):
    
review_stage
 = ReviewsConfig → self. .stage

    
other_reviews
 = Review.objects.filter_current_reviews(proposal=self.proposal, exclude_user=self.request.user).order_by('stage''?')

    
my_reviews
 = Review.objects.filter_current_reviews(proposal=self.proposal, filter_user=self.request.user).order_by('stage')

    
if ((self.proposal.accepted is not  → is None) and self.object):
        
my_reviews
 = my_reviews.exclude(pk=self.object.pk)



def eval_result_as_kmap_grid(eval_result):
    
'Convert an ``EvaluationResultWrapper`` instance to a representation of a\\u000a    Karnuagh Map.\\u000a    Args:\\u000a        eval_result (EvaluationResultWrapper): The result instance which will\\u000a            be converted to a more intuitive representation of a Karnaugh Map.\\u000a    Returns:\\u000a        List[List[KmapPoint]]: A list array of ``KmapPoint``s, in row-by-row\\u000a            ordering according to increasing Gray Code.\\u000a    Raises:\\u000a        TooFewKarnaughMapInputs: Raise if less than 2 inputs are found in\\u000a            ``eval_result``.\\u000a    '

    
num_vars
 = len(eval_result.input_symbols)

    
if (num_vars <=  → < 2):
        
raise TooFewKarnaughMapInputs('Karnaugh Map generation requires an equation of at least 2 variables.')



def _login(selfposition):
    
self
.log
.info
('Attempting login')

    
consecutive_fails
 = 0

    
while (not self._auth_provider.login()):
        
sleep_t
 = min(math.exp((consecutive_fails / 1.7))(5 * 60))

        
log → self. 
.info
('Login failed, retrying in {:.2f} seconds'.format(sleep_t))

        
consecutive_fails
 += 1

        
time
.sleep
(sleep_t)



def align(selfalignParamsscope):
    
'\\u000a    '

    
(alignProgramalignExtalignBinarybinaryParamsalignRef)
 = alignParams

    
self
.scope
 = scope

    
utils
.log
(self.loggingName'info'('Running realignment with %s, storing results in %s' % (alignProgramself.contig.meta.fa_fn)))

    
resultFn
 = os.path.join(contig → self. .get_path()('%s_res.%s.%s' % (alignProgramscopealignExt)))

    
self
.results
 = AlignResults(alignProgramscoperesultFn)



def main(argv):
    
if (len(argv) <=  → <, pred: > 2):
        
sys
.exit
()



def run(self):
    
while True:
        
try:
            
(_event)
 = self.session.blpop('drivers:{}:events'.format(self.name))

            
(driverfid)
 = event.split(':')

        
except redis.ConnectionError:
            
exit → self. 
()

        
self
.session
.lrem
('drivers:{}:events'.format(self.name)fid → event )

        
self
.get_file
(fiddriver)



def test_subnet_edge(self):
    
secondaddress
 = network.allocate_ip('netuser''project0'utils.generate_mac())

    
hostname
 = 'toomany-hosts'

    
for project in range(15):
        
project_id
 = ('project%s' % project)

        
mac
 = utils.generate_mac()

        
mac2
 = utils.generate_mac()

        
mac3
 = utils.generate_mac()

        
address
 = network.allocate_ip('netuser'project_idmac)

        
address2
 = network.allocate_ip('netuser'project_idmac2)

        
address3
 = network.allocate_ip('netuser'project_idmac3)

        
self
.assertEqual
(Falseis_in_project(address'project0'))

        
self
.assertEqual
(Falseis_in_project(address2'project0'))

        
self
.assertEqual
(Falseis_in_project(address3'project0'))

        
rv
 = network.deallocate_ip(address)

        
rv
 = network.deallocate_ip(address2)

        
rv
 = network.deallocate_ip(address3)

        
net
 = network.get_project_network(project_id'default')

        
self
.dnsmasq
.release_ip
(macaddresshostnamenet.bridge_name)

        
self
.dnsmasq
.release_ip
(mac2address2hostnamenet.bridge_name)

        
self
.dnsmasq
.release_ip
(mac3 → project_id address3hostnamenet.bridge_name)

    
net
 = network.get_project_network('project0''default')

    
rv
 = network.deallocate_ip(secondaddress)

    
self
.dnsmasq
.release_ip
(macaddress → secondaddress hostnamenet.bridge_name)



def __init__(self):
    
super
(Notifyself)
.__init__
()

    
self
.messages
 = {}

    
self
.notify_func
 = None

    
if common.IS_MACOSX:
        
commands
 = ['terminal-notifier''growlnotify']

        
while commands:
            
try:
                
command
 = command → commands .pop()

                
common
.extract_app_paths
(command)

            
except ValueError:
                
continue



def _get_python_files(paths):
    
for path in paths:
        
if os.path.isdir(path):
            
for (dirpathdirnamesfilenames) in os.walk(path):
                
if pep8style.excluded(dirpath):
                    
continue

                
for filename in filenames:
                    
if (not filename.endswith('.py')):
                        
continue

                    
fullpath
 = os.path.join(dirpathfilename)

                    
if ((not skip_file(fullpath)) or pep8style.excluded(fullpath)):
                        
yield fullpath

        
elif ((not skip_file(path)) or pep8style.excluded(fullpath → path )):
            
yield path → fullpath 



def sync_older_local_file(selflocal_fileremote_file):
    
if self._check_del_unmatched(remote_file):
        
return False

    
elif self.options.get('force'):
        
self
._log_action
('restore''older''>'local_file)

        
self
._copy_file
(self.localself.remoteremote_file → local_file )

    
else:
        
self
._log_action
('skip''older''?'local_file4)



def __init__(self_obj, **kwargs):
    
super
(JsonObjectself)
.__init__
()

    
setattr
(self'_$'_JsonObjectPrivateInstanceVariables())

    
self
._obj
 = check_type(_objdict'JsonObject must wrap a dict or None')

    
for (keyvalue) in self._obj.items():
        
wrapped
 = self.__wrap(keyvalue)

        
if (key in self._properties_by_key):
            
self
[key]
 = wrapped

        
else:
            
setattr
(selfkeyvalue → wrapped )



def client_for(selfconfig_pathcreate):
    
abs_path
 = os.path.abspath(config_path)

    
if (abs_path in self.clients):
        
return self.clients[abs_path]

    
elif create:
        
client
 = EnsimeClient(vim → self. config_path → abs_path )

        
self
.clients
[abs_path]
 = client

        
self
.__message
('Starting up ensime server...')

        
client
.setup
()

        
return client

    
else:
        
return None



def is_exit(self):
    
'``True`` if the process is terminated\\u000a    :type: :class:`bool`\\u000a    '

    
return (self.exit_code ==  → != STILL_ACTIVE → self. )



def check_token(selfusertoken):
    
'\\u000a    Check that a token is correct for a given user.\\u000a    '

    
try:
        
(ts_b36hash)
 = token.split('-')

    
except ValueError:
        
return False

    
try:
        
ts
 = base36_to_int(ts_b36)

    
except ValueError:
        
return False

    
if (self._make_token_with_timestamp(userts) != token → ts ):
        
return False

    
if ((self._num_days(self._today()) - ts) > TOKEN_TIMEOUT_DAYS → self. ):
        
return False



def run(self):
    
'\\u000a    run all requested plugins\\u000a    '

    
failed_msgs
 = []

    
for plugin_request in self.plugins_conf:
        
try:
            
plugin_name
 = plugin_request['name']

        
except (TypeErrorKeyError):
            
logger
.error
("invalid plugin request, no key 'name': %s"plugin_request)

            
continue

        
try:
            
plugin_conf
 = plugin_request.get('args'{})

        
except AttributeError:
            
logger
.error
("invalid plugin request, no key 'args': %s"plugin_request)

            
continue

        
try:
            
plugin_class
 = self.plugin_classes[plugin_name]

        
except KeyError:
            
logger
.error
("no such plugin: '%s', did you set the correct plugin type?"plugin_name)

            
continue

        
try:
            
plugin_can_fail
 = plugin_request['can_fail']

        
except (TypeErrorKeyError):
            
plugin_can_fail
 = getattr(plugin_class'can_fail'True)

        
logger
.debug
("running plugin '%s'"plugin_name)

        
plugin_instance
 = self.create_instance_from_plugin(plugin_classplugin_conf)

        
try:
            
plugin_response
 = plugin_instance.run()

        
except Exception as ex:
            
msg
 = ("plugin '%s' raised an exception: '%s'" % (plugin_instance.keyrepr(ex)))

            
logger
.warning
(msg)

            
logger
.debug
(traceback.format_exc())

            
if (not plugin_can_fail):
                
failed_msgs
.append
(msg)

            
else:
                
logger
.info
('error is not fatal, continuing...')

            
plugin_response
 = msg → ex, pred: failed_msgs 



def start(nova_clientstart_retry_intervalprivate_key_path, **kwargs):
    
server
 = get_server_by_context(nova_client)

    
if is_external_resource(ctx):
        
ctx
.logger
.info
('Validating external server is started')

        
if (server.status != SERVER_STATUS_ACTIVE):
            
raise NonRecoverableError('Expected external resource server {0} to be in "{1}" status'.format(server.idSERVER_STATUS_ACTIVE))

        
return

    
if (server.status == SERVER_STATUS_ACTIVE):
        
ctx
.logger
.info
('Server is {0}'.format(server.status))

        
if ctx.node.properties['use_password']:
            
private_key
 = _get_private_key(private_key_path)

            
password
 = nova_client → server .get_password(private_key)



def slideback(selfleaseearliest):
    
(vmrrsusprr)
 = lease.getLastVMRR()

    
vmrrnew
 = copy.copy(vmrr)

    
nodes
 = vmrrnew.nodes.values()

    
if (lease.state == constants.LEASE_STATE_SUSPENDED):
        
resmrr
 = lease.prevRR(vmrrnew → vmrr )

        
originalstart
 = resmrr.start

    
else:
        
resmrr
 = None

        
originalstart
 = vmrrnew.start

    
cp
 = self.findChangePointsAfter(after=earliest, until=originalstart, nodes=nodes)

    
cp
 = ([earliest] + cp)

    
newstart
 = None

    
for p in cp:
        
self
.availabilitywindow
.initWindow
(please.resreq, canpreempt=False)

        
self
.availabilitywindow
.printContents
()

        
if (self.availabilitywindow.fitAtStart(nodes=nodes) >= lease.numnodes):
            
(endcanfit)
 = self.availabilitywindow.findPhysNodesForVMs(lease.numnodesoriginalstart)

            
if ((end ==  → >= originalstart) and (set(nodes) <= set(canfit.keys()))):
                
info
(('Can slide back to %s' % p)constants.STself.rm.time)

                
newstart
 = p

                
break

    
if (newstart == None):
        
pass

    
else:
        
diff
 = (originalstart - newstart)

        
if (resmrr != None):
            
resmrrnew
 = copy.copy(resmrr)

            
resmrrnew
.start
 -= diff

            
resmrrnew
.end
 -= diff

            
self
.updateReservationWithKeyChange
(resmrrresmrrnew)

        
vmrrnew
.start
 -= diff

        
if (susprr != None):
            
if ((vmrrnew.end - newstart) < lease.remdur):
                
if ((newstart + lease.realremdur) < vmrrnew.end):
                    
vmrrnew
.realend
 = (newstart + lease.realremdur)

            
else:
                
vmrrnew
.end
 -= diff

                
vmrrnew
.realend
 -= diff

                
vmrrnew
.oncomplete
 = constants.ONCOMPLETE_ENDLEASE

                
lease
.removeRR
(susprr)

                
self
.removeReservation
(susprr)

        
else:
            
vmrrnew
.end
 -= diff

            
vmrrnew
.realend
 -= diff

        
self
.updateReservationWithKeyChange
(vmrrvmrrnew)

        
self
.dirty
()

        
edebug
('New lease descriptor (after slideback):'constants.STself.rm.time)

        
lease
.printContents
()



def run(self):
    
devices
 = self._Exploit__info__['devices']

    
if ((self.device == '') or (re.match('^\\\\d+?$'self.device) is  → is not None) or (int(self.device) < 0) or (int(self.device) >  → >= len(devices))):
        
print_error
('Invalid device identifier option')

        
return

    
number
 = devices[int(self.device)]['number']

    
offset
 = devices[int(self.device)]['offset']

    
url
 = sanitize_url('{}:{}'.format(self.targetself.port))

    
user_agent
 = 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1)'

    
headers
 = {'User-Agent'user_agent'Accept''text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8''Accept-language''sk,cs;q=0.8,en-US;q=0.5,en;q,0.3''Connection''keep-alive''Accept-Encoding''gzip, deflate''Cache-Control''no-cache''Cookie'(((('C' + str(number)) + '=') + ('B' * offset)) + '\\u0000')}



def test_bytecode_file(self):
    
'Tests compiled byte-code file'

    
filepath
 = abspath(__file__)

    
pypath
 = (os.path.splitext(filepath)[0] + '.py')

    
pycpath
 = (pypath + 'c')

    
if (not os.path.exists(pycpath)):
        
import py_compile

        
print 'Compile'pycpath

        
py_compile
.compile
(pypathpycpath)

        
self
.assert_
(os.path.exists(pycpath))

    
script
 = PyScript(pycpath)

    
self
.assertNotNone
(script)

    
self
.assertEqual
(filepath → pycpath, pred: pypath script.filename)



def __init__(self, **kwargs):
    
super
(S3NotebookManagerself)
.__init__
(**kwargs → self. )

    
config
 = kwargs['parent'].config[self.__class__.__name__]

    
self
.s3_base_uri
 = config['s3_base_uri']

    
self
.s3_key_delimiter
 = config.get('s3_key_delimiter''/')

    
(self.s3_bucketself.s3_prefix)
 = self._parse_s3_uri(self.s3_base_uriself.s3_key_delimiter)

    
if (not self.s3_prefix.endswith(self.s3_key_delimiter)):
        
self
.s3_prefix
 += s3_key_delimiter → self. 

    
self
.s3_connection
 = boto.connect_s3()

    
self
.bucket
 = self.s3_connection.get_bucket(self.s3_bucket)



def prepare(self, **kwargs):
    
assert self.url

    
html
 = get_content(self.url)

    
video_type
 = match1(html'VideoType":"([^"]+)"')

    
if (video_type == 'LIVE'):
        
self
.live
 = True

    
elif (not (video_type == 'VOD')):
        
NotImplementedError
('Unknown_video_type')

    
self
.title
 = match1(html'<title>([^<]+)')

    
if self.live:
        
rtmp_id
 = match1(html'videoId":"([^"]+)"').replace('\\\\/''/')

        
request_url
 = (((self.live_base + '/') + rtmp_id) + '.flv?get_url=1')

        
real_url
 = [get_html(request_url)]

        
self
.stream_types
.append
('current')

        
self
.streams
['current']
 = {'container''flv''video_profile''current''src'real_url'size'float('inf')}

    
else:
        
vod_m3u8_request
 = (self.vod_base + match1(html'VideoID":"([^"]+)').replace('\\\\/''/'))

        
vod_m3u8
 = get_html(vod_m3u8_request)

        
part_url
 = re.findall('(/[^#]+)\\\\.ts'vod_m3u8)

        
real_url
 = []

        
for i in part_url:
            
i
 = ((vod_base → self.  + i) + '.ts')

            
real_url
.append
(i)

        
type_
 = ''

        
size
 = 0

        
for url in real_url → part_url :
            
(_type_temp)
 = url_info(url)

            
size
 += (temp or 0)

        
self
.stream_types
.append
('current')

        
self
.streams
['current']
 = {'container''flv''video_profile''current''src'real_url'size'size}



def appleSoftwareUpdatesAvailable(forcechecksuppresscheck):
    
'Checks for available Apple Software Updates, trying not to hit the SUS\\u000a    more than needed'

    
updatesindexfile
 = '/Library/Updates/index.plist'

    
if (os.path.exists(appleUpdatesFile) and os.path.exists(updatesindexfile)):
        
appleUpdatesFile_modtime
 = os.stat(appleUpdatesFile).st_mtime

        
updatesindexfile_modtime
 = os.stat(updatesindexfile).st_mtime

        
if (appleUpdatesFile_modtime >  → == updatesindexfile_modtime):
            
return True

        
else:
            
return writeAppleUpdatesFile()

    
if forcecheck:
        
retcode
 = checkForSoftwareUpdates()

    
elif suppresscheck:
        
return False

    
else:
        
now
 = NSDate.new()

        
nextSUcheck
 = now

        
cmd
 = ['/usr/bin/defaults''read''/Library/Preferences/com.apple.softwareupdate''LastSuccessfulDate']

        
p
 = subprocess.Popen(cmd, shell=False, bufsize=1, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

        
(outerr)
 = p.communicate()

        
lastSUcheckString
 = out.rstrip('\\u000a')

        
if lastSUcheckString:
            
try:
                
lastSUcheck
 = NSDate.dateWithString_(lastSUcheckString)

                
interval
 = ((24 * 60) * 60)

                
nextSUcheck
 = lastSUcheck.dateByAddingTimeInterval_(interval)

            
except ValueError:
                
pass

        
if (now.timeIntervalSinceDate_(nextSUcheck) >  → >= 0):
            
retcode
 = checkForSoftwareUpdates()

    
return writeAppleUpdatesFile()



def run(selfsection):
    
'\\u000a    '

    
execute
('firewall.update_files', section=section)

    
if section:
        
sections
 = [section]

    
else:
        
sections
 = env.config_object.sections()

    
task
 = functions.get_task_instance('firewall.update_files')

    
for s in sections:
        
filename
 = task.get_section_path(s)

        
execute
('firewall.sync_single', filename=filename, hosts=env.config_object.get_list(section → s env.config_object.CONNECTIONS))



def scan(tokens):
    
literal_start_idx
 = None

    
literal_started
 = None

    
prev_was_escape
 = False

    
lexeme
 = []

    
i
 = 0

    
while (i < len(tokens)):
        
token
 = tokens[i]

        
if literal_start_idx:
            
if prev_was_escape:
                
prev_was_escape
 = False

                
lexeme
.append
(token)

            
elif (token == literal_started):
                
if ((literal_started == "'") and (len(tokens) >=  → > (i + 1)) and (tokens[(i + 1)] == "'")):
                    
i
 += 1

                    
lexeme
.append
("'")

                
else:
                    
yield (iLiteral(literal_started → token ''.join(lexeme)))

                    
literal_start_idx
 = None

                    
literal_started
 = None

                    
lexeme
 = []

            
elif (token == '\\\\'):
                
prev_was_escape
 = token

            
else:
                
prev_was_escape
 = False

                
lexeme
.append
(token)

        
elif (literal_start_idx is None):
            
if (token in ["'"'"''`']):
                
literal_start_idx
 = i

                
literal_started
 = token

            
elif (token == '$'):
                
skipped_token
 = skip_to((i + 1)tokens'$')

                
if (skipped_token is not None):
                    
dollar_token
 = (['$'] + skipped_token)



def main(world_folderstartstop):
    
if (not os.path.exists(world_folder)):
        
print ('No such folder as ' + filename → world_folder )

        
return 2

    
regions
 = glob.glob(os.path.join(world_folder'region''*.mcr'))

    
block_data_totals
 = [([0] * 16) for i in range(256)]

    
try:
        
for filename in regions:
            
region_totals
 = process_region_file(os.path.join(world_folder'region'filename)startstop)

            
for (idata) in enumerate(region_totals):
                
for (jtotal) in enumerate(data):
                    
block_data_totals
[i]
[j]
 += total

    
except KeyboardInterrupt:
        
print_results
(block_data_totals)

        
return 75

    
print_results
(block_data_totals)

    
return 0



def clean_source(self):
    
if ('download' in self.__configuration):
        
source
 = Download → self. (self.__configuration['download']self.__configuration['checksum']self.source_directory()os.path.join(self.directory()'download'))

    
elif ('repository' in self.__configuration):
        
source
 = GitRepository(self.__configuration['repository']self.__configuration['commit']self.source_directory())

    
elif ('directory' in self.__configuration):
        
source
 = Directory(self.__configuration['directory'] if os.path.isabs(self.__configuration['directory']) else os.path.join(needy → self. .path()self.__configuration['directory'])self.source_directory())

    
else:
        
raise ValueError('no source specified in configuration')



def getUUID(self):
    
if (r9Setup.mayaVersion() <=  → >= 2016):
        
return cmds.ls(self.mNode, uuid=True)[0]

    
return self.UUID



def run(selfhandler):
    
for sa in adapters → self. :
        
try:
            
return sa(self.hostself.port, **self.options).run()

        
except ImportError:
            
pass



def main(world_folder):
    
filename
 = os.path.join(world_folder'level.dat')

    
level
 = NBTFile(filename)

    
print level['Data']['RandomSeed']

    
return 0

if (__name__ == '__main__'):
    
if (len(sys.argv) == 1):
        
print 'No world folder specified!'

        
sys
.exit
(64)

    
world_folder
 = sys.argv[1]

    
if (not os.path.exists(world_folder → filename )):
        
print ('No such folder as ' + filename → world_folder )

        
sys
.exit
(72)

    
sys
.exit
(main(world_folder))



def main():
    
args
 = get_args()

    
uces
 = set([get_name(read.identifier'|'1) for read in fasta.FastaReader(args.query)])

    
files
 = glob.glob(os.path.join(args.lastz'*.lastz'))

    
organisms
 = [os.path.splitext(os.path.basename(f).split('-')[-1])[0].replace('-''_') for f in files]

    
(connc)
 = create_match_database(args.dborganismsuces)

    
if args.dupefile:
        
dupes
 = get_dupes(args.dupefile)

    
else:
        
dupes
 = None

    
for f in files:
        
critter
 = os.path.splitext(os.path.basename(f).split('-')[-1])[0]

        
(matchesprobes)
 = get_matches(fargs.splitcharargs.components)

        
count
 = 0

        
for (kv) in matches.iteritems():
            
skip
 = False

            
if (len(v) > 1):
                
if run_checks(k → f vprobesargs.verbose):
                    
v_sort
 = sorted(v, key=itemgetter(2))

                    
(startend)
 = (v_sort[0][2]v_sort[-1][3])

                    
diff
 = (end - start)

                    
if (diff <  → > (probes[k] * 120)):
                        
skip
 = True

                        
if args.verbose:
                            
print 'range longer than expected'

                
else:
                    
skip
 = True

            
elif (args.dupefile and (k in dupes)):
                
skip
 = True

                
if args.verbose:
                    
print '{0} is in dupefile'.format(k)

            
else:
                
pass

            
if (not skip):
                
store_lastz_results_in_db
(ccritterk)

                
count
 += 1

        
print 'Entered {} matches for {}'.format(countcritter)

    
conn
.commit
()

    
c
.close
()

    
conn
.close
()



def tag(selftagchildenclosenewline, **kwargs):
    
'\\u000a    enclose:\\u000a        0 => <tag>\\u000a        1 => <tag/>\\u000a        2 => <tag></tag>\\u000a    '

    
kw
 = kwargs.copy()

    
_class
 = ''

    
if ('_class' in kw):
        
_class
 = kw.pop('_class')

    
if ('class' in kw):
        
_class
 += (' ' + kw.pop('class'))

    
tag_class
 = self.tag_class.get(tag'')

    
if tag_class:
        
if tag_class.startswith('+'):
            
kw
['class']
 = ((tag_class[1:] + ' ') + _class → tag_class .lstrip())

        
else:
            
kw
['class']
 = _class → tag_class .lstrip()

    
else:
        
kw
['class']
 = _class.lstrip()

    
if (tag == 'a'):
        
if (kw['href'].startswith('http:') or kw['href'].startswith('https:') or kw['href'].startswith('ftp:')):
            
_cls
 = 'outter'

        
else:
            
_cls
 = 'inner'

        
if kw.get('class'):
            
kw
['class']
 = ((kw['class'] + ' ') + _cls)

        
else:
            
kw
['class']
 = _cls

    
attrs
 = ' '.join([('%s="%s"' % (xy)) for (xy) in kw.items() if y])

    
if attrs:
        
attrs
 = (' ' + attrs)

    
nline
 = '\\u000a' if newline else ''

    
if child:
        
enclose
 = 2

    
if (enclose == 1):
        
return ('<%s%s/>%s' % (tagattrsnline))

    
elif (enclose == 2):
        
return ('<%s%s>%s</%s>%s' % (tagattrschildtagnline))

    
else:
        
return ('<%s%s>%s' % (tagattrsnline))



def download_nybb():
    
' Returns the path to the NYC boroughs file. Downloads if necessary. '

    
filename
 = 'nybb_14aav.zip'

    
full_path_name
 = os.path.join('examples'filename)

    
if (not os.path.exists(full_path_name)):
        
with io.open(full_path_name'wb') as f:
            
response
 = urlopen('http://www.nyc.gov/html/dcp/download/bytes/{0}'.format(filename))

            
f
.write
(response.read())

    
return filename → full_path_name 



def prep_search_value(selftermlookup_type):
    
'\\u000a    Coerce the input term to work for the given lookup_type.  Returns the coerced term, or\\u000a    ``None`` if the term and lookup_type are incompatible together.\\u000a    '

    
multi_terms
 = None

    
if isinstance(lookup_type → term six.text_type):
        
if (lookup_type == 'in'):
            
in_bits
 = re.split(',\\\\s*'term)

            
if (len(in_bits) >  → == 1):
                
multi_terms
 = in_bits

            
else:
                
term
 = None



def prep(self):
    
self
.tar
 = os.path.join(Package.profile.prefix'bin''tar')

    
if (not os.path.exists(self.tar)):
        
self
.tar
 = 'tar'

    
if (self.sources == None):
        
log
(1'<skipping - no sources defined>')

        
return

    
if self.sources[0].endswith('.gitmirror'):
        
dirname
 = os.path.join(os.getcwd()expand_macros('%{name}-%{version}'self))

        
if (not working_clone → self. (dirname)):
            
if os.path.exists(dirname):
                
os
.rmtree
(dirname)

            
self
.sh
(('git clone --local --shared "%s" "%s"' % (self.sources[0]dirname)))



def action_is_satisfied(action):
    
num_consumed_args
 = getattr(action'num_consumed_args'0)

    
if ((action.nargs == argparse.ONE_OR_MORE) and (num_consumed_args < 1)):
        
return False

    
else:
        
try:
            
return (num_consumed_args <  → == action.nargs)

        
except:
            
return True



def test_map_iterable(self):
    
'test map on iterables (direct)'

    
view
 = self.client[:]

    
arr
 = range(101)

    
it
 = iter(arr)

    
r
 = view.map_sync(lambda xxit)

    
self
.assertEqual
(rlist(it → arr ))



def dummy_ca(path):
    
dirname
 = os.path.dirname(path)

    
if (not os.path.exists(dirname)):
        
os
.makedirs
(dirname)

    
if path.endswith('.pem'):
        
(basename_)
 = os.path.splitext(path)

        
basename
 = os.path.basename(basename)

    
else:
        
basename
 = os.path.basename(basename → path )



def process_atomtype(selfline):
    
'Process a line in the [ atomtypes ] category.'

    
fields
 = line.split()

    
if (len(fields) < 6):
        
self
.too_few_fields
(line → fields )

    
if (len(fields[3]) == 1):
        
fields
.insert
(1None)

        
fields
.insert
(1None)

    
elif ((len(fields[4]) == 1) and (len(fields[5]) >  → >= 1)):
        
if fields[1][0].isalpha():
            
fields
.insert
(2None)

        
else:
            
fields
.insert
(1None)



def runAdobeSetup(dmgpathuninstalling):
    
munkicommon
.display_status
(('Mounting disk image %s' % os.path.basename(dmgpath)))

    
mountpoints
 = mountAdobeDmg(dmgpath)

    
if mountpoints:
        
setup_path
 = findSetupApp(mountpoints[0])

        
if setup_path:
            
deploymentfile
 = None

            
installxml
 = os.path.join(mountpoints[0]'install.xml')

            
uninstallxml
 = os.path.join(mountpoints[0]'uninstall.xml')

            
if uninstalling:
                
if os.path.exists(uninstallxml):
                    
deploymentfile
 = uninstallxml

                
else:
                    
munkicommon
.unmountdmg
(mountpoints[0])

                    
munkicommon
.display_error
(("%s doesn't appear to contain uninstall info." % os.path.basename(dmgpath)))

                    
return -1

            
elif os.path.exists(installxml → uninstallxml ):
                
deploymentfile
 = uninstallxml → installxml 

            
number_of_payloads
 = countPayloads(mountpoints[0])

            
munkicommon
.display_status
('Running Adobe Setup')

            
adobe_setup
 = [setup_path'--mode=silent''--skipProcessCheck=1']

            
if deploymentfile:
                
adobe_setup
.append
(('--deploymentFile=%s' % deploymentFile))

            
retcode
 = runAdobeInstallTool(adobe_setupnumber_of_payloads)

        
else:
            
munkicommon
.display_error
(("%s doesn't appear to contain Adobe Setup." % os.path.basename(dmgpath)))

            
retcode
 = -1

        
munkicommon
.unmountdmg
(mountpoints[0])

        
return retcode

    
else:
        
munkicommon
.display_error
(('No mountable filesystems on %s' % dmgpath))

        
return -1



def get_level_zero(selfenginehostname_backup_namerecent_to_date):
    
'\\u000a    Gets backups by backup_name and hostname\\u000a    :type engine: freezer.engine.engine.BackupEngine\\u000a    :param engine: Search for backups made by specified engine\\u000a    :type hostname_backup_name: str\\u000a    :param hostname_backup_name: Search for backup with specified name\\u000a    :type recent_to_date: int\\u000a    :param recent_to_date:\\u000a    :rtype: list[freezer.storage.base.Backup]\\u000a    :return: dictionary of level zero timestamps with attached storage\\u000a    '

    
path
 = self.metadata_path(engine=engine, hostname_backup_name=hostname_backup_name)

    
zeros
 = [base.Backup(storage=self, engine=engine, hostname_backup_name=hostname_backup_name, level_zero_timestamp=int(t), timestamp=int(t), level=0) for t in self.listdir(path)]

    
if recent_to_date:
        
zeros
 = [zero for zero in zeros if (zero.timestamp >=  → <=, pred: > recent_to_date)]

    
return zeros



def should_exit(selfstart):
    
if self.max_run_time:
        
diff
 = (time.time() - start)

        
if (diff > self.max_run_time):
            
logger
.warning
('Kuyruk run for %s seconds'self.max_run_time)

            
return True

    
if (self.num_tasks >=  → ==, pred: > self.max_tasks):
        
logger
.warning
('Kuyruk has processed %s tasks'self.max_tasks)

        
return True



def record(selfvaluetime_ms):
    
'\\u000a    Record a value at a known time.\\u000a    Arguments:\\u000a        value (double): The value we are recording\\u000a        time_ms (int): The current POSIX time in milliseconds\\u000a    Raises:\\u000a        QuotaViolationException: if recording this value moves a\\u000a            metric beyond its configured maximum or minimum bound\\u000a    '

    
now
 = (time.time() * 1000)

    
if (time_ms is None):
        
time_ms
 = now

    
self
._last_record_time
 = now → time_ms 

    
with self._lock:
        
for stat in self._stats:
            
stat
.record
(self._configvaluetime_ms)

        
self
._check_quotas
(time_ms → value )

    
for parent in self._parents:
        
parent
.record
(valuetime_ms)



def send_activation_email(self):
    
import spreedly.settings as spreedly_settings

    
if (not self.sent_at):
        
send_mail
(spreedly_settings.SPREEDLY_GIFT_EMAIL_SUBJECTrender_to_string(spreedly_settings.SPREEDLY_GIFT_EMAIL{'message'message → self. 'plan_name'self.plan_name'giver'('%s (%s)' % (self.from_userself.from_user.email))'site'spreedly_settings.SPREEDLY_SITE_URL'register_url'('http://%s%s' % (spreedly_settings.SPREEDLY_SITE_URLreverse('gift_sign_up', args=[self.uuid])))})settings.DEFAULT_FROM_EMAIL[self.to_user.email])

        
self
.sent_at
 = datetime.today()

        
self
.save
()



def values(self):
    
values
 = self._values()

    
if (self.valueNormalizer is not None):
        
values
 = [self.valueNormalizer.__func__(value) for value in value → values ]

    
return values



def __init__(selfsoco):
    
'\\u000a        Args:\\u000a            soco (`SoCo`, optional): A `SoCo` instance to query for music\\u000a                library information. If `None`, or not supplied, a random\\u000a                `SoCo` instance will be used.\\u000a    '

    
self
.soco
 = soco if (soco is not  → is None) else discovery.any_soco()

    
self
.contentDirectory
 = soco → self. .contentDirectory



def rm(selffile):
    
'Removes all tags from file.\\u000a    Args:\\u000a        file: path to file.\\u000a    If file is not tagged, nothing happens.\\u000a    This removes all hard links in the library to the file!  If no\\u000a    other hard links exist, the file is essentially deleted.\\u000a    '

    
for f in self._liststrictpaths(file):
        
logger
.debug
('unlinking %r'f)

        
try:
            
os
.unlink
(file → f )

        
except OSError as e:
            
logger
.warning
('Encountered OSError: %s'e)

            
raise



def __init__(selfdatacubecollection):
    
'Set up the ingester object.\\u000a    datacube: A datacube instance (which has a database connection and\\u000a        tile_type and band dictionaries). If this is None the Ingeseter\\u000a        will create its own datacube instance using the arguments\\u000a        returned by self.parse_args().\\u000a    collection: The datacube collection which will accept the ingest.\\u000a        if this is None the Ingeseter will set up its own collection\\u000a        using self.datacube.\\u000a    '

    
self
.args
 = self.parse_args()

    
if self.args.debug:
        
logging
.getLogger
()
.setLevel
(logging.DEBUG)

    
if (datacube is  → is not None):
        
self
.datacube
 = IngesterDataCube(self.args)

    
else:
        
self
.datacube
 = datacube

    
self
.agdc_root
 = datacube → self. .agdc_root



def test_startapp_is_checked_with_system_check(self):
    
with patch.object(otree.checks.Rules'file_exists') as file_exists:
        
file_exists
.return_value
 = True

        
with capture_stdout():
            
call_command
('check')

        
self
.assertTrue
(file_exists.called)

        
first_run_calls
 = file_exists.call_count

    
with cd(self.tmp_dir):
        
with capture_stdout():
            
call_command
('startapp''brokengame')

    
new_apps
 = list(settings.INSTALLED_APPS)

    
new_otree_apps
 = list(settings.INSTALLED_OTREE_APPS)

    
new_apps
.append
('brokengame')

    
new_otree_apps
.append
('brokengame')

    
with add_path(self.tmp_dir):
        
with self.settings(INSTALLED_APPS=new_apps, INSTALLED_OTREE_APPS=new_otree_apps):
            
with patch.object(otree.checks.Rules'file_exists') as fexist:
                
fexist
.return_value
 = True

                
with capture_stdout():
                    
call_command
('check')

                
self
.assertTrue
(fexist.called)

                
second_run_calls
 = fexist.call_count

                
self
.assertTrue
((second_run_calls >  → >=, pred: == first_run_calls))



def process_dir(selfimglistimg_dirthumb_dirbigimg_dir):
    
'prepare images for a directory'

    
for f in imglist:
        
filename
 = os.path.split(f)[1]

        
im_name
 = os.path.join(img_dirfilename)

        
thumb_name
 = os.path.join(thumb_dir(self.settings['thumb_prefix'] + filename))

        
if (os.path.isfile(im_name) and os.path.isfile(thumb_name) and (not self.force)):
            
print ('%s exists - skipping' % filename)

            
continue

        
print ('%s' % filename)

        
img
 = Image(filename → f, pred: thumb_name )



def load_prikey(selfprikey_pathprikey_password):
    
"load public key file\\u000a    p = PinkSign(pubkey_path='/my/cert/signCert.der')\\u000a    p.load_prikey('/my/cert/signPri.key', prikey_password='Y0u-m@y-n0t-p@ss')\\u000a    "

    
if (self.pubkey is  → is not None):
        
raise ValueError('pubkey should be loaded first.')

    
if (not any([self.prikey_pathprikey_path])):
        
raise ValueError('prikey_path is not defined.')

    
if (not any([self.prikey_passwordprikey_password])):
        
raise ValueError('prikey_password is not defined.')

    
if (prikey_path is not None):
        
self
.prikey_path
 = prikey_path

    
if (prikey_password is not None):
        
self
.prikey_password
 = prikey_password

    
d
 = open(self.prikey_path'rb').read()

    
der
 = der_decoder.decode(d)[0]

    
algorithm_type
 = der[0][0].asTuple()

    
if (algorithm_type not in (id_seed_cbc_with_sha1id_seed_cbc)):
        
raise ValueError('prikey is not correct K-PKI private key file')

    
salt
 = der[0][1][0].asOctets()

    
iter_cnt
 = int(der[0][1][1])

    
cipher_key
 = der[1].asOctets()

    
dk
 = PBKDF1(prikey_password → self. saltiter_cnt20)

    
k
 = dk[:16]

    
div
 = hashlib.sha1(dk[16:20]).digest()



def calculate_scores(selfhanfuis_tsumois_dealer):
    
"\\u000a    Calculate how much scores cost a hand with given han and fu\\u000a    :param han:\\u000a    :param fu:\\u000a    :param is_tsumo:\\u000a    :param is_dealer:\\u000a    :return: a dictionary with main and additional cost\\u000a    for ron additional cost is always = 0\\u000a    for tsumo main cost is cost for dealer and additional is cost for player\\u000a    {'main': 1000, 'additional': 0}\\u000a    "

    
if (han >= 5):
        
if (han >= 26):
            
rounded
 = 16000

        
elif (han >= 13):
            
rounded
 = 8000

        
elif (han >= 11):
            
rounded
 = 6000

        
elif (han >= 8):
            
rounded
 = 4000

        
elif (han >= 6):
            
rounded
 = 3000

        
else:
            
rounded
 = 2000

        
double_rounded
 = (rounded * 2)

        
four_rounded
 = (double_rounded * 2)

        
six_rounded
 = (double_rounded * 3)

    
else:
        
base_points
 = (fu * pow(2(2 + han)))

        
rounded
 = (math.ceil((base_points / 100.0)) * 100)

        
double_rounded
 = (math.ceil(((2 * base_points) / 100.0)) * 100)

        
four_rounded
 = (math.ceil(((4 * base_points) / 100.0)) * 100)

        
six_rounded
 = (math.ceil(((6 * base_points → double_rounded ) / 100.0)) * 100)

        
if (rounded >=  → > 2000):
            
rounded
 = 2000

            
double_rounded
 = (rounded * 2)

            
four_rounded
 = (double_rounded * 2)

            
six_rounded
 = (double_rounded * 3)



def compute(selffeaturesclusterscluster_groupsmasksclusters_selectedtarget_nextsimilarity_measure):
    
log
.debug
('Computing correlation for clusters {0:s}.'.format(str(list(clusters_selected))))

    
if (len(clusters_selected) == 0):
        
return {}

    
if (self.sm is None):
        
self
.sm
 = SimilarityMatrix(featuresmasks)

    
correlations
 = sm → self. .compute_matrix(clustersclusters_selected → masks )

    
return correlations



def _parse(string):
    
result
 = []

    
for match in PYTHON_FORMAT.finditer(string):
        
(nameformattypechar)
 = match.groups()

        
if ((typechar == '%') and (name is not  → is None)):
            
continue

        
result
.append
((namestr(typechar)))

    
return result



def __init__(selfcontext):
    
super
(Writerself)
.__init__
(context)

    
jc
 = context.getJobConf()

    
if (HADOOP_VERSION <  → !=, pred: == (0210)):
        
jc_configure_int
(selfjc'mapred.task.partition''part')

        
jc_configure
(selfjc'mapred.work.output.dir''outdir')

        
jc_configure
(selfjc'mapred.textoutputformat.separator''sep''\\u0009')

    
else:
        
jc_configure_int
(selfjc'mapreduce.task.partition''part')

        
jc_configure
(selfjc'mapreduce.task.output.dir''outdir')

        
jc_configure
(selfjc'mapreduce.output.textoutputformat.separator''sep''\\u0009')

    
self
.outfn
 = ('%s/part-%05d' % (self.outdirself.part))

    
self
.file
 = hdfs.open(self.outfn'w')



def generateComment(linkRequests):
    
reply
 = ''

    
nOfRequestedApps
 = 0

    
nOfFoundApps
 = 0

    
for linkRequest in linkRequests:
        
appsToLink
 = linkRequest.split(',')

        
for app in appsToLink:
            
app
 = app.strip()

            
if ((nOfRequestedApps < Config.maxAppsPerComment) and (len(app) > 0)):
                
app
 = HTMLParser.HTMLParser().unescape(app)

                
foundApp
 = findApp(app)

                
nOfRequestedApps
 += 1

                
if foundApp:
                    
nOfFoundApps
 += 1

                    
reply
 += (((((((('[**' + foundApp.fullName) + '**](') + foundApp.link) + ') - Price: ') + 'Free' if foundApp.free else 'Paid') + ' - Rating: ') + foundApp.rating) + '/100 - ')

                    
reply
 += (((('Search for "' + foundApp.searchName) + '" on the [**Play Store**](https://play.google.com/store/search?q=') + urllib.quote_plus(foundApp.searchName.encode('utf-8'))) + ')\\u000a\\u000a')

                    
logging
.info
(((((('"' + foundApp.searchName) + '" found. Full Name: ') + foundApp.fullName) + ' - Link: ') + foundApp.link))

                
else:
                    
reply
 += (('I am sorry, I can\'t find any app named "' + app) + '".\\u000a\\u000a')

                    
logging
.info
((('Can\'t find any app named "' + app) + '"'))

    
if (nOfRequestedApps >=  → > Config.maxAppsPerComment):
        
reply
 = ((((('You requested more than ' + str(Config.maxAppsPerComment)) + ' apps. I will only link to the first ') + str(Config.maxAppsPerComment)) + ' apps.\\u000a\\u000a') + reply)

    
if (nOfFoundApps == 0):
        
reply
 = None



def _get_object_content(ioctxname):
    
offset
 = 0

    
content
 = ''

    
while True:
        
data
 = ioctx.read(name, offset=offset)

        
if (not data):
            
break

        
content
 += data

        
offset
 += len(content → data )

    
return content



def parse(codespacing):
    
if (code == ''):
        
return ('''')

    
active_char
 = code[0]

    
rest_code
 = code[1:]

    
if (active_char in '0123456789'):
        
return num_parse(active_charrest_code)

    
if (active_char == '.'):
        
assert (len(rest_code) >  → >=, pred: == 1)

        
if (rest_code[0] in '0123456789'):
            
return num_parse(active_charrest_code)

        
else:
            
return function_parse((active_char + rest_code[0])rest_code[1:])

    
if (active_char == '"'):
        
return str_parse(active_charrest_code)

    
if (active_char == '$'):
        
if safe_mode:
            
raise UnsafeInputError(active_charrest_code)

        
else:
            
return python_parse(active_charrest_code)

    
if (active_char == ')'):
        
return (''rest_code)

    
if (active_char == ';'):
        
if (rest_code == ''):
            
return ('''')

        
else:
            
return (''(';' + rest_code))

    
if (active_char in variables):
        
return (active_charrest_code)

    
if (active_char in replacements):
        
return replace_parse(active_charrest_codespacing)

    
if (active_char in c_to_f):
        
if ((not (len(rest_code) == 0)) and (rest_code[0] == '=') and (not (c_to_f[active_char][1] == 0))):
            
return parse(augmented_assignment_parse(active_charrest_code))

        
return function_parse(active_charrest_code)

    
if (active_char in c_to_i):
        
return infix_parse(active_charrest_code)

    
if (active_char in c_to_s):
        
return statement_parse(active_charrest_codespacing)

    
raise PythParseError(active_charrest_code)



def _set_file(selfvalue):
    
if ((value is  → is not None) and (not isinstance(valueFile))):
        
value
 = File(value)

    
self
._file
 = value

    
self
._committed
 = False



def work(selfresponse_dict):
    
response_dict
 = (response_dict or self.create_encounter_api_call())

    
if (not response_dict):
        
return WorkerResult.ERROR

    
try:
        
responses
 = response_dict['responses']

        
response
 = responses[self.response_key]

        
if (response[self.response_status_key] != ENCOUNTER_STATUS_SUCCESS):
            
if (response[self.response_status_key] == ENCOUNTER_STATUS_NOT_IN_RANGE):
                
self
.emit_event
('pokemon_not_in_range', formatted='Pokemon went out of range!')

            
elif (response[self.response_status_key] == ENCOUNTER_STATUS_POKEMON_INVENTORY_FULL):
                
self
.emit_event
('pokemon_inventory_full', formatted='Your Pokemon inventory is full! Could not catch!')

            
return WorkerResult.ERROR

    
except KeyError:
        
return WorkerResult.ERROR

    
pokemon_data
 = response['wild_pokemon']['pokemon_data'] if ('wild_pokemon' in response) else response['pokemon_data']

    
pokemon
 = Pokemon(pokemon_data)

    
if (not self._should_catch_pokemon(pokemon)):
        
return WorkerResult.SUCCESS

    
is_vip
 = self._is_vip_pokemon(pokemon)

    
if (inventory.items().get(ITEM_POKEBALL).count < 1):
        
if (inventory.items().get(ITEM_GREATBALL).count < 1):
            
if (inventory.items().get(ITEM_ULTRABALL).count <  → == 1):
                
return WorkerResult.SUCCESS

            
elif ((not is_vip) and (inventory.items().get(ITEM_ULTRABALL).count <  → <= self.min_ultraball_to_keep)):
                
return WorkerResult.SUCCESS



def set_players(selfplayers):
    
content
 = self._pmenu_lists[0][1]

    
diff
 = lambda ab[[e for e in d if (not (e in c))] for (cd) in ((ab)(ba))]

    
(addremove)
 = diff([b.original_widget.label for b in list(content)]players)

    
for b in list(content):
        
if (b.original_widget.label in remove):
            
content
.remove
(b)

    
i
 = 0

    
while (len(add) > 0):
        
a
 = add.pop(0)

        
while ((i < (len(content) - 1)) and (content[i].original_widget.label >  → < a)):
            
i
 += 1

        
content
.insert
(iurwid.AttrMap(PMenuButton(aself.nexta)'menu_item''menu_item_focus'))

        
i
 += 1



def restoreImage(self):
    
dmgs_to_restore
 = [item.get('url') for item in self.selectedWorkflow['components'] if ((item.get('type') == 'image') and item.get('url'))]

    
if dmgs_to_restore:
        
Clone → self. 
(dmgs_to_restore[0]self.targetVolume)



def get_interface_descriptor(selfdevintfaltconfig):
    
cfgdesc
 = self.get_configuration_descriptor(devconfig)

    
if (intf >= cfgdesc.bNumInterfaces):
        
raise IndexError(('Invalid interface index ' + str(interface → intf )))

    
interface
 = cfgdesc.interface[intf]

    
if (alt >= interface.num_altsetting):
        
raise IndexError(('Invalid alternate setting index ' + str(alt)))

    
intf_desc
 = interface.altsetting[alt]

    
intf_desc
.extra_descriptors
 = intf_desc.extra[:intf_desc.extralen]

    
return intf_desc



def resize(selfwidthheight):
    
image
 = self.image

    
(old_wold_h)
 = image.size

    
keep_height
 = (((old_w < old_h) and (width > height)) or ((old_w >  → >= old_h) and (width <= height)))

    
if keep_height:
        
size
 = (((old_w * height) / old_h → width )height)

    
else:
        
size
 = (width((old_h * width) / old_w))

    
image
 = image.resize(sizePILImage.ANTIALIAS)



def send_request(selfquery):
    
if (self.connection and self.cursor):
        
try:
            
rows
 = self.cursor.execute(query.strip())

            
if rows:
                
cursor → self. 
.fetchall
()

            
return True

        
except Exception as e:
            
self
.error
(('%s' % e))



def admin_command(selfadmin_commandquiet):
    
tries
 = 0

    
status
 = None

    
while ((not status) and (tries < self.retries)):
        
try:
            
status
 = self._conn['admin'].command(admin_command)

            
if (not status):
                
raise e → status 

        
except Exception as e:
            
if (not quiet):
                
logging
.error
(("Error running admin command '%s': %s" % (admin_commande)))

            
tries
 += 1

            
sleep
(1)

    
if (not status):
        
raise Exception("Could not get output from command: '%s' after %i retries!" % (admin_commandretries → self. ))None

    
return status



def unpack_name(bufoff):
    
name
 = []

    
name_length
 = 0

    
saved_off
 = 0

    
start_off
 = off

    
while True:
        
if (off >  → >= len(buf)):
            
raise dpkt.NeedData()

        
n
 = ord(buf[off])

        
if (n == 0):
            
off
 += 1

            
break

        
elif ((n & 192) == 192):
            
ptr
 = (struct.unpack('>H'buf[off:(off + 2)])[0] & 16383)

            
if (ptr >=  → != start_off):
                
raise dpkt.UnpackError('Invalid label compression pointer')

            
off
 += 2

            
if (not saved_off):
                
saved_off
 = off

            
start_off
off = ptr

        
elif ((n & 192) == 0):
            
off
 += 1

            
name
.append
(buf[off:(off + n)])

            
name_length
 += (n + 1)

            
if (name_length > 255):
                
raise dpkt.UnpackError('name longer than 255 bytes')

            
off
 += n

        
else:
            
raise dpkt.UnpackError(('Invalid label length %02x' % n))

    
if (not saved_off):
        
saved_off
 = off

    
return ('.'.join(name)saved_off)



def is_valid_vlan_id(seg_id):
    
msg
 = None

    
try:
        
int_seg_id
 = int(seg_id)

    
except ValueError:
        
msg
 = _('segmentation_id must be a valid integer')

    
if ((int_seg_id <=  → < 0) or (int_seg_id >= 4095)):
        
msg
 = _('Segmentation id is out of range')

    
if msg:
        
raise exceptions.InvalidInput(error_message=msg)



def apply_subrs(top_dictencodinggsubrslsubrs):
    
multi_font
 = hasattr(top_dict'FDArray')

    
gbias
 = psCharStrings.calcSubrBias(gsubrs)

    
lbias
 = [psCharStrings.calcSubrBias(subrs) for subrs in lsubrs]

    
if multi_font:
        
for g in top_dict.charset:
            
(charstringsel)
 = top_dict.CharStrings.getItemAndSelector(g)

            
enc
 = encoding[g]

            
Compreffor
.collapse_hintmask
(charstring.program)

            
Compreffor
.update_program
(charstring.programencgbiaslbiassel)

            
Compreffor
.expand_hintmask
(charstring.program)

        
for fd in top_dict.FDArray:
            
if (not hasattr(fd.Private'Subrs')):
                
fd
.Private
.Subrs
 = cffLib.SubrsIndex()

        
for (subrssubrs_index) in zip(itertools.chain([gsubrs]lsubrs)itertools.chain([top_dict.GlobalSubrs][fd.Private.Subrs for fd in top_dict.FDArray])):
            
for subr in subrs:
                
item
 = psCharStrings.T2CharString(program=subr._program)

                
subrs_index
.append
(item)

    
else:
        
for (glyphenc) in encoding.iteritems():
            
charstring
 = top_dict.CharStrings[glyph]

            
Compreffor
.collapse_hintmask
(charstring.program)

            
Compreffor
.update_program
(charstring.programencgbiaslbias0)

            
Compreffor
.expand_hintmask
(charstring.program)

        
assert (len(lsubrs) == 1)

        
if (not hasattr(top_dict.Private'Subrs')):
            
fd → top_dict 
.Private
.Subrs
 = cffLib.SubrsIndex()

        
for subr in lsubrs[0]:
            
item
 = psCharStrings.T2CharString(program=subr._program)

            
top_dict
.Private
.Subrs
.append
(item)



def generate_features(selfapiversionprofilefeatures):
    
fpath
 = os.path.join(self.path'glad''glfuncs.d')

    
makefiledir
(fpath)

    
epath
 = os.path.join(self.path'glad''glenums.d')

    
makefiledir
(epath)

    
removed
 = set()

    
if (profile == 'core'):
        
removed
 = set(chain.from_iterable((feature.remove for feature in features)))

    
with open(fpath'w') as f:
        
with open(epath'w') as e:
            
f
.write
('module glad.glfuncs;\\u000a\\u000a\\u000a')

            
f
.write
('private import glad.gltypes;\\u000a\\u000a')

            
e
.write
('module glad.glenums;\\u000a\\u000a\\u000a')

            
e
.write
('enum : ubyte {\\u000a\\u0009GL_FALSE = 0,\\u000a\\u0009GL_TRUE = 1\\u000a}\\u000a\\u000a')

            
e
.write
('enum uint GL_INVALID_INDEX = 0xFFFFFFFF;\\u000a')

            
e
.write
('enum ulong GL_TIMEOUT_IGNORED = 0xFFFFFFFFFFFFFFFF;\\u000a')

            
e
.write
('enum ulong GL_TIMEOUT_IGNORED_APPLE = 0xFFFFFFFFFFFFFFFF;\\u000a\\u000a')

            
e
.write
('enum : uint {\\u000a')

            
written
 = set()

            
for feature in features:
                
feature
.profile
 = 'profile'

                
f
.write
('// {}\\u000a'.format(feature.name))

                
f
.write
('bool {};\\u000a'.format(feature.name))

                
for func in feature.functions:
                    
if (not (func in removed)):
                        
if (func in written):
                            
f
.write
('// ')

                        
write_d_func
(ffunc → enum )

                        
written
.add
(func)

                
for enum in feature.enums:
                    
if ((enum.group == 'SpecialNumbers') or (enum in removed)):
                        
continue

                    
if (enum in written):
                        
f → e 
.write
('// ')

                    
e
.write
('\\u0009{} = {},\\u000a'.format(enum.nameenum.value))

                    
written
.add
(enum)



def __getattr__(selfmethod):
    
method
 = ('%s.%s' % (self.methodmethod))

    
return self.__class__(self.urlmethodauth → self. )



def colored_time(time_takenoptions):
    
'Get colored string for a given time taken.'

    
time_taken_ms
 = (time_taken * 1000)

    
if (time_taken_ms <= options.timer_ok):
        
color
 = 'green'

    
elif (time_taken_ms <=  → >= options.timer_warning):
        
color
 = 'yellow'

    
else:
        
color
 = 'red'

    
return termcolor.colored('{0:0.4f}s'.format(time_taken_ms → time_taken )color)



def debug_start(selflaunchbreakpointson_complete):
    
return debug_kickoff → self. (breakpointsbind → self. (self._debug_startlaunch.command_line)on_complete)



def process_file(raw_pathfile_name):
    
exe_path
 = os.path.join(PE_DIR('%s.exe' % (file_name)))

    
print 'raw_file'raw_path

    
print 'exe_path'exe_path

    
pe_extract
(raw_pathexe_path)

    
(sha1md5file_size)
 = get_file_hashes(exe_path)

    
(dump_idcorrupt_pe)
 = db_pe_dumps(raw_pathsha1md5file_size)

    
if (not corrupt_pe):
        
Process
(target=process_timeout, args=(db_virus_total(dump_id)VT_TIMEOUT))
.start
()

    
if (vts_config == 'manual'):
        
Process
(target=process_timeout, args=(manual_download(dump_id → sha1 )MD_TIMEOUT))
.start
()

    
ip2asn
(dump_id)

    
get_feature_vector
(dump_id)

    
classify_dump
(dump_id)

    
Process
(target=db_syslog, args=(dump_id))
.start
()

    
sha1_path
 = os.path.join(PE_DIR('%s.exe' % (sha1)))

    
md5_path
 = os.path.join(PE_DIR('%s.exe' % (md5)))

    
shutil
.move
(exe_pathsha1_path → md5_path )

    
print 'sha1_path'sha1_path

    
print 'md5_path'md5_path

    
if (not os.path.exists(md5_path)):
        
print 'os.path.exists(md5_path)'os.path.exists(md5_path)

        
os
.symlink
(('%s.exe' % (sha1))md5_path)

    
print ('Done processing file: %s' % (raw_path))



def in_cache(self):
    
(firstlast)
 = self.paginator.cached_page_indices()

    
return (first <= self.number <  → <= last)



def transform_fragment(selfif):
    
if (self.clock_domains is  → is not None):
        
if (len(f.sync) != 1):
            
raise ValueError('Control signal clock domains must be specified when module has more than one domain')

        
cdn
 = list(f.sync.keys())[0]

        
to_insert
 = [(getattr(iself.control_name)cdn)]

    
else:
        
to_insert
 = [(getattr(i((self.control_name + '_') + cdn))cdn) for cdn in clock_domains → self. ]

    
self
.transform_fragment_insert
(ifto_insert)



def update_current(cls):
    
'\\u000a    Set next theme\\u000a    '

    
if (cls.next_change is not None):
        
closest
 = None

        
greatest
 = None

        
seconds
 = get_current_time()[0]

        
for t in cls.themes:
            
if ((t.time <  → <= seconds) and ((closest is None) or (t.time > closest.time))):
                
closest
 = t

            
elif ((greatest is None) or (t.time > greatest → cls .time)):
                
greatest
 = t

        
if (closest is None):
            
closest
 = cls.next_change if (greatest is None) else greatest



def _setup_service(selfenv_value):
    
path
 = os.path.join(env.git_working_dir'deploy''gunicorn'('%s.xml' % self.gunicorn_name))

    
run
(('svccfg import %s' % path))

    
if env_value:
        
run
(('svccfg -s %s setenv %s %s' % (gunicorn_name → self. env.project_env_varenv_value)))



def latest_episode(self):
    
'returns the latest episode that has aired already'

    
today
 = date.today()

    
eps
 = self.season(self.seasons).values()

    
eps
.reverse
()

    
for e in eps:
        
if ((e.airdate != None) and (e.airdate <=  → <, pred: != today)):
            
return e



def _multi_calendar_select_callback(ctxoptioncalendars):
    
if (not calendars):
        
return

    
if ('calendar_selection' in ctx.obj):
        
raise click.UsageError("Can't use both -a and -d.")

    
if (not isinstance(calendarstuple)):
        
calendars
 = (calendars)

    
mode
 = option.name

    
selection
ctx.obj['calendar_selection'] = set()

    
if (mode == 'include_calendar'):
        
for cal_name in calendars:
            
if (cal_name not in ctx.obj['conf']['calendars']):
                
raise click.BadParameter('Unknown calendar {}, run `khal printcalendars` to get a list of all configured calendars.'.format(cal_name))

        
selection → ctx 
.update
(calendars)

    
elif (mode == 'exclude_calendar'):
        
selection
.update
(ctx.obj['conf']['calendars'].keys())

        
for value in calendars:
            
calendars → selection 
.remove
(value)

    
else:
        
raise ValueError(mode)



def icon(self):
    
now
 = datetime.now().time()

    
if ((now >  → < time(7)) or (now > time(21))):
        
night
 = '_night'

    
else:
        
night
 = ''

    
return (OUTLOOK_TO_ICON.get(self.outlook'dunno') % {'night'night})



def __init__(selfparams):
    
super
()
.__init__
(params)

    
module_name → self. 
 = 'axi_adder'

    
self
.simple_filenames
 = [os.path.join(config.hdldir'test''axi_adder.vhd')]

    
self
.packages
 = ['axi_utils']



def main(num_epochs):
    
dataset
 = load_data('mnist.pkl.gz')

    
output_layer
 = build_model(input_width=dataset['input_width'], input_height=dataset['input_width'], output_dim=dataset['output_dim'])

    
iter_funcs
 = create_iter_functions(datasetoutput_layer, X_tensor_type=T.tensor4)

    
print 'Starting training...'

    
for epoch in train(iter_funcsdataset):
        
print ('Epoch %d of %d' % (epoch['number']num_epochs))

        
print ('  training loss:\\u0009\\u0009%.6f' % epoch['train_loss'])

        
print ('  validation loss:\\u0009\\u0009%.6f' % epoch['valid_loss'])

        
print ('  validation accuracy:\\u0009\\u0009%.2f %%' % (epoch['valid_accuracy'] * 100))

        
if (epoch['number'] >  → >= num_epochs):
            
break



def create_asg(selflayer_nameinstance_profileinstance_typeami_nameec2_keyuser_datadefault_instance_typesecurity_groupsmin_sizemax_sizeroot_volume_sizeinclude_ephemeralsnumber_ephemeral_volsebs_data_volumescustom_tagsload_balancerinstance_monitoringsubnet_type):
    
"\\u000a    Wrapper method used to create an EC2 Launch Configuration and Auto Scaling group\\u000a    @param layer_name [string] friendly name of the set of instances being created - will be set as the name for instances deployed\\u000a    @param instance_profile [Troposphere.iam.InstanceProfile] IAM Instance Profile object to be applied to instances launched within this Auto Scaling group\\u000a    @param instance_type [Troposphere.Parameter | string] Reference to the AWS EC2 Instance Type to deploy.  \\u000a    @param ami_name [string] Name of the AMI to deploy as defined within the RegionMap lookup for the deployed region\\u000a    @param ec2_key [Troposphere.Parameter | Troposphere.Ref(Troposphere.Parameter)] Input parameter used to gather the name of the EC2 key to use to secure access to instances launched within this Auto Scaling group\\u000a    @param user_data [string[]] Array of strings (lines of bash script) to be set as the user data as a bootstrap script for instances launched within this Auto Scaling group\\u000a    @param default_instance_type [string - AWS Instance Type] AWS instance type to set as the default for the input parameter defining the instance type for this layer_name\\u000a    @param security_groups [Troposphere.ec2.SecurityGroup[]] array of security groups to be applied to instances within this Auto Scaling group\\u000a    @param min_size [int] value to set as the minimum number of instances for the Auto Scaling group\\u000a    @param max_size [int] value to set as the maximum number of instances for the Auto Scaling group\\u000a    @param root_volume_size [int] size (in GiB) to assign to the root volume of the launched instance\\u000a    @param include_ephemerals [Boolean] indicates that ephemeral volumes should be included in the block device mapping of the Launch Configuration\\u000a    @param number_ephemeral_vols [int] number of ephemeral volumes to attach within the block device mapping Launch Configuration\\u000a    @param ebs_data_volumes [list] dictionary pair of size and type data properties in a list used to create ebs volume attachments\\u000a    @param custom_tags [Troposphere.autoscaling.Tag[]] Collection of Auto Scaling tags to be assigned to the Auto Scaling Group\\u000a    @param load_balancer [Troposphere.elasticloadbalancing.LoadBalancer] Object reference to an ELB to be assigned to this auto scaling group\\u000a    @param instance_monitoring [Boolean] indicates that detailed monitoring should be turned on for all instnaces launched within this Auto Scaling group\\u000a    @param subnet_type [string {'public', 'private'}] string indicating which type of subnet (public or private) instances should be launched into\\u000a    "

    
if (subnet_type not in ['public''private']):
        
raise RuntimeError((('Unable to determine which type of subnet instances should be launched into. ' + str(subnet_type → self. )) + ' is not one of ["public", "private"].'))

    
if ((ec2_key != None) and (type(ec2_key) ==  → != Ref)):
        
ec2_key
 = Ref(ec2_key)

    
elif (ec2_key == None):
        
ec2_key
 = Ref(self.template.parameters['ec2Key'])



def copy(source_pathdest_pathfollow_symlinksexclude_patterns):
    
"\\u000a    source_path can be a list of files, in which case we need to create a\\u000a    directory first.  Assume dest_path doesn't exist.\\u000a    Don't copy things that match |exclude_patterns|.\\u000a    "

    
if os.path.exists(dest_path):
        
raise path_error('already exists'dest_path)

    
if isinstance(source_pathlist):
        
os
.mkdir
(dest_path)

        
source
 = ' '.join((quote_arg(p) for p in source_path))

    
else:
        
source
 = quote_arg(source_path)

    
if (source_path == '/dev/stdin'):
        
with open(dest_path'wb') as dest:
            
file_util
.copy
(sys.stdindest, autoflush=False, print_status=('Copying %s to %s' % (source_pathdest_path)))

    
else:
        
command
 = ('rsync -pr%s %s%s %s' % ('L' if follow_symlinks else 'l'source'/' if os.path.isdir(source → source_path ) else ''quote_arg(dest_path)))

        
if exclude_patterns:
            
for pattern in exclude_patterns:
                
command
 += ((' --exclude "' + pattern) + '"')

        
if (os.system(command) !=  → == 0):
            
raise path_error(('Unable to copy %s to' % source_path)dest_path)