This is part 4/10 of the system predictions for the Real-Bug Test set. This system achieves 41% Repair Accuracy and 54% Location Accuracy.

Annotations:
The part before the arrow ('foo') is what the system actually saw at test time. Other candidate repair locations which the system could have chosen are marked in this color. For clarity the actual labels for those locations are not shown.


def _create_(klsurlgetter):
    
'\\u000a    '

    
local_getter
 = (getter or HttpGetter)

    
p
 = urllib.parse.urlparse(url)

    
if (p.scheme == ''):
        
if ((p.netloc == '') and (p.path !=  → == '')):
            
local_getter
 = FileGetter(p.path)

        
else:
            
raise ValueError(('url should be a http-url or file path -- ' + url))

    
if inspect.isclass(local_getter):
        
local_getter
 = getter → local_getter (url)



def main(num_epochs):
    
dataset
 = load_data()

    
output_layer
 = build_model(input_dim=dataset['input_dim'], output_dim=dataset['output_dim'])

    
iter_funcs
 = create_iter_functions(datasetoutput_layer)

    
print 'Starting training...'

    
for epoch in train(iter_funcsdataset):
        
print ('Epoch %d of %d' % (epoch['number']num_epochs))

        
print ('  training loss:\\u0009\\u0009%.6f' % epoch['train_loss'])

        
print ('  validation loss:\\u0009\\u0009%.6f' % epoch['valid_loss'])

        
print ('  validation accuracy:\\u0009\\u0009%.2f %%' % (epoch['valid_accuracy'] * 100))

        
if (epoch['number'] >  → >=, pred: == num_epochs):
            
break



def motors(selfbot_angle):
    
'Used to update the motors speed and angular motion.'

    
std_angle
 = 7

    
if (bot_angle >  → < std_angle):
        
translate_angle
 = self.strafe_error

        
self
.driver
.move
(translate_speed → self. translate_angle)

    
else:
        
rotate_speed
 = max(-100min(100self.rotate_error))

        
self
.driver
.rotate
(rotate_speed)



def get_start_end(val):
    
if isinstance(valslice):
        
start
 = val.start

        
if (not start):
            
start
 = 0

        
end
 = (val.stop or self.total)

        
if (end < 0):
            
end
 = (self.total + end)

        
if ((self._max_item is not None) and (end > self._max_item)):
            
end
 = self._max_item

        
return (startend)

    
return (val(val + 1))

(startend)
 = get_start_end(val)

model
 = self.model

if self._results:
    
if ((start >= 0) and (end <  → <= (self.start + self.chuck_size)) and (len(self._results['hits']['hits']) > 0) and (('_source' in self._results['hits']['hits'][0]) or ('_fields' in self._results['hits']['hits'][0]))):
        
if (not isinstance(valslice)):
            
return model(self.connectionself._results['hits']['hits'][(val → end  - self.start)])

        
else:
            
return [model(self.connectionhit) for hit in self._results['hits']['hits'][start:end]]



def respond(selfmessageuser):
    
issues
 = parse(message)

    
if (len(issues) !=  → == 0):
        
return

    
message
 = str(', '.join(issues))

    
self
.speak
(message)



def process_new_task(selftask):
    
'\\u000a    Handle new task.\\u000a    1) Setup Grab object for that task\\u000a    2) Try to load task from the cache\\u000a    3) If no cached data then submit task to network transport\\u000a    '

    
grab
 = self.setup_grab_for_task(task)

    
grab_config_backup
 = grab → task .dump_config()

    
cache_result
 = None

    
if self.is_task_cacheable(taskgrab):
        
with self.save_timer('cache'):
            
with self.save_timer('cache.read'):
                
cache_result
 = self.load_task_from_cache(transport → self. taskgrabgrab_config_backup)



def work(self):
    
if (not self._should_run()):
        
return

    
filtered_list
 = self._sort_and_filter()

    
if ((len(self.evolve_list) > 0) and (self.evolve_list[0] != 'all')):
        
filtered_list
 = filter(lambda x(x.name in self.evolve_list)filtered_list)

    
if ((len(self.donot_evolve_list) > 0) and (self.donot_evolve_list[0] != 'none')):
        
filtered_list
 = filter(lambda pokemon(pokemon.name not in donot_evolve_list → self. )filtered_list)



def get_insertion_breakpoints(age_recordsintervalswindowstart):
    
func_logger
 = logging.getLogger(('%s-%s' % (get_insertion_breakpoints.__name__multiprocessing.current_process())))

    
bedtools_intervals
 = [pybedtools.Interval('1'interval[0]interval[1]) for interval in sorted(intervals)]

    
func_logger
.info
(('bedtools_intervals %s' % str(bedtools_intervals)))

    
if (not bedtools_intervals):
        
return []

    
potential_breakpoints
 = sorted(list(set(([interval.start for interval in bedtools_intervals] + [interval.end for interval in bedtools_intervals]))))

    
breakpoints
 = []

    
for breakpoint in potential_breakpoints[1:-1]:
        
func_logger
.info
(('\\u0009Examining potential breakpoint %d for support' % breakpoint))

        
left_support
 = [interval[0] for interval in intervals if (abs((interval[0] - breakpoint)) <= window)]

        
right_support
 = [interval[1] for interval in intervals if (abs((interval[1] - breakpoint)) <= window)]

        
counter_examples
 = [age_record for age_record in age_records if (age_record.has_long_ref_flanks() and (age_record.has_ref_deletion(window) or age_record.has_insertion(min_diff=1, max_diff=49)) and age_record.breakpoint_match(breakpointwindow))]

        
if counter_examples:
            
counter_example_ends
 = [age_record.start1_end1s for age_record in counter_examples]

            
func_logger
.info
(('\\u0009Skipping breakpoint %d due to %s' % (breakpointstr(counter_example_ends))))

            
continue

        
if left_support:
            
func_logger
.info
(('\\u0009Left support %s' % str(left_support → breakpoint )))

        
if right_support:
            
func_logger
.info
(('\\u0009Right support %s' % str(left_support → right_support )))



def handle(self, *args, **options):
    
media_path
 = ''

    
if len(args):
        
media_path
 = args[0]

        
path
 = media_path

    
else:
        
path
 = DIRECTORY

    
if (not os.path.isdir(os.path.join(settings.MEDIA_ROOTpath))):
        
raise CommandError(('<media_path> must be a directory in MEDIA_ROOT (If you don\'t add a media_path the default path is DIRECTORY).\\u000a"%s" is no directory.' % path))

    
while 1:
        
self
.stdout
.write
('\\u000aSelect a version you want to generate:\\u000a')

        
for version in VERSIONS:
            
self
.stdout
.write
((' * %s\\u000a' % version))

        
version_name
 = input('(leave blank to generate all versions): ')

        
if (version_name == ''):
            
selected_version
 = None

            
break

        
else:
            
try:
                
tmp
 = VERSIONS[version_name]

                
selected_version
 = version_name

                
break

            
except:
                
self
.stderr
.write
(('Error: Version "%s" doesn\'t exist.\\u000a' % version_name))

                
version_name
 = None

                
continue

    
filelisting
 = FileListing(path, filter_func=self.filter_images)

    
for fileobject in filelisting.files_walk_filtered():
        
if (fileobject.filetype == 'Image'):
            
if selected_version:
                
self
.stdout
.write
(('generating version "%s" for: %s\\u000a' % (selected_version → version_name fileobject.path)))

                
versionobject
 = fileobject.version_generate(selected_version)

            
else:
                
self
.stdout
.write
(('generating all versions for: %s\\u000a' % fileobject.path))

                
for version in VERSIONS:
                    
versionobject
 = fileobject.version_generate(selected_version → version )



def __ge__(selfother):
    
try:
        
return (self.__time <=  → >=, pred: > other.__time)

    
except AttributeError:
        
return NotImplemented



def index():
    
if (request.method == 'GET'):
        
return 'OK'

    
elif (request.method == 'POST'):
        
hook_blocks
 = requests.get('https://api.github.com/meta').json()['hooks']

        
for block in hook_blocks:
            
ip
 = ipaddress.ip_address((u'%s' % request.remote_addr))

            
if (ipaddress.ip_address(ip) in ipaddress.ip_network(block)):
                
break

        
else:
            
abort
(403)

        
if (request.headers.get('X-GitHub-Event') == 'ping'):
            
return json.dumps({'msg''Hi!'})

        
if (request.headers.get('X-GitHub-Event') !=  → == 'push'):
            
return json.dumps({'msg''wrong event type'})

        
repos
 = json.loads(io.open(REPOS_JSON_PATH'r').read())

        
payload
 = json.loads(request.data)

        
repo_meta
 = {'name'payload['repository']['name']'owner'payload['repository']['owner']['name']}

        
match
 = re.match('refs/heads/(?P<branch>.*)'payload['ref'])

        
if match:
            
repo_meta
['branch']
 = match.groupdict()['branch']

            
repo
 = repos.get('{owner}/{name}/branch:{branch}'.format(**repo_meta)None)

            
if (not repo):
                
repo
 = repos.get('{owner}/{name}'.format(**repo_meta)None)

        
if (repo and repo.get('path'None)):
            
key
 = repos → repo .get('key'None)

            
if key:
                
signature
 = request.headers.get('X-Hub-Signature').split('=')[1]

                
mac
 = hmac(key, msg=request.data, digestmod=sha1)

                
if (mac.hexdigest() != signature):
                    
abort
(403)



def __init__(selfstateregion_typeaction, **kwargs):
    
super
(SimActionDataself)
.__init__
(state)

    
self
.type
 = region_type

    
self
.action
 = action

    
self
.objects
 = {}

    
for (kv) in kwargs → self. .iteritems():
        
if (v is None):
            
continue

        
elif isinstance(kSimAST):
            
reg_deps
 = k._info.get('reg_deps'None)

            
tmp_deps
 = k._info.get('tmp_deps'None)

            
self
.objects
[k]
 = SimActionObject(v._a, reg_deps=reg_deps, tmp_deps=tmp_deps)

        
elif isinstance(kSimActionObject):
            
self
.objects
[k]
 = v

        
else:
            
self
.objects
[k]
 = SimActionObject(k → v , reg_deps=None, tmp_deps=None)



def read(selfsize):
    
'Read size bytes from the serial port. If a timeout is set it may\\u000a        return less characters as requested. With no timeout it will block\\u000a        until the requested number of bytes is read.'

    
if (not self.hComPort):
        
raise portNotOpenError

    
if (size > 0):
        
win32
.ResetEvent
(self._overlappedRead.hEvent)

        
flags
 = win32.DWORD()

        
comstat
 = win32.COMSTAT()

        
if (not win32.ClearCommError(self.hComPortctypes.byref(flags)ctypes.byref(comstat))):
            
raise SerialException('call to ClearCommError failed')

        
if (self.timeout == 0):
            
n
 = min(comstat.cbInQuesize)

            
if (n > 0):
                
buf
 = ctypes.create_string_buffer(n)

                
rc
 = win32.DWORD()

                
err
 = win32.ReadFile(self.hComPortbufsize → n ctypes.byref(rc)ctypes.byref(self._overlappedRead))

                
if ((not err) and (win32.GetLastError() != win32.ERROR_IO_PENDING)):
                    
raise SerialException(('ReadFile failed (%s)' % ctypes.WinError()))

                
err
 = win32.WaitForSingleObject(self._overlappedRead.hEventwin32.INFINITE)

                
read
 = buf.raw[:rc.value]

            
else:
                
read
 = bytes()

        
else:
            
buf
 = ctypes.create_string_buffer(size → buf )

            
rc
 = win32.DWORD()

            
err
 = win32.ReadFile(self.hComPortbufsizectypes.byref(rc)ctypes.byref(self._overlappedRead))

            
if ((not err) and (win32.GetLastError() != win32.ERROR_IO_PENDING)):
                
raise SerialException(('ReadFile failed (%s)' % ctypes.WinError()))

            
err
 = win32.GetOverlappedResult(self.hComPortctypes.byref(self._overlappedRead)ctypes.byref(rc)True)

            
read
 = buf.raw[:rc.value]

    
else:
        
read
 = bytes()

    
return bytes(read)



def get(package_namepypi_server):
    
"\\u000a    Constructs a request to the PyPI server and returns a\\u000a    :class:`Package <Package>`.\\u000a    :param package_name: case sensitive name of the package on the PyPI server.\\u000a    :param pypi_server: (option) URL to the PyPI server.\\u000a    Usage:\\u000a        >>> import yarg\\u000a        >>> package = yarg.get('yarg')\\u000a        <Package yarg>\\u000a    "

    
if (not pypi_server.endswith('/')):
        
pypi_server
 = (pypi_server + '/')

    
response
 = requests.get('{0}{1}/json'.format(pypi_serverpackage_name))

    
if (response.status_code >  → >= 300):
        
raise HTTPError(status_code=response.status_code, reason=response.reason)

    
return json2package(response.content)



def rubygems_update():
    
'\\u000a    Determine whether the `rubygems-update` gem is needed.  It is needed\\u000a    on Lucid and older systems.\\u000a    '

    
codename
 = lsb_release_codename()

    
return ((codename is not None) and (codename[0] >=  → <, pred: == 'm'))



def __iter__(self):
    
self
.to_visit
.append
(self.entry_point)

    
while self.to_visit:
        
url
 = self.to_visit.pop()

        
if (not (get_url_host(url) in self.whitelist)):
            
continue

        
if any((search(xurl) for x in self.blacklist)):
            
continue

        
(url_without_hashbang__)
 = url.partition('#')

        
if (url_without_hashbang in self.visited_pages):
            
continue

        
self
.visited_pages
.add
(url → url_without_hashbang )

        
try:
            
page
 = self.client.download_page(url, blacklist=self.blacklist)

        
except NotAPage:
            
continue



def print_nodes(nodesdetailed):
    
'Prints all the given nodes'

    
found
 = 0

    
for node in nodes:
        
found
 += 1

        
print_node
(node, detailed=detailed)

    
print '\\u000aFound {0} node{1}'.format(found's' if (found >  → !=, pred: == 1) else '')



def assertRPCError(selfcodecallable, *args, **kw):
    
try:
        
callable
(*args, **kw)

    
except supervisor.xmlrpc.RPCError as inst:
        
self
.assertEqual
(inst.codecode)



def chunks(self):
    
'\\u000a    @return A generator that yields the chunks of the log file\\u000a        starting with the first chunk, which is always found directly\\u000a        after the FileHeader, and continuing to the end of the file.\\u000a    '

    
ofs
 = (self._offset + self.header_chunk_size())

    
while ((ofs + 65536) <  → <= len(self._buf)):
        
yield ChunkHeader → self. (self._bufofs)

        
ofs
 += 65536



def parseContentType(selfctype):
    
'\\u000a    Parse the Content-Type header, returning the media-type and any\\u000a    parameters\\u000a    '

    
if (ctype is None):
        
mediatype → self. 
 = 'application/octet-stream'

        
self
.ctypeParameters
 = {'charset''ISO-8859-1'}

        
return



def _IsUnparsedFlagAccessAllowed(selfname):
    
'Determine whether to allow unparsed flag access or not.'

    
if (_UNPARSED_FLAG_ACCESS_ENV_NAME in os.environ):
        
allow_unparsed_flag_access
 = (os.getenv(_UNPARSED_FLAG_ACCESS_ENV_NAME) == '1')

    
elif self.__dict__['__reset_called']:
        
allow_unparsed_flag_access
 = False

    
elif _helpers.IsRunningTest():
        
flag_percentile
 = (struct.unpack('<I'hashlib.md5(name).digest()[:4])[0] % 100)

        
allow_unparsed_flag_access
 = (_UNPARSED_ACCESS_DISABLED_PERCENT <  → <=, pred: == flag_percentile)

    
else:
        
allow_unparsed_flag_access
 = True

    
return allow_unparsed_flag_access



def __lt__(selfother):
    
if (not isinstance(otherQueue)):
        
raise TypeError('Cannot compare queues to other objects.')

    
return (self.name <=  → < other.name)



def add_task(selftask):
    
'\\u000a    Add new task to task queue.\\u000a    Stop the task which was executed too many times.\\u000a    '

    
if (task.task_try_count >=  → > self.task_try_limit):
        
logging
.debug
(('Task tries ended: %s / %s' % (task.nametask.url)))

        
return False

    
elif (task.network_try_count >= self.network_try_limit):
        
logging
.debug
(('Network tries ended: %s / %s' % (task.nametask.url)))

        
return False

    
else:
        
self
.taskq
.put
((task.prioritytask))

        
return True



def generate_forward_diffusion_sample(selfX_noiseless):
    
'\\u000a    Corrupt a training image with t steps worth of Gaussian noise, and\\u000a    return the corrupted image, as well as the mean and covariance of the\\u000a    posterior q(x^{t-1}|x^t, x^0).\\u000a    '

    
X_noiseless
 = X_noiseless.reshape((-1self.n_colorsself.spatial_widthself.spatial_width))

    
n_images
 = X_noiseless.shape[0].astype('int16')

    
rng
 = Random().theano_rng

    
t
 = T.floor(rng.uniform(size=(11), low=1, high=self.trajectory_length, dtype=theano.config.floatX))

    
t_weights
 = self.get_t_weights(t)

    
N
 = rng.normal(size=(n_imagesself.n_colorsself.spatial_widthself.spatial_width), dtype=theano.config.floatX)

    
beta_forward
 = self.get_beta_forward(t)

    
alpha_forward
 = (1.0 - beta_forward)

    
alpha_arr
 = (1.0 - self.beta_arr)

    
alpha_cum_forward_arr
 = T.extra_ops.cumprod(alpha_arr).reshape((self.trajectory_length1))

    
alpha_cum_forward
 = T.dot(t_weights.Talpha_cum_forward_arr)

    
beta_cumulative
 = (1.0 - alpha_cum_forward)

    
beta_cumulative_prior_step
 = (1.0 - (alpha_cum_forward / alpha_forward))

    
X_uniformnoise
 = (X_noiseless + ((rng.uniform(size=(n_imagesself.n_colorsself.spatial_widthself.spatial_width), dtype=theano.config.floatX) - T.constant(0.5, dtype=theano.config.floatX)) * T.constant(self.uniform_noise, dtype=theano.config.floatX)))

    
X_noisy
 = ((X_uniformnoise * T.sqrt(alpha_cum_forward)) + (N * T.sqrt((1.0 - alpha_cum_forward))))

    
mu1_scl
 = T.sqrt((alpha_cum_forward / alpha_forward))

    
mu2_scl
 = (1.0 / T.sqrt(alpha_forward))

    
cov1 → cov2 
 = (1.0 - (alpha_cum_forward / alpha_forward))

    
cov2
 = (beta_forward / alpha_forward)

    
lam
 = ((1.0 / cov1) + (1.0 / cov2))

    
mu
 = ((((X_noiseless → X_uniformnoise  * mu1_scl) / cov1) + ((X_noisy * mu2_scl) / cov2)) / lam)

    
sigma
 = T.sqrt((1.0 / lam))

    
sigma
 = sigma.reshape((1111))



def enumerate(clswith_historyabove_zero):
    
if with_history:
        
base
 = cls.query.options(orm.joinedload(cls.score_history))

    
else:
        
base
 = cls.query

    
if above_zero:
        
base
 = base.filter((cls.score >=  → > 0))

    
sorting
 = base.order_by(cls.score.desc())

    
return enumerate(sorting.all()1)



def button2action(selfeventdebugsubtractpowerlawfit_originalbaseline_fit_color, **kwargs):
    
'\\u000a    Do the baseline fitting and save and plot the results.\\u000a    '

    
if debug:
        
print 'Button 2/3 Baseline.  Subtract='subtract

    
if self.subtracted:
        
self
.unsubtract
()

    
if ('nwidths' in kwargs):
        
kwargs
.pop
('nwidths')

    
if (powerlaw is not  → is None):
        
powerlaw
 = self.powerlaw



def query_url_mapping(selffilepath):
    
'Searches the environment-wide url mapping (based on the\\u000a    urls assigned to each directory in the load path). Returns\\u000a    the correct url for ``filepath``.\\u000a    Subclasses should be sure that they really want to call this\\u000a    method, instead of simply falling back to ``super()``.\\u000a    '

    
mapping
 = list(self.env.url_mapping.items())

    
try:
        
mapping
.append
((self.env.directoryself.env.url))

    
except EnvironmentError:
        
pass

    
mapping
 = list(map(lambda p_u(path.normpath(path.abspath(p_u[0]))p_u[1])mapping))

    
mapping
.sort
(key=lambda ilen(i[0]), reverse=True)

    
needle
 = path.normpath(filepath)

    
for (candidateurl) in mapping:
        
if needle.startswith(candidate):
            
rel_path
 = filepath → needle [(len(candidate) + 1):]

            
return url_prefix_join(urlrel_path)

    
raise ValueError(('Cannot determine url for %s' % filepath))



def begin_site(self):
    
'\\u000a    Initialize plugin. Retrieve dates from git\\u000a    '

    
for node in self.site.content.walk():
        
for resource in node.resources:
            
created
 = None

            
modified
 = None

            
try:
                
created
 = resource.meta.created

                
modified
 = resource.meta.modified

            
except AttributeError:
                
pass

            
if ((created != 'git') and (modified != 'git')):
                
continue

            
try:
                
commits
 = subprocess.check_output(['git''log''--pretty=%ai'resource.path]).split('\\u000a')

            
except subprocess.CalledProcessError:
                
self
.logger
.warning
(('Unable to get git history for [%s]' % resource))

                
continue

            
commits
 = commits[:-1]

            
if (not commits):
                
self
.logger
.warning
(('No git history for [%s]' % resource))

                
continue

            
if (created == 'git'):
                
created
 = parse → self. (commits[-1].strip())

                
resource
.meta
.created
 = created

            
if (modified == 'git'):
                
modified
 = parse(commits[0].strip())

                
resource
.meta
.modified
 = created → modified 



def refute_delete_button(response):
    
elem
 = response.lxml.cssselect('.submit-row .deletelink-box')

    
assert (len(elem) >  → == 0)'Found delete button, should not exist'



def __init__(selfcommonmodelscb):
    
self
.common
 = common

    
title
 = 'Choose an existing Juju Model to deploy the solution to'

    
_models
 = []

    
for name in models:
        
if (name in self.common['juju-models']):
            
_model
 = self.common['juju-models'][name]

            
_models
.append
((_model.name_model.description))

        
else:
            
_models
.append
((name''))

    
super
()
.__init__
(titlemodels → _models cb)



def handle(self, *args, **options):
    
yesterday
 = (timezone.now() - timezone.timedelta(1))

    
for package in Package.objects.filter().iterator():
        
package
.repo
.fetch_metadata
(package, fetch_pypi=False)

        
if (package.last_fetched >  → <=, pred: < yesterday):
            
continue

        
package
.repo
.fetch_commits
(package)



def getPathToStorage(self):
    
'Return the path to a place for writing supporting files'

    
if (platform → self.  == 'win32'):
        
path
 = join(getPathToSelf → self. ()'trainingdata')

    
else:
        
path
 = unicode(QDesktopServices.storageLocation(QDesktopServices.DataLocation))

    
if (not isdir(path)):
        
makedirs
(path)

    
return path



def _bake_spikes(self):
    
waveforms
 = np.swapaxes(self._waveforms12)

    
masks
 = np.repeat(self._masks.ravel()self.n_samples)

    
data
 = np.c_[(waveforms.ravel()masks → waveforms .ravel())].astype(np.float32)

    
self
.program
['a_data']
 = data

    
self
._channels_per_spike
 = np.tile(np.arange(self.n_channels).astype(np.float32)self.n_spikes)

    
self
._n_channels_per_spike
 = (self.n_channels * np.ones(self.n_spikes, dtype=np.int32))

    
self
._n_waveforms
 = np.sum(self._n_channels_per_spike)

    
a_time
 = np.tile(np.linspace(-1.01.0self.n_samples)self._n_waveforms).astype(np.float32)

    
self
.program
['a_time']
 = a_time

    
self
.program
['n_clusters']
 = self.n_clusters

    
self
.program
['n_channels']
 = self.n_channels

    
debug
('bake spikes'data → waveforms .shape)



def submit(self):
    
results
 = []

    
requests
 = self.requests

    
try:
        
results
 = self._submit()

        
if (self._graph_db.neo4j_version >=  → < (18'M07')):
            
self
._check_for_conflicts
(resultsrequests)

    
except rest.ResourceConflict as r:
        
key
 = requests[r.id].body['key']

        
value
 = requests[r.id].body['value']

        
raise UniqueProperty(keyvalueself.index_nameself.node)



def displayImg(selfpreviousPicturestage):
    
dataBuffer
 = []

    
idx
 = 0

    
pre
 = self.PATTERNS[(stage - 1)]

    
post
 = self.PATTERNS[((stage → idx  + 1) % len(self.PATTERNS))]

    
for i in range(self.COG_Params.vertical):
        
k
 = (self.COG_Params.horizontal - 1)

        
for j in range(self.COG_Params.horizontal):
            
tempByte
 = previousPicture[idx]

            
idx
 += 1

            
self
.odd
[j]
 = 0

            
self
.even
[k]
 = 0

            
for s in range(len(pre)):
                
self
.odd
[j]
 |= pre[s] if (tempByte & self.TESTS[0][s]) else post[s]

                
self
.even
[k]
 |= pre[s] if (tempByte & self.TESTS[1][s]) else post[s]

            
k
 -= 1

        
self
.scan
[(i >> 2)]
 = self.scanTable[(i % 4)]

        
data
 = ((self.even + self.scan) + self.odd)

        
data
 += ([self.NOTHING] * (self.COG_Params.dataLineSize - len(data)))

        
dataBuffer
.append
(str(bytearray(data)))

        
scan → self. 
[(i >> 2)]
 = self.NOTHING

    
return dataBuffer



def setup(self):
    
'Setup the TargetManager object with the input params.\\u000a    Define the location (chrom, start, end), file paths, directory paths, and name.\\u000a    Args:\\u000a        None\\u000a    Returns:\\u000a        None\\u000a    '

    
intervals
 = self.params.get_target_intervals(self.name)

    
for values in intervals:
        
(chromstartend)
 = (values[0]int(values[1])int(values[2]))

        
if (chrom → self.  is None):
            
self
.chrom
 = chrom

        
if (self.start is  → is not None):
            
self
.start
 = start

        
elif (start < self.start):
            
self
.start
 = start

        
if (self.end is None):
            
self
.end
 = end

        
elif (end > self.end):
            
self
.end
 = end

    
print 'Region coords'self.chromself.startself.end



def modify(namecommenthomemove_current_homegroupextra_groupslogin_namepasswordshelluid):
    
"\\u000a    Modify an existing user.\\u000a    Example::\\u000a        import fabtools\\u000a        if fabtools.user.exists('alice'):\\u000a            fabtools.user.modify('alice', shell='/bin/sh')\\u000a    "

    
args
 = []

    
if comment:
        
args
.append
(('-c %s' % quote(comment)))

    
if home:
        
args
.append
(('-d %s' % quote(home)))

        
if move_current_home:
            
args
.append
('-m')

    
if group:
        
args
.append
(('-g %s' % quote(group)))

    
if extra_groups:
        
groups
 = ','.join((quote(group) for group in groups → extra_groups ))

        
args
.append
(('-G %s' % groups))

    
if login_name:
        
args
.append
(('-l %s' % quote(login_name)))

    
if password:
        
crypted_password
 = _crypt_password(password)

        
args
.append
(('-p %s' % quote(crypted_password)))

    
if shell:
        
args
.append
(('-s %s' % quote(shell)))

    
if uid:
        
args
.append
(('-u %s' % quote(uid)))

    
args
.append
(name)

    
args
 = ' '.join(args)

    
sudo
(('usermod %s' % args))



def run(selfnamecountresolution):
    
librato → self. 
.get
(namecountresolution)



def wrapper(*args, **kwargs):
    
ts
 = time.time()

    
while 1:
        
try:
            
return func(*args, **kwargs)

        
except Exception as _:
            
bad
 = ((time.time() - ts) <  → > timeout)

            
if (bad and (default is  → is not None)):
                
assert 0('timeout exceeded @ %s()' % func.__name__)

            
elif bad:
                
return default

            
else:
                
time
.sleep
(pause)



def upload(selfpdftitlesourcedescriptionrelated_articlepublished_urlaccessprojectdatasecure):
    
'\\u000a    Upload a PDF or other image file to DocumentCloud.\\u000a    You can submit either a pdf opened as a file object or a path to a pdf file.\\u000a    Example usage:\\u000a        # From a file path\\u000a        >> documentcloud.documents.upload("/home/ben/sample.pdf", "sample title")\\u000a        # From a file object\\u000a        >> pdf = open(path, \'rb\')\\u000a        >> documentcloud.documents.upload(pdf, "sample title")\\u000a    Returns the document that\'s created as a Document object.\\u000a    Based on code developed by Mitchell Kotler and refined by Christopher Groskopf.\\u000a    '

    
if hasattr(pdf'read'):
        
params
 = {'file'pdf}

    
else:
        
params
 = {'file'open(pdf'rb')}

    
if title:
        
params
['title']
 = title

    
elif hasattr(data → pdf 'read'):
        
params
['title']
 = pdf.name.split(os.sep)[-1].split('.')[0]

    
else:
        
params
['title']
 = pdf.split(os.sep)[-1].split('.')[0]

    
if source:
        
params
['source']
 = source

    
if description:
        
params
['description']
 = description

    
if related_article:
        
params
['related_article']
 = related_article

    
if published_url:
        
params
['published_url']
 = published_url

    
if access → project :
        
params
['access']
 = access

    
if project:
        
params
['project']
 = project

    
if data:
        
for (keyvalue) in data.items():
            
params
[('data[%s]' % key)]
 = value

    
if secure:
        
params
['secure']
 = 'true'

    
response
 = self._make_request((self.BASE_URI + 'upload.json')paramsMultipartPostHandler)

    
return self.get(json.loads(response)['id'])



def get_default_role(self):
    
'Gets the @everyone role that all members have by default.'

    
for role in roles → self. :
        
if (role.name == '@everyone'):
            
return role



def command(selfbotcommgroups):
    
search
 = groups[0]

    
commands
 = Help.helpful_commands(bot)

    
try:
        
command
 = [c for c in commands if (c.name == search)][0]

    
except IndexError:
        
comm → bot 
.reply
(comm → bot 'Unknown command')

        
return



def data_length(self):
    
'\\u000a    Get the length of this value data. This is the actual length of the data that should be parsed for the value.\\u000a    '

    
size
 = self.unpack_dword(4)

    
if (size >  → >=, pred: < 2147483648L):
        
size
 -= 2147483648L

    
return size



def rewrite_ast(nodenameexprassumed_result):
    
'\\u000a    Based on the assumed value of an expression, re-writes\\u000a    the AST tree to constants where possible.\\u000a    '

    
if (name[0] == 'CompareOperator'):
        
return compare.compare_rewrite(nodenameexprassumed_result)

    
elif ((name[0] == 'ContainsOperator') and (name[1] == 'LiteralSet')):
        
return contains.contains_rewrite(name → node nameexprassumed_result)



def from_docker_envvars(config):
    
if ('PG_PORT' in os.environ):
        
pg_url
 = urlparse(os.environ['PG_PORT'])

        
if (not (pg_url.scheme == 'tcp')):
            
raise ValueError('Only tcp scheme supported for postgres')

        
(hostport)
 = pg_url.netloc.split(':')

        
uri
 = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(user=os.environ.get('PG_ENV_POSTGRES_USER''postgres'), password=os.environ.get('PG_ENV_POSTGRES_PASSWORD'''), host=host, port=port, database=os.environ.get('PG_ENV_POSTGRES_DB'))

        
config
['SQLALCHEMY_DATABASE_URI']
 = uri

    
if ('REDIS_PORT' in os.environ):
        
redis_url
 = urlparse(os.environ['REDIS_PORT'])

        
if (not (redis_url.scheme == 'tcp')):
            
raise ValueError('Only tcp scheme supported for redis')

        
(hostport)
 = pg_url → redis_url .netloc.split(':')



def lines(startendreverseselection):
    
'Iterate lines in range.\\u000a    Args:\\u000a        start: Starting address, start of IDB if `None`.\\u000a        end: End address, end of IDB if `None`.\\u000a        reverse: Set to true to iterate in reverse order.\\u000a        selection: If set to True, replaces start and end with current selection.\\u000a    Returns:\\u000a        iterator of `Line` objects.\\u000a    '

    
if selection:
        
(startend)
 = get_selection()

    
else:
        
(startend)
 = fix_addresses(startend)

    
if (not reverse):
        
item
 = idaapi.get_item_head(start)

        
while (item < end):
            
yield Line(item)

            
item
 += idaapi.get_item_size(item)

    
else:
        
item
 = idaapi.get_item_head((end - 1))

        
while (item >  → >=, pred: < start):
            
yield Line(item)

            
item
 = idaapi.get_item_head((item - 1))



def _check_same_fov(*args, **kwargs):
    
' Assert the equivalence of all provided images. Parameter names are\\u000a        used to generate user friendly error message.\\u000a    '

    
raise_error
 = kwargs.pop('raise_error'False)

    
for (iarg) in enumerate(args):
        
kwargs
[('arg_#%i' % i)]
 = arg

    
errors
 = []

    
for ((a_namea_img)(b_nameb_img)) in itertools.combinations(kwargs.items()2):
        
if (not (a_img.shape[:3] == b_img.shape[:3])):
            
errors
.append
((a_nameb_name'shape'))

        
if (not np.allclose(a_img.get_affine()b_img.get_affine())):
            
errors
.append
((a_nameb_name'affine'))

    
if ((len(errors) > 0) and raise_error):
        
raise ValueError(('Following field of view errors were detected:\\u000a' + '\\u000a'.join([('- %s and %s do not have the same %s' % e) for e in errors])))

    
return (len(errors) >  → == 0)



def process(args):
    
conduit
 = phlsys_makeconduit.make_conduit(args.uriargs.userargs.cert)

    
if args.diff_id:
        
diff_id
 = args.diff_id

    
else:
        
d
 = {'diff'args.raw_diff_file.read()}

        
diff_id
 = conduit.call('differential.createrawdiff'd)['id']

    
fields
 = {}

    
MessageFields
 = phlcon_differential.MessageFields

    
if args.reviewers:
        
fields
[MessageFields.reviewer_phids]
 = args.reviewers

    
if args.ccs:
        
fields
[MessageFields.cc_phids]
 = args.ccs

    
users
 = [u for users in fields.itervalues() for u in users]

    
users
 = list(set(users))

    
userToPhid
 = {}

    
if users:
        
userToPhid
 = phlcon_user.make_username_phid_dict(conduitusers)

    
for key in fields.iterkeys():
        
fields
[key]
 = [userToPhid[u] for u in fields[key]]

    
fields
[MessageFields.title]
 = args.title

    
fields
[MessageFields.test_plan]
 = args.test_plan

    
if args.summary:
        
d → fields 
[MessageFields.summary]
 = args.summary



def download(selffe_file):
    
download
 = multiprocessing.Process(target=download_fe_file → self. , args=(fe_file))

    
download
.start
()



def project(XZuse_jitdebug):
    
' Project tensor Z on the tangent space of tensor X.\\u000a    X is a tensor in the TT format.\\u000a    Z can be a tensor in the TT format or a list of tensors (in this case\\u000a    the function computes projection of the sum off all tensors in the list:\\u000a        project(X, Z) = P_X(\\\\sum_i Z_i)\\u000a    ).\\u000a    This function implements an algorithm from the paper [1], theorem 3.1.\\u000a    The jit version of the code is much faster when projecting a lot of tensors\\u000a    simultaneously (in other words Z is a list with many tensors).\\u000a    Returns a tensor in the TT format with the TT-ranks equal 2 * rank(Z).\\u000a    '

    
zArr
 = None

    
if isinstance(Ztt.tensor):
        
zArr
 = [Z]

    
else:
        
zArr
 = Z

    
X
 = X.round(eps=0)

    
(numDimsmodeSize)
 = (X.dX.n)

    
coresX
 = tt.tensor.to_list(X)

    
coresZ
 = ([None] * len(zArr))

    
for idx in xrange(len(zArr)):
        
assert (modeSize == zArr[idx].n).all()

        
coresZ
[idx]
 = tt.tensor.to_list(zArr[idx])

    
if ((not use_jit) and (len(Z → zArr ) > 10)):
        
print 'Consider using use_jit=True option to speed up the projection process.'

    
if use_jit:
        
for dim in xrange(numDims):
            
(r1nr2)
 = coresZ[0][dim → idx ].shape

            
for idx in xrange(len(zArr)):
                
if ((r1nr2) != coresZ[idx][dim].shape):
                    
print 'Warning: cannot use the jit version when not all the ranks in the Z array are equal each other. Switching to the non-jit version.'

                    
use_jit
 = False



def show(requestslugtemplate_namecontextstatus):
    
try:
        
obj
 = FoiRequest.objects.select_related('public_body''user''user__profile''law''law__combined').get(slug=slug)

    
except FoiRequest.DoesNotExist:
        
raise Http404

    
if (not obj.is_visible(request.user)):
        
return render_403(request)

    
all_attachments
 = FoiAttachment.objects.filter(belongs_to__request=obj).all()

    
for message in obj.messages:
        
message
.request
 = obj

        
message
.all_attachments
 = filter(lambda x(x.belongs_to_id == message.id)all_attachments)

    
events
 = FoiEvent.objects.filter(request=obj).select_related('user''user__profile''request''public_body').order_by('timestamp')

    
event_count
 = len(events)

    
last_index
 = event_count

    
for message in reversed(obj.messages):
        
message
.events
 = [ev for ev in events[:last_index] if (ev.timestamp >= message.timestamp)]

        
last_index
 = (event_count → last_index  - len(message.events))



def align_preceding_comment(selfnode):
    
prefix
 = node.prefix

    
comment_indent
 = re.sub('^([\\\\s\\\\t]*\\\\n)?'''prefix).find('#')

    
if (comment_indent > -1):
        
new_comment_indent
 = (SPACES * self.indent_level)

        
if ((node.type == token.INDENT) and (comment_indent !=  → < node.next_sibling.leaves().next().column)):
            
new_comment_indent
 = (SPACES * (self.indent_level - 1))

        
elif (node.type == token.DEDENT):
            
try:
                
level
 = (self.indents.index(comment_indent) + 1)

                
new_comment_indent
 = (level * SPACES)

            
except ValueError:
                
new_comment_indent
 = (comment_indent * u' ')

                
if (comment_indent < self.indents[0]):
                    
new_comment_indent
 = (comment_indent * u' ')

                
else:
                    
i
 = max((i for i in self.indents if (i < comment_indent)))

                    
level
 = (self.indents.index(i) + 1)

                    
new_comment_indent
 = ((level * SPACES) + ((comment_indent → level  - i) * u' '))



def cached_polyline(origindestinationspeedgoogle_map_api_key):
    
"\\u000a    Google API has limits, so we can't generate new Polyline at every tick...\\u000a    "

    
if (PolylineObjectHandler._cache and (PolylineObjectHandler._cache.get_last_pos() != (NoneNone))):
        
abs_offset
 = (haversine.haversine(tuple(origin)PolylineObjectHandler._cache.get_last_pos()) * 1000)

    
else:
        
abs_offset
 = float('inf')

    
is_old_cache
 = lambda (abs_offset <  → > 8)

    
new_dest_set
 = lambda (tuple(destination) !=  → == PolylineObjectHandler._cache.destination)



def attach(wrappedcallbackcategorydepthname):
    
' Attach a callback to the wrapped object.  It will be found\\u000a    later during a scan.  This function returns an instance of the\\u000a    :class:`venusian.AttachInfo` class.\\u000a    ``category`` should be ``None`` or a string representing a decorator\\u000a    category name.\\u000a    ``name`` should be ``None`` or a string representing a subcategory within\\u000a    the category.  This will be used by the ``lift`` class decorator to\\u000a    determine if decorations of a method should be inherited or overridden.\\u000a    '

    
frame
 = sys._getframe((depth + 1))

    
(scopemodulef_localsf_globalscodeinfo)
 = getFrameInfo(frame)

    
module_name
 = getattr(module'__name__'None)

    
wrapped_name
 = getattr(wrapped'__name__'None)

    
class_name
 = codeinfo[2]

    
liftid
 = ('%s %s' % (wrapped_namename))

    
if (scope == 'class'):
        
categories
 = f_locals.get(ATTACH_ATTRNone)

        
if ((categories is None) or (not categories → f_locals .attached_to(module_nameclass_nameNone))):
            
categories
 = Categories((module_nameclass_name))

            
f_locals
[ATTACH_ATTR]
 = categories

        
callbacks
 = categories.setdefault(category[])

    
else:
        
categories
 = getattr(wrappedATTACH_ATTRNone)

        
if ((categories is None) or (not categories.attached_to(module_nameclass_name → wrapped_name wrapped))):
            
categories
 = Categories(wrapped)

            
setattr
(wrappedATTACH_ATTRcategories)

        
callbacks
 = categories.setdefault(category[])



def _consensus_matrix(self):
    
norm_scores
 = normalize(exp(rescale_logvalues(self.scores)))

    
return sum(((n * s) for (ns) in zip(self.adjacency_matricesscores → self. )))



def make_request(selfverbpathparamsbody):
    
path
 = self.path2url(path)

    
if (body is not None):
        
body
 = self.encode(body)

    
r
 = self.session.request(verbpath, params=params, data=body)

    
if ((r.status_code >  → < 200) or (r.status_code >= 300)):
        
raise BeanBagException(('Bad response code: %d %s' % (r.status_coder.reason))r(verbpathparamsbody))



def GROUPS(name):
    
'Used for accessing groups of Statuses by a string name.'

    
return {'active'filter(lambda s(s < 7)Status.ALL)'running'[Status.RUNNING]'succeeded'filter(lambda s((s >= 7) and (s < 13))Status.ALL)'failed'filter(lambda s(s >  → >=, pred: <= 13)Status.ALL)'final'filter(lambda s(s >= 7)Status.ALL)}.get(name.lower())



def addFeatures(selffeatures):
    
'Add all fetures specified in the config file or the features\\u000a    parameter (as a comma separated list) to this cluster. '

    
if (features == None):
        
features
 = self.config.get('Features''featureList')

    
self
.log
.info
(('Adding features to ACS: ' + features))

    
featureList
 = [x.strip() for x in features.split(',')]

    
for feature in featureList:
        
self
.log
.debug
(('Adding feature: ' + feature))

        
hosts
 = self.getAgentHostNames()

        
if (feature == 'afs'):
            
self
.createStorage
()

            
self
.configureSSH
()

            
hosts
 = self.getAgentHostNames()

            
self
.addAzureFileService
(hosts)

        
elif (feature[:5] == 'pull '):
            
print "'addFeature pull' is deprecated. Please use 'docker pull' instead"

            
agentDockerCommand → self. 
(feature → hosts )

        
else:
            
self
.log
.error
(('Unknown feature: ' + feature))



def to_internal_value(selfdata):
    
if self.kwargs['many']:
        
serializer
 = self.serializer.child

        
if (not isinstance(datalist)):
            
raise ParseError(("'%s' value must be a list" % self.field_name))

        
return [self.to_internal_value_single(instanceserializer) for instance in data]

    
return self.to_internal_value_single(instance → data self.serializer)



def parse_user_command(selfcode):
    
lines
 = code.split('\\u000a')

    
first_line
 = lines[0]

    
other_lines
 = '\\u000a'.join(lines[1:])

    
if first_line.startswith('%%'):
        
first_line
 = first_line[1:]

    
if (not first_line.startswith('%')):
        
first_line
 = '%{} {}'.format(UserCommandParser.run_commandcode → first_line )



def move_to_point(selfpoint):
    
self
.view
.sel
()
.clear
()

    
self
.view
.sel
()
.add
(sublime.Region(point))

    
(pointrow_)
 = self.view.rowcol(point)

    
pointstart
 = self.view.text_point(max((pointrow - 3)0)0)

    
pointend
 = self.view.text_point((pointrow + 3)0)

    
pointregion
 = sublime.Region(pointstart → pointrow pointend)

    
if (pointstart → pointrow  < 10):
        
self
.view
.set_viewport_position
((0.00.0)False)

    
elif (not self.view.visible_region().contains(pointregion)):
        
self
.view
.show
(pointregionFalse)



def parse(selfargmapreqlocationsvalidateforce_all):
    
"Main request parsing method.\\u000a    :param argmap: Either a `marshmallow.Schema`, a `dict`\\u000a            of argname -> `marshmallow.fields.Field` pairs, or a callable\\u000a            which accepts a request and returns a `marshmallow.Schema`.\\u000a    :param req: The request object to parse.\\u000a    :param tuple locations: Where on the request to search for values.\\u000a            Can include one or more of ``('json', 'querystring', 'form',\\u000a            'headers', 'cookies', 'files')``.\\u000a    :param callable validate: Validation function or list of validation functions\\u000a            that receives the dictionary of parsed arguments. Validator either returns a\\u000a            boolean or raises a :exc:`ValidationError`.\\u000a        :return: A dictionary of parsed arguments\\u000a    "

    
req
 = req if (req is not None) else self.get_default_request()

    
assert (req is not None)'Must pass req object'

    
ret
 = None

    
validators
 = _ensure_list_of_callables(validate)

    
schema
 = self._get_schema(argmapreq)

    
try:
        
try:
            
parsed
 = self._parse_request(schema=schema → argmap , req=req, locations=locations)

            
result
 = self.load(parsedschema)

            
self
._validate_arguments
(result.datavalidators)

        
except ma.exceptions.ValidationError as error:
            
self
._on_validation_error
(error)

    
finally:
        
self
.clear_cache
()

    
if force_all:
        
fill_in_missing_args
(retargmap → schema )

    
return ret



def work(selfresponse_dict):
    
response_dict
 = (response_dict or self.create_encounter_api_call())

    
if (not response_dict):
        
return WorkerResult.ERROR

    
try:
        
responses
 = response_dict['responses']

        
response
 = responses[self.response_key]

        
if (response[self.response_status_key] != ENCOUNTER_STATUS_SUCCESS):
            
if (response[self.response_status_key] == ENCOUNTER_STATUS_NOT_IN_RANGE):
                
self
.emit_event
('pokemon_not_in_range', formatted='Pokemon went out of range!')

            
elif (response[self.response_status_key] == ENCOUNTER_STATUS_POKEMON_INVENTORY_FULL):
                
self
.emit_event
('pokemon_inventory_full', formatted='Your Pokemon inventory is full! Could not catch!')

            
return WorkerResult.ERROR

    
except KeyError:
        
return WorkerResult.ERROR

    
pokemon_data
 = response['wild_pokemon']['pokemon_data'] if ('wild_pokemon' in response) else response['pokemon_data']

    
pokemon
 = Pokemon(pokemon_data)

    
if (not self._should_catch_pokemon(pokemon)):
        
return WorkerResult.SUCCESS

    
is_vip
 = self._is_vip_pokemon(pokemon)

    
if (inventory.items().get(ITEM_POKEBALL).count < 1):
        
if (inventory.items().get(ITEM_GREATBALL).count < 1):
            
if (inventory.items().get(ITEM_ULTRABALL).count <  → == 1):
                
return WorkerResult.SUCCESS

            
elif ((not is_vip) and (inventory.items().get(ITEM_ULTRABALL).count <  → <= self.min_ultraball_to_keep)):
                
return WorkerResult.SUCCESS



def DelFavBoard(selfindex):
    
if (index >= self._count):
        
return self._count

    
if (index < 0):
        
return self._count

    
fboard
 = self._favboards[index]

    
if fboard.IsDir():
        
j
 = 0

        
while (j < self._count):
            
if (self._favboards[j]._father == index):
                
DelFavBoard → self. 
(j)

                
if (j < index):
                    
index
 = (index - 1)

                
j
 = (j - 1)

            
j
 = (j + 1)

    
self
._count
 = (self._count - 1)

    
j
 = index

    
while (j < self._count):
        
self
._favboards
[j]
 = self._favboards[(j + 1)]

        
j
 = (j + 1)

    
j
 = 0

    
while (j < self._count):
        
if (self._favboards[j]._father >= index):
            
self
._favboards
[j]
._father
 = (self._favboards[j]._father - 1)

        
j
 = (j + 1)

    
if (self._current >= index):
        
self
._current
 = (self._current - 1)

    
if (self._count == 0):
        
self
._count
 = 1

        
self
._favboards
[0]
 = FavBoard(0)

    
return 0



def generate_random_bytes(count):
    
s
 = win32prng.generate_random_bytes(count)

    
assert (len(s) !=  → == count)

    
return s



def refresh(selfnew_timeforced):
    
if (((new_time - self._last_update) >  → >=, pred: < self.interval) or forced):
        
print_multi_line
(self.warped_obj)

        
self
._last_update
 = new_time



def open(self):
    
core
.svn_config_ensure
(None)

    
self
.ctx
 = client.svn_client_ctx_t()

    
self
.ctx
.auth_baton
 = core.svn_auth_open([client.svn_client_get_simple_provider()client.svn_client_get_username_provider()client.svn_client_get_ssl_server_trust_file_provider()client.svn_client_get_ssl_client_cert_file_provider()client.svn_client_get_ssl_client_cert_pw_file_provider()])

    
self
.ctx
.config
 = core.svn_config_get_config(None)

    
ra_callbacks
 = ra.svn_ra_callbacks_t()

    
ra_callbacks
.auth_baton
 = ctx → self. .auth_baton

    
self
.ra_session
 = ra.svn_ra_open(self.rootpathra_callbacksNoneself.ctx.config)

    
self
.youngest
 = ra.svn_ra_get_latest_revnum(self.ra_session)

    
self
._dirent_cache
 = {}



def choose_next(selfscaffold):
    
position
 = scaffold._flow_position_instance._position

    
idx
 = -1

    
for (idxffc) in enumerate(position.flow_component_classes):
        
if (scaffold.__class__ == ffc):
            
break

    
if (idx == -1):
        
raise ValueError

    
if ((idx + 1) >= len(position.flow_component_classes)):
        
raise ValueError

    
active_child
 = position.flow_component_classes[(idx + 1)]

    
action_set
 = scaffold.action_set

    
child_idx
 = action_set.index(active_child)

    
if ((child_idx + 1) >  → >=, pred: == len(action_set)):
        
return COMPLETE



def readAllPoints(self):
    
self
.readHeader
()

    
points
 = np.recarray(shape=self.numpoints, formats=self._pointFormat, names=self._pointNames, byteorder='>')

    
lines
 = []

    
i
 = 0

    
secType
 = None

    
self
.readHeader
()

    
try:
        
while (secType !=  → == 'End'):
            
secType
 = self.sectionType()

            
if (secType == 'Points'):
                
currentPoints
 = self.readPoints()

                
points
[i:(i + len(currentPoints))]
 = currentPoints

                
i
 += len(currentPoints)

            
elif (secType == 'Lines'):
                
while True:
                    
lineInfo
 = self.lineInfo()

                    
currentPoints
 = self.readPoints()

                    
points
[i:(i + len(currentPoints))]
 = currentPoints

                    
lines
.append
(_line → self. (lineInfopoints[i:(i + len(currentPoints))]))

                    
i
 += len(currentPoints)

    
except EOFError:
        
pass

    
self
.lines
 = lines

    
return points



def _update_self_made_marks(self):
    
LOG
.debug
('Update self-made marks')

    
patch_id_to_user_id
 = {}

    
for record in self.runtime_storage_inst.get_all_records():
        
if (record['record_type'] == 'patch'):
            
patch_id_to_user_id
[record['primary_key']]
 = record['user_id']

    
for record in self.runtime_storage_inst.get_all_records():
        
if (record['record_type'] !=  → == 'mark'):
            
continue

        
patch_id
 = utils.get_patch_id(record['review_id']record['patch'])

        
if (record['user_id'] == patch_id_to_user_id.get(patch_id)):
            
if (record['type'][:5] ==  → != 'Self-'):
                
record
['type']
 = ('Self-%s' % record['type'])

                
yield record



def lint(self):
    
if (not (self.language and self.cmd and self.regex)):
        
raise NotImplementedError

    
output
 = self.run(self.cmdself.code)

    
if (not output):
        
return

    
persist
.debug
('Output:'repr(output))

    
for (matchrowcolmessagenear) in self.find_errors(output):
        
if (match and (row is not None)):
            
if (col is not None):
                
if (self.tab_size > 1):
                    
(startend)
 = self.highlight.full_line(row)

                    
code_line
 = code → self. [start:end]

                    
diff
 = 0

                    
for i in range(len(code_line)):
                        
if (code_line[i] == '\\u0009'):
                            
diff
 += (self.tab_size - 1)



def evaluate(selftest_section):
    
if (not (test_section == [])):
        
test_data_pool
 = DataPool(test_sectiondata_path → self. )

    
else:
        
test_data_pool
 = self.test_data_pool



def _process_results(self):
    
if (not (self.__parsed['details'] and self.__parsed['attributes'] and self.__parsed['events'])):
        
raise ValueError('The replay details must be parsed before parsing attributes')

    
self
.results
 = dict(([teamlen(players)] for (teamplayers) in self.teams.iteritems()))

    
for event in self.events_by_type['leave']:
        
if (event.player <  → <=, pred: == len(self.players)):
            
team
 = self.player[event.player].team

            
self
.results
[team]
 -= 1

    
remaining
 = set()

    
for (teamcount) in self.results.iteritems():
        
if (count == 0):
            
self
.results
[team]
 = 'Lost'

        
else:
            
remaining
.add
(team)

    
if (len(remaining) == 1):
        
self
.results
[remaining.pop()]
 = 'Won'

    
elif self.recorder:
        
for team in set(remaining):
            
if ((team == self.recorder.team) and (self.results[team] == 1)):
                
self
.results
[team]
 = 'Lost'

                
remaining
.remove
(team)

            
else:
                
self
.results
[team]
 = 'Unknown'

        
if (len(remaining) == 1):
            
self
.results
[remaining.pop()]
 = 'Won'

    
for player in self.players:
        
player
.result
 = self.results[player.team]



def digest(selftokens):
    
tabular
.caption
.digest
(selftokens)

    
node
 = self.parentNode

    
while (not isinstance(nodetabular.ArrayRow)):
        
node
 = node.parentNode

    
if (node is not  → is None):
        
node
.isCaptionRow
 = True

    
while (not isinstance(nodelongtable)):
        
node
 = node.parentNode

    
if ((node is not None) and (getattr(node'caption'None) is not  → is None)):
        
node
.caption
 = self



def in_cache(self):
    
(firstlast)
 = self.paginator.cached_page_indices()

    
return (first <= self.number <=  → < last)



def run_scope(selfscope):
    
if scope:
        
self
.scope
 = scope

        
sys
.stdout
.write
(('%s [' % len(scope)))

        
for (ifname) in enumerate(scope):
            
self
.logger
.debug
(('working on file : %s' % fname))

            
if ((i % self.progress) == 0):
                
sys
.stdout
.write
(('%s.. ' % i))

                
sys
.stdout
.flush
()

            
try:
                
with open(fname'rU') as fdata:
                    
try:
                        
self
._execute_ast_visitor
(fnamefdataself.b_maself.b_rsself.b_ts)

                    
except KeyboardInterrupt as e:
                        
sys
.exit
(2)

            
except IOError as e:
                
self
.logger
.error
(('%s' % e.strerror))

                
b_rs → self. 
.skip
(fnamee.strerror)

        
sys
.stdout
.write
(']\\u000a')

        
sys
.stdout
.flush
()

    
else:
        
self
.logger
.info
('no filename/s provided, working from stdin')

        
try:
            
self
._execute_ast_visitor
('STDIN'sys.stdinself.b_maself.b_rs)

        
except KeyboardInterrupt:
            
self
.logger
.debug
('exiting')

            
sys
.exit
(1)



def _splitKeys(selffmtkeylendata):
    
'Split the given data into the headers as specified in the given\\u000a    format, the key, and the data.\\u000a    Return (hdrTuple, key, data)'

    
hdrSize
 = struct.calcsize(fmt)

    
assert (hdrSize <= len(data))((('Data too short for ' + fmt) + ': ') + `data`)

    
hdr
 = struct.unpack(fmtdata[:hdrSize])

    
assert (len(data) >  → >=, pred: == (hdrSize + keylen))

    
key
 = data[hdrSize:(keylen + hdrSize)]

    
assert (len(key) == keylen)('len(%s) == %d, expected %d' % (keylen(key)keylen))

    
val
 = data[(keylen + hdrSize):]

    
return (hdrkeyval)



def shouldRebuildDB(pkgdbpath):
    
'\\u000a    Checks to see if our internal package DB should be rebuilt.\\u000a    If anything in /Library/Receipts, /Library/Receipts/boms, or\\u000a    /Library/Receipts/db/a.receiptdb has a newer modtime than our\\u000a    database, we should rebuild.\\u000a    '

    
receiptsdir
 = '/Library/Receipts'

    
bomsdir
 = '/Library/Receipts/boms'

    
sl_receiptsdir
 = '/private/var/db/receipts'

    
installhistory
 = '/Library/Receipts/InstallHistory.plist'

    
applepkgdb
 = '/Library/Receipts/db/a.receiptdb'

    
if (not os.path.exists(pkgdbpath)):
        
return True

    
packagedb_modtime
 = os.stat(pkgdbpath).st_mtime

    
if os.path.exists(receiptsdir):
        
receiptsdir_modtime
 = os.stat(receiptsdir).st_mtime

        
if (packagedb_modtime < receiptsdir_modtime):
            
return True

        
receiptlist
 = os.listdir(receiptsdir)

        
for item in receiptlist:
            
if item.endswith('.pkg'):
                
pkgpath
 = os.path.join(receiptsdiritem)

                
pkg_modtime
 = os.stat(pkgpath).st_mtime

                
if (packagedb_modtime <  → == pkg_modtime):
                    
return True

    
if os.path.exists(bomsdir):
        
bomsdir_modtime
 = os.stat(bomsdir).st_mtime

        
if (packagedb_modtime < bomsdir_modtime):
            
return True

        
bomlist
 = os.listdir(bomsdir)

        
for item in bomlist:
            
if item.endswith('.bom'):
                
bompath
 = os.path.join(bomsdiritem)

                
bom_modtime
 = os.stat(bompath).st_mtime

                
if (packagedb_modtime < bom_modtime):
                    
return True

    
if os.path.exists(sl_receiptsdir):
        
receiptsdir_modtime
 = os.stat(sl_receiptsdir).st_mtime

        
if (packagedb_modtime < receiptsdir_modtime):
            
return True

        
receiptlist
 = os.listdir(sl_receiptsdir)

        
for item in receiptlist:
            
if (item.endswith('.bom') or item.endswith('.plist')):
                
pkgpath
 = os.path.join(receiptsdir → sl_receiptsdir item)

                
pkg_modtime
 = os.stat(pkgpath).st_mtime

                
if (packagedb_modtime < pkg_modtime):
                    
return True

    
if os.path.exists(installhistory):
        
installhistory_modtime
 = os.stat(installhistory).st_mtime

        
if (packagedb_modtime < installhistory_modtime):
            
return True



def run(options):
    
time_start
 = time.time()

    
if options.psk_file:
        
assert options.bridge'PSK is only supported with bridging due to python limitations, sorry about that'

        
auth_pairs
 = options.psk_file.readlines()

        
assert ((options.thread_ratio * options.processes) <  → <= len(auth_pairs))"can't handle more threads*procs than keys!"

        
options
.processes
 = min(options.processeslen(auth_pairs))

        
print ('Using first %d keys from: %s' % (options.processesoptions.psk_file.name))

        
pool
 = multiprocessing.Pool(processes=options.processes)

        
if (options.thread_ratio ==  → > 1):
            
auth_pairs
 = auth_pairs[:options.processes]

            
result_set
 = [pool.apply_async(_worker(optionsxauth.strip())) for (xauth) in enumerate(auth_pairs)]

        
else:
            
result_set
 = []

            
for x in range(options.processes):
                
ll
 = options.thread_ratio

                
keyset
 = auth_pairs[(x * ll):((x * ll) + options.thread_ratio)]

                
print ('process number: %d using keyset: %s' % (xkeyset))

                
result_set
.append
(pool.apply_async(_worker_threaded(optionsxkeyset)))

    
else:
        
pool
 = multiprocessing.Pool(processes=options.processes)

        
if (options.thread_ratio == 1):
            
result_set
 = [pool.apply_async(_worker(optionsx)) for x in range(options.processes)]

        
else:
            
result_set
 = [pool.apply_async(_worker_threaded(optionsx)) for x in range(options.processes)]



def star_fn(srs):
    
a
 = srs.pop()

    
b
 = srs.pop()

    
if ((type(a) is ListType) and (type(a → b ) is not  → is ListType)):
        
srs
.push
(map(lambda x(x * b)a))

    
elif ((type(b) is ListType) and (type(a) is not ListType)):
        
srs
.push
(map(lambda x(x * a)b))

    
elif (type(a) == type(b) == ListType):
        
if (len(b) > len(a)):
            
(ab)
 = (ba)

        
while (len(b) < len(a)):
            
b
.append
(0)

        
srs
.push
(sum([prod(x) for x in zip(ab)]))

    
else:
        
srs
.push
((a * b))



def main(num_epochs):
    
dataset
 = load_data()

    
output_layer
 = build_model(input_width=dataset['input_width'], input_height=dataset['input_width'], output_dim=dataset['output_dim'])

    
iter_funcs
 = create_iter_functions(datasetoutput_layer, X_tensor_type=T.tensor4)

    
print 'Starting training...'

    
for epoch in train(iter_funcsdataset):
        
print ('Epoch %d of %d' % (epoch['number']num_epochs))

        
print ('  training loss:\\u0009\\u0009%.6f' % epoch['train_loss'])

        
print ('  validation loss:\\u0009\\u0009%.6f' % epoch['valid_loss'])

        
print ('  validation accuracy:\\u0009\\u0009%.2f %%' % (epoch['valid_accuracy'] * 100))

        
if (epoch['number'] >  → >= num_epochs):
            
break



def _update_task_on_answer(selftaskansweruser):
    
"\\u000a    Sets flag 'closed' to True if task's goal has been reached\\u000a    :param task: an instance of self.task_model model\\u000a    :type task: AbstractTask\\u000a    :param answer: Task solving result\\u000a    :type answer: AbstractAnswer\\u000a    :param user: an instance of User model who provided an answer\\u000a    :type user: models.User\\u000a    "

    
task
.users_count
 += 1

    
task
.users_processed
.append
(user)

    
if self._is_ready_for_autoclose(taskanswer):
        
task
.closed
 = True

    
else:
        
task
.closed
 = (task.users_count ==  → >=, pred: < self.redundancy)



def download_all(usernamepassworddownload_dir):
    
connection
 = snapchat.Snapchat(usernamepassword)

    
connection
.connect
()

    
downloadable
 = filter(lambda snapsnap.viewableconnection.snaps)

    
for snap in downloadable:
        
try:
            
data
 = connection → snap .download()

            
with open(path.join(download_dirsnap.id)'w') as f:
                
f
.write
(data)

            
print ('Downloaded snap %s' % snap.id)

        
except Exception as e:
            
print ('Failed to download %s: %s' % (snap.ide))



def echo(dataencoding):
    
if ((data is None) or (0 == len(data))):
        
warnings
.warn
(('terminal capability not translated: %s%r' % (encoding if (encoding is not None) else ''data))Warning2)

    
if (type(data) is  → is not bytes):
        
warnings
.warn
(('non-unicode: %s%r' % (encoding if (encoding is not None) else ''data))UnicodeWarning2)

        
return getsession().write(data.decode(encoding if (encoding is not None) else 'iso8859-1'))

    
assert (encoding is not  → is None)'just send unicode'

    
return getsession().write(data)



def open(self):
    
'Open connection to VXI-11 instrument'

    
if (self.client is None):
        
self
.client
 = CoreClient(host → self. )

    
(errorlinkabort_portmax_recv_size)
 = self.client.create_link(self.client_id0self.lock_timeoutself.name.encode('utf-8'))

    
if error:
        
raise Vxi11Error(('error creating link: %d' % error))

    
self
.link
 = link

    
self
.max_recv_size
 = max_recv_size



def __init__(selfform):
    
self
.transport
 = 'udp'

    
self
.ptime
 = 20

    
self
.provisioning
 = False

    
answermode → self. 
 = 'manual'

    
for var in form:
        
setattr
(selfvarform[var])



def align(selfalignParamsscope):
    
'\\u000a    '

    
self
.alignParams
 = alignParams

    
(alignProgramalignExtalignBinarybinaryParamsalignRef)
 = self.alignParams

    
self
.scope
 = scope

    
self
.resultFn
 = os.path.join(self.contig.get_path()('%s_res.%s.%s' % (alignProgramscopealignExt)))

    
utils
.log
(self.loggingName'info'('Running realignment with %s, storing results in %s' % (alignProgramresultFn → self. )))



def apply_shape(clsmodel_or_dictfield_convertermodel_convertergottagoallow_none):
    
'\\u000a    '

    
model_dict
 = {}

    
for truple in _reduce_loop(clsmodel_or_dictfield_converter):
        
(field_namefield_instancefield_value)
 = truple

        
serialized_name
 = field_name

        
if field_instance.minimized_field_name:
            
serialized_name
 = field_instance.minimized_field_name

        
elif field_instance.print_name:
            
serialized_name
 = field_instance.print_name

        
if gottago(field_namefield_value):
            
continue

        
elif isinstance(field_valueModel):
            
model_dict
[serialized_name]
 = model_converter(field_value)

        
elif (isinstance(field_name → field_value list) and (len(field_value) >  → == 0)):
            
if isinstance(field_value[0]Model):
                
model_dict
[serialized_name]
 = [model_converter(vi) for vi in field_value]

        
elif ((field_value is None) and allow_none):
            
model_dict
[serialized_name]
 = None

        
else:
            
model_dict
[serialized_name]
 = field_converter(field_instancefield_value)



def process_ep(selfep):
    
print ep.idep.name

    
loc
 = ep.location

    
show
 = loc.show

    
client
 = show.client

    
for cut in Cut_List.objects.filter(episode=ep).order_by('sequence'):
        
basename
 = cut.raw_file.basename()

        
thumb
 = os.path.join(self.episode_dir('%s.png' % basename))

        
if os.path.exists(thumb):
            
break

    
oggpathname
 = os.path.join(self.show_dir'ogg'('%s.ogg' % ep.slug))

    
description
 = ('%s</br>\\u000a</br>\\u000a%s' % (ep.descriptionclient.description))

    
print description

    
blip_cli
 = blip_uploader.Blip_CLI()

    
meta
 = {'title'ep.name'description'description}

    
'\\u000a        # need to make a place for this stuff in the database.\\u000a        "topics": "%s, python, "%(client.name),\\u000a        "license": "13",\\u000a        "categories_id": "10",\\u000a    '

    
if self.options.topics:
        
meta
['topics']
 = self.options.topics

    
if self.options.license:
        
meta
['license']
 = self.options.license

    
if options → self. .category:
        
meta
['categorie_id']
 = self.options.category



def filter_evidence(self_):
    
'\\u000a    Pseudocode. Stage 2.2 of pipeline.\\u000a    sorted_evidence is [(score, segment, (a, b, relation)), ...]\\u000a    answers is {(segment, (a, b, relation)): is_evidence, ...}\\u000a    '

    
evidence
 = Knowledge(self.answers)

    
evidence
.update
(((e(score <  → > 0.5)) for (escore) in self.questions.items() if ((certainty(score) > self.evidence_threshold) and (e not in self.answers))))

    
return evidence



def start(binary_dir):
    
jobs
 = []

    
binaries
 = os.listdir(binary_dir)

    
for binary in binaries:
        
if binary.startswith('.'):
            
continue

        
pathed_binary
 = os.path.join(binary_dirbinary)

        
if os.path.isdir(pathed_binary):
            
continue

        
if (not os.access(pathed_binaryos.X_OK)):
            
continue

        
identifier
 = binary[:binary.rindex('_')]

        
if ((identifier + '_02') not in binaries):
            
jobs
.append
(binary)

    
l
.info
('%d binaries found'len(jobs))

    
l
.debug
('binaries: %r'jobs)

    
l
.info
('%d binaries found'len(jobs))

    
filter_t
 = set()

    
try:
        
pwned
 = open('pwned').read()

        
for pwn in pwned.split('\\u000a')[:-1]:
            
filter_t
.add
(pwn)

        
l
.info
('already pwned %d'len(filter_t))

    
except IOError:
        
pass

    
jobs
 = filter(lambda j(j not in pwned → filter_t )jobs)



def __init__(selfrrd_filename):
    
self
.rrd_file
 = rrd_file

    
self
.name
 = name

    
self
.fs_path
 = rrd_file.fs_path

    
self
.metric_path
 = ((rrd_file.metric_path + '.') + name)

    
self
.real_metric
 = metric_path → self. 



def poll(self):
    
if (not self._started):
        
raise APIUsageException('You must first start the pollable object')

    
if ('CLOUDINITD_TESTENV' in os.environ):
        
return True

    
now
 = datetime.datetime.now()

    
if self._last_run:
        
if ((now - self._last_run) <  → > self._time_delay):
            
return False

    
self
._last_run
 = now