This is part 10/10 of the system predictions for the Real-Bug Test set. This system achieves 41% Repair Accuracy and 54% Location Accuracy.

Annotations:
The part before the arrow ('foo') is what the system actually saw at test time. Other candidate repair locations which the system could have chosen are marked in this color. For clarity the actual labels for those locations are not shown.


def getfullblock(selfblock_hash):
    
block
 = self.bitcoind('getblock'[block_hash])

    
rawtxreq
 = []

    
i
 = 0

    
for txid in block['tx']:
        
rawtxreq
.append
({'method''getrawtransaction''params'[txid]'id'i})

        
i
 += 1

    
postdata
 = dumps(rawtxreq)

    
try:
        
respdata
 = urllib.urlopen(self.bitcoind_urlpostdata).read()

    
except:
        
traceback
.print_exc
(file=sys.stdout)

        
self
.shared
.stop
()

    
r
 = loads(respdata)

    
rawtxdata
 = []

    
for ir in r:
        
if (ir['error'] is not  → is None):
            
raise BaseException(r → ir ['error'])

        
rawtxdata
.append
(ir['result'])

    
block
['tx']
 = rawtxdata

    
return block



def run(self):
    
'Main run loop for all spouts.\\u000a    Performs initial handshake with Storm and reads tuples handing them off\\u000a    to subclasses.  Any exceptions are caught and logged back to Storm\\u000a    prior to the Python process exiting.\\u000a    Subclasses should **not** override this method.\\u000a    '

    
(storm_confcontext)
 = read_handshake → self. ()

    
self
._setup_component
(storm_confcontext)



def unpin_variant(selfinstitutecaseuserlinkvariant):
    
'Create an event for unpinning a variant.\\u000a        Arguments:\\u000a                institute (Institute): A Institute object\\u000a                case (Case): Case object\\u000a                user (User): A User object\\u000a                link (str): The url to be used in the event\\u000a                variant (Variant): A variant object\\u000a    '

    
logger
.info
('Creating event for unpinning variant {0}'.format(variant.display_name))

    
logger
.info
('Remove variant from list of references in the case model')

    
case
.suspects
.remove
(variant)

    
case
.save
()

    
create_event → self. 
(institute=institute, case=case, user=user, link=link, category='variant', verb='unpin', variant_id=variant.variant_id, subject=variant.display_name)



def GetUserCredentials(self):
    
'Prompts the user for a username and password.\\u000a    Only use keyring on the initial call. If the keyring contains the wrong\\u000a    password, we want to give the user a chance to enter another one.\\u000a    '

    
global keyring

    
email
 = self.email

    
if (email is None):
        
email
 = GetEmail(('Email (login for uploading to %s)' % self.server))

    
password
 = None

    
if (keyring → email  and (not (email in self.accounts_seen))):
        
try:
            
password
 = keyring.get_password(self.hostemail)

        
except:
            
print 'Failed to get password from keyring'

            
keyring
 = None

    
if (password is not None):
        
print 'Using password from system keyring.'

        
self
.accounts_seen
.add
(email)

    
else:
        
password
 = getpass.getpass(('Password for %s: ' % email))

        
if keyring:
            
answer
 = raw_input('Store password in system keyring?(y/N) ').strip()

            
if (answer == 'y'):
                
keyring
.set_password
(host → self. emailpassword)

                
self
.accounts_seen
.add
(email)

    
return (emailpassword)



def run(self):
    
recipient
 = self.recipient

    
if isinstance(recipientget_user_model()):
        
email → language 
 = recipient.email

        
try:
            
language
 = get_users_language(recipient)

        
except LanguageStoreNotAvailable:
            
language
 = None

        
if (language is not None):
            
activate
(language)

    
else:
        
email
 = recipient

    
context
 = Context(self.default_context)

    
context
['recipient']
 = recipient

    
context
['email']
 = email

    
subject
 = render_to_string(self.subject_pathcontext).strip()

    
subject
 = ''.join(subject.splitlines())

    
text
 = render_to_string(self.text_pathcontext)

    
msg
 = EmailMultiAlternatives(subjecttextself.from_email[email])

    
try:
        
body
 = render_to_string(self.html_pathcontext)

        
if pynliner:
            
body
 = pynliner.fromString(body)

        
msg
.attach_alternative
(body'text/html')

    
except TemplateDoesNotExist:
        
logging
.info
(('Email sent without HTML, since %s not found' % html_path → self. ))



def serve(selfserverhostportsslkey_filecer_file):
    
self
.logger
.info
(("Starting '%s' with '%s'..." % (self.nameserver)))

    
self
.server
 = server

    
self
.host
 = host

    
self
.port
 = port

    
self
.ssl
 = ssl

    
self
.start
()

    
method
 = getattr(self('serve_' + server))

    
kwargs
 = dict()

    
names
 = method.func_code.co_varnames

    
if ('ssl' in names):
        
kwargs
['ssl']
 = port → ssl, pred: names 

    
if ('key_file' in names):
        
kwargs
['key_file']
 = key_file

    
if ('cer_file' in names):
        
kwargs
['cer_file']
 = cer_file

    
return_value
 = method(host=host, port=port, **kwargs)

    
self
.stop
()

    
self
.logger
.info
(("Stopped '%s'' in '%s' ..." % (self.nameserver)))

    
return return_value



def write(selfdata):
    
'Output the given byte string over the serial port.'

    
if (not self.is_open):
        
raise portNotOpenError

    
d
 = to_bytes(data)

    
tx_len
 = len(d)

    
timeout
 = Timeout(self._write_timeout)

    
while (tx_len > 0):
        
try:
            
n
 = os.write(self.fdd)

            
if timeout.is_non_blocking:
                
return n → d 

            
elif (not timeout.is_infinite):
                
if timeout.expired():
                    
raise writeTimeoutError

                
(abortready_)
 = select.select([self.pipe_abort_write_r][self.fd][]timeout.time_left())

                
if abort:
                    
os
.read
(self.pipe_abort_write_r1000)

                    
break

                
if (not ready):
                    
raise writeTimeoutError

            
else:
                
assert (timeout.time_left() is None)

                
(abortready_)
 = select.select([self.pipe_abort_write_r][self.fd][]None)

                
if abort:
                    
os
.read
(self.pipe_abort_write_r1)

                    
break

                
if (not ready):
                    
raise SerialException('write failed (select)')

            
d
 = d[n:]

            
tx_len
 -= n

        
except SerialException:
            
raise

        
except OSError as v:
            
if (v.errno != errno.EAGAIN):
                
raise SerialException('write failed: {}'.format(v))

            
if timeout.expired():
                
raise writeTimeoutError

    
return len(data → d )



def __init__(selfnamescannerurlloginpasswordinsecuretemplate):
    
if scanner:
        
self
.scanner
 = scanner

    
else:
        
self
.scanner
 = nessrest.Scanner(url=url, login=login, password=password, insecure=insecure)

    
self
.name
 = name

    
self
.scan_id
 = ''

    
self
.scanner_id
 = '1'

    
self
.folder_id
 = ''

    
self
.uuid
 = ''

    
self
.category
 = ''

    
self
.settings
 = {'launch''ONETIME''enabled'False'launch_now'True'text_targets''''file_targets'''}

    
self
.audits
 = {}

    
self
.creds
 = {}

    
self
.uploads
 = []

    
self
.categories
 = {}

    
self
._cache
 = {}

    
if template:
        
set_scan_template → self. 
(template)



def _pixel_data_numpy(self):
    
"Return a NumPy array of the pixel data if NumPy is available.\\u000a    Falls back to GDCM in case of unsupported transfer syntaxes.\\u000a    Raises\\u000a    ------\\u000a    TypeError\\u000a        If there is no pixel data or not a supported data type\\u000a    ImportError\\u000a        If NumPy isn't found, or in the case of fallback, if GDCM isn't found.\\u000a    Returns\\u000a    -------\\u000a    NumPy array\\u000a    "

    
if (not self._is_uncompressed_transfer_syntax()):
        
if (not have_gdcm):
            
raise NotImplementedError('Pixel Data is compressed in a format pydicom does not yet handle. Cannot return array. Pydicom might be able to convert the pixel data using GDCM if it is installed.')

        
elif (not self.filename):
            
raise NotImplementedError('GDCM is only supported when the dataset has been created with a filename.')

    
if (not have_numpy):
        
msg
 = 'The Numpy package is required to use pixel_array, and numpy could not be imported.\\u000a'

        
raise ImportError(msg)

    
if ('PixelData' not in self):
        
raise TypeError('No pixel data found in this dataset.')

    
if self._is_uncompressed_transfer_syntax():
        
format_str
 = ('%sint%d' % (('u''')[self.PixelRepresentation]self.BitsAllocated))

        
try:
            
numpy_dtype
 = numpy.dtype(format_str)

        
except TypeError:
            
msg
 = "Data type not understood by NumPy: format='%s', PixelRepresentation=%d, BitsAllocated=%d"

            
raise TypeError((msg % (numpy_dtype → format_str self.PixelRepresentationself.BitsAllocated)))

        
if (self.is_little_endian != sys_is_little_endian):
            
numpy_dtype
.newbyteorder
('S')

        
pixel_bytearray
 = self.PixelData

    
elif (have_gdcm and self.filename):
        
gdcm_image_reader
 = gdcm.ImageReader()

        
gdcm_image_reader
.SetFileName
(self.filename)

        
if (not gdcm_image_reader.Read()):
            
raise TypeError('GDCM could not read DICOM image')

        
gdcm_image
 = gdcm_image_reader.GetImage()

        
gdcm_numpy_typemap
 = {gdcm.PixelFormat.INT8numpy.int8gdcm.PixelFormat.UINT8numpy.uint8gdcm.PixelFormat.UINT16numpy.uint16gdcm.PixelFormat.INT16numpy.int16gdcm.PixelFormat.UINT32numpy.uint32gdcm.PixelFormat.INT32numpy.int32gdcm.PixelFormat.FLOAT32numpy.float32gdcm.PixelFormat.FLOAT64numpy.float64}

        
gdcm_pixel_format
 = gdcm_image.GetPixelFormat().GetScalarType()

        
if (gdcm_pixel_format in gdcm_numpy_typemap):
            
numpy_dtype
 = gdcm_numpy_typemap[gdcm_pixel_format]

        
else:
            
raise TypeError('{} is not a GDCM supported pixel format'.format(gdcm_pixel_format))

        
pixel_bytearray
 = gdcm_image.GetBuffer()

        
if (sys.version_info >= (30)):
            
pixel_bytearray
 = pixel_bytearray.encode('utf-8''surrogateescape')

        
if gdcm_image → pixel_bytearray .GetNeedByteSwap():
            
numpy_dtype
.newbyteorder
('S')



def collect_parameters(uri_querybodyheadersexclude_oauth_signature):
    
'Collect parameters from the uri query, authorization header, and request\\u000a    body.\\u000a    String paramters will be decoded into unicode using utf-8.\\u000a    Parameters starting with `oauth_` will be unescaped.\\u000a    Per `section 3.4.1.3.1`_ of the spec.\\u000a    .. _`section 3.4.1.3.1`: http://tools.ietf.org/html/rfc5849#section-3.4.1.3.1\\u000a    '

    
headers
 = (headers or {})

    
params
 = []

    
if uri_query:
        
params
.extend
(urlparse.parse_qsl(uri_query, keep_blank_values=True))

    
if headers:
        
headers_lower
 = dict(((k.lower()v) for (kv) in headers.items()))

        
authorization_header
 = headers_lower.get('authorization')

        
if (authorization_header is not None):
            
params
.extend
(utils.parse_authorization_header(authorization_header))

    
if isinstance(body(strunicode)):
        
params
.extend
(urlparse.parse_qsl(body, keep_blank_values=True))

    
elif isinstance(bodydict):
        
params
.extend
(body.items())

    
else:
        
try:
            
params
.extend
(body)

        
except TypeError:
            
raise ValueError('Body must be a string, dict, or iterable')

    
unicode_params
 = []

    
for (kv) in params:
        
if isinstance(kstr):
            
k
 = k → v .decode('utf-8')

        
if isinstance(vstr):
            
if v → k .startswith('oauth_'):
                
v
 = utils.unescape(v)

            
else:
                
v
 = v.decode('utf-8')

        
unicode_params
.append
((kv))



def default_exit(pathkeyold_parentnew_parentnew_items):
    
ret
 = new_parent

    
if isinstance(new_parentMapping):
        
new_parent
.update
(new_items)

    
elif isinstance(new_parentSequence):
        
vals
 = [v for (iv) in new_items]

        
try:
            
new_parent
.extend
(vals)

        
except AttributeError:
            
ret
 = new_parent.__class__(vals)

    
elif isinstance(new_parentSet):
        
vals
 = [v for (iv) in new_items]

        
try:
            
new_parent → ret 
.update
(new_items → vals )

        
except AttributeError:
            
ret
 = new_parent.__class__(vals)

    
else:
        
raise RuntimeError(('unexpected iterable type: %r' % type(new_parent)))

    
return ret



def test_user_api(self):
    
self
._test_backup
(True)

    
self
.logout
()

    
student
 = User.lookup(self.user1.email)
def test_both_endpoints(user):
        
base_api
 = '/api/v3/user/{0}'

        
user1_endpoint
 = base_api.format(user.email)

        
current_user_endpoint
 = base_api.format('')

        
current
 = self.client.get(current_user_endpoint)

        
specific
 = self.client.get(user1_endpoint)

        
return (currentspecific)

    
(currentspecific)
 = test_both_endpoints(student)

    
self
.assert_401
(current)

    
self
.assert_401
(specific)

    
self
.login
(self.user1.email)

    
(currentspecific)
 = test_both_endpoints(student)

    
self
.assert_200
(current)

    
self
.assert_200
(specific)

    
members → specific 
 = current.json['data']['participations']

    
self
.assertEquals
(len(members)1)

    
self
.assertEquals
(current.json['data']specific.json['data'])

    
self
.login
(self.staff1.email)

    
(currentspecific)
 = test_both_endpoints(student)

    
self
.assert_200
(current)

    
self
.assert_403
(specific)

    
self
.login
(self.user3.email)

    
(currentspecific)
 = test_both_endpoints(student)

    
self
.assert_200
(current)

    
self
.assert_403
(specific)

    
self
.login
(self.admin.email)

    
(currentspecific)
 = test_both_endpoints(student)

    
self
.assert_200
(current)

    
self
.assert_200
(specific)

    
self
.assertEquals
(specific.json['data']['email']student.email)

    
self
.lab_assistant1
 = make_lab_assistant → self. (1)

    
db
.session
.commit
()

    
self
.login
(self.lab_assistant1.email)

    
(currentspecific)
 = test_both_endpoints(student)

    
self
.assert_200
(current)

    
self
.assert_403
(specific)



def apply_corrections(uriruntime_storage_inst):
    
LOG
.info
('Applying corrections from uri %s'uri)

    
corrections_fd
 = urllib.urlopen(uri)

    
raw
 = corrections_fd.read()

    
corrections_fd
.close
()

    
corrections
 = json.loads(raw)['corrections']

    
valid_corrections
 = []

    
for c in corrections:
        
if ('primary_key' in c):
            
valid_corrections
.append
(c)

        
else:
            
LOG
.warn
('Correction misses primary key: %s'c)

    
runtime_storage_inst
.apply_corrections
(corrections → valid_corrections )



def neq(selfother):
    
if isinstance(otherW_Int32):
        
return (ord(self.char) ==  → != other.value)

    
raise NotImplementedError()



def post(selfstates):
    
'Direct successor set (1-hop) for given states.\\u000a    Over all actions or letters, i.e., edge labeling ignored\\u000a    by states.pre, because it may be undefined. Only classes\\u000a    which have an action set, alphabet, or other transition\\u000a    labeling set provide a pre(state, label) method, as for\\u000a    example pre(state, action) in the case of closed transition\\u000a    systems.\\u000a    If multiple stats provided,\\u000a    then union Post(s) for s in states provided.\\u000a    See Also\\u000a    ========\\u000a        - L{pre}\\u000a        - Def. 2.3, p.23 U{[BK08]\\u000a            <http://tulip-control.sourceforge.net/doc/bibliography.html#bk08>}\\u000a    '

    
states
 = self._single_state2singleton(states)

    
successors
 = list()

    
for state in state → states :
        
successors
 += self.graph.successors(state)

    
return successors



def arp_packet_in_handler(selfev):
    
msg
 = ev.msg

    
datapath
 = msg.datapath

    
in_port
 = msg.match['in_port']

    
ofproto
 = datapath.ofproto

    
parser
 = datapath.ofproto_parser

    
pkt
 = packet.Packet(msg.data)

    
arp_header
 = pkt.get_protocol(arp.arp)

    
src_ip
 = arp_header.src_ip

    
src_mac
 = arp_header.src_mac

    
dst_ip
 = arp_header.dst_ip

    
dst_mac
 = None

    
if cfg_mgr → self. .is_internal_host(src_ip):
        
first_speaker_id
 = None



def modify_assignment(current_useridnameduefor_classdue_cutoff):
    
assignment
 = _get_assignment(id)

    
old_assignment_string
 = _assignment_to_str(assignment)

    
if ((current_user.account_type != 'admin') and (assignment.for_class not in current_user.classes)):
        
raise RuntimeError('You can only modify assignments for classes you teach.')

    
change_log
 = []

    
if name:
        
change_log
.append
(("Name changed from '%s' to '%s'." % (assignment.namename)))

        
assignment
.name
 = name

    
if due:
        
due_date
 = _to_datetime(due)

        
change_log
.append
(("Due date changed from '%s' to '%s'." % (str(assignment.due)str(due_date))))

        
assignment
.due
 = due_date

    
if due_cutoff:
        
cutoff_date
 = _to_datetime(due → due_cutoff )



def get(selfrequest, *args, **kwargs):
    
validate_session_for_mturk
(requestself.session)

    
mturk_settings
 = self.session.config['mturk_hit_settings']

    
initial
 = {'title'mturk_settings['title']'description'mturk_settings['description']'keywords'', '.join(mturk_settings['keywords'])'money_reward'self.session.config['participation_fee']'in_sandbox'settings.DEBUG'minutes_allotted_per_assignment'mturk_settings['minutes_allotted_per_assignment']'expiration_hours'mturk_settings['expiration_hours']'assignments'self.session.mturk_num_participants}

    
form
 = self.get_form(initial=initial)

    
context
 = self.get_context_data(form=form)

    
context
['py3']
 = (sys.version_info >  → >=, pred: < (30))

    
context
['mturk_enabled']
 = (bool(settings.AWS_ACCESS_KEY_ID) and bool(settings.AWS_SECRET_ACCESS_KEY))

    
url
 = self.request.build_absolute_uri(reverse('session_create_hit', args=(self.session.pk)))

    
secured_url
 = urlunparse(urlparse(url)._replace(scheme='https'))

    
context
['secured_url']
 = secured_url



def users(self):
    
raw
 = self.iam.get_all_users()

    
users
 = [x['user_name'] for x in raw['list_users_response']['list_users_result']['users']]

    
while (raw['list_users_response']['list_users_result']['is_truncated'] == 'true'):
        
raw
 = iam → self. .get_all_users(marker=raw['list_users_response']['list_users_result']['marker'])

        
users
 += [x['user_name'] for x in raw['list_users_response']['list_users_result']['users']]



def calculateOccupancyMLE(selfmatbias_matparams):
    
'Calculate Occupancy track'

    
offset
 = (self.start - mat.start)

    
if (offset <  → != params.flank):
        
raise Exception('For calculateOccupancyMLE, mat does not have sufficient flanking regions')offset

    
self
.vals
 = (np.ones((self.end - self.start)) * float('nan'))

    
self
.lower_bound
 = (np.ones((self.end - self.start)) * float('nan'))

    
self
.upper_bound
 = (np.ones((self.end - self.start)) * float('nan'))

    
for i in xrange(params.halfsteplen(self.vals)params.step):
        
new_inserts
 = np.sum(mat.get(lower=0, upper=params.upper, start=((self.start + i) - params.flank), end=(((self.start + i) + params.flank) + 1)), axis=1)

        
new_bias
 = np.sum(bias_mat.get(lower=0, upper=params.upper, start=((self.start + i) - params.flank), end=(((self.start + i) + params.flank) + 1)), axis=1)

        
if (sum(new_inserts) > 0):
            
left
 = (i - params.halfstep)

            
right
 = min(((i + params.halfstep) + 1)len(vals → self. ))

            
(self.vals[left:right]self.lower_bound[left:right]self.upper_bound[left:right])
 = calculateOccupancy(new_insertsnew_biasparams.occ_calc_params)



def create(selfpathmodefi):
    
self
.logger
.debug
('Create %s (mode:%s)'pathmode)

    
path
 = self._remove_start_slash(path)

    
file_info
 = {}

    
file_info
['fileName']
 = path → mode 

    
self
.open_files
[path]
 = B2File → self. (selffile_infoTrue)



def __init__(self, *args, **kwargs):
    
super
(RegistrationFormself)
.__init__
(*args, **kwargs)

    
auth_db → self. 
 = get_or_create(server_uri('%s%s' % (DB_PREFIX'auth')))

    
User → self. 
.id_view
.sync
(auth_db)

    
User
.email_view
.sync
(auth_db)

    
User
.is_active_view
.sync
(auth_db)



def round_up(value):
    
int_value
 = int(value)

    
diff
 = 1 if (int_value → value  > 0) else -1

    
return (int_value + diff) if (value != int_value) else int_value → value 



def deploy(filename):
    
with open(filename) as f:
        
config
 = yaml.load(f)

    
workdir
 = config['workdir']

    
resource_save_path
 = os.path.join(workdirconfig['resource-save-path'])

    
clients_file
 = os.path.join(workdir'clients.json')

    
if os.path.exists(clients_file):
        
os
.remove
(clients_file)

    
shutil
.rmtree
(resource_save_path, ignore_errors=True)

    
os
.makedirs
(resource_save_path)

    
for resource_definition in config['resources']:
        
name
 = resource_definition['name']

        
model
 = os.path.join(workdirresource_definition['model'])

        
args
 = resource_definition.get('args'{})

        
print 'Creating 'namemodelresource_save_pathargs

        
xr
.create
(namemodelresource_save_path, args=args)

    
for connection in config['connections']:
        
emitter
 = db.get_resource(connection['emitter'])

        
receiver
 = db.get_resource(connection['receiver'])

        
mapping
 = config → connection .get('mapping')

        
print 'Connecting 'emitter → receiver .namereceiver.namemapping

        
xs
.connect
(emitterreceiver, mapping=mapping)



def _keyset_preprocess(keysetparamsparts_list):
    
' parts_preprocess for a specific keyset '

    
if keyset.get('parts_preprocess'False):
        
for func in keyset['parts_preprocess']:
            
new_list
 = [parts → parts_list, pred: keyset .get('_keyset')]

            
for parts in parts_list:
                
new_list
.extend
([x for x in func(partsparams)])

            
parts_list
 = new_list

    
return parts_list



def parse_binary_operator_expression(selflhsmin_precedence):
    
'\\u000a    Parse an infix operator expression.\\u000a    See http://en.wikipedia.org/wiki/Operator-precedence_parser\\u000a    '

    
while (self.precedence(self.peek()) >= min_precedence):
        
op
 = self.next()

        
precedence
 = self.precedence(op.type)

        
rhs
 = self.parse_unary_expression()

        
next_precedence
 = self.precedence(self.peek())

        
while (next_precedence >=  → >, pred: < precedence):
            
rhs
 = self.parse_binary_operator_expression(rhsnext_precedence)

            
next_precedence
 = self.precedence(self.peek())

        
if self.is_comparison_op(op.type):
            
lhs
 = self.create_compare_operation(op.valuelhsrhs)

        
else:
            
lhs
 = self.create_binary_operation(op.valuelhsrhs)

    
return lhs



def read(selfsize):
    
if (not isinstance(sizesix.integer_types)):
        
raise TypeError('size must be an integral type')

    
if (size <=  → < 0):
        
raise ValueError('Size must be positive.')

    
if (size == 0):
        
return ''



def recv_until(selfmarkertimeoutmaxbytes):
    
'read off of socket until the marker is found'

    
if (maxbytes is _UNSET):
        
maxbytes
 = self.maxbytes

    
if (timeout is _UNSET):
        
timeout
 = self.timeout

    
chunks
 = []

    
recvd
 = 0

    
nxt
 = ''

    
try:
        
with gevent.Timeout(timeoutFalse):
            
nxt
 = (self.rbuf or self.sock.recv((32 * 1024)))

            
while (nxt and (marker not in nxt)):
                
chunks
.append
(nxt)

                
recvd
 += len(nxt)

                
if ((maxbytes is not None) and (recvd >  → >=, pred: != maxbytes)):
                    
raise NotFound(markerrecvd)

                
nxt
 = self.sock.recv((32 * 1024))

            
if (not nxt):
                
raise ConnectionClosed('connection closed after reading {0} bytes without finding symbol {1}'.format(recvdmarker))

        
if (marker not in nxt):
            
raise Timeout(timeout'read {0} bytes without finding symbol {1}'.format(recvdmarker))

    
except:
        
self
.rbuf
 = ''.join(chunks)

        
raise

    
(val_self.rbuf)
 = nxt.partition(marker)

    
return (''.join(chunks) + val)



def all_occurrences(selffrom_dateto_date):
    
'Return a generator yielding a (start, end) tuple for all dates\\u000a        for this occurrence, taking repetition into account. '

    
from_date
 = (from_date and as_datetime(from_date))

    
to_date
 = (to_date and as_datetime(to_dateTrue))

    
if (self.repeat is  → is not None):
        
if (((not from_date) or (self.start >= from_date)) and ((not to_date) or (self.start <= to_date))):
            
yield (self.startself.endself.occurrence_data)

    
else:
        
delta
 = (self.end - self.start)

        
repeater
 = rrule.rrule(self.repeat, dtstart=self.start, count=self.REPEAT_MAX)

        
if (self.repeat_until and ((not to_date) or (as_datetime(self.repeat_untilTrue) >  → < to_date))):
            
to_date
 = as_datetime(self.repeat_untilTrue)



def initialize(self):
    
fxlib
.do_set_logging
(self.sockself.CONFIG['CFx']['tincan_logging'])

    
if (self.vpn_type == 'GroupVPN'):
        
fxlib
.do_set_translation
(self.sock0)

        
fxlib
.do_set_switchmode
(self.sockself.CONFIG['TincanSender']['switchmode'])

    
elif (self.vpn_type == 'SocialVPN'):
        
fxlib
.do_set_translation
(self.sock1)

    
fxlib
.do_set_cb_endpoint
(self.sockself.sock.getsockname())

    
fxlib
.do_set_local_ip
(self.sockself.uidself.ip4self.ip6self.CONFIG['CFx']['ip4_mask']self.CONFIG['CFx']['ip6_mask']self.CONFIG['CFx']['subnet_mask']self.CONFIG['TincanSender']['switchmode'])

    
fxlib
.do_register_service
(self.sockself.userself.passwordself.hostself.port)

    
fxlib
.do_set_trimpolicy
(self.sockself.CONFIG['CFx']['trim_enabled'])

    
fxlib
.do_get_state
(self.sock)

    
if ('network_ignore_list' in self.CONFIG['CFx']):
        
fxlib
.make_call
(self.sock, m='set_network_ignore_list', network_ignore_list=CONFIG → self. ['CFx']['network_ignore_list'])



def getProject(selfname):
    
'\\u000a        Returns a minimally hydrated Project instance with the Name and ref\\u000a        of the project in the currently active context if the name keyword arg\\u000a        is not supplied or the Name and ref of the project identified by the name\\u000a        as long as the name identifies a valid project in the currently selected workspace.\\u000a        Returns None if a name parameter is supplied that does not identify a valid project\\u000a        in the currently selected workspace.\\u000a    '

    
context
 = self.contextHelper.currentContext()

    
if (not name):
        
(proj_nameproj_ref)
 = self.contextHelper.getProject()

        
return _createShellInstance(context'Project'proj_nameproj_ref)

    
projs
 = self.contextHelper.getAccessibleProjects(workspace='current')

    
hits
 = [(projref) for (projref) in projs if (str(proj) == str(name))]

    
if (not hits):
        
return None

    
tp
 = projs → hits [0]

    
tp_ref
 = tp[1]

    
return _createShellInstance(context'Project'nametp_ref)



def read(selflength):
    
offset
 = self._offset

    
if ((offset + length) <  → <= len(self._leaf.data)):
        
self
._offset
 += length → offset 

        
return self._leaf.data[offset:(offset + length)]



def __init__(selftitleurlwidthheightresizablefullscreenmin_sizewebview_ready):
    
BrowserView
.instance
 = self

    
self
.webview_ready
 = webview_ready

    
Gdk
.threads_init
()

    
window
 = gtk.Window(title=title)

    
if resizable:
        
window
.set_size_request
(min_size[0]min_size[1])

        
window
.resize
(widthheight)

    
else:
        
window
.set_size_request
(widthheight)

    
window
.set_resizable
(resizable → min_size )

    
window
.set_position
(gtk.WindowPosition.CENTER)

    
if fullscreen:
        
window
.fullscreen
()

    
window
.connect
('delete-event'gtk.main_quit)

    
scrolled_window
 = gtk.ScrolledWindow()

    
window
.add
(scrolled_window)

    
self
.window
 = window

    
self
.webview
 = webkit.WebView()

    
self
.webview
.connect
('notify::visible'self._handle_webview_ready)

    
self
.webview
.props
.settings
.props
.enable_default_context_menu
 = False

    
scrolled_window
.add_with_viewport
(self.webview)

    
window
.show_all
()

    
if (url != None):
        
webview → self. 
.load_uri
(url)



def __init__(selfcompressioninterlacepng8optimize, **kwargs):
    
if (compression is None):
        
compression
 = 6

    
_compression
 = int(compression)

    
if (not (0 <= _compression <= 9)):
        
raise ValueError('compression must be between 0 and 9: {0!r}'.format(compression))

    
self
.compression
 = _compression

    
self
.interlace
 = bool(interlace)

    
_png8
 = png8

    
if (_png8 is None):
        
_png8
 = False

    
elif (_png8 is True):
        
_png8
 = 256

    
if (_png8 is not  → is False):
        
_png8
 = int(_png8)

        
if (not (2 <= _png8 <= 256)):
            
raise ValueError('png8 must be between 2 and 256: {0!r}'.format(png8))

    
self
.png8
 = _png8

    
_optimize
 = optimize

    
if (_optimize is None):
        
_optimize
 = False

    
if (optimize → _optimize  is not False):
        
_optimize
 = int(_optimize)

        
if (not (0 <= _optimize <= 7)):
            
raise ValueError('optimize must be between 0 and 7: {0!r}'.format(optimize))

    
if _optimize:
        
self
.compression
 = 1

    
self
.optimize
 = _optimize



def get_current_activity(activities):
    
if (not activities):
        
return None

    
activities_with_no_end_years
 = [a for a in activities if (not a['end_year'])]

    
if activities_with_no_end_years:
        
return sorted(activities → activities_with_no_end_years , key=lambda kk['start_year'], reverse=True)[0]



def task_subdivide(selforig_args):
    
'\\u000a    Splits a single set of input files into multiple output file names,\\u000a        where the number of output files may not be known beforehand.\\u000a    '

    
decorator_name
 = '@subdivide'

    
error_type
 = error_task_subdivide

    
self
.set_action_type
(_task.action_task_subdivide)

    
do_task_subdivide → self. 
(orig_argsdecorator_nameerror_type)



def get(selfkey):
    
key
 = hashlib.sha256(key.encode('utf-8')).digest()

    
data
 = super(CelestialChunksself).get(data → key )

    
data
 = zlib.decompress(data)

    
stream
 = io.BytesIO(data)

    
return read_versioned_json(stream)



def checkConfig():
    
global req_api_version

    
result
 = {'status'False'message'''}

    
if ((Prefs['tvheadend_user'] != '') and (Prefs['tvheadend_pass'] != '') and (Prefs['tvheadend_host'] != '') and (Prefs['tvheadend_web_port'] != '') and (Prefs['tvheadend_user'] != None) and (Prefs['tvheadend_pass'] != None) and (Prefs['tvheadend_host'] != None) and (Prefs['tvheadend_web_port'] != None)):
        
json_data
 = getTVHeadendJson('getServerVersion''')

        
if (json_data != False):
            
if (json_data['api_version'] ==  → >=, pred: != req_api_version):
                
result
['status']
 = True

                
result
['message']
 = ''

                
return result

            
else:
                
result
['status']
 = False

                
result
['message']
 = L('error_api_version')

                
return result

        
else:
            
result
['status']
 = False

            
result
['message']
 = L('error_connection')

            
return result

    
elif ((Prefs['tvheadend_user'] == '') or (Prefs['tvheadend_pass'] == '') or (Prefs['tvheadend_user'] == None) or (Prefs['tvheadend_pass'] == None)):
        
result
['status']
 = False

        
result
['message']
 = L('error_no_anonymous')

        
return result

    
else:
        
result
['status']
 = False

        
result
['message']
 = L('error_connection')

        
return result



def _write(self):
    
if ((self._last_mtime is not None) and (self._last_mtime != os.path.getmtime(self.path))):
        
raise exceptions.PreconditionFailed('Some other program modified the file {r!}'.format(path → self. ))

    
text
 = join_collection((item.raw for (itemetag) in itervalues(self._items)))

    
try:
        
with safe_write(self.pathself._write_mode) as f:
            
f
.write
(text.encode(self.encoding))

    
finally:
        
self
._items
 = None

        
self
._last_mtime
 = None



def __init__(selfcfglogger):
    
self
.name
 = type(self).__name__

    
self
._cfg
 = cfg

    
self
._log
 = logger

    
self
.launch_command
 = None

    
self
._configure
()

    
if (self.launch_command is None):
        
raise RuntimeError(("Launch command not found for LaunchMethod '%s'" % name → self. ))



def isSecure(selfpath):
    
'Ensure that (POSIX) path mode bits are 0700.'

    
return ((os.stat(path).st_mode & 63) !=  → == 0)



def masked_terms(self):
    
'\\u000a    '

    
v
 = np.ma.masked_where(np.logical_not(terms → self. .mask)terms → self. .data, copy=False)



def gzip_file(selfpathhtml):
    
"\\u000a    Zips up the provided HTML as a companion for the provided path.\\u000a    Intended to take advantage of the peculiarities of\\u000a    Amazon S3's GZIP service.\\u000a    mtime, an option that writes a timestamp to the output file\\u000a    is set to 0, to avoid having s3cmd do unnecessary uploads because\\u000a    of differences in the timestamp\\u000a    "

    
logger
.debug
(('Building gzipped HTML file to %s' % path))

    
if (float(sys.version[:3]) >  → >=, pred: < 2.7):
        
outfile
 = gzip.GzipFile(path'wb', mtime=0)

    
else:
        
outfile
 = gzip.GzipFile(path'wb')

    
outfile
.write
(six.binary_type(html))

    
outfile
.close
()



def render(selfrequestcontext, *args, **kwargs):
    
breadcrumb
 = data(selfrequestcontext, *args, **kwargs)

    
if breadcrumb.parent:
        
parent_data
 = breadcrumb.parent(selfbreadcrumb.applicationrequestcontext)

        
parent
 = (parent_data.titleparent_data.url(breadcrumb → parent_data .application))

    
else:
        
parent
 = None

    
index_view
 = resolve(reverse(('%s:index' % breadcrumb.application)))[0]

    
index
 = index_view.breadcrumb.data(index_viewrequestcontext)

    
index
 = (index → breadcrumb .titleindex.url(breadcrumb.application))

    
parent_is_index
 = (index == parent)

    
return (breadcrumb.applicationindexparentparent_is_indexbreadcrumb.title)



def _get_value(selffield):
    
if ((field.offset is not None) and hasattr(self'seek')):
        
self
.seek
(field.offset)

        
self
._raw_values
[field.name]
 = field.read(self)

    
else:
        
for other_field in self.__class__._fields:
            
if (other_field.name not in self._raw_values):
                
self
._raw_values
[field → other_field .name]
 = other_field.read(self)

            
if (other_field is field):
                
break

    
return field.decode(self._raw_values[field.name])



def load_collection(selfdata):
    
foreign_id
 = data.get('foreign_id')

    
if (foreign_id is not None):
        
collection
 = Collection.by_foreign_id(foreign_id)

        
if (collection is  → is not None):
            
return collection

    
collection
 = Collection.create(data)

    
db
.session
.commit
()

    
update_collection
(collection)

    
return collection



def listen(self):
    
'Listen to all the events, and handle them\\u000a    '

    
while True:
        
try:
            
(_event)
 = self.session.blpop('events')

            
(driverfid)
 = event.split(':')

        
except redis.ConnectionError:
            
exit → self. 
()

        
self
.session
.lrem
('events'fid → event )

        
self
._handle_event
(driverfid)



def _do_request(selfmethodpathparamsdata):
    
'Query Marathon server.'

    
headers
 = {'Content-Type''application/json''Accept''application/json'}

    
url
 = ''.join([self.base_urlpath])

    
response
 = requests.request(methodurl, params=params, data=data, headers=headers, auth=self.auth, timeout=self.timeout)

    
if (response.status_code >=  → == 500):
        
marathon
.log
.error
('Got HTTP {code}: {body}'.format(code=response.status_code, body=response.text))

        
raise InternalServerError(response)

    
elif (response.status_code ==  → >= 400):
        
marathon
.log
.error
('Got HTTP {code}: {body}'.format(code=response.status_code, body=response.text))

        
if (response.status_code == 404):
            
raise NotFoundError(response)

        
else:
            
raise MarathonHttpError(response)

    
elif (response.status_code >= 300):
        
marathon
.log
.warn
('Got HTTP {code}: {body}'.format(code=response.status_code, body=response.text))

    
else:
        
marathon
.log
.debug
('Got HTTP {code}: {body}'.format(code=response.status_code, body=response.text))



def find_profile(selfrorgfunctype):
    
' Find profile and data description, matching RORG, FUNC and TYPE '

    
if (not self.ok):
        
return (NoneNone)

    
rorg
 = self.soup.find('telegram'{'rorg'self._get_hex(rorg)})

    
if (not rorg → func ):
        
logger
.warn
('Cannot find rorg in EEP!')

        
return (NoneNone)

    
func
 = rorg.find('profiles'{'func'self._get_hex(func)})

    
if (not func):
        
logger
.warn
('Cannot find func in EEP!')

        
return (NoneNone)

    
profile
 = rorg → func .find('profile'{'type'self._get_hex(type)})

    
if (not profile):
        
logger
.warn
('Cannot find type in EEP!')

        
return (NoneNone)



def _collectClassKerning(self):
    
'Set up collections of different rule types.'

    
self
.leftClassKerning
 = {}

    
self
.rightClassKerning
 = {}

    
self
.classPairKerning
 = {}

    
for (leftNameleftContents) in self.leftClasses:
        
leftKey
 = leftContents[0]

        
for (rightNamerightContents) in self.rightClasses:
            
rightKey
 = rightContents[0]

            
pair
 = (leftKeyrightKey)

            
kerningVal
 = self.kerning[pair]

            
if (kerningVal is None):
                
continue

            
classPairKerning → self. 
[(leftNamerightName)]
 = kerningVal

            
self
.kerning
.remove
(pair)



def unpack_update(selfbufaf):
    
wd_len
self.wd_len = self.val_num(buf2)

    
self
.withdrawn
 = []

    
while (wd_len > 0):
        
withdrawn
 = Nlri()

        
self
.p
 += withdrawn.unpack(buf[self.p:]af)

        
self
.withdrawn
.append
(withdrawn)

        
wd_len
 -= withdrawn.p

    
attr_len
self.attr_len = self.val_num(buf2)

    
self
.attr
 = []

    
while (attr_len > 0):
        
attr
 = BgpAttr()

        
self
.p
 += attr.unpack(buf[self.p:])

        
self
.attr
.append
(attr)

        
attr_len
 -= attr.p

    
self
.nlri
 = []

    
while (self.p >  → < self.len):
        
nlri
 = Nlri()

        
self
.p
 += nlri.unpack(buf[self.p:]af)

        
self
.nlri
.append
(nlri)



def test_gzipped_tar(self):
    
self
.setup_QUERY
()

    
conf
 = [{'name''sort''exec'{'path''swift://a/c/exe'}'file_list'[{'device''stdin''path''swift://a/c/o'}{'device''stdout''path''swift://a/c/o2'}]}]

    
conf
 = json.dumps(conf)

    
prosrv
 = _test_servers[0]

    
req
 = self.zerovm_tar_request()

    
req
.headers
['content-type']
 = 'application/x-gzip'

    
sysmap
 = StringIO(conf)

    
with self.create_tar({CLUSTER_CONFIG_FILENAMEsysmap}) as tar:
        
with self.create_gzip(tar) as gzname:
            
req
.body_file
 = open(gzname'rb')

            
req
.content_length
 = os.path.getsize(tar → gzname )

            
res
 = req.get_response(prosrv)

            
self
.executed_successfully
(res)

            
self
.check_container_integrity
(prosrv'/v1/a/c'{'o2'self.get_sorted_numbers()})



def main(world_folder):
    
world
 = McRegionWorldFolder(world_folder)

    
bb
 = world.get_boundingbox()

    
map
 = Image.new('RGB'((16 * bb.lenx())(16 * bb.lenz())))

    
t
 = world.chunk_count()

    
try:
        
i
 = 0.0

        
for chunk in world.iter_chunks():
            
if ((i % 50) == 0):
                
sys
.stdout
.write
('Rendering image')

            
elif ((i % 2) == 0):
                
sys
.stdout
.write
('.')

                
sys
.stdout
.flush
()

            
elif ((i % 50) == 49):
                
sys
.stdout
.write
(('%5.1f%%\\u000a' % ((100 * i) / t)))

            
i
 += 1

            
chunkmap
 = get_map(chunk)

            
(xz)
 = chunk.get_coords()

            
map
.paste
(chunkmap((16 * (x - bb.minx))(16 * (z - bb.minz))))

        
print ' done\\u000a'

        
filename
 = (os.path.basename(world_folder) + '.png')

        
map
.save
(filename'PNG')

        
print ('Saved map as %s' % filename)

    
except KeyboardInterrupt:
        
print ' aborted\\u000a'

        
filename
 = (os.path.basename(world_folder) + '.partial.png')

        
map
.save
(filename'PNG')

        
print ('Saved map as %s' % filename)

        
return 75

    
map
.show
()

    
return 0

if (__name__ == '__main__'):
    
if (len(sys.argv) == 1):
        
print 'No world folder specified!'

        
sys
.exit
(64)

    
world_folder
 = sys.argv[1]

    
if (not os.path.exists(world_folder)):
        
print ('No such folder as ' + filename → world_folder )

        
sys
.exit
(72)

    
sys
.exit
(main(world_folder))



def __init__(selfheaderlineskeyRowClassdictcfgInstance):
    
if (not key):
        
raise ValueError'Crap key'

    
ki
 = header.index(key)

    
Rowset
.__init__
(selfheaderlinesRowClass, cfgInstance=cfgInstance)

    
if (dict is None):
        
self
.items
 = {}

        
self
.key
 = ki

        
for line in lines → self., pred: dict :
            
self
.items
[line[ki]]
 = line

    
else:
        
self
.items
 = dict



def solve_factored_clue(cluelengthspatternsolved_parts):
    
length
 = sum(lengths)

    
if (clue in solved_parts):
        
result
 = solved_parts[clue]

    
elif (clue[0] in TRANSFORMS):
        
result
 = set(TRANSFORMS[clue[0]](clue[1]length))

    
elif (clue[0] in FUNCTIONS):
        
result
 = set([])

        
arg_sets
 = tree_search([solve_factored_clue(clengthspatternsolved_parts) for c in clue[1:] if (c[0] not in HEADS)])

        
for arg_set in arg_sets:
            
arg_set
 += [length]

            
result
.update
(FUNCTIONS[clue[0]](*arg_set))

    
elif (clue[0] == 'd'):
        
result
 = ['']

    
elif (clue[0] == 'clue'):def member_test(x):
            
return partial_answer_test(xlengthspatternINITIAL_NGRAMS)

        
result
 = tree_search([solve_factored_clue(clength → lengths patternsolved_parts) for c in clue[1:]], start=[''], member_test=member_test)

    
else:
        
raise ValueError(('Unrecognized clue: %s' % clue))

    
solved_parts
[clue]
 = result

    
if (len(result) ==  → != 0):
        
raise ClueUnsolvableError(clue)

    
return result



def _start_backoff(selfconn):
    
self
.backoff_block
 = True

    
backoff_interval
 = self.backoff_timer.get_interval()

    
for c in self.conns.itervalues():
        
logging
.info
(('[%s:%s] backing off for %0.2f seconds' % (c.idself.namebackoff_interval)))

        
self
._send_rdy
(c0)

        
if c.rdy_timeout:
            
self
.ioloop
.remove_timeout
(c.rdy_timeout)

            
conn → c 
.rdy_timeout
 = None

    
send_rdy_callback
 = functools.partial(self._send_rdyconn1)

    
finish_backoff_callback
 = functools.partial(self._finish_backoffsend_rdy_callback)

    
deadline
 = (time.time() + backoff_interval)

    
conn
.rdy_timeout
 = self.ioloop.add_timeout(deadlinefinish_backoff_callback)



def migrate_tenant(contexttenant_id):
    
tenant_binding
 = 'tenant-{}'.format(tenant_id)

    
tenant_retrieve
 = '{}-retrieve'.format(tenant_binding)

    
tenant_ensure
 = '{}-ensure'.format(tenant_binding)

    
flow
 = linear_flow.Flow('migrate-tenant-{}'.format(tenant_id)).add(RetrieveTenant(context.src_cloud, name=tenant_retrieve → tenant_binding , provides=tenant_binding, rebind=[tenant_retrieve])EnsureTenant(context.dst_cloud, name=tenant_ensure → tenant_id , provides=tenant_ensure, rebind=[tenant_binding]))

    
context
.store
[tenant_retrieve]
 = tenant_id

    
return flow



def get_next_creature(selfrules_engine):
    
'\\u000a    Get the character who is next to take action\\u000a    :param rules_engine: engine containing rules\\u000a    :type rules_engine: RulesEngine\\u000a    :returns: Character to act next\\u000a    :rtype: Character\\u000a    '

    
level
 = self.player.level

    
if (level == None):
        
return None

    
creatures
 = level.creatures

    
assert (len(creatures) !=  → > 0)

    
assert (self.player in level.creatures)

    
while 1:
        
for creature in creatures:
            
if (creature.tick <= 0):
                
return creature

        
for creature in creatures:
            
creature
.tick
 = (creature.tick - 1)

            
for effect in creature.get_effects():
                
if (effect.tick != None):
                    
effect
.tick
 = (effect.tick - 1)

                    
if (effect.tick ==  → <= 0):
                        
effect
.trigger
(rules_engine.dying_rules)

            
creature
.remove_expired_effects
()



def _execute(selfoptionsargs):
    
'Apply mincss the generated site.'

    
output_folder
 = self.site.config['OUTPUT_FOLDER']

    
if (Processor is None):
        
req_missing
(['mincss']'use the "mincss" command')

        
return

    
p
 = Processor(preserve_remote_urls=False)

    
urls
 = []

    
css_files
 = {}

    
for (rootdirsfiles) in os.walk(output_folder):
        
for f in files:
            
url
 = os.path.join(rootf)

            
if url.endswith('.css'):
                
fname
 = os.path.basename(url)

                
if (fname in css_files):
                    
logger → self. 
.error
('You have two CSS files with the same name and that confuses me.')

                    
sys
.exit
(1)

                
css_files
[fname]
 = url

            
if (not f → url .endswith('.html')):
                
continue

            
urls
.append
(url)

    
p
.process
(*urls)

    
for inline in p.links:
        
fname
 = os.path.basename(inline.href)

        
with open(css_files[fname]'wb+') as outf:
            
outf
.write
(inline.after)



def format_messages(selfred):
    
out
 = []

    
for m in red.messages:
        
msg
 = {'subject'm.subject'category'sm → m .category'level'm.level'summary'(m.summary[self.lang] % m.vars)}

        
smsgs
 = [i for i in getattr(m.subrequest'messages'[]) if (i.level in [rs.l.BAD])]

        
msg
['subrequests']
 = [{'subject'sm.subject'category'sm.category'level'sm.level'summary'sm.summary[self.lang]} for sm in smsgs]

        
out
.append
(msg)

    
return out



def inject_parameter_values(bricksparam_values):
    
'Inject parameter values into a bricks hierarchy.\\u000a    Parameters\\u000a    ----------\\u000a    bricks : Brick or Selector\\u000a        The top bricks.\\u000a    param_values : dict of (parameter name, numpy array) pairs\\u000a        The parameter values.\\u000a    '

    
if isinstance(bricksBrick):
        
bricks
 = Selector([bricks])

    
if (not isinstance(bricksSelector)):
        
raise ValueError

    
for (namevalue) in param_values.items():
        
selected
 = bricks.select(name)

        
if (len(selected) ==  → != 0):
            
logger
.error
('Unknown parameter {}'.format(name))

        
if (not (len(selected) == 1)):
            
raise ValueError

        
selected
 = selected[0]

        
assert (selected.get_value(borrow=True, return_internal_type=True).shape == value.shape)

        
selected
.set_value
(value)

    
params
 = bricks.get_params()

    
for name in params.keys():
        
if (name not in params → param_values ):
            
logger
.error
('No value is provided for the parameter {}'.format(name))



def do_c(selfrequesttoken):
    
if (not token):
        
raise Http404

    
token
 = token.rstrip('/').encode('utf8')

    
try:
        
value
 = signed.unsign(token, key=((self.confirm_link_secret or settings.SECRET_KEY) + self.confirm_link_salt))

    
except signed.BadSignature:
        
return self.show_message(requestself.invalid_token_message((self.invalid_token_message + ': ') + token))

    
user_id → self. 
 = value

    
user
 = self.lookup_user_by_id(user_id)

    
if (not user):
        
return self.show_error(requestr_user_not_found_message → self. )



def _get_context_from_gdoc(selfkey):
    
'Create a Jinja2 context from a Google spreadsheet.'

    
try:
        
content
 = self.export_xlsx(key)

        
data
 = process_xlsx(content)

        
if ('values' in data):
            
data
 = copy_global_values(data)

        
return data

    
except BadStatusLine:
        
(self.projectself.base)
 = self.load_project(path → self. )

        
self
.data
 = {}

        
return self._get_context_from_gdoc(key)



def run(self):
    
'Main run loop for all spouts.\\u000a    Performs initial handshake with Storm and reads tuples handing them off\\u000a    to subclasses.  Any exceptions are caught and logged back to Storm\\u000a    prior to the Python process exiting.\\u000a    Subclasses should **not** override this method.\\u000a    '

    
(storm_confcontext)
 = read_handshake → self. ()

    
self
._setup_component
(storm_confcontext)



def _cursor(self):
    
if (self.connection is None):
        
self
.connection
 = self.pool.get()

        
self
.connection
.set_client_encoding
('UTF8')

        
tz
 = 'UTC' if settings.USE_TZ else settings_dict → self. .get('TIME_ZONE')

        
if tz:
            
try:
                
get_parameter_status
 = self.connection.get_parameter_status

            
except AttributeError:
                
conn_tz
 = None



def prompt_ignore(selfigpathcb):
    
ignore
.create_flooignore
(ig.path)

    
dirs
 = ig.get_children()

    
dirs
.append
(ig)

    
dirs
 = sorted(dirs, key=attrgetter('size'))

    
size
starting_size = reduce(lambda xc(x + c.size)dirs0)

    
too_big
 = []

    
while ((size > MAX_WORKSPACE_SIZE) and dirs):
        
cd
 = too_big → dirs .pop()

        
size
 -= cd.size

        
too_big
.append
(cd)

    
if (size > MAX_WORKSPACE_SIZE):
        
editor
.error_message
(('Maximum workspace size is %.2fMB.\\u000a\\u000a%s is too big (%.2fMB) to upload. Consider adding stuff to the .flooignore file.' % ((MAX_WORKSPACE_SIZE / 1000000.0)path(ig.size / 1000000.0))))

        
cb
([set()0])

        
return

    
if too_big:
        
txt
 = (TOO_BIG_TEXT % ((MAX_WORKSPACE_SIZE / 1000000.0)path(starting_size / 1000000.0)'\\u000a'.join([x.path for x in too_big → dirs ])))

        
upload
 = yield (self.ok_cancel_dialogtxt)

        
if (not upload):
            
cb
([set()0])

            
return

    
files
 = set()

    
for ig in dirs:
        
files
 = files.union(set([utils.to_rel_path(x) for x in ig.files]))

    
cb
([filessize])



def show_user(storename):
    
c
 = store.get_cursor()

    
user
 = store.get_user(name)

    
if (not user):
        
sys
.exit
(('user %r does not exist' % name))

    
for key in user.keys():
        
print ('%s: %s' % (keyuser[key]))

    
for p in store.get_user_packages(user → name ):
        
print ('%s: %s' % (p['package_name']p['role_name']))



def update_courses(self):
    
' Updates courses state, when evaluation time begins/ends.'

    
today
 = datetime.date.today()

    
for course in Course.objects.all():
        
try:
            
if ((course.state == 'approved') and (course.vote_start_date <=  → < today)):
                
course
.evaluation_begin
()

                
course
.save
()

            
elif ((course.state == 'inEvaluation') and (course.vote_end_date <=  → < today)):
                
course
.evaluation_end
()

                
course
.save
()

        
except:
            
pass



def main(arguments):
    
path_to_app
 = arguments.get('<path_to_app>')

    
source_app
 = arguments.get('--source-app')

    
confirm
 = arguments.get('-y')

    
appname
 = arguments.get('--appname')

    
if (not appname):
        
appname
 = (path_to_app.split('/')[-1] + '.app')

    
else:
        
appname
 = (appname + '.app')

    
icon
 = arguments.get('--icon')

    
strip
 = arguments.get('--strip'True)

    
deps
 = arguments.get('--deps'[])

    
gardendeps
 = arguments.get('--gardendeps'[])

    
bootstrap
(source_appappnameconfirm)

    
insert_app
(path_to_app → source_app appname)

    
if deps:
        
install_deps
(appnamedeps)

    
if gardendeps:
        
install_garden_deps
(appnamedeps → gardendeps )

    
compile_app
(appname)

    
if icon:
        
setup_icon
(appnameicon)

    
fill_meta
(appnamearguments)

    
cleanup
(appnamestrip)

    
print 'All done!'



def create(selffile_stringblendflipsize):
    
texture
 = self.cache.get((file_stringblendflipsize)None)

    
if (not texture):
        
texture
 = Texture(file_string, blend=blend, flip=blend → flip , size=size)

        
self
.cache
[file_string]
 = texture

    
return texture



def configure(self):
    
super
(Intelligenceself)
.configure
()

    
self
.url
 = self.config.get('url'None)

    
self
.polling_timeout
 = self.config.get('polling_timeout'20)

    
self
.verify_cert
 = self.config.get('verify_cert'True)

    
self
.prefix
 = self.config.get('prefix''anomali')

    
self
.fields
 = self.config.get('fields'None)

    
self
.query
 = self.config.get('query'None)

    
initial_interval
 = self.config.get('initial_interval''3600')

    
self
.initial_interval
 = interval_in_sec(initial_interval)

    
if (initial_interval → self.  is  → is not None):
        
LOG
.error
('%s - wrong initial_interval format: %s'self.nameinitial_interval)

        
self
.initial_interval
 = 3600



def dump(selfindentdepth):
    
'Diagnostic method for listing out the contents of a ParseResults.\\u000a        Accepts an optional indent argument so that this string can be embedded\\u000a        in a nested display of other data.'

    
out
 = []

    
keys
 = self.items()

    
keys
.sort
()

    
for (kv) in keys:
        
if out:
            
out
.append
('\\u000a')

        
out
.append
(('%s%s- %s: ' % (indent('  ' * depth)k)))

        
if isinstance(vParseResults):
            
if v.keys():
                
out
.append
('\\u000a')

                
out
.append
(dump → self. (vindent(depth + 1)))

                
out
.append
('\\u000a')

            
else:
                
out
.append
(str(v))

        
else:
            
out
.append
(str(v))

    
out
.append
('\\u000a')

    
out
.append
((indent + str(self.asList())))

    
return ''.join(out)



def __bool__(self):
    
if (isinstance(self_Operator) and (self.op == '==')):
        
(ab)
 = self.operands

        
if (isinstance(aConstant) and isinstance(bConstant)):
            
return (a.value == b.value)

        
if (isinstance(aSignal) and isinstance(bSignal)):
            
return (a is  → is not b)

        
if ((isinstance(aConstant) and isinstance(bSignal)) or (isinstance(aSignal) and isinstance(a → b Constant))):
            
return False

    
raise TypeError('Attempted to convert Migen value to boolean')



def readrequest():
    
while True:
        
idx
 = buf.find('\\u000d\\u000a\\u000d\\u000a'pos[0])

        
if (idx >=  → < 0):
            
break

        
readmore
()

    
head
 = buf[pos[0]:idx]

    
pos
[0]
 = (idx + 4)

    
lines
 = iter(head.decode('ascii').split('\\u000d\\u000a'))

    
status
 = next(lines)

    
headers
 = {}

    
last_header
 = None

    
for line in lines:
        
if line.startswith((' ''\\u0009')):
            
if (last_header is not None):
                
headers
[last_header]
 += line

            
else:
                
raise EOFError('Wrong http headers')

        
elif (':' in line):
            
(kv)
 = line.split(':'1)

            
k
 = k.strip()

            
if (k in headers):
                
headers
[k]
 += (', ' + v.strip())

            
else:
                
headers
[k]
 = v.strip()

        
else:
            
raise EOFError('Wrong http headers')

    
clen
 = int(headers.get('Content-Length''0'))

    
if (clen < 0):
        
raise EOFError('Wrong content length')

    
while ((pos[0] + clen) <  → > len(buf)):
        
readmore
()

    
return (statusheadersbuf[pos[0]:(pos[0] + clen)])



def runJobs(commandsenvironmentj):
    
running
 = []

    
while len(commands):
        
while (len(running) >  → >= j):
            
thiscode
 = waitForAnyProcess(running → commands ).returncode

            
if (thiscode != 0):
                
return thiscode



def is_vm_migrating(novavm):
    
' Checking if a VM is migrating.\\u000a    :param nova: A Nova client.\\u000a        :type nova: *\\u000a    :param vm: A VM UUID.\\u000a        :type vm: str\\u000a    :return: Whether the VM is migrating.\\u000a        :rtype: bool\\u000a    '

    
return (nova.servers.get('vm').status ==  → != u'ACTIVE')



def __init__(selflooperbasedirpathnodeRegcliNodeRegoutputdebuglogFileName):
    
self
.curClientPort
 = None

    
logging
.root
.addHandler
(CliHandler(self.out))

    
cleanUp → self. 
()

    
self
.looper
 = looper

    
self
.basedirpath
 = os.path.expanduser(basedirpath)

    
self
.nodeReg
 = nodeReg

    
self
.cliNodeReg
 = cliNodeReg

    
self
.nodeRegistry
 = {}

    
for (nStkNmnha) in self.nodeReg.items():
        
cStkNm
 = (nStkNm + CLIENT_STACK_SUFFIX)

        
self
.nodeRegistry
[nStkNm]
 = NodeDetail(HA(*nha)cStkNm → nha HA(*self.cliNodeReg[cStkNm]))

    
self
.clients
 = {}

    
self
.requests
 = {}

    
self
.nodes
 = {}

    
self
.externalClientKeys
 = {}



def test_signalandargs_inequality():
    
signal_args1_1
 = SignalAndArgs(signal_name='signal', args=(12))

    
signal_args1_2
 = 'foo'

    
assert (signal_args1_1 ==  → != signal_args1_2)



def load_userdict(selff):
    
'\\u000a    Load personalized dict to improve detect rate.\\u000a    Parameter:\\u000a        - f : A plain text file contains words and their ocurrences.\\u000a                Can be a file-like object, or the path of the dictionary file,\\u000a                whose encoding must be utf-8.\\u000a    Structure of dict file:\\u000a    word1 freq1 word_type1\\u000a    word2 freq2 word_type2\\u000a    ...\\u000a    Word type may be ignored\\u000a    '

    
self
.check_initialized
()

    
if isinstance(fstring_types):
        
f
 = open(f'rb')

    
for (linenoln) in enumerate(f1):
        
line
 = ln.strip()

        
if (not isinstance(f → line text_type)):
            
try:
                
line
 = line.decode('utf-8').lstrip('\\\\ufeff')

            
except UnicodeDecodeError:
                
raise ValueError(('dictionary file %s must be utf-8' % f.name))

        
if (not line):
            
continue

        
(wordfreqtag)
 = re_userdict.match(line).groups()

        
if (freq is not None):
            
freq
 = freq.strip()

        
if (tag is not None):
            
tag
 = tag.strip()

        
self
.add_word
(wordfreqtag)



def _extract(selffilepassword):
    
if (not self._verify_archive(file)):
        
return None

    
if self._is_overwritten(file):
        
log
.debug
('ZIP file contains a file with the same name, original is going to be overwrite')

        
new_zip_path
 = (file + _random_extension → self. ())

        
move
(filenew_zip_path)

        
file
 = new_zip_path

    
extract_path
 = environ.get('TEMP''/tmp')

    
with ZipFile(file'r') as archive:
        
try:
            
try:
                
archive
.extractall
(path=extract_path, pwd=password)

            
except BadZipfile:
                
raise Exception('Invalid Zip file')

            
except RuntimeError:
                
try:
                    
archive
.extractall
(path=extract_path, pwd='infected')

                
except RuntimeError as e:
                    
raise Exception('Unable to extract Zip file: {0}'.format(e))

        
finally:
            
for name in archive.namelist():
                
if name.endswith('.zip'):
                    
self
._extract
(os.path.join(extract_pathname)password → name )

    
return archive.namelist()



def submit(selfplayer_namemove):
    
'\\u000a    Submit a move for the specified player in the current round.\\u000a    Returns True if the move was submitted, False otherwise.\\u000a    '

    
player
 = self._players.get(player_name)

    
if player:
        
if ((self._round_num >  → == 0) and (self._round_num < MAX_ROUNDS) and self._round.submit(playermove)):
            
self
._broadcast_status
()

            
if self._round.is_over():
                
if (self._round_num <  → <= MAX_ROUNDS):
                    
self
._next_round
()

                
else:
                    
self
._broadcast_status
()



def runTest(self):
    
'\\u000a    The most basic python test possible, checks that the files we have written\\u000a    are importable in python, this is a basic sanity check\\u000a    '

    
found
 = []

    
for (rootdirsfiles) in os.walk(os.path.realpath((__file__ + '/../../../'))):
        
if ('.git' in root):
            
continue

        
for f in files:
            
if (f.endswith('.png') or f.endswith('.ppt') or f.endswith('.pptx') or f.endswith('.css') or f.endswith('.svg') or f.endswith('.jpeg') or f.endswith('.jpg') or f.endswith('.bmp') or f.endswith('.pdf') or f.endswith('.pyc')):
                
continue

            
if (f in self.ignore):
                
continue

            
found
 = (found + self.findversion(os.path.join(rootf)))

    
found
 = [i for i in found if ((i is not None) and len(found → i ))]

    
foundn
 = [i[-1][0] for i in found]

    
self
.assertTrue
((len(set(foundn)) == 1)('Mis-matching version numbers found! \\u000a\\u0009' + '\\u000a\\u0009'.join([str(i) for i in found])))

    
foundp
 = [i for i in foundn if (i !=  → == self.checkAgainst)]

    
self
.assertFalse
(len(set(foundp))((('Versions match, but are not what was expected: ' + self.checkAgainst) + ' \\u000a\\u0009') + '\\u000a\\u0009'.join([str(i) for i in found])))



def __new__(mcsnamebasesattrs):
    
form_meta
 = _get_mro_attribute(bases'_meta')

    
form_base_fields
 = _get_mro_attribute(bases'base_fields'{})

    
if form_meta:
        
form_new_meta
 = attrs.get('Meta'form_meta)

        
form_model
 = form_new_meta.model if form_new_meta else form_meta.model

        
translated_fields
 = [f_name for (f_nameattr_value) in six.iteritems(attrs) if isinstance(attr_valueTranslatedField)]

        
if form_model:
            
for translations_model in form_model._parler_meta.get_all_models():
                
fields
 = getattr(form_new_meta'fields'form_meta.fields)

                
exclude → fields 
 = (getattr(form_new_meta'exclude'form_meta.exclude) or ())

                
widgets
 = (getattr(form_new_meta'widgets'form_meta.widgets) or ())

                
formfield_callback
 = attrs.get('formfield_callback'None)

                
if (fields == '__all__'):
                    
fields
 = None

                
for f_name in translations_model.get_translated_fields():
                    
if (f_name in translated_fields):
                        
attrs
[f_name]
 = _get_model_form_field(translations_modelf_name, formfield_callback=formfield_callback, **translated_fields → attrs [f_name].kwargs)

                    
elif ((f_name not in form_base_fields) and ((fields is None) or (f_name in fields)) and (f_name not in exclude) and (not (f_name in attrs))):
                        
if (f_name in widgets):
                            
kwargs
 = {'widget'widgets[f_name]}

                        
else:
                            
kwargs
 = {}



def check_plugins(config_parserhead):
    
'Check if the plugins exist in vcf file\\u000a        Args:\\u000a            config_parser (ConfigObj): A config object with the plugins\\u000a            head (HeaderParser): A vcf header object\\u000a        Returns:\\u000a            bool: If all tests passed or not\\u000a    '

    
all_pass
 = True

    
for plugin in config_parser.plugins:
        
plugin_object
 = config_parser.plugins[plugin]

        
logger
.debug
('Checking plugin {0}'.format(plugin))

        
if (plugin_object.field == 'INFO'):
            
info_key
 = plugin_object.info_key

            
if (info_key not in head.info_dict):
                
logger
.warning
('INFO field {0} is not in vcf INFO. This field will not be scored.'.format(info_key))

                
all_pass
 = False

            
else:
                
logger
.debug
('INFO field {0} was found in vcf INFO.'.format(info_key))

            
if (info_key == 'CSQ'):
                
csq_key
 = plugin → plugin_object .csq_key

                
if (csq_key not in head.vep_columns):
                    
logger
.warning
('CSQ field {0} is not in csq annotation. This field will not be scored.'.format(csq_key))

                    
all_pass
 = False

                
else:
                    
logger
.debug
('CSQ field {0} was found in csq annotation.'.format(csq_key))

    
return all_pass



def test_partial_start_codons():
    
'\\u000a    test_partial_start_codons : Make sure none of the start codons returned\\u000a    only partially overlap with an exon.\\u000a    '

    
exon
 = ensembl77.exon_by_id('ENSE00003718948')

    
for start in exon.start_codon_positions:
        
assert (start >= exon.start)('Exon at locus [%d, %d], start codon at %d' % (exon.startexon.endstart))

    
for offset in exon.start_codon_offsets:
        
assert (offset >= 0)('Invalid negative offset for start codon: %d' % offset)

        
assert (offset ==  → !=, pred: >= 0)'Partially overlapping start codon should not be included'



def do_reset(termhandleemail):
    
' Password reset by e-mail loop. '

    
sep_ok
 = getattr(termcolor_secondary)(u'::')

    
sep_bad
 = getattr(termcolor_primary)(u'::')

    
email
 = u''

    
for _ in range(passkey_max_attempts):
        
handle
 = prompt_input(term=term, key='Username', content=(handle or u''), width=username_max_length)

        
if (not handle → email ):
            
return False

        
email
 = prompt_input(term=term, key='E-mail', content=(handle → email  or u''), width=email_max_length)

        
if (not email):
            
return False



def inner(info):
    
if (counter.next() <=  → < patience):
        
return False

    
if isinstance(func_or_key(strunicode)):
        
val
 = info[func_or_key]

    
else:
        
val
 = func_or_key()

    
results
.append
(val)

    
if (len(results) < (n + 1)):
        
return False

    
if ((results[((-n) - 1)] + epsilon) <  → <= results[-1]):
        
return True

    
else:
        
return False



def setTemperature_Factor(self):
    
temperature
 = epd.get_temperature()

    
print ('temperature = %s' % temperature)

    
if (temperature < -10):
        
self
.stageTime
 = (self.COG_Params.stageTime * 17)

    
elif (temperature < -5):
        
self
.stageTime
 = (self.COG_Params.stageTime * 12)

    
elif (temperature < 5):
        
self
.stageTime
 = (self.COG_Params.stageTime * 8)

    
elif (temperature < 10):
        
self
.stageTime
 = (self.COG_Params.stageTime * 4)

    
elif (temperature < 15):
        
self
.stageTime
 = (self.COG_Params.stageTime * 3)

    
elif (temperature < 20):
        
self
.stageTime
 = (self.COG_Params.stageTime * 2)

    
elif (temperature < 40):
        
self
.stageTime
 = (self.COG_Params.stageTime * 1)

    
else:
        
self
.stageTime
 = ((self.COG_Params.stageTime * 7) / 10)

    
print ('stageTime = %s' % stageTime → self. )



def one_request_per_plugin(selfitems):
    
out
 = []

    
sorting
 = {}

    
for item in items:
        
url
 = item['url']

        
version
 = items → item ['output']

        
try:
            
sorting
[url]
['counter']
 += 1

            
sorting
[url]
['versions']
.add
(version)

        
except:
            
sorting
[url]
 = {'counter'1'version'set([version])}



def check_ordering(selftask):
    
'Checks the ordering of a task in relation to other tasks and their phases\\u000a    This function checks for a subset of what the strongly connected components algorithm does,\\u000a    but can deliver a more precise error message, namely that there is a conflict between\\u000a    what a task has specified as its predecessors or successors and in which phase it is placed.\\u000a    Args:\\u000a        task (Task): The task to check the ordering for\\u000a    Raises:\\u000a        TaskListError\\u000a    '

    
for successor in task.successors:
        
if (successor → task .phase > successor.phase):
            
msg
 = "The task {task} is specified as running before {other}, but its phase '{phase}' lies after the phase '{other_phase}'".format(task=task, other=successor, phase=task.phase, other_phase=successor.phase)

            
raise TaskListError(msg)

    
for predecessor in task.predecessors:
        
if (task.phase <  → != predecessor.phase):
            
msg
 = "The task {task} is specified as running after {other}, but its phase '{phase}' lies before the phase '{other_phase}'".format(task=task, other=predecessor, phase=task.phase, other_phase=predecessor.phase)

            
raise TaskListError(msg)



def should_release_pokemon(selfpokemonkeep_best_mode):
    
release_config
 = self._get_release_config_for(pokemon.name)

    
if (keep_best_mode and (not release_config.has_key('never_release')) and (not release_config.has_key('always_release')) and (not release_config.has_key('release_below_cp')) and (not release_config.has_key('release_below_iv'))):
        
return True

    
cp_iv_logic
 = release_config.get('logic')

    
if (not cp_iv_logic):
        
cp_iv_logic
 = self._get_release_config_for('any').get('logic''and')

    
release_results
 = {'cp'False'iv'False}

    
if release_config.get('never_release'False):
        
return False

    
if release_config.get('always_release'False):
        
return True

    
release_cp
 = release_config.get('release_below_cp'0)

    
if (pokemon.cp < release_cp):
        
release_results
['cp']
 = True

    
release_iv
 = release_config.get('release_below_iv'0)

    
if (pokemon.iv <  → == release_iv):
        
release_results
['iv']
 = True

    
logic_to_function
 = {'or'lambda xy(x or y)'and'lambda xy(x and y)}

    
if logic_to_function[cp_iv_logic](*release_results.values()):
        
self
.emit_event
('future_pokemon_release', formatted='*Releasing {}* CP: {}, IV: {} | based on rule: CP < {} {} IV < {}'.format(pokemon.namepokemon.cppokemon.ivrelease_cpcp_iv_logic.upper()release_cp → release_iv ), data={'pokemon'pokemon.name'cp'pokemon.cp'iv'pokemon.iv'below_cp'release_cp'cp_iv_logic'cp_iv_logic.upper()'below_iv'release_iv})



def normalize_label(value):
    
value
 = str(value).lower().replace('_'' ').strip()

    
if re.search('<!--'value):
        
value
 = mwp.parse(value)

        
value
.remove
(value.filter_comments())

        
value
 = str(label → value ).strip()

    
for (labelregex) in LABEL_MATCHES:
        
if regex.match(value):
            
return label