Skip to content

Instantly share code, notes, and snippets.

@breandan
Last active March 2, 2023 08:12
Show Gist options
  • Save breandan/07688f41441591e311e18e504c45609c to your computer and use it in GitHub Desktop.
Save breandan/07688f41441591e311e18e504c45609c to your computer and use it in GitHub Desktop.
Parenthetically complex and syntactically [in]valid Python statements.
numValues = sum([len(i.cache.keys()) for i in _memoizedFunctions]),
expectedGroupedC = [(i, [(i, i * 3 + j) for j in range(3)]) for i in range(5)]
res2 = array(map(lambda x: int(x[1]), tmp))
val = np.array([(1698 - 10.79 * xi) *(np.abs(np.cos(- 0.11 +(xi + 1) / 6)) - 1) + 484 for xi in x])
bottom_ninetyfive_percent = sorted(signal)[: int(np.floor(len(signal) * 0.95))]
data = concatenate((data[0: indx[0] - 1, : ], data[indx[0] + 1: data.shape[0] - 1, : ]))
indx = (s[argmin(s[: , 1]), 0] if d_max > 6 * amin(s[: , 1]) else 1)
sbar_dudy = std(concatenate((data[indx: m - 1, 1], data[indx: m - 1, 2]))) / sqrt(ar_dudyI0.eff_N[0] + ar_dudyIn.eff_N[0])
total = sum([int(input().split()[target_idx]) for _ in range(N)])
val = np.array([(abs(b *((- xi) / a + 1))) *(- 1) *(np.abs(np.cos(c * xi)) - 1) ** 2 + d for xi in x])
sstot = sum([(Y[i] - np.mean(Y)) ** 2 for i in xrange(len(Y))])
sserr = sum([(Y[i] - fun(X[i], * args)) ** 2 for i in xrange(len(Y))])
delivery = sender.send(Message(body = unicode(self.cookies[randint(0, self.upper)])))
index = int(x[8: 10]) * 12 + int(math.floor(int(x[10: 12]) / 5))
self.data = dict(map(lambda k: (k, CacheElement(dicttocache[k], self, k)), dicttocache))
self.curtar = np.array([img[int(lbl / 2)]])
i = (np.array([np.nonzero(r <= C)]))[0][0][0]
diff = round(((float(row[11]) - float(default_time[key])) / float(default_time[key])) * 100, 2)
s2 = sum([sum([int(unit) for unit in str(int(digit) * 2)]) for pos, digit in enumerate(number) if not int(pos) % 2])
weight = scipy.sum(atlas_image[objects[rid - 1]][label_image[objects[rid - 1]] == rid])
hsize = int((float(img.size[1]) * float(wpercent)))
station_ids = sorted((int(child[0].text) for child in root[1: ]))
ppts1 = array([[p.x(), p.y()] for p in pts1])
ppts2 = array([[p.x(), p.y()] for p in pts2])
ppts1 = array([[p.x(), p.y()] for p in pts1 + pts1[0: 1]])
ppts2 = array([[p.x(), p.y()] for p in pts2 + pts2[0: 1]])
ppts1 = array([[p.x(), p.y()] for p in shape1 + shape1[0: 1]])
ppts2 = array([[p.x(), p.y()] for p in shape2 + shape2[0: 1]])
sa1 = array([[p.x(), p.y()] for p in result[2] + result[2][0: 1]])
sa2 = array([[p.x(), p.y()] for p in result[3] + result[3][0: 1]])
ss1 = array([[p.x(), p.y()] for p in segs1 + segs1[0: 1]])
ss2 = array([[p.x(), p.y()] for p in segs2 + segs2[0: 1]])
ss1 = array([[p.x(), p.y()] for p in segs1 + segs1[0: 1]])
ss1 = array([[p.x(), p.y()] for p in segs1 + segs1[0: 1]])
ss1 = array([[p.x(), p.y()] for p in segs1 + segs1[0: 1]])
Float = min(max(float(self.inputs[0].sv_get()[0][0]), self.minim), self.maxim)
(var, value) = (string[: i], string[(i + len(op)): ])
targets = np.array([float(row[- 1]) for row in reader])
bVectors = np.array([(np.cos(t), np.sin(t)) for t in starters])
available_moves = [(Coordinate(0, 0), Board.from_grid([[]]))]
distance = np.linalg.norm(list(np.asarray(points[i]) - np.asarray(points[j])));
signtx.vin[0].scriptSig = CScript([- 1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
z = [List([b[0]] +[y.v[i] for y in b[1: ]]).Eval(env, block) for i in range(n)]
RV = [[R *(finish[y] - start[y]) /(L + 0.0000001), R *(start[x] - finish[x]) /(L + 0.0000001)]]
SA = [[(start[x] - RV[0][x]), (start[y] - RV[0][y]), 0]]
SB = [[(start[x] + RV[0][x]), (start[y] + RV[0][y]), 0]]
FA = [[(finish[x] - RV[0][x]), (finish[y] - RV[0][y]), 0]]
FB = [[(finish[x] + RV[0][x]), (finish[y] + RV[0][y]), 0]]
print(len([r for r in treestore.get_names(tree_uri = args.uri, format = None)]))
activeBitsFeature = np.array(list(objectSDRs[objectNames[i]][j][0][1]))
activeBitsLocation = np.array(list(objectSDRs[objectNames[i]][j][0][0]))
total_orders = sum([float(row[4][1: ]) for row in data])
loop.run_until_complete(gather(*[check_img(filenames = filenames, post = post) for post in posts], return_exceptions = True))
name_change = not u.data[- 1] or ALIAS_TIME /((float(u.data[- 1]) / ALIAS_DAYS) ** 2) < time_since_crawl
if i == 1: values.append(stringify(redact_urls(max(cleaned_names_ws[- j - 1].iteritems(), key = itemgetter(1))[0])))
distanceArray = np.array([np.sum((X[m, : ] - X[k, : ]) *(X[m, : ] - X[k, : ])) for m in groupIndexArray[kmeans[k]][0]])
distanceArray = np.array([np.sum((X[m, : ] - X[k, : ]) *(X[m, : ] - X[k, : ])) for m in groupIndexArray[kmeans[k]][0]])
masses3_sqrt1 = np.array(sum([[1 / m, 1 / m, 1 / m] for m in np.sqrt(masses)], []))
buckets = dict(map(lambda n: (n, []), range(num_files)))
log_bleu_prec = sum([math.log(float(x) /(y)) for x, y in zip(stats[2: : 2], stats[3: : 2])]) / 4.
PairwiseSlopes = np.append(PairwiseSlopes, ((TheData[r] - ii) /(r - i)))
nData = len(list(TheData[np.where(TheData > TheMDI)]))
suffix = [elem for elem, _ in state[state.index((file, time)): ] if(not(elem in modified or elem in removed))]
row = round((y -(col_f * self.spacing[1] / 2) - self.orig[1]) / self.spacing[1])
lhsDesign = ot.LHSExperiment(ot.ComposedDistribution([ot.Uniform(0.0, 1.0)] * dimension), size)
self._macro_to_pc[self._macro_to_pc.index(p)] = ((p[0], int(p[0].value), p[2], p[3]))
rotated = numpy.dot(transform, numpy.array([vector[0], vector[1], vector[2], 1.0]))[: 3]
line = new_format.format(*(re.split(SEP, line[1: ])))
points = [(long(x[i]), long(self.height - y[i])) for i in range(len(x))]
res = np.insert(res, 0, [1 for j in xrange(X.shape[1])], axis = 0)
prob = clf.predict_proba(np.arrat([features[feature]]))
self.z_etoile_barre = array([g(self.points_sigma[i]) for i in range(0, 2 * self.d)])
g3 = Vector({E[1]: g3_num.dict[E[1]] / g3_den, E[3]: g3_num.dict[E[3]] / g3_den})
y = cy +((cos_table[(angle + 90) % 360] * radius) >> COSFP)
ret = process_f([(self.env, config_override[key])])
points = [Point((p[0] * App.gsx + ox) * sx, (p[1] * App.gsy + oy) * sy) for p in ps]
data_ = [(n.mean(topom_dict[i]), n.std(topm_dict[i])) for i in vertex_measures]
data_ = [(n.mean(topom_dict[i][j]), n.std(topm_dict[i][j])) for i in sector_vertex_measures for j in range(3)]
info = dict([(k.upper(), v) for k, v in info_.items()])
checked_param.eval(feed_dict = {param: np.ones([1])})
return 1. / thish ** self._dim * numpy.sum(numpy.tile(self._w, (x.shape[0], 1)) * thiskernel / self._lambda[: , 0] ** self._dim, axis = 1)
T = array([[0], [0], [k * np.sum([i for i in inputs])]])
calibrated = int(self.calib[0] +(level *(self.calib[1] - self.calib[0])) / 100.0)
sys.path.append(os.path.dirname(os.path.abspath(sys.argv[0])))
entity_names.append(' '.join([child[0] for child in t]))
output_file.write(major_list[str(x[0])])
i[...] = header['HIERARCH ESO DRS CAL TH COEFF LL{0}'.format(str(int(i)))]
placeholder_re = re.compile(r'{([a-zA-Z]+\d*=?[^\s\[\]\{\}=]*)}')
placeholder_content_re = re.compile(r'^(?P<placeholder_name>[a-zA-Z]+)(\d*)(=[^\s\[\]\{\}=]*)?$')
bbcodde_standalone_re = r'^\[(?P<start_name>[^\s=\[\]]*)(=\{[a-zA-Z]+\d*=?[^\s\[\]\{\}=]*\})?\]\{?[a-zA-Z]*\d*=?[^\s\[\]\{\}=]*\}?$'
__version__ = ('.'.join(map(str, VERSION[: 3])) + '.'.join(VERSION[3: ]))
ids = list(set([fn.split('.')[0] for fn in filenames if fn != 'lock']))
assert_that(self.nginx_configs.keys(), equal_to([('/etc/nginx/nginx.conf', '/usr/share/nginx', '/usr/sbin/nginx')]))
rms.append(_rmsdiff(os.path.join(directory, pair[0]), os.path.join(directory, pair[1])))
mask = np.array([(o.can_init(obs) and o.pi(obs) == a) for o in self.options])
ipaddr = int(''.join(['%02x' % int(x) for x in ip.split('.')]), 16)
netaddr = int(''.join(['%02x' % int(x) for x in netstr.split('.')]), 16)
cur.execute('INSERT INTO docs (id, title) VALUES(NULL, ?)', [buffer(line.strip())])
proc = DockerProcess(self, ['version', '-f', '{{.Client.Version}}'], stdout = PIPE)
__table_args__ = ({'info': {'unique_fields': ['name']}}, )
qregex = re.compile("#\[[\d]+\]\[([^#]*)\]")
self.write_json({"type": "success", "value": self.sys.pathing.get(int(req["x"]), int(req["y"]), int(req["z"]))})
result = self.pdriver.find("xp://*[contains(text(), 'clicked')]")
imageSearch = compile(tagre("img", "src", r'(.*/comics/[^"]+)'))
post_optimization_modifiers = [('odi', lambda results: np.arctan2(1.0, results['kappa'] * 10) * 2 / np.pi)]
return sqrt(sum([x ** 2 for x in(point - center)]))
f = open(os.path.join(os.path.dirname(sys.argv[0]), 'server.pid'), 'w')
return struct.unpack(l[0], bytes(obj[l[1]: l[2]]))[0]
list_model.StringItemList = uno.Any("[]string", tuple([item[1] for item in items]))
db.notelist.update({"_id": user}, {"$set": {"lists.{}".format(list_name): []}})
data.append((float(d[0]), float(d[1]), float(d[2]), float(d[3])))
assert type(func_timer(max, [i for i in range(1000)])) == type(0.1)
print(set([ord(b) for b in bytes]))
return list(itertools.chain(*[_.split(" ") for _ in sentences]))
return self.pool.get('account.bank.statement').write(cr, uid, [record.id], {'line_ids': [(0, False, values)]}, context = context)
return ''.join([struct.pack("!Q", random.getrandbits(64)) for x in xrange(0, len / 8 + int(len % 8 > 0))])[: len]
f.write("urlList = re.findall(r'(https?://[^\s]+)', myString)\n")
dict_re = re.compile(r"""(?P<filename>(?P<dict>[a-z]{2}(-[A-Z]{2})?).*).bdic""", re.VERBOSE)
self.assertEqual(len(set([1, 2, 1])), 2)
self.assertEqual(len(set([1, 2, 1])), 3)
self.assertEqual(len(set([1, 2, 1])), 2)
sortedfields.append(':'.join([tag, fields[tag]]))
imsto = imp.load_module('imsto', * imp.find_module('imsto', [os.path.join(os.path.dirname(__file__), '..')]))
PART_RE = re.compile(r'(?:([A-Z][A-Z0-9\_]+)\,?)')
l = sum([len(str(s)) for s in self.segments])
seen_authors = set(filter(proper_name, (t[0] for t in seen)))
permutations.append(map(lambda x: input[i] + x, permute(input[: i] + input[i + 1: ])))
print(stats.kruskal(*[choose_points(d[: min_len]) for d in data.values()]))
print(stats.wilcoxon(choose_points(data['1']), choose_points(data['2'])))
check = Equals(Function(f, [Real(1)]), Function(g, (Real(2), Int(4))))
def ImportModel(T, C): ses = eval(string(0)); VIEWER.Libs = ses[1] +([[]] *(8 - len(ses[1]))) if ses[0] == 1 else[]
pas = str((i * temps) /(len(x[: , 0])))
length = max([len(obj.encode('utf-8')) for obj in object_list])
print(''.join(str(x) for x in list(permus[1000000 - 1])))
tokens = tuple(lexer.tokenize('\n\t [[personal. information.details]] \n'))
context['total'] = sum(map(lambda x: len(x[1]), issue_lists))
r.append((int(minmax[0]), int(minmax[1])))
self.add_edges_from([(u, v, {weight: d}) for u, v, d in ebunch], ** attr)
ROOT_DIR = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), os.pardir))
s.append("%s" % k + ",".join([str(u[0]) + str(u[1]) for u in v]))
B1 = merge_sort(A[: int(math.floor(len(A) / 2))])
B2 = merge_sort(A[int(math.floor(len(A) / 2)): ])
return actual * screen_distance / max(1, dist_on_line(p, r, (r[0] + z_unit[0], r[1] + z_unit[1], r[2] + z_unit[2])))
sys.stderr.write('usage: {0} parameters\n\n'.format(os.path.basename(sys.argv[0])))
plt.bar(list(range(feat.shape[0])), feat)
return pd.DataFrame([dict(case.items()) for case in cases.cases])
return[''.join(tmp[3 * i: 3 *(i + 1)]) for i in range(len(tmp) // 3)]
self.append(ComponentLink([_toid(cid1)], _toid(cid2), forwards))
self.append(ComponentLink([_toid(cid2)], _toid(cid1), backwards))
return mark_safe("%s%s" %(render, self.render_script(attrs['id'])))
m = re.match('\(([0-9\.]+),([0-9\.]+)\)', nodeStr)
return prefix + ' '.join(['{0:04X}'.format(ord(i)) for i in s]) + suffix
assert peakdetect_simpleedge(s) ==[((s[2], 2), (0, 4))]
print(reduce(lambda x, y: x + y, [int(n) for n in f.readlines()]))
return web.DataReader(_ticker, "yahoo", date(int(_s[0]), int(_s[1]), int(_s[2])), date(int(_e[0]), int(_e[1]), int(_e[2])))
return ''.join([hex(ord(x)) for x in value])
assert v(b"flibble") ==('Unknown Image', [[('header', 'Image Format: '), ('text', 'unknown')]])
return - samples -(factors * log(data *(1 - data[: : - 1]))).sum() / samples
mask = ~(2 **(32 - int(network[1])) - 1)
minpts = max(1.0 / percentiles[0], 1.0 /(1.0 - percentiles[len(percentiles) - 1]))
return{'saved_searches': ' '.join([search.html for search in SavedSearch.objects.filter(user = request.user).all()])}
config.RBG_MEANS = np.array([[[123.68, 116.779, 103.939]]])
series_year_re = re.compile('^(?P<series>[ \w\'.:-]+)(?: \((?P<year>\d{4})\))?$')
X_b = np.hstack((np.ones((X.shape[0], 1)), X))
return list(map(lambda r: (r.status_code, set(r.data.keys() if isinstance(r.data, dict) and 200 <= r.status_code < 300 else[])), responses))
return[(data['data'], FileHeader(data['header']))]
extend_([escape_(form.render(), False), u'\n'])
return[(c[0][0], convert_if_bytes(c[0][1]), c[1]) for c in countries if c[0][0] != "ATA"]
[(re.compile(r'[^aeiouy]'), 'tokenize')],
valueStr = '\n'.join([' '.join(map(str, i)) for i in self.array])
out.writelines(','.join(str(np.around(s, decimals = 3)) for s in[x, y, z, a]) + '\n')
quality = np.array(['good' for i in xrange(int(n_channels))])
self.assertEqual(f3, [[[1, 2], [3, 4]], [[None, 6], [7, 8]]])
new_d.append((key, np.mean([x[key] for x in dict_list])))
return '\n' + '\n'.join(['\t'.join([unicode(x) for x in r]) for r in self.rows])
self.sq.add_filter(SQ(title__in = set(["A Famous Paper", "An Infamous Article"])))
return Decimal(''.join([c for c in fname if c.isdigit()]))
tmpl_map = dict([(int(k), t) for k, t in tmpls])
return ''.join([chr(48 + d[char]) for char in s])
self._current_appliance.append(IPAppliance(urlparse(conf.env['base_url'])))
modtime = time.asctime(time.localtime(struct.unpack('i', binary[4: 8])[0]))
print(timeit('''d = {}''' + directory_of_sources + '''"languages = [f for f in listdir(directory_of_sources) if not f.startswith("_")]''', number = 10))
print(bytes(array('f', [1, 2.3])))
assert_equal(list(G.in_degree(iter([0]))), [(0, 2)])
url(r"^(?P<package>[^/]+)/(?:(?P<version>[^/]+)/)?$", ReleaseDetail.as_view(), name = "package_detail"),
data = re.sub('\s*[[(</-]?\s*(?:at|@)\s*[])>/-]?\s*', '@', data)
data = re.sub('\s*[[(</-]?\s*(?:dot|\.)\s*[])>/-]?\s*', '.', data)
out = [(x[0], '{' + ','.join(['"' + field[0] + ':' + field[1] + '":["' + '","'.join(x[1][field[0] + ":" + field[1]]) + '"]' for field in fields_list]) + '}')]
self.assert_(len([log for log in logs if re.search('Error: exclusion contains an undefined service: fake', log)]) == 1)
TAGWORD = re.compile(r':([^:()|]+):([^:()|]+)')
WORD = re.compile(r':[^:()|]+:([^:()|]+)')
pt.append([int(j[0]), int(j[1])])
return MockFileSystem(TestFileSystem(self._file_system_data[str(branch)]))
print(sum(int(it) for it in str(factorial[n])))
sys.stderr.write("%s\n" % " ".join([str(arg) for arg in args]))
imageRegEx = re.compile(r'\[\[(Pilt|File|Image)\:.+?\]\]', re.IGNORECASE)
EDIT_MODULE_PAGE_BANNER = (By.XPATH, "//ol[@class='breadcrumb']/li[@class='active' and contains(text(), 'Edit module')]")
setup(name = 'shim', version = '0.1', ext_modules = [Extension('shim', ['shim.cpp'])])
results.append((sub, int(s[: - 1], 16)))
string = ['\nPosition (x,y,z) = ' + ','.join([str(p) for p in self.position])]
string.append('\nRotation (x,y,z) = ' + ','.join([str(r) for r in self.rotation]))
return[event for event in events if np.all([rule(event) for rule in self.rules])]
obj = getattr(relations[int(key[0])], PRED_MAP.get(key[1]))
X.append(seq[np.unique((u, v))])
escaped_substring = "".join(["%" + hex(ord(x))[2: ] for x in substring])
print(wrapper.fill(re.sub('[\n \t]+', ' ', ref).strip()))
logging.info("%s: %s" %(s, str(self.sects[s])))
values = sorted(set([(n - i - 1) * a + i * b for i in range(n)]))
final_factor.reduce([(var, evidence[var])])
rules = [Rule(LxmlLinkExtractor(allow = ('(http://www.dmoz.org/).+')), callback = 'parse_item', follow = True)]
target = map(itemgetter(0), filter(itemgetter(1), sorted(data["target"].items(), key = itemgetter(1), reverse = True)))
pages = int(str(links_number)[0] + '0' *(len(str(links_number)) - 1))
([get_request({"a": "b"})], None, None),
return[nodes.raw('', CODE.format('\n'.join(self.content)), format = 'html')]
str_value = ''.join([chr(c) for c in byte_seq[index: (index + 4)]])
print({int(str(x) + str(y) + str(z), base): [x, y, z] for x in digits for y in digits for z in digits})
paths.append(join_path(root, file)[(len(package) + 1): ])
listOfNumbersAsStrings = re.findall("([-+]?(\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?)", stringToParse)
('params', RE(r"\("), [RE(r"\)")], params),
N = sum([i for(_, i) in list(corpus.items())])
url(r'^{}/order/$'.format(r'(?P<basket_id>[\w]+)'), views.BasketOrderView.as_view(), name = 'retrieve_order'),
sys.path.append(os.sep.join([os.path.dirname(os.path.abspath(__file__)), '..', 'lib']))
p = [Philosopher(i, c[i], c[(i + 1) % n], butler) for i in range(n)]
parent.logger.info("msg_log received: %s %s%s topic=%s lag=%g %s" % tuple(msg.notice.split()[0: 3] +[msg.topic, msg.get_elapse(), msg.hdrstr]))
ATTACHMENT_RE = re.compile(r'(?P<before>.*)(\[attachment\:(?P<id>\d+)\])(?P<after>.*)', re.IGNORECASE)
log.error('%s: connection with %s broken: %s' %(self, ':'.join([str(x) for x in self.__remoteEndpoint]), exc_info[1]))
R = np.array([[cos(angle), - sin(angle), 0], [sin(angle), cos(angle), 0], [0, 0, 1]])
contacts.add((int(x[0]), int(x[1])))
self.menu = Menu(kwargs.pop("menu", getattr(self, "menu", [])))
return(self.__oF[self.__default_op].join([repr(_) for _ in self.__children]))
return "%s %s %s" %(self.__oF['('], self.__oF[self.__name].join([repr(_) for _ in self.__children]), self.__oF[')'])
os.chdir(os.path.dirname(os.path.realpath(sys.argv[0])))
print('%s\t\t%d\t\t%d' %(m, len(mirPath_all[m]), len(mirPath_enr[m])))
return nodes.Output([self.call_method('_render', [nodes.Name('csrf_token', 'load')])]).set_lineno(token.lineno)
self.textBox.insert(END, self.results[int(selection[0])][3])
child_urls = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', ret.content)
return max([(f(val, agent.countQ(currentState, a)), a) for(a, val) in qVals])[1]
return{line[0]: {'mem': int(line[1]), 'time': float(line[2])} for line in stats}
migrations.CreateModel("Foo", [("name", models.CharField(max_length = 255))]),
ringList.append(Ring([self.position[0], self.position[1]]))
assert(np.allclose(np.array([[pset[0].lon], [pset[0].lat]]), vals, 1e-2))
return all([access.has_access(operator_info().user_id, perm) for perm in perms])
pat = re.compile(r"\[\[Category:(.*?)(\|.*){0,1}\]\]")
return int('-' + ''.join(list(str(x))[1: ][: : - 1]))
x = int(''.join(list(str(x))[: : - 1]))
print(random.choice([i for i in range(11) if i % 2 == 0]))
plate.add_summary_filter(multiworm.filters.summary_lifetime_minimum(int(sys.argv[1])))
print(str(type(self)).split('.')[3] + '(' + str(len(data)) + '): ' + str(data.get_array_hex(1))[1: - 1])
self.put_tasks([str(os.getpid())])
return ';'.join(['{0},{1}'.format(item[0], item[1]) for item in pairs])
tagged_data = set([(row[0], correct_tag) for row in reader])
TAR_FILE_REGEX = re.compile(r".*\.tar(?:\.([a-z]+)|)", re.IGNORECASE)
print(re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', url))
value = BitArray(bytes = ''.join([chr(x) for x in data])).int
os.system('sudo cp {0} /etc/init/{1}'.format(info['service']['conf'], os.path.basename(info['service']['conf'])))
settings = self.env["base_import_map.map"].search([("id", "=", options.get('settings'))])
pDiffL = sum([evidence2([x], a, b) for x in s], axis = 0)
valid_diceroll = re.compile(r'^([+-]?(?:\d+|\d*d(?:\d+|F))(?:[+-](?:\d+|\d*d(?:\d+|F)))*)( .+)?$', re.I)
print(":".join([mac[e: e + 2] for e in range(0, 11, 2)]))
(self).breaker = behaviors.CircuitBreaker(((((u"[") +(serviceName)) +(u" at ")) +(url)) +(u"]"), failureLimit, retestDelay)
print(str(type(self)).split('.')[3] + '(' + str(len(data)) + '): ' + str(data.get_array_hex(1))[1: - 1])
myname = (splitext(basename(sys.argv[0])))[0]
self.assertEqual([(attrs['ElementName'])], response)
schedule.recurrences = recurrence.Recurrence(rrules = [recurrence.Rule(recurrence.WEEKLY, byday = (int(schedule.day), ))])
output[i >> 5] |=(ord(input[int(i / 8)]) & 0xFF) << i % 32
{'header': ('Server', 'mod_fcgid/(?P<version>[0-9\.]+)')},
return np.array([(1 + x[0] ** 2) ** 2 + x[1] ** 2])
binary_str = np.array([format(x, '0%sb' %(k)) for x in input_int])
print('\n'.join([l.rstrip() for l in s.splitlines()]))
output, error, returncode = cli_call(["docker", "inspect", "--format", "{{ .NetworkSettings.IPAddress }}", containerName], expect_success = True)
self.assertEqual(str(cls_data.Data(['aaa', 0.4, ['a', 'b', 'c']])), "['aaa', 0.4, ['a', 'b', 'c']]")
blocks.append((TestingBlock([1, 2, 3]), [], [0]))
val_deci = sng *(val_list[0] +((val_list[1] +(val_list[2] / 60.0)) / 60.0))
return list(filter(None, [cls._build_brighcove_url(m) for m in matches]))
metadata = dict(re.findall("__([a-z_]+)__\s*=\s*'([^']+)'", main_module))
return type(self)([[1. / self[0][0]]])
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
listaProcesada.append(re.split('[\[\]]', ip))
info = dict(zip(fields, [s.decode('windows-1252') for s in row]))
print("===== PLEXBMC STOP [id: %s]: %s seconds =====" %(plexbmc_start, (time.time() - plexbmc_start)))
print(os.sep.join([sys.argv[1], R, V[1: ]]))
result.append(u", ".join([videosclase.video, hw.title, hw.course.name, str(hw.course.year)]))
__all__ = list(module for _, module, _ in pkgutil.iter_modules([os.path.split(__file__)[0]]))
self.ser.write([int(self.reverse_read_temp(int(value)))])
regex = re.compile("(?P<value>\{[0-9]*\})")
print(" ".join([str(x) for x in count if x != 0]))
self.addCommand(Command("gringo+clasp", "gringo --shift $1 $2 | clasp", validator = ExitCodeValidator([10, 20, 30])))
logging.debug("{} -> {}".format(m.group(1), macros[m.group(1)]))
model = h2o.random_forest(y = iris[4], x = iris[list(range(maxx + 1))], ntrees = 50, max_depth = 100)
self.assertEqual(GafferScene.ScenePlug.pathToString(IECore.InternedStringVectorData(["a"])), "/a")
self.assertEqual(GafferScene.ScenePlug.pathToString(IECore.InternedStringVectorData(["a", "b"])), "/a/b")
out = n.array([str(n.round(el, 0)) + " & " for el in arr])
self.assertEqual(sorted(list([x.pk for x in s])), [6, 7, 8])
M = ''.join(map(str, [b for b in bits(f)]))
prevSearch = compile(tagre("a", "href", r'(%s\?webcomic1=[^"]+)' % rurl, after = "previous-webcomic"))
print("".join(["%s" %(alpha[A[ee, ii].Value()]) for ii in range(word_len)]))
data.append(strs[(strs.index(i) + shift) % 26])
entries.append(cls(*[entry[key.lower()] for key in keys]))
p.add_tools(HoverTool(tooltips = [("Organisation Name", "@OrganisationName")]))
return np.concatenate([np.ones([X.shape[0], 1]), X, np.square(X) / 2.0], axis = 1)
data = np.array([np.array([0, 2]), np.array([4, 4])])
err = np.array([np.diag((0.2, 0.1)), np.diag((0.15, 0.15))])
vpc = self.ec2.describe_vpcs(Filters = [{'Name': 'cidr', 'Values': [cidr]}])['Vpcs']
vpc = self.ec2.describe_vpcs(Filters = [{'Name': 'vpc-id', 'Values': [vpc_id]}])['Vpcs']
self.assertCodeExecution('print(list(filter(bool, [True, False, True])))')
self.assertCodeExecution('print(list(filter(bool, [1, 0, 3, -1])))')
self.assertCodeExecution('print(list(filter(bool, [])))')
self.assertCodeExecution('print(list(filter(None, [True, False, True])))')
self.assertCodeExecution('print(list(filter(None, [])))')
self.assertCodeExecution('print(list(filter(lambda x: x > 1, [3, 4, 56, 1, -11])))')
groups = list(map(list, zip(*[iter(elements)] * n)))
farm[mill] = dict(farm.get(mill, {}), **{day: farm.get(mill, {}).get(day, 0) + int(prod)})
print("Generated keystream: %s" %([hex(i) for i in keystream]))
l +=["%s <%s>" %(ob.attr(["cn", "sn", "o"]), ob.attr('mail'))]
reactor.callFromThread(self._protocol.sendMessage, (''.join([chr(b) for b in buff])).encode('utf8'), isBinary = False)
regex = re.compile('''states\[(?P<n>[n0-9+-]*)\]''')
ids = list(amcates.ES().query_ids(filters = {"sets": [setid]}))
assert str(canonicalize(IntInterval([1, 4]))) == '[1, 5)'
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment