nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
tendenci/tendenci
0f2c348cc0e7d41bc56f50b00ce05544b083bf1d
tendenci/apps/pages/feeds.py
python
LatestEntriesFeed.items
(self)
return items
[]
def items(self): items = Page.objects.filter(**PUBLIC_FILTER).filter(syndicate=True).order_by('-create_dt')[:20] return items
[ "def", "items", "(", "self", ")", ":", "items", "=", "Page", ".", "objects", ".", "filter", "(", "*", "*", "PUBLIC_FILTER", ")", ".", "filter", "(", "syndicate", "=", "True", ")", ".", "order_by", "(", "'-create_dt'", ")", "[", ":", "20", "]", "return", "items" ]
https://github.com/tendenci/tendenci/blob/0f2c348cc0e7d41bc56f50b00ce05544b083bf1d/tendenci/apps/pages/feeds.py#L16-L18
OpenMDAO/OpenMDAO
f47eb5485a0bb5ea5d2ae5bd6da4b94dc6b296bd
openmdao/surrogate_models/nn_interpolators/nn_base.py
python
NNBase.__init__
(self, training_points, training_values, num_leaves=2)
Initialize nearest neighbor interpolant by scaling input to the unit hypercube.
Initialize nearest neighbor interpolant by scaling input to the unit hypercube.
[ "Initialize", "nearest", "neighbor", "interpolant", "by", "scaling", "input", "to", "the", "unit", "hypercube", "." ]
def __init__(self, training_points, training_values, num_leaves=2): """ Initialize nearest neighbor interpolant by scaling input to the unit hypercube. """ # training_points and training_values are the known points and their # respective values which will be interpolated against. # Grab the mins and ranges of each dimension self._tpm = np.amin(training_points, axis=0) self._tpr = (np.amax(training_points, axis=0) - self._tpm) self._tvm = np.amin(training_values, axis=0) self._tvr = (np.amax(training_values, axis=0) - self._tvm) # This prevents against collinear data (range = 0) self._tpr[self._tpr == 0] = 1 self._tvr[self._tvr == 0] = 1 # Normalize all points self._tp = (training_points - self._tpm) / self._tpr self._tv = (training_values - self._tvm) / self._tvr # Record number of dimensions and points self._indep_dims = training_points.shape[1] self._dep_dims = training_values.shape[1] self._ntpts = training_points.shape[0] # Make training data into a Tree leavesz = ceil(self._ntpts / float(num_leaves)) self._KData = cKDTree(self._tp, leafsize=leavesz) # Cache for gradients self._pt_cache = None
[ "def", "__init__", "(", "self", ",", "training_points", ",", "training_values", ",", "num_leaves", "=", "2", ")", ":", "# training_points and training_values are the known points and their", "# respective values which will be interpolated against.", "# Grab the mins and ranges of each dimension", "self", ".", "_tpm", "=", "np", ".", "amin", "(", "training_points", ",", "axis", "=", "0", ")", "self", ".", "_tpr", "=", "(", "np", ".", "amax", "(", "training_points", ",", "axis", "=", "0", ")", "-", "self", ".", "_tpm", ")", "self", ".", "_tvm", "=", "np", ".", "amin", "(", "training_values", ",", "axis", "=", "0", ")", "self", ".", "_tvr", "=", "(", "np", ".", "amax", "(", "training_values", ",", "axis", "=", "0", ")", "-", "self", ".", "_tvm", ")", "# This prevents against collinear data (range = 0)", "self", ".", "_tpr", "[", "self", ".", "_tpr", "==", "0", "]", "=", "1", "self", ".", "_tvr", "[", "self", ".", "_tvr", "==", "0", "]", "=", "1", "# Normalize all points", "self", ".", "_tp", "=", "(", "training_points", "-", "self", ".", "_tpm", ")", "/", "self", ".", "_tpr", "self", ".", "_tv", "=", "(", "training_values", "-", "self", ".", "_tvm", ")", "/", "self", ".", "_tvr", "# Record number of dimensions and points", "self", ".", "_indep_dims", "=", "training_points", ".", "shape", "[", "1", "]", "self", ".", "_dep_dims", "=", "training_values", ".", "shape", "[", "1", "]", "self", ".", "_ntpts", "=", "training_points", ".", "shape", "[", "0", "]", "# Make training data into a Tree", "leavesz", "=", "ceil", "(", "self", ".", "_ntpts", "/", "float", "(", "num_leaves", ")", ")", "self", ".", "_KData", "=", "cKDTree", "(", "self", ".", "_tp", ",", "leafsize", "=", "leavesz", ")", "# Cache for gradients", "self", ".", "_pt_cache", "=", "None" ]
https://github.com/OpenMDAO/OpenMDAO/blob/f47eb5485a0bb5ea5d2ae5bd6da4b94dc6b296bd/openmdao/surrogate_models/nn_interpolators/nn_base.py#L54-L84
statsmodels/statsmodels
debbe7ea6ba28fe5bdb78f09f8cac694bef98722
statsmodels/sandbox/tsa/example_arma.py
python
autocorr
(s, axis=-1)
return sxx/N
Returns the autocorrelation of signal s at all lags. Adheres to the definition r(k) = E{s(n)s*(n-k)} where E{} is the expectation operator.
Returns the autocorrelation of signal s at all lags. Adheres to the definition r(k) = E{s(n)s*(n-k)} where E{} is the expectation operator.
[ "Returns", "the", "autocorrelation", "of", "signal", "s", "at", "all", "lags", ".", "Adheres", "to", "the", "definition", "r", "(", "k", ")", "=", "E", "{", "s", "(", "n", ")", "s", "*", "(", "n", "-", "k", ")", "}", "where", "E", "{}", "is", "the", "expectation", "operator", "." ]
def autocorr(s, axis=-1): """Returns the autocorrelation of signal s at all lags. Adheres to the definition r(k) = E{s(n)s*(n-k)} where E{} is the expectation operator. """ N = s.shape[axis] S = np.fft.fft(s, n=2*N-1, axis=axis) sxx = np.fft.ifft(S*S.conjugate(), axis=axis).real[:N] return sxx/N
[ "def", "autocorr", "(", "s", ",", "axis", "=", "-", "1", ")", ":", "N", "=", "s", ".", "shape", "[", "axis", "]", "S", "=", "np", ".", "fft", ".", "fft", "(", "s", ",", "n", "=", "2", "*", "N", "-", "1", ",", "axis", "=", "axis", ")", "sxx", "=", "np", ".", "fft", ".", "ifft", "(", "S", "*", "S", ".", "conjugate", "(", ")", ",", "axis", "=", "axis", ")", ".", "real", "[", ":", "N", "]", "return", "sxx", "/", "N" ]
https://github.com/statsmodels/statsmodels/blob/debbe7ea6ba28fe5bdb78f09f8cac694bef98722/statsmodels/sandbox/tsa/example_arma.py#L182-L189
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/urllib3/packages/backports/makefile.py
python
backport_makefile
(self, mode="r", buffering=None, encoding=None, errors=None, newline=None)
return text
Backport of ``socket.makefile`` from Python 3.5.
Backport of ``socket.makefile`` from Python 3.5.
[ "Backport", "of", "socket", ".", "makefile", "from", "Python", "3", ".", "5", "." ]
def backport_makefile(self, mode="r", buffering=None, encoding=None, errors=None, newline=None): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= set(["r", "w", "b"]): raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = SocketIO(self, rawmode) self._makefile_refs += 1 if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode return text
[ "def", "backport_makefile", "(", "self", ",", "mode", "=", "\"r\"", ",", "buffering", "=", "None", ",", "encoding", "=", "None", ",", "errors", "=", "None", ",", "newline", "=", "None", ")", ":", "if", "not", "set", "(", "mode", ")", "<=", "set", "(", "[", "\"r\"", ",", "\"w\"", ",", "\"b\"", "]", ")", ":", "raise", "ValueError", "(", "\"invalid mode %r (only r, w, b allowed)\"", "%", "(", "mode", ",", ")", ")", "writing", "=", "\"w\"", "in", "mode", "reading", "=", "\"r\"", "in", "mode", "or", "not", "writing", "assert", "reading", "or", "writing", "binary", "=", "\"b\"", "in", "mode", "rawmode", "=", "\"\"", "if", "reading", ":", "rawmode", "+=", "\"r\"", "if", "writing", ":", "rawmode", "+=", "\"w\"", "raw", "=", "SocketIO", "(", "self", ",", "rawmode", ")", "self", ".", "_makefile_refs", "+=", "1", "if", "buffering", "is", "None", ":", "buffering", "=", "-", "1", "if", "buffering", "<", "0", ":", "buffering", "=", "io", ".", "DEFAULT_BUFFER_SIZE", "if", "buffering", "==", "0", ":", "if", "not", "binary", ":", "raise", "ValueError", "(", "\"unbuffered streams must be binary\"", ")", "return", "raw", "if", "reading", "and", "writing", ":", "buffer", "=", "io", ".", "BufferedRWPair", "(", "raw", ",", "raw", ",", "buffering", ")", "elif", "reading", ":", "buffer", "=", "io", ".", "BufferedReader", "(", "raw", ",", "buffering", ")", "else", ":", "assert", "writing", "buffer", "=", "io", ".", "BufferedWriter", "(", "raw", ",", "buffering", ")", "if", "binary", ":", "return", "buffer", "text", "=", "io", ".", "TextIOWrapper", "(", "buffer", ",", "encoding", ",", "errors", ",", "newline", ")", "text", ".", "mode", "=", "mode", "return", "text" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/urllib3/packages/backports/makefile.py#L14-L53
liquidctl/liquidctl
73962574632f94050c2a75f517e929a29797b5e2
liquidctl/util.py
python
delta
(profile)
return [(cur[0]-prev[0], cur[1]-prev[1]) for cur, prev in zip(profile[1:], profile[:-1])]
Compute a profile's Δx and Δy.
Compute a profile's Δx and Δy.
[ "Compute", "a", "profile", "s", "Δx", "and", "Δy", "." ]
def delta(profile): """Compute a profile's Δx and Δy.""" return [(cur[0]-prev[0], cur[1]-prev[1]) for cur, prev in zip(profile[1:], profile[:-1])]
[ "def", "delta", "(", "profile", ")", ":", "return", "[", "(", "cur", "[", "0", "]", "-", "prev", "[", "0", "]", ",", "cur", "[", "1", "]", "-", "prev", "[", "1", "]", ")", "for", "cur", ",", "prev", "in", "zip", "(", "profile", "[", "1", ":", "]", ",", "profile", "[", ":", "-", "1", "]", ")", "]" ]
https://github.com/liquidctl/liquidctl/blob/73962574632f94050c2a75f517e929a29797b5e2/liquidctl/util.py#L169-L172
saltstack/salt-contrib
062355938ad1cced273056e9c23dc344c6a2c858
modules/syslog_ng.py
python
_parse_typed_parameter
(param)
Parses a TypedParameter and fills it with values.
Parses a TypedParameter and fills it with values.
[ "Parses", "a", "TypedParameter", "and", "fills", "it", "with", "values", "." ]
def _parse_typed_parameter(param): ''' Parses a TypedParameter and fills it with values. ''' global _current_parameter, _current_parameter_value type, value = _expand_one_key_dictionary(param) _current_parameter.type = type if _is_simple_type(value) and value != '': _current_parameter_value = SimpleParameterValue(value) _current_parameter.add_value(_current_parameter_value) elif isinstance(value, list): for i in value: if _is_simple_type(i): _current_parameter_value = SimpleParameterValue(i) _current_parameter.add_value(_current_parameter_value) elif isinstance(i, dict): _current_parameter_value = TypedParameterValue() _parse_typed_parameter_typed_value(i) _current_parameter.add_value(_current_parameter_value)
[ "def", "_parse_typed_parameter", "(", "param", ")", ":", "global", "_current_parameter", ",", "_current_parameter_value", "type", ",", "value", "=", "_expand_one_key_dictionary", "(", "param", ")", "_current_parameter", ".", "type", "=", "type", "if", "_is_simple_type", "(", "value", ")", "and", "value", "!=", "''", ":", "_current_parameter_value", "=", "SimpleParameterValue", "(", "value", ")", "_current_parameter", ".", "add_value", "(", "_current_parameter_value", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "for", "i", "in", "value", ":", "if", "_is_simple_type", "(", "i", ")", ":", "_current_parameter_value", "=", "SimpleParameterValue", "(", "i", ")", "_current_parameter", ".", "add_value", "(", "_current_parameter_value", ")", "elif", "isinstance", "(", "i", ",", "dict", ")", ":", "_current_parameter_value", "=", "TypedParameterValue", "(", ")", "_parse_typed_parameter_typed_value", "(", "i", ")", "_current_parameter", ".", "add_value", "(", "_current_parameter_value", ")" ]
https://github.com/saltstack/salt-contrib/blob/062355938ad1cced273056e9c23dc344c6a2c858/modules/syslog_ng.py#L427-L446
openstack/manila
142990edc027e14839d5deaf4954dd6fc88de15e
manila/db/api.py
python
share_group_snapshot_get
(context, share_group_snapshot_id)
return IMPL.share_group_snapshot_get(context, share_group_snapshot_id)
Get a share group snapshot.
Get a share group snapshot.
[ "Get", "a", "share", "group", "snapshot", "." ]
def share_group_snapshot_get(context, share_group_snapshot_id): """Get a share group snapshot.""" return IMPL.share_group_snapshot_get(context, share_group_snapshot_id)
[ "def", "share_group_snapshot_get", "(", "context", ",", "share_group_snapshot_id", ")", ":", "return", "IMPL", ".", "share_group_snapshot_get", "(", "context", ",", "share_group_snapshot_id", ")" ]
https://github.com/openstack/manila/blob/142990edc027e14839d5deaf4954dd6fc88de15e/manila/db/api.py#L1310-L1312
Delta-ML/delta
31dfebc8f20b7cb282b62f291ff25a87e403cc86
egs/sre16/v1/local/make_sre18_eval.py
python
wait_for_background_commands
()
This waits for all threads to exit. You will often want to run this at the end of programs that have launched background threads, so that the program will wait for its child processes to terminate before it dies.
This waits for all threads to exit. You will often want to run this at the end of programs that have launched background threads, so that the program will wait for its child processes to terminate before it dies.
[ "This", "waits", "for", "all", "threads", "to", "exit", ".", "You", "will", "often", "want", "to", "run", "this", "at", "the", "end", "of", "programs", "that", "have", "launched", "background", "threads", "so", "that", "the", "program", "will", "wait", "for", "its", "child", "processes", "to", "terminate", "before", "it", "dies", "." ]
def wait_for_background_commands(): """ This waits for all threads to exit. You will often want to run this at the end of programs that have launched background threads, so that the program will wait for its child processes to terminate before it dies.""" for t in threading.enumerate(): if not t == threading.current_thread(): t.join()
[ "def", "wait_for_background_commands", "(", ")", ":", "for", "t", "in", "threading", ".", "enumerate", "(", ")", ":", "if", "not", "t", "==", "threading", ".", "current_thread", "(", ")", ":", "t", ".", "join", "(", ")" ]
https://github.com/Delta-ML/delta/blob/31dfebc8f20b7cb282b62f291ff25a87e403cc86/egs/sre16/v1/local/make_sre18_eval.py#L178-L185
kanzure/nanoengineer
874e4c9f8a9190f093625b267f9767e19f82e6c4
cad/src/commands/Extrude/ExtrudePropertyManager.py
python
ExtrudePropertyManager.show
(self)
Extends superclass method.
Extends superclass method.
[ "Extends", "superclass", "method", "." ]
def show(self): """ Extends superclass method. """ _superclass.show(self) self.updateMessage()
[ "def", "show", "(", "self", ")", ":", "_superclass", ".", "show", "(", "self", ")", "self", ".", "updateMessage", "(", ")" ]
https://github.com/kanzure/nanoengineer/blob/874e4c9f8a9190f093625b267f9767e19f82e6c4/cad/src/commands/Extrude/ExtrudePropertyManager.py#L40-L46
python-telegram-bot/python-telegram-bot
ade1529986f5b6d394a65372d6a27045a70725b2
telegram/ext/updater.py
python
Updater.__init__
( self: 'Updater[CCT, UD, CD, BD]', user_sig_handler: Callable = None, dispatcher: Dispatcher[CCT, UD, CD, BD] = None, )
[]
def __init__( self: 'Updater[CCT, UD, CD, BD]', user_sig_handler: Callable = None, dispatcher: Dispatcher[CCT, UD, CD, BD] = None, ): ...
[ "def", "__init__", "(", "self", ":", "'Updater[CCT, UD, CD, BD]'", ",", "user_sig_handler", ":", "Callable", "=", "None", ",", "dispatcher", ":", "Dispatcher", "[", "CCT", ",", "UD", ",", "CD", ",", "BD", "]", "=", "None", ",", ")", ":", "..." ]
https://github.com/python-telegram-bot/python-telegram-bot/blob/ade1529986f5b6d394a65372d6a27045a70725b2/telegram/ext/updater.py#L195-L200
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/apps/users/models.py
python
CouchUser.from_django_user
(cls, django_user, strict=False)
return cls.get_by_username(django_user.username, strict=strict)
[]
def from_django_user(cls, django_user, strict=False): return cls.get_by_username(django_user.username, strict=strict)
[ "def", "from_django_user", "(", "cls", ",", "django_user", ",", "strict", "=", "False", ")", ":", "return", "cls", ".", "get_by_username", "(", "django_user", ".", "username", ",", "strict", "=", "strict", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/users/models.py#L1391-L1392
mypaint/mypaint
90b36dbc7b8bd2f323383f7edf608a5e0a3a1a33
lib/layer/group.py
python
LayerStack.__setitem__
(self, index, layer)
Replaces the layer at an index (notifies root)
Replaces the layer at an index (notifies root)
[ "Replaces", "the", "layer", "at", "an", "index", "(", "notifies", "root", ")" ]
def __setitem__(self, index, layer): """Replaces the layer at an index (notifies root)""" index = self._normidx(index) oldlayer = self._layers[index] self._layers[index] = layer self._notify_disown(oldlayer, index) updates = [oldlayer.get_full_redraw_bbox()] self._notify_adopt(layer, index) updates.append(layer.get_full_redraw_bbox()) self._content_changed(*tuple(core.combine_redraws(updates)))
[ "def", "__setitem__", "(", "self", ",", "index", ",", "layer", ")", ":", "index", "=", "self", ".", "_normidx", "(", "index", ")", "oldlayer", "=", "self", ".", "_layers", "[", "index", "]", "self", ".", "_layers", "[", "index", "]", "=", "layer", "self", ".", "_notify_disown", "(", "oldlayer", ",", "index", ")", "updates", "=", "[", "oldlayer", ".", "get_full_redraw_bbox", "(", ")", "]", "self", ".", "_notify_adopt", "(", "layer", ",", "index", ")", "updates", ".", "append", "(", "layer", ".", "get_full_redraw_bbox", "(", ")", ")", "self", ".", "_content_changed", "(", "*", "tuple", "(", "core", ".", "combine_redraws", "(", "updates", ")", ")", ")" ]
https://github.com/mypaint/mypaint/blob/90b36dbc7b8bd2f323383f7edf608a5e0a3a1a33/lib/layer/group.py#L349-L358
angr/claripy
4c961b4dc664706be8142fe4868f27655bc8da77
claripy/simplifications.py
python
SimplificationManager.rotate_shift_mask_simplifier
(a, b)
return expr
Handles the following case: ((A << a) | (A >> (_N - a))) & mask, where A being a BVS, a being a integer that is less than _N, _N is either 32 or 64, and mask can be evaluated to 0xffffffff (64-bit) or 0xffff (32-bit) after reversing the rotate-shift operation. It will be simplified to: (A & (mask >>> a)) <<< a
Handles the following case: ((A << a) | (A >> (_N - a))) & mask, where A being a BVS, a being a integer that is less than _N, _N is either 32 or 64, and mask can be evaluated to 0xffffffff (64-bit) or 0xffff (32-bit) after reversing the rotate-shift operation.
[ "Handles", "the", "following", "case", ":", "((", "A", "<<", "a", ")", "|", "(", "A", ">>", "(", "_N", "-", "a", ")))", "&", "mask", "where", "A", "being", "a", "BVS", "a", "being", "a", "integer", "that", "is", "less", "than", "_N", "_N", "is", "either", "32", "or", "64", "and", "mask", "can", "be", "evaluated", "to", "0xffffffff", "(", "64", "-", "bit", ")", "or", "0xffff", "(", "32", "-", "bit", ")", "after", "reversing", "the", "rotate", "-", "shift", "operation", "." ]
def rotate_shift_mask_simplifier(a, b): """ Handles the following case: ((A << a) | (A >> (_N - a))) & mask, where A being a BVS, a being a integer that is less than _N, _N is either 32 or 64, and mask can be evaluated to 0xffffffff (64-bit) or 0xffff (32-bit) after reversing the rotate-shift operation. It will be simplified to: (A & (mask >>> a)) <<< a """ # is the second argument a BVV? if b.op != 'BVV': return None # is it a rotate-shift? if a.op != '__or__' or len(a.args) != 2: return None a_0, a_1 = a.args if a_0.op != '__lshift__': return None if a_1.op != 'LShR': return None a_00, a_01 = a_0.args a_10, a_11 = a_1.args if not a_00 is a_10: return None if a_01.op != 'BVV' or a_11.op != 'BVV': return None lshift_ = a_01.args[0] rshift_ = a_11.args[0] bitwidth = lshift_ + rshift_ if bitwidth not in (32, 64): return None # is the second argument a mask? # Note: the following check can be further loosen if we want to support more masks. if bitwidth == 32: m = ((b.args[0] << rshift_) & 0xffffffff) | (b.args[0] >> lshift_) if m != 0xffff: return None else: # bitwidth == 64 m = ((b.args[0] << rshift_) & 0xffffffffffffffff) | (b.args[0] >> lshift_) if m != 0xffffffff: return None # Show our power! masked_a = (a_00 & m) expr = (masked_a << lshift_) | (masked_a >> rshift_) return expr
[ "def", "rotate_shift_mask_simplifier", "(", "a", ",", "b", ")", ":", "# is the second argument a BVV?", "if", "b", ".", "op", "!=", "'BVV'", ":", "return", "None", "# is it a rotate-shift?", "if", "a", ".", "op", "!=", "'__or__'", "or", "len", "(", "a", ".", "args", ")", "!=", "2", ":", "return", "None", "a_0", ",", "a_1", "=", "a", ".", "args", "if", "a_0", ".", "op", "!=", "'__lshift__'", ":", "return", "None", "if", "a_1", ".", "op", "!=", "'LShR'", ":", "return", "None", "a_00", ",", "a_01", "=", "a_0", ".", "args", "a_10", ",", "a_11", "=", "a_1", ".", "args", "if", "not", "a_00", "is", "a_10", ":", "return", "None", "if", "a_01", ".", "op", "!=", "'BVV'", "or", "a_11", ".", "op", "!=", "'BVV'", ":", "return", "None", "lshift_", "=", "a_01", ".", "args", "[", "0", "]", "rshift_", "=", "a_11", ".", "args", "[", "0", "]", "bitwidth", "=", "lshift_", "+", "rshift_", "if", "bitwidth", "not", "in", "(", "32", ",", "64", ")", ":", "return", "None", "# is the second argument a mask?", "# Note: the following check can be further loosen if we want to support more masks.", "if", "bitwidth", "==", "32", ":", "m", "=", "(", "(", "b", ".", "args", "[", "0", "]", "<<", "rshift_", ")", "&", "0xffffffff", ")", "|", "(", "b", ".", "args", "[", "0", "]", ">>", "lshift_", ")", "if", "m", "!=", "0xffff", ":", "return", "None", "else", ":", "# bitwidth == 64", "m", "=", "(", "(", "b", ".", "args", "[", "0", "]", "<<", "rshift_", ")", "&", "0xffffffffffffffff", ")", "|", "(", "b", ".", "args", "[", "0", "]", ">>", "lshift_", ")", "if", "m", "!=", "0xffffffff", ":", "return", "None", "# Show our power!", "masked_a", "=", "(", "a_00", "&", "m", ")", "expr", "=", "(", "masked_a", "<<", "lshift_", ")", "|", "(", "masked_a", ">>", "rshift_", ")", "return", "expr" ]
https://github.com/angr/claripy/blob/4c961b4dc664706be8142fe4868f27655bc8da77/claripy/simplifications.py#L811-L864
shchur/gnn-benchmark
1e72912a0810cdf27ae54fd589a3b43358a2b161
gnnbench/models/gat.py
python
attention_layer
(inputs, output_dim, num_heads, graph_adj, adj_with_self_loops_indices, activation_fn, use_averaging, input_dropout_prob, coefficient_dropout_prob, weight_decay, name)
[]
def attention_layer(inputs, output_dim, num_heads, graph_adj, adj_with_self_loops_indices, activation_fn, use_averaging, input_dropout_prob, coefficient_dropout_prob, weight_decay, name): with tf.name_scope(name): head_results = [] for i in range(num_heads): # dims: num_nodes x num_features, output_dim, num_nodes x num_nodes -> num_nodes x output_dim head_results.append(attention_head(inputs=inputs, output_dim=output_dim, graph_adj=graph_adj, adj_with_self_loops_indices=adj_with_self_loops_indices, activation_fn=activation_fn, input_dropout_prob=input_dropout_prob, coefficient_dropout_prob=coefficient_dropout_prob, weight_decay=weight_decay, name="%s-head%d" % (name, i))) if use_averaging: return tf.add_n(head_results) / num_heads else: # dims: num_nodes x output_dim -> num_nodes x num_heads x output_dim return tf.concat(head_results, axis=1)
[ "def", "attention_layer", "(", "inputs", ",", "output_dim", ",", "num_heads", ",", "graph_adj", ",", "adj_with_self_loops_indices", ",", "activation_fn", ",", "use_averaging", ",", "input_dropout_prob", ",", "coefficient_dropout_prob", ",", "weight_decay", ",", "name", ")", ":", "with", "tf", ".", "name_scope", "(", "name", ")", ":", "head_results", "=", "[", "]", "for", "i", "in", "range", "(", "num_heads", ")", ":", "# dims: num_nodes x num_features, output_dim, num_nodes x num_nodes -> num_nodes x output_dim", "head_results", ".", "append", "(", "attention_head", "(", "inputs", "=", "inputs", ",", "output_dim", "=", "output_dim", ",", "graph_adj", "=", "graph_adj", ",", "adj_with_self_loops_indices", "=", "adj_with_self_loops_indices", ",", "activation_fn", "=", "activation_fn", ",", "input_dropout_prob", "=", "input_dropout_prob", ",", "coefficient_dropout_prob", "=", "coefficient_dropout_prob", ",", "weight_decay", "=", "weight_decay", ",", "name", "=", "\"%s-head%d\"", "%", "(", "name", ",", "i", ")", ")", ")", "if", "use_averaging", ":", "return", "tf", ".", "add_n", "(", "head_results", ")", "/", "num_heads", "else", ":", "# dims: num_nodes x output_dim -> num_nodes x num_heads x output_dim", "return", "tf", ".", "concat", "(", "head_results", ",", "axis", "=", "1", ")" ]
https://github.com/shchur/gnn-benchmark/blob/1e72912a0810cdf27ae54fd589a3b43358a2b161/gnnbench/models/gat.py#L118-L139
iwonbigbro/gsync
03db22fe826f073e56877918b0acb7b4ba908f18
libgsync/output.py
python
Channel.disable
(self)
Disables the channel.
Disables the channel.
[ "Disables", "the", "channel", "." ]
def disable(self): """Disables the channel.""" self._priority = -1
[ "def", "disable", "(", "self", ")", ":", "self", ".", "_priority", "=", "-", "1" ]
https://github.com/iwonbigbro/gsync/blob/03db22fe826f073e56877918b0acb7b4ba908f18/libgsync/output.py#L29-L32
uqfoundation/multiprocess
028cc73f02655e6451d92e5147d19d8c10aebe50
py3.10/multiprocess/resource_tracker.py
python
ResourceTracker.ensure_running
(self)
Make sure that resource tracker process is running. This can be run from any process. Usually a child process will use the resource created by its parent.
Make sure that resource tracker process is running.
[ "Make", "sure", "that", "resource", "tracker", "process", "is", "running", "." ]
def ensure_running(self): '''Make sure that resource tracker process is running. This can be run from any process. Usually a child process will use the resource created by its parent.''' with self._lock: if self._fd is not None: # resource tracker was launched before, is it still running? if self._check_alive(): # => still alive return # => dead, launch it again os.close(self._fd) # Clean-up to avoid dangling processes. try: # _pid can be None if this process is a child from another # python process, which has started the resource_tracker. if self._pid is not None: os.waitpid(self._pid, 0) except ChildProcessError: # The resource_tracker has already been terminated. pass self._fd = None self._pid = None warnings.warn('resource_tracker: process died unexpectedly, ' 'relaunching. Some resources might leak.') fds_to_pass = [] try: fds_to_pass.append(sys.stderr.fileno()) except Exception: pass cmd = 'from multiprocess.resource_tracker import main;main(%d)' r, w = os.pipe() try: fds_to_pass.append(r) # process will out live us, so no need to wait on pid exe = spawn.get_executable() args = [exe] + util._args_from_interpreter_flags() args += ['-c', cmd % r] # bpo-33613: Register a signal mask that will block the signals. # This signal mask will be inherited by the child that is going # to be spawned and will protect the child from a race condition # that can make the child die before it registers signal handlers # for SIGINT and SIGTERM. The mask is unregistered after spawning # the child. try: if _HAVE_SIGMASK: signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS) pid = util.spawnv_passfds(exe, args, fds_to_pass) finally: if _HAVE_SIGMASK: signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS) except: os.close(w) raise else: self._fd = w self._pid = pid finally: os.close(r)
[ "def", "ensure_running", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "if", "self", ".", "_fd", "is", "not", "None", ":", "# resource tracker was launched before, is it still running?", "if", "self", ".", "_check_alive", "(", ")", ":", "# => still alive", "return", "# => dead, launch it again", "os", ".", "close", "(", "self", ".", "_fd", ")", "# Clean-up to avoid dangling processes.", "try", ":", "# _pid can be None if this process is a child from another", "# python process, which has started the resource_tracker.", "if", "self", ".", "_pid", "is", "not", "None", ":", "os", ".", "waitpid", "(", "self", ".", "_pid", ",", "0", ")", "except", "ChildProcessError", ":", "# The resource_tracker has already been terminated.", "pass", "self", ".", "_fd", "=", "None", "self", ".", "_pid", "=", "None", "warnings", ".", "warn", "(", "'resource_tracker: process died unexpectedly, '", "'relaunching. Some resources might leak.'", ")", "fds_to_pass", "=", "[", "]", "try", ":", "fds_to_pass", ".", "append", "(", "sys", ".", "stderr", ".", "fileno", "(", ")", ")", "except", "Exception", ":", "pass", "cmd", "=", "'from multiprocess.resource_tracker import main;main(%d)'", "r", ",", "w", "=", "os", ".", "pipe", "(", ")", "try", ":", "fds_to_pass", ".", "append", "(", "r", ")", "# process will out live us, so no need to wait on pid", "exe", "=", "spawn", ".", "get_executable", "(", ")", "args", "=", "[", "exe", "]", "+", "util", ".", "_args_from_interpreter_flags", "(", ")", "args", "+=", "[", "'-c'", ",", "cmd", "%", "r", "]", "# bpo-33613: Register a signal mask that will block the signals.", "# This signal mask will be inherited by the child that is going", "# to be spawned and will protect the child from a race condition", "# that can make the child die before it registers signal handlers", "# for SIGINT and SIGTERM. The mask is unregistered after spawning", "# the child.", "try", ":", "if", "_HAVE_SIGMASK", ":", "signal", ".", "pthread_sigmask", "(", "signal", ".", "SIG_BLOCK", ",", "_IGNORED_SIGNALS", ")", "pid", "=", "util", ".", "spawnv_passfds", "(", "exe", ",", "args", ",", "fds_to_pass", ")", "finally", ":", "if", "_HAVE_SIGMASK", ":", "signal", ".", "pthread_sigmask", "(", "signal", ".", "SIG_UNBLOCK", ",", "_IGNORED_SIGNALS", ")", "except", ":", "os", ".", "close", "(", "w", ")", "raise", "else", ":", "self", ".", "_fd", "=", "w", "self", ".", "_pid", "=", "pid", "finally", ":", "os", ".", "close", "(", "r", ")" ]
https://github.com/uqfoundation/multiprocess/blob/028cc73f02655e6451d92e5147d19d8c10aebe50/py3.10/multiprocess/resource_tracker.py#L81-L143
Vector35/binaryninja-api
d9661f34eec6855d495a10eaafc2a8e2679756a7
python/plugin.py
python
PluginCommand.register_for_address
(cls, name: str, description: str, action: Callable[[binaryview.BinaryView, int], None], is_valid: Optional[Callable[[binaryview.BinaryView, int], bool]] = None )
r""" ``register_for_address`` Register a plugin to be called with an address argument :param str name: name of the plugin (use 'Folder\\Name' to have the menu item nested in a folder) :param str description: description of the plugin :param callback action: function to call with the :class:`~binaryview.BinaryView` and address as arguments :param callback is_valid: optional argument of a function passed a :class:`~binaryview.BinaryView` and address to determine whether the plugin should be enabled for that view :rtype: None .. warning:: Calling ``register_for_address`` with the same function name will replace the existing function but will leak the memory of the original plugin.
r""" ``register_for_address`` Register a plugin to be called with an address argument
[ "r", "register_for_address", "Register", "a", "plugin", "to", "be", "called", "with", "an", "address", "argument" ]
def register_for_address(cls, name: str, description: str, action: Callable[[binaryview.BinaryView, int], None], is_valid: Optional[Callable[[binaryview.BinaryView, int], bool]] = None ): r""" ``register_for_address`` Register a plugin to be called with an address argument :param str name: name of the plugin (use 'Folder\\Name' to have the menu item nested in a folder) :param str description: description of the plugin :param callback action: function to call with the :class:`~binaryview.BinaryView` and address as arguments :param callback is_valid: optional argument of a function passed a :class:`~binaryview.BinaryView` and address to determine whether the plugin should be enabled for that view :rtype: None .. warning:: Calling ``register_for_address`` with the same function name will replace the existing function but will leak the memory of the original plugin. """ binaryninja._init_plugins() action_obj = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.POINTER(core.BNBinaryView), ctypes.c_ulonglong)(lambda ctxt, view, addr: cls._address_action(view, addr, action)) is_valid_obj = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_void_p, ctypes.POINTER(core.BNBinaryView), ctypes.c_ulonglong)(lambda ctxt, view, addr: cls._address_is_valid(view, addr, is_valid)) cls._registered_commands.append((action_obj, is_valid_obj)) core.BNRegisterPluginCommandForAddress(name, description, action_obj, is_valid_obj, None)
[ "def", "register_for_address", "(", "cls", ",", "name", ":", "str", ",", "description", ":", "str", ",", "action", ":", "Callable", "[", "[", "binaryview", ".", "BinaryView", ",", "int", "]", ",", "None", "]", ",", "is_valid", ":", "Optional", "[", "Callable", "[", "[", "binaryview", ".", "BinaryView", ",", "int", "]", ",", "bool", "]", "]", "=", "None", ")", ":", "binaryninja", ".", "_init_plugins", "(", ")", "action_obj", "=", "ctypes", ".", "CFUNCTYPE", "(", "None", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "POINTER", "(", "core", ".", "BNBinaryView", ")", ",", "ctypes", ".", "c_ulonglong", ")", "(", "lambda", "ctxt", ",", "view", ",", "addr", ":", "cls", ".", "_address_action", "(", "view", ",", "addr", ",", "action", ")", ")", "is_valid_obj", "=", "ctypes", ".", "CFUNCTYPE", "(", "ctypes", ".", "c_bool", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "POINTER", "(", "core", ".", "BNBinaryView", ")", ",", "ctypes", ".", "c_ulonglong", ")", "(", "lambda", "ctxt", ",", "view", ",", "addr", ":", "cls", ".", "_address_is_valid", "(", "view", ",", "addr", ",", "is_valid", ")", ")", "cls", ".", "_registered_commands", ".", "append", "(", "(", "action_obj", ",", "is_valid_obj", ")", ")", "core", ".", "BNRegisterPluginCommandForAddress", "(", "name", ",", "description", ",", "action_obj", ",", "is_valid_obj", ",", "None", ")" ]
https://github.com/Vector35/binaryninja-api/blob/d9661f34eec6855d495a10eaafc2a8e2679756a7/python/plugin.py#L376-L397
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/google-api-python-client/oauth2client/client.py
python
Credentials.to_json
(self)
return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
Creating a JSON representation of an instance of Credentials. Returns: string, a JSON representation of this instance, suitable to pass to from_json().
Creating a JSON representation of an instance of Credentials.
[ "Creating", "a", "JSON", "representation", "of", "an", "instance", "of", "Credentials", "." ]
def to_json(self): """Creating a JSON representation of an instance of Credentials. Returns: string, a JSON representation of this instance, suitable to pass to from_json(). """ return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
[ "def", "to_json", "(", "self", ")", ":", "return", "self", ".", "_to_json", "(", "Credentials", ".", "NON_SERIALIZED_MEMBERS", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/google-api-python-client/oauth2client/client.py#L134-L141
H4ckForJob/dirmap
d2000165efe01988fe55280175b0f87930f65d33
thirdlib/IPy/IPy.py
python
IPint.__cmp__
(self, other)
return 0
Called by comparison operations. Should return a negative integer if self < other, zero if self == other, a positive integer if self > other. Order is first determined by the address family. IPv4 addresses are always smaller than IPv6 addresses: >>> IP('10.0.0.0') < IP('2001:db8::') 1 Then the first address is compared. Lower addresses are always smaller: >>> IP('10.0.0.0') > IP('10.0.0.1') 0 >>> IP('10.0.0.0/24') > IP('10.0.0.1') 0 >>> IP('10.0.1.0') > IP('10.0.0.0/24') 1 >>> IP('10.0.1.0/24') > IP('10.0.0.0/24') 1 >>> IP('10.0.1.0/24') > IP('10.0.0.0') 1 Then the prefix length is compared. Shorter prefixes are considered smaller than longer prefixes: >>> IP('10.0.0.0/24') > IP('10.0.0.0') 0 >>> IP('10.0.0.0/24') > IP('10.0.0.0/25') 0 >>> IP('10.0.0.0/24') > IP('10.0.0.0/23') 1
Called by comparison operations.
[ "Called", "by", "comparison", "operations", "." ]
def __cmp__(self, other): """Called by comparison operations. Should return a negative integer if self < other, zero if self == other, a positive integer if self > other. Order is first determined by the address family. IPv4 addresses are always smaller than IPv6 addresses: >>> IP('10.0.0.0') < IP('2001:db8::') 1 Then the first address is compared. Lower addresses are always smaller: >>> IP('10.0.0.0') > IP('10.0.0.1') 0 >>> IP('10.0.0.0/24') > IP('10.0.0.1') 0 >>> IP('10.0.1.0') > IP('10.0.0.0/24') 1 >>> IP('10.0.1.0/24') > IP('10.0.0.0/24') 1 >>> IP('10.0.1.0/24') > IP('10.0.0.0') 1 Then the prefix length is compared. Shorter prefixes are considered smaller than longer prefixes: >>> IP('10.0.0.0/24') > IP('10.0.0.0') 0 >>> IP('10.0.0.0/24') > IP('10.0.0.0/25') 0 >>> IP('10.0.0.0/24') > IP('10.0.0.0/23') 1 """ if not isinstance(other, IPint): raise TypeError # Lower version -> lower result if self._ipversion != other._ipversion: return self._ipversion < other._ipversion and -1 or 1 # Lower start address -> lower result if self.ip != other.ip: return self.ip < other.ip and -1 or 1 # Shorter prefix length -> lower result if self._prefixlen != other._prefixlen: return self._prefixlen < other._prefixlen and -1 or 1 # No differences found return 0
[ "def", "__cmp__", "(", "self", ",", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "IPint", ")", ":", "raise", "TypeError", "# Lower version -> lower result", "if", "self", ".", "_ipversion", "!=", "other", ".", "_ipversion", ":", "return", "self", ".", "_ipversion", "<", "other", ".", "_ipversion", "and", "-", "1", "or", "1", "# Lower start address -> lower result", "if", "self", ".", "ip", "!=", "other", ".", "ip", ":", "return", "self", ".", "ip", "<", "other", ".", "ip", "and", "-", "1", "or", "1", "# Shorter prefix length -> lower result", "if", "self", ".", "_prefixlen", "!=", "other", ".", "_prefixlen", ":", "return", "self", ".", "_prefixlen", "<", "other", ".", "_prefixlen", "and", "-", "1", "or", "1", "# No differences found", "return", "0" ]
https://github.com/H4ckForJob/dirmap/blob/d2000165efe01988fe55280175b0f87930f65d33/thirdlib/IPy/IPy.py#L706-L759
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/collections.py
python
EventCollection.get_linewidth
(self)
return super(EventCollection, self).get_linewidth()[0]
Get the width of the lines used to mark each event.
Get the width of the lines used to mark each event.
[ "Get", "the", "width", "of", "the", "lines", "used", "to", "mark", "each", "event", "." ]
def get_linewidth(self): """Get the width of the lines used to mark each event.""" return super(EventCollection, self).get_linewidth()[0]
[ "def", "get_linewidth", "(", "self", ")", ":", "return", "super", "(", "EventCollection", ",", "self", ")", ".", "get_linewidth", "(", ")", "[", "0", "]" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/matplotlib-3.0.3-py3.7-macosx-10.9-x86_64.egg/matplotlib/collections.py#L1530-L1532
sentinel-hub/eo-learn
cf964eaf173668d6a374675dbd7c1d244264c11d
visualization/eolearn/visualization/eoexecutor_visualization.py
python
EOExecutorVisualization._get_task_descriptions
(self)
return descriptions
Prepares a list of task names and their initialization parameters
Prepares a list of task names and their initialization parameters
[ "Prepares", "a", "list", "of", "task", "names", "and", "their", "initialization", "parameters" ]
def _get_task_descriptions(self): """ Prepares a list of task names and their initialization parameters """ descriptions = [] for task_name, task in self.eoexecutor.workflow.get_tasks().items(): descriptions.append({ 'name': f"{task_name} ({task.__class__.__name__}_{task.private_task_config.uuid[:6]})", 'args': { key: value.replace('<', '&lt;').replace('>', '&gt;') for key, value in task.private_task_config.init_args.items() } }) return descriptions
[ "def", "_get_task_descriptions", "(", "self", ")", ":", "descriptions", "=", "[", "]", "for", "task_name", ",", "task", "in", "self", ".", "eoexecutor", ".", "workflow", ".", "get_tasks", "(", ")", ".", "items", "(", ")", ":", "descriptions", ".", "append", "(", "{", "'name'", ":", "f\"{task_name} ({task.__class__.__name__}_{task.private_task_config.uuid[:6]})\"", ",", "'args'", ":", "{", "key", ":", "value", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", ".", "replace", "(", "'>'", ",", "'&gt;'", ")", "for", "key", ",", "value", "in", "task", ".", "private_task_config", ".", "init_args", ".", "items", "(", ")", "}", "}", ")", "return", "descriptions" ]
https://github.com/sentinel-hub/eo-learn/blob/cf964eaf173668d6a374675dbd7c1d244264c11d/visualization/eolearn/visualization/eoexecutor_visualization.py#L88-L102
statsmodels/statsmodels
debbe7ea6ba28fe5bdb78f09f8cac694bef98722
statsmodels/stats/libqsturng/qsturng_.py
python
_interpolate_v
(p, r, v)
return y
interpolates v based on the values in the A table for the scalar value of r and th
interpolates v based on the values in the A table for the scalar value of r and th
[ "interpolates", "v", "based", "on", "the", "values", "in", "the", "A", "table", "for", "the", "scalar", "value", "of", "r", "and", "th" ]
def _interpolate_v(p, r, v): """ interpolates v based on the values in the A table for the scalar value of r and th """ # interpolate v (p should be in table) # ordinate: y**2 # abcissa: 1./v # find the 3 closest v values # only p >= .9 have table values for 1 degree of freedom. # The boolean is used to index the tuple and append 1 when # p >= .9 v0, v1, v2 = _select_vs(v, p) # y = f - 1. y0_sq = (_func(A[(p,v0)], p, r, v0) + 1.)**2. y1_sq = (_func(A[(p,v1)], p, r, v1) + 1.)**2. y2_sq = (_func(A[(p,v2)], p, r, v2) + 1.)**2. # if v2 is inf set to a big number so interpolation # calculations will work if v2 > 1e38: v2 = 1e38 # transform v v_, v0_, v1_, v2_ = 1./v, 1./v0, 1./v1, 1./v2 # calculate derivatives for quadratic interpolation d2 = 2.*((y2_sq-y1_sq)/(v2_-v1_) - \ (y0_sq-y1_sq)/(v0_-v1_)) / (v2_-v0_) if (v2_ + v0_) >= (v1_ + v1_): d1 = (y2_sq-y1_sq) / (v2_-v1_) - 0.5*d2*(v2_-v1_) else: d1 = (y1_sq-y0_sq) / (v1_-v0_) + 0.5*d2*(v1_-v0_) d0 = y1_sq # calculate y y = math.sqrt((d2/2.)*(v_-v1_)**2. + d1*(v_-v1_)+ d0) return y
[ "def", "_interpolate_v", "(", "p", ",", "r", ",", "v", ")", ":", "# interpolate v (p should be in table)", "# ordinate: y**2", "# abcissa: 1./v", "# find the 3 closest v values", "# only p >= .9 have table values for 1 degree of freedom.", "# The boolean is used to index the tuple and append 1 when", "# p >= .9", "v0", ",", "v1", ",", "v2", "=", "_select_vs", "(", "v", ",", "p", ")", "# y = f - 1.", "y0_sq", "=", "(", "_func", "(", "A", "[", "(", "p", ",", "v0", ")", "]", ",", "p", ",", "r", ",", "v0", ")", "+", "1.", ")", "**", "2.", "y1_sq", "=", "(", "_func", "(", "A", "[", "(", "p", ",", "v1", ")", "]", ",", "p", ",", "r", ",", "v1", ")", "+", "1.", ")", "**", "2.", "y2_sq", "=", "(", "_func", "(", "A", "[", "(", "p", ",", "v2", ")", "]", ",", "p", ",", "r", ",", "v2", ")", "+", "1.", ")", "**", "2.", "# if v2 is inf set to a big number so interpolation", "# calculations will work", "if", "v2", ">", "1e38", ":", "v2", "=", "1e38", "# transform v", "v_", ",", "v0_", ",", "v1_", ",", "v2_", "=", "1.", "/", "v", ",", "1.", "/", "v0", ",", "1.", "/", "v1", ",", "1.", "/", "v2", "# calculate derivatives for quadratic interpolation", "d2", "=", "2.", "*", "(", "(", "y2_sq", "-", "y1_sq", ")", "/", "(", "v2_", "-", "v1_", ")", "-", "(", "y0_sq", "-", "y1_sq", ")", "/", "(", "v0_", "-", "v1_", ")", ")", "/", "(", "v2_", "-", "v0_", ")", "if", "(", "v2_", "+", "v0_", ")", ">=", "(", "v1_", "+", "v1_", ")", ":", "d1", "=", "(", "y2_sq", "-", "y1_sq", ")", "/", "(", "v2_", "-", "v1_", ")", "-", "0.5", "*", "d2", "*", "(", "v2_", "-", "v1_", ")", "else", ":", "d1", "=", "(", "y1_sq", "-", "y0_sq", ")", "/", "(", "v1_", "-", "v0_", ")", "+", "0.5", "*", "d2", "*", "(", "v1_", "-", "v0_", ")", "d0", "=", "y1_sq", "# calculate y", "y", "=", "math", ".", "sqrt", "(", "(", "d2", "/", "2.", ")", "*", "(", "v_", "-", "v1_", ")", "**", "2.", "+", "d1", "*", "(", "v_", "-", "v1_", ")", "+", "d0", ")", "return", "y" ]
https://github.com/statsmodels/statsmodels/blob/debbe7ea6ba28fe5bdb78f09f8cac694bef98722/statsmodels/stats/libqsturng/qsturng_.py#L627-L667
jython/jython3
def4f8ec47cb7a9c799ea4c745f12badf92c5769
lib-python/3.5.1/_pydecimal.py
python
Decimal.__float__
(self)
return float(s)
Float representation.
Float representation.
[ "Float", "representation", "." ]
def __float__(self): """Float representation.""" if self._isnan(): if self.is_snan(): raise ValueError("Cannot convert signaling NaN to float") s = "-nan" if self._sign else "nan" else: s = str(self) return float(s)
[ "def", "__float__", "(", "self", ")", ":", "if", "self", ".", "_isnan", "(", ")", ":", "if", "self", ".", "is_snan", "(", ")", ":", "raise", "ValueError", "(", "\"Cannot convert signaling NaN to float\"", ")", "s", "=", "\"-nan\"", "if", "self", ".", "_sign", "else", "\"nan\"", "else", ":", "s", "=", "str", "(", "self", ")", "return", "float", "(", "s", ")" ]
https://github.com/jython/jython3/blob/def4f8ec47cb7a9c799ea4c745f12badf92c5769/lib-python/3.5.1/_pydecimal.py#L1598-L1606
blurstudio/cross3d
277968d1227de740fc87ef61005c75034420eadf
cross3d/abstract/abstractscenelayer.py
python
AbstractSceneLayer.altMaterialCount
(self)
return len(self._nativeAltMaterials())
\remarks return the number of alternate materials that this layer is associated with \return <int> count
\remarks return the number of alternate materials that this layer is associated with
[ "\\", "remarks", "return", "the", "number", "of", "alternate", "materials", "that", "this", "layer", "is", "associated", "with" ]
def altMaterialCount(self): """ \remarks return the number of alternate materials that this layer is associated with \return <int> count """ return len(self._nativeAltMaterials())
[ "def", "altMaterialCount", "(", "self", ")", ":", "return", "len", "(", "self", ".", "_nativeAltMaterials", "(", ")", ")" ]
https://github.com/blurstudio/cross3d/blob/277968d1227de740fc87ef61005c75034420eadf/cross3d/abstract/abstractscenelayer.py#L155-L161
caktus/django-timepiece
52515dec027664890efbc535429e1ba1ee152f40
timepiece/entries/views.py
python
Dashboard.process_progress
(self, entries, assignments)
return project_progress
Returns a list of progress summary data (pk, name, hours worked, and hours assigned) for each project either worked or assigned. The list is ordered by project name.
Returns a list of progress summary data (pk, name, hours worked, and hours assigned) for each project either worked or assigned. The list is ordered by project name.
[ "Returns", "a", "list", "of", "progress", "summary", "data", "(", "pk", "name", "hours", "worked", "and", "hours", "assigned", ")", "for", "each", "project", "either", "worked", "or", "assigned", ".", "The", "list", "is", "ordered", "by", "project", "name", "." ]
def process_progress(self, entries, assignments): """ Returns a list of progress summary data (pk, name, hours worked, and hours assigned) for each project either worked or assigned. The list is ordered by project name. """ # Determine all projects either worked or assigned. project_q = Q(id__in=assignments.values_list('project__id', flat=True)) project_q |= Q(id__in=entries.values_list('project__id', flat=True)) projects = Project.objects.filter(project_q).select_related('business') # Hours per project. project_data = {} for project in projects: try: assigned = assignments.get(project__id=project.pk).hours except ProjectHours.DoesNotExist: assigned = Decimal('0.00') project_data[project.pk] = { 'project': project, 'assigned': assigned, 'worked': Decimal('0.00'), } for entry in entries: pk = entry.project_id hours = Decimal('%.5f' % (entry.get_total_seconds() / 3600.0)) project_data[pk]['worked'] += hours # Sort by maximum of worked or assigned hours (highest first). key = lambda x: x['project'].name.lower() project_progress = sorted(project_data.values(), key=key) return project_progress
[ "def", "process_progress", "(", "self", ",", "entries", ",", "assignments", ")", ":", "# Determine all projects either worked or assigned.", "project_q", "=", "Q", "(", "id__in", "=", "assignments", ".", "values_list", "(", "'project__id'", ",", "flat", "=", "True", ")", ")", "project_q", "|=", "Q", "(", "id__in", "=", "entries", ".", "values_list", "(", "'project__id'", ",", "flat", "=", "True", ")", ")", "projects", "=", "Project", ".", "objects", ".", "filter", "(", "project_q", ")", ".", "select_related", "(", "'business'", ")", "# Hours per project.", "project_data", "=", "{", "}", "for", "project", "in", "projects", ":", "try", ":", "assigned", "=", "assignments", ".", "get", "(", "project__id", "=", "project", ".", "pk", ")", ".", "hours", "except", "ProjectHours", ".", "DoesNotExist", ":", "assigned", "=", "Decimal", "(", "'0.00'", ")", "project_data", "[", "project", ".", "pk", "]", "=", "{", "'project'", ":", "project", ",", "'assigned'", ":", "assigned", ",", "'worked'", ":", "Decimal", "(", "'0.00'", ")", ",", "}", "for", "entry", "in", "entries", ":", "pk", "=", "entry", ".", "project_id", "hours", "=", "Decimal", "(", "'%.5f'", "%", "(", "entry", ".", "get_total_seconds", "(", ")", "/", "3600.0", ")", ")", "project_data", "[", "pk", "]", "[", "'worked'", "]", "+=", "hours", "# Sort by maximum of worked or assigned hours (highest first).", "key", "=", "lambda", "x", ":", "x", "[", "'project'", "]", ".", "name", ".", "lower", "(", ")", "project_progress", "=", "sorted", "(", "project_data", ".", "values", "(", ")", ",", "key", "=", "key", ")", "return", "project_progress" ]
https://github.com/caktus/django-timepiece/blob/52515dec027664890efbc535429e1ba1ee152f40/timepiece/entries/views.py#L101-L134
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/plugins/obsolete/swing_gui.py
python
leoSwingTree.allocateNodes
(self,where,lines)
Allocate Tk widgets in nodes that will become visible as the result of an upcoming scroll
Allocate Tk widgets in nodes that will become visible as the result of an upcoming scroll
[ "Allocate", "Tk", "widgets", "in", "nodes", "that", "will", "become", "visible", "as", "the", "result", "of", "an", "upcoming", "scroll" ]
def allocateNodes(self,where,lines): """Allocate Tk widgets in nodes that will become visible as the result of an upcoming scroll""" assert(where in ("above","below")) # print "allocateNodes: %d lines %s visible area" % (lines,where) # Expand the visible area: a little extra delta is safer. delta = lines * (self.line_height + 4) y1,y2 = self.visibleArea if where == "below": y2 += delta else: y1 = max(0.0,y1-delta) self.expandedVisibleArea=y1,y2 # print "expandedArea: %5.1f %5.1f" % (y1,y2) # Allocate all nodes in expanded visible area. self.updatedNodeCount = 0 self.updateTree(self.c.rootPosition(),self.root_left,self.root_top,0,0)
[ "def", "allocateNodes", "(", "self", ",", "where", ",", "lines", ")", ":", "assert", "(", "where", "in", "(", "\"above\"", ",", "\"below\"", ")", ")", "# print \"allocateNodes: %d lines %s visible area\" % (lines,where)", "# Expand the visible area: a little extra delta is safer.", "delta", "=", "lines", "*", "(", "self", ".", "line_height", "+", "4", ")", "y1", ",", "y2", "=", "self", ".", "visibleArea", "if", "where", "==", "\"below\"", ":", "y2", "+=", "delta", "else", ":", "y1", "=", "max", "(", "0.0", ",", "y1", "-", "delta", ")", "self", ".", "expandedVisibleArea", "=", "y1", ",", "y2", "# print \"expandedArea: %5.1f %5.1f\" % (y1,y2)", "# Allocate all nodes in expanded visible area.", "self", ".", "updatedNodeCount", "=", "0", "self", ".", "updateTree", "(", "self", ".", "c", ".", "rootPosition", "(", ")", ",", "self", ".", "root_left", ",", "self", ".", "root_top", ",", "0", ",", "0", ")" ]
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/plugins/obsolete/swing_gui.py#L6176-L6198
mfessenden/SceneGraph
0fa3429059c77c881d1b58b28e89dcb44c609909
ui/attributes.py
python
QIntEditor.initializeEditor
(self)
Set the widgets nodes values.
Set the widgets nodes values.
[ "Set", "the", "widgets", "nodes", "values", "." ]
def initializeEditor(self): """ Set the widgets nodes values. """ if not self.nodes or not self.attribute: return editor_value = self.default_value node_values = self.values if node_values: if len(node_values) > 1: pass elif len(node_values) == 1: if node_values[0]: editor_value = node_values[0] # set the editor value self.val1_edit.blockSignals(True) editor_value = int(editor_value) # set the current node values. self.val1_edit.setText(str(editor_value)) self.val1_edit.blockSignals(False) self.val1_edit.valueChanged.connect(self.valueUpdatedAction)
[ "def", "initializeEditor", "(", "self", ")", ":", "if", "not", "self", ".", "nodes", "or", "not", "self", ".", "attribute", ":", "return", "editor_value", "=", "self", ".", "default_value", "node_values", "=", "self", ".", "values", "if", "node_values", ":", "if", "len", "(", "node_values", ")", ">", "1", ":", "pass", "elif", "len", "(", "node_values", ")", "==", "1", ":", "if", "node_values", "[", "0", "]", ":", "editor_value", "=", "node_values", "[", "0", "]", "# set the editor value", "self", ".", "val1_edit", ".", "blockSignals", "(", "True", ")", "editor_value", "=", "int", "(", "editor_value", ")", "# set the current node values.", "self", ".", "val1_edit", ".", "setText", "(", "str", "(", "editor_value", ")", ")", "self", ".", "val1_edit", ".", "blockSignals", "(", "False", ")", "self", ".", "val1_edit", ".", "valueChanged", ".", "connect", "(", "self", ".", "valueUpdatedAction", ")" ]
https://github.com/mfessenden/SceneGraph/blob/0fa3429059c77c881d1b58b28e89dcb44c609909/ui/attributes.py#L757-L783
allenai/allennlp
a3d71254fcc0f3615910e9c3d48874515edf53e0
allennlp/modules/conditional_random_field.py
python
ConditionalRandomField.forward
( self, inputs: torch.Tensor, tags: torch.Tensor, mask: torch.BoolTensor = None )
return torch.sum(log_numerator - log_denominator)
Computes the log likelihood.
Computes the log likelihood.
[ "Computes", "the", "log", "likelihood", "." ]
def forward( self, inputs: torch.Tensor, tags: torch.Tensor, mask: torch.BoolTensor = None ) -> torch.Tensor: """ Computes the log likelihood. """ if mask is None: mask = torch.ones(*tags.size(), dtype=torch.bool, device=inputs.device) else: # The code below fails in weird ways if this isn't a bool tensor, so we make sure. mask = mask.to(torch.bool) log_denominator = self._input_likelihood(inputs, mask) log_numerator = self._joint_likelihood(inputs, tags, mask) return torch.sum(log_numerator - log_denominator)
[ "def", "forward", "(", "self", ",", "inputs", ":", "torch", ".", "Tensor", ",", "tags", ":", "torch", ".", "Tensor", ",", "mask", ":", "torch", ".", "BoolTensor", "=", "None", ")", "->", "torch", ".", "Tensor", ":", "if", "mask", "is", "None", ":", "mask", "=", "torch", ".", "ones", "(", "*", "tags", ".", "size", "(", ")", ",", "dtype", "=", "torch", ".", "bool", ",", "device", "=", "inputs", ".", "device", ")", "else", ":", "# The code below fails in weird ways if this isn't a bool tensor, so we make sure.", "mask", "=", "mask", ".", "to", "(", "torch", ".", "bool", ")", "log_denominator", "=", "self", ".", "_input_likelihood", "(", "inputs", ",", "mask", ")", "log_numerator", "=", "self", ".", "_joint_likelihood", "(", "inputs", ",", "tags", ",", "mask", ")", "return", "torch", ".", "sum", "(", "log_numerator", "-", "log_denominator", ")" ]
https://github.com/allenai/allennlp/blob/a3d71254fcc0f3615910e9c3d48874515edf53e0/allennlp/modules/conditional_random_field.py#L318-L334
omerbsezer/LSTM_RNN_Tutorials_with_Demo
d3bb6b7dac10159b2e8a0a21fbf27e0078c3321b
SentimentAnalysisProject/emo_utils.py
python
read_glove_vecs
(glove_file)
return words_to_index, index_to_words, word_to_vec_map
[]
def read_glove_vecs(glove_file): with open(glove_file, 'r', encoding="utf8") as f: words = set() word_to_vec_map = {} for line in f: line = line.strip().split() curr_word = line[0] words.add(curr_word) word_to_vec_map[curr_word] = np.array(line[1:], dtype=np.float64) i = 1 words_to_index = {} index_to_words = {} for w in sorted(words): words_to_index[w] = i index_to_words[i] = w i = i + 1 return words_to_index, index_to_words, word_to_vec_map
[ "def", "read_glove_vecs", "(", "glove_file", ")", ":", "with", "open", "(", "glove_file", ",", "'r'", ",", "encoding", "=", "\"utf8\"", ")", "as", "f", ":", "words", "=", "set", "(", ")", "word_to_vec_map", "=", "{", "}", "for", "line", "in", "f", ":", "line", "=", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "curr_word", "=", "line", "[", "0", "]", "words", ".", "add", "(", "curr_word", ")", "word_to_vec_map", "[", "curr_word", "]", "=", "np", ".", "array", "(", "line", "[", "1", ":", "]", ",", "dtype", "=", "np", ".", "float64", ")", "i", "=", "1", "words_to_index", "=", "{", "}", "index_to_words", "=", "{", "}", "for", "w", "in", "sorted", "(", "words", ")", ":", "words_to_index", "[", "w", "]", "=", "i", "index_to_words", "[", "i", "]", "=", "w", "i", "=", "i", "+", "1", "return", "words_to_index", ",", "index_to_words", ",", "word_to_vec_map" ]
https://github.com/omerbsezer/LSTM_RNN_Tutorials_with_Demo/blob/d3bb6b7dac10159b2e8a0a21fbf27e0078c3321b/SentimentAnalysisProject/emo_utils.py#L8-L25
wizyoung/googletranslate.popclipext
a3c465685a5a75213e2ec8517eb98d336984bc50
src/h2/settings.py
python
Settings.acknowledge
(self)
return changed_settings
The settings have been acknowledged, either by the user (remote settings) or by the remote peer (local settings). :returns: A dict of {setting: ChangedSetting} that were applied.
The settings have been acknowledged, either by the user (remote settings) or by the remote peer (local settings).
[ "The", "settings", "have", "been", "acknowledged", "either", "by", "the", "user", "(", "remote", "settings", ")", "or", "by", "the", "remote", "peer", "(", "local", "settings", ")", "." ]
def acknowledge(self): """ The settings have been acknowledged, either by the user (remote settings) or by the remote peer (local settings). :returns: A dict of {setting: ChangedSetting} that were applied. """ changed_settings = {} # If there is more than one setting in the list, we have a setting # value outstanding. Update them. for k, v in self._settings.items(): if len(v) > 1: old_setting = v.popleft() new_setting = v[0] changed_settings[k] = ChangedSetting( k, old_setting, new_setting ) return changed_settings
[ "def", "acknowledge", "(", "self", ")", ":", "changed_settings", "=", "{", "}", "# If there is more than one setting in the list, we have a setting", "# value outstanding. Update them.", "for", "k", ",", "v", "in", "self", ".", "_settings", ".", "items", "(", ")", ":", "if", "len", "(", "v", ")", ">", "1", ":", "old_setting", "=", "v", ".", "popleft", "(", ")", "new_setting", "=", "v", "[", "0", "]", "changed_settings", "[", "k", "]", "=", "ChangedSetting", "(", "k", ",", "old_setting", ",", "new_setting", ")", "return", "changed_settings" ]
https://github.com/wizyoung/googletranslate.popclipext/blob/a3c465685a5a75213e2ec8517eb98d336984bc50/src/h2/settings.py#L160-L179
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/Python3/asyncio/queues.py
python
Queue.put
(self, item)
return self.put_nowait(item)
Put an item into the queue. Put an item into the queue. If the queue is full, wait until a free slot is available before adding item. This method is a coroutine.
Put an item into the queue.
[ "Put", "an", "item", "into", "the", "queue", "." ]
def put(self, item): """Put an item into the queue. Put an item into the queue. If the queue is full, wait until a free slot is available before adding item. This method is a coroutine. """ while self.full(): putter = self._loop.create_future() self._putters.append(putter) try: yield from putter except: putter.cancel() # Just in case putter is not done yet. if not self.full() and not putter.cancelled(): # We were woken up by get_nowait(), but can't take # the call. Wake up the next in line. self._wakeup_next(self._putters) raise return self.put_nowait(item)
[ "def", "put", "(", "self", ",", "item", ")", ":", "while", "self", ".", "full", "(", ")", ":", "putter", "=", "self", ".", "_loop", ".", "create_future", "(", ")", "self", ".", "_putters", ".", "append", "(", "putter", ")", "try", ":", "yield", "from", "putter", "except", ":", "putter", ".", "cancel", "(", ")", "# Just in case putter is not done yet.", "if", "not", "self", ".", "full", "(", ")", "and", "not", "putter", ".", "cancelled", "(", ")", ":", "# We were woken up by get_nowait(), but can't take", "# the call. Wake up the next in line.", "self", ".", "_wakeup_next", "(", "self", ".", "_putters", ")", "raise", "return", "self", ".", "put_nowait", "(", "item", ")" ]
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/Python3/asyncio/queues.py#L121-L141
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/video/v1/room/room_participant/room_participant_subscribed_track.py
python
SubscribedTrackList.stream
(self, limit=None, page_size=None)
return self._version.stream(page, limits['limit'])
Streams SubscribedTrackInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient. :param int limit: Upper limit for the number of records to return. stream() guarantees to never return more than limit. Default is no limit :param int page_size: Number of records to fetch per request, when not set will use the default value of 50 records. If no page_size is defined but a limit is defined, stream() will attempt to read the limit with the most efficient page size, i.e. min(limit, 1000) :returns: Generator that will yield up to limit results :rtype: list[twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance]
Streams SubscribedTrackInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient.
[ "Streams", "SubscribedTrackInstance", "records", "from", "the", "API", "as", "a", "generator", "stream", ".", "This", "operation", "lazily", "loads", "records", "as", "efficiently", "as", "possible", "until", "the", "limit", "is", "reached", ".", "The", "results", "are", "returned", "as", "a", "generator", "so", "this", "operation", "is", "memory", "efficient", "." ]
def stream(self, limit=None, page_size=None): """ Streams SubscribedTrackInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient. :param int limit: Upper limit for the number of records to return. stream() guarantees to never return more than limit. Default is no limit :param int page_size: Number of records to fetch per request, when not set will use the default value of 50 records. If no page_size is defined but a limit is defined, stream() will attempt to read the limit with the most efficient page size, i.e. min(limit, 1000) :returns: Generator that will yield up to limit results :rtype: list[twilio.rest.video.v1.room.room_participant.room_participant_subscribed_track.SubscribedTrackInstance] """ limits = self._version.read_limits(limit, page_size) page = self.page(page_size=limits['page_size'], ) return self._version.stream(page, limits['limit'])
[ "def", "stream", "(", "self", ",", "limit", "=", "None", ",", "page_size", "=", "None", ")", ":", "limits", "=", "self", ".", "_version", ".", "read_limits", "(", "limit", ",", "page_size", ")", "page", "=", "self", ".", "page", "(", "page_size", "=", "limits", "[", "'page_size'", "]", ",", ")", "return", "self", ".", "_version", ".", "stream", "(", "page", ",", "limits", "[", "'limit'", "]", ")" ]
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/video/v1/room/room_participant/room_participant_subscribed_track.py#L36-L57
openstack/ironic
b392dc19bcd29cef5a69ec00d2f18a7a19a679e5
ironic/drivers/modules/image_utils.py
python
ImageHandler.publish_image
(self, image_file, object_name)
return image_url
Make image file downloadable. Depending on ironic settings, pushes given file into Swift or copies it over to local HTTP server's document root and returns publicly accessible URL leading to the given file. :param image_file: path to file to publish :param object_name: name of the published file :return: a URL to download published file
Make image file downloadable.
[ "Make", "image", "file", "downloadable", "." ]
def publish_image(self, image_file, object_name): """Make image file downloadable. Depending on ironic settings, pushes given file into Swift or copies it over to local HTTP server's document root and returns publicly accessible URL leading to the given file. :param image_file: path to file to publish :param object_name: name of the published file :return: a URL to download published file """ if self.swift_enabled: container = self._container timeout = self._timeout object_headers = {'X-Delete-After': str(timeout)} swift_api = swift.SwiftAPI() swift_api.create_object(container, object_name, image_file, object_headers=object_headers) image_url = swift_api.get_temp_url(container, object_name, timeout) image_url = self._append_filename_param( image_url, os.path.basename(image_file)) else: public_dir = os.path.join(CONF.deploy.http_root, self._image_subdir) if not os.path.exists(public_dir): os.mkdir(public_dir, 0o755) published_file = os.path.join(public_dir, object_name) try: os.link(image_file, published_file) os.chmod(image_file, self._file_permission) except OSError as exc: LOG.debug( "Could not hardlink image file %(image)s to public " "location %(public)s (will copy it over): " "%(error)s", {'image': image_file, 'public': published_file, 'error': exc}) shutil.copyfile(image_file, published_file) os.chmod(published_file, self._file_permission) http_url = CONF.deploy.external_http_url or CONF.deploy.http_url image_url = os.path.join(http_url, self._image_subdir, object_name) return image_url
[ "def", "publish_image", "(", "self", ",", "image_file", ",", "object_name", ")", ":", "if", "self", ".", "swift_enabled", ":", "container", "=", "self", ".", "_container", "timeout", "=", "self", ".", "_timeout", "object_headers", "=", "{", "'X-Delete-After'", ":", "str", "(", "timeout", ")", "}", "swift_api", "=", "swift", ".", "SwiftAPI", "(", ")", "swift_api", ".", "create_object", "(", "container", ",", "object_name", ",", "image_file", ",", "object_headers", "=", "object_headers", ")", "image_url", "=", "swift_api", ".", "get_temp_url", "(", "container", ",", "object_name", ",", "timeout", ")", "image_url", "=", "self", ".", "_append_filename_param", "(", "image_url", ",", "os", ".", "path", ".", "basename", "(", "image_file", ")", ")", "else", ":", "public_dir", "=", "os", ".", "path", ".", "join", "(", "CONF", ".", "deploy", ".", "http_root", ",", "self", ".", "_image_subdir", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "public_dir", ")", ":", "os", ".", "mkdir", "(", "public_dir", ",", "0o755", ")", "published_file", "=", "os", ".", "path", ".", "join", "(", "public_dir", ",", "object_name", ")", "try", ":", "os", ".", "link", "(", "image_file", ",", "published_file", ")", "os", ".", "chmod", "(", "image_file", ",", "self", ".", "_file_permission", ")", "except", "OSError", "as", "exc", ":", "LOG", ".", "debug", "(", "\"Could not hardlink image file %(image)s to public \"", "\"location %(public)s (will copy it over): \"", "\"%(error)s\"", ",", "{", "'image'", ":", "image_file", ",", "'public'", ":", "published_file", ",", "'error'", ":", "exc", "}", ")", "shutil", ".", "copyfile", "(", "image_file", ",", "published_file", ")", "os", ".", "chmod", "(", "published_file", ",", "self", ".", "_file_permission", ")", "http_url", "=", "CONF", ".", "deploy", ".", "external_http_url", "or", "CONF", ".", "deploy", ".", "http_url", "image_url", "=", "os", ".", "path", ".", "join", "(", "http_url", ",", "self", ".", "_image_subdir", ",", "object_name", ")", "return", "image_url" ]
https://github.com/openstack/ironic/blob/b392dc19bcd29cef5a69ec00d2f18a7a19a679e5/ironic/drivers/modules/image_utils.py#L172-L226
nottombrown/rl-teacher
b2c2201e9d2457b13185424a19da7209364f23df
agents/pposgd-mpi/pposgd_mpi/common/tf_util.py
python
is_placeholder
(x)
return type(x) is tf.Tensor and len(x.op.inputs) == 0
[]
def is_placeholder(x): return type(x) is tf.Tensor and len(x.op.inputs) == 0
[ "def", "is_placeholder", "(", "x", ")", ":", "return", "type", "(", "x", ")", "is", "tf", ".", "Tensor", "and", "len", "(", "x", ".", "op", ".", "inputs", ")", "==", "0" ]
https://github.com/nottombrown/rl-teacher/blob/b2c2201e9d2457b13185424a19da7209364f23df/agents/pposgd-mpi/pposgd_mpi/common/tf_util.py#L100-L101
numenta/nupic
b9ebedaf54f49a33de22d8d44dff7c765cdb5548
external/linux32/lib/python2.6/site-packages/matplotlib/patches.py
python
Rectangle.__init__
(self, xy, width, height, **kwargs)
*fill* is a boolean indicating whether to fill the rectangle Valid kwargs are: %(Patch)s
[]
def __init__(self, xy, width, height, **kwargs): """ *fill* is a boolean indicating whether to fill the rectangle Valid kwargs are: %(Patch)s """ Patch.__init__(self, **kwargs) self._x = xy[0] self._y = xy[1] self._width = width self._height = height # Note: This cannot be calculated until this is added to an Axes self._rect_transform = transforms.IdentityTransform()
[ "def", "__init__", "(", "self", ",", "xy", ",", "width", ",", "height", ",", "*", "*", "kwargs", ")", ":", "Patch", ".", "__init__", "(", "self", ",", "*", "*", "kwargs", ")", "self", ".", "_x", "=", "xy", "[", "0", "]", "self", ".", "_y", "=", "xy", "[", "1", "]", "self", ".", "_width", "=", "width", "self", ".", "_height", "=", "height", "# Note: This cannot be calculated until this is added to an Axes", "self", ".", "_rect_transform", "=", "transforms", ".", "IdentityTransform", "(", ")" ]
https://github.com/numenta/nupic/blob/b9ebedaf54f49a33de22d8d44dff7c765cdb5548/external/linux32/lib/python2.6/site-packages/matplotlib/patches.py#L424-L440
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/sunau.py
python
Au_write.setparams
(self, params)
[]
def setparams(self, params): nchannels, sampwidth, framerate, nframes, comptype, compname = params self.setnchannels(nchannels) self.setsampwidth(sampwidth) self.setframerate(framerate) self.setnframes(nframes) self.setcomptype(comptype, compname)
[ "def", "setparams", "(", "self", ",", "params", ")", ":", "nchannels", ",", "sampwidth", ",", "framerate", ",", "nframes", ",", "comptype", ",", "compname", "=", "params", "self", ".", "setnchannels", "(", "nchannels", ")", "self", ".", "setsampwidth", "(", "sampwidth", ")", "self", ".", "setframerate", "(", "framerate", ")", "self", ".", "setnframes", "(", "nframes", ")", "self", ".", "setcomptype", "(", "comptype", ",", "compname", ")" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/sunau.py#L400-L406
qutebrowser/qutebrowser
3a2aaaacbf97f4bf0c72463f3da94ed2822a5442
qutebrowser/browser/greasemonkey.py
python
GreasemonkeyManager.all_scripts
(self)
return self._run_start + self._run_end + self._run_idle
Return all scripts found in the configured script directory.
Return all scripts found in the configured script directory.
[ "Return", "all", "scripts", "found", "in", "the", "configured", "script", "directory", "." ]
def all_scripts(self): """Return all scripts found in the configured script directory.""" return self._run_start + self._run_end + self._run_idle
[ "def", "all_scripts", "(", "self", ")", ":", "return", "self", ".", "_run_start", "+", "self", ".", "_run_end", "+", "self", ".", "_run_idle" ]
https://github.com/qutebrowser/qutebrowser/blob/3a2aaaacbf97f4bf0c72463f3da94ed2822a5442/qutebrowser/browser/greasemonkey.py#L445-L447
numenta/nupic
b9ebedaf54f49a33de22d8d44dff7c765cdb5548
external/linux32/lib/python2.6/site-packages/matplotlib/delaunay/triangulate.py
python
Triangulation.nn_interpolator
(self, z, default_value=np.nan)
return NNInterpolator(self, z, default_value)
Get an object which can interpolate within the convex hull by the natural neighbors method. z -- an array of floats giving the known function values at each point in the triangulation.
Get an object which can interpolate within the convex hull by the natural neighbors method.
[ "Get", "an", "object", "which", "can", "interpolate", "within", "the", "convex", "hull", "by", "the", "natural", "neighbors", "method", "." ]
def nn_interpolator(self, z, default_value=np.nan): """Get an object which can interpolate within the convex hull by the natural neighbors method. z -- an array of floats giving the known function values at each point in the triangulation. """ z = np.asarray(z, dtype=np.float64) if z.shape != self.old_shape: raise ValueError("z must be the same shape as x and y") if self.j_unique is not None: z = z[self.j_unique] return NNInterpolator(self, z, default_value)
[ "def", "nn_interpolator", "(", "self", ",", "z", ",", "default_value", "=", "np", ".", "nan", ")", ":", "z", "=", "np", ".", "asarray", "(", "z", ",", "dtype", "=", "np", ".", "float64", ")", "if", "z", ".", "shape", "!=", "self", ".", "old_shape", ":", "raise", "ValueError", "(", "\"z must be the same shape as x and y\"", ")", "if", "self", ".", "j_unique", "is", "not", "None", ":", "z", "=", "z", "[", "self", ".", "j_unique", "]", "return", "NNInterpolator", "(", "self", ",", "z", ",", "default_value", ")" ]
https://github.com/numenta/nupic/blob/b9ebedaf54f49a33de22d8d44dff7c765cdb5548/external/linux32/lib/python2.6/site-packages/matplotlib/delaunay/triangulate.py#L145-L158
lutris/lutris
66675a4d5537f6b2a2ba2b6df0b3cdf8924c823a
lutris/util/linux.py
python
LinuxSystem.has_steam
(self)
return bool(system.find_executable("steam"))
Return whether Steam is installed locally
Return whether Steam is installed locally
[ "Return", "whether", "Steam", "is", "installed", "locally" ]
def has_steam(self): """Return whether Steam is installed locally""" return bool(system.find_executable("steam"))
[ "def", "has_steam", "(", "self", ")", ":", "return", "bool", "(", "system", ".", "find_executable", "(", "\"steam\"", ")", ")" ]
https://github.com/lutris/lutris/blob/66675a4d5537f6b2a2ba2b6df0b3cdf8924c823a/lutris/util/linux.py#L219-L221
facebookresearch/pytorch_GAN_zoo
b75dee40918caabb4fe7ec561522717bf096a8cb
models/utils/image_transform.py
python
NumpyFlip.__init__
(self, p=0.5)
[]
def __init__(self, p=0.5): self.p = p random.seed(None)
[ "def", "__init__", "(", "self", ",", "p", "=", "0.5", ")", ":", "self", ".", "p", "=", "p", "random", ".", "seed", "(", "None", ")" ]
https://github.com/facebookresearch/pytorch_GAN_zoo/blob/b75dee40918caabb4fe7ec561522717bf096a8cb/models/utils/image_transform.py#L38-L40
thinkle/gourmet
8af29c8ded24528030e5ae2ea3461f61c1e5a575
gourmet/threadManager.py
python
Terminated.__init__
(self, value)
[]
def __init__ (self, value): self.value=value
[ "def", "__init__", "(", "self", ",", "value", ")", ":", "self", ".", "value", "=", "value" ]
https://github.com/thinkle/gourmet/blob/8af29c8ded24528030e5ae2ea3461f61c1e5a575/gourmet/threadManager.py#L55-L56
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/sms/v20190711/models.py
python
ModifySmsTemplateResponse.__init__
(self)
r""" :param ModifyTemplateStatus: 修改模板参数响应 :type ModifyTemplateStatus: :class:`tencentcloud.sms.v20190711.models.ModifyTemplateStatus` :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str
r""" :param ModifyTemplateStatus: 修改模板参数响应 :type ModifyTemplateStatus: :class:`tencentcloud.sms.v20190711.models.ModifyTemplateStatus` :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str
[ "r", ":", "param", "ModifyTemplateStatus", ":", "修改模板参数响应", ":", "type", "ModifyTemplateStatus", ":", ":", "class", ":", "tencentcloud", ".", "sms", ".", "v20190711", ".", "models", ".", "ModifyTemplateStatus", ":", "param", "RequestId", ":", "唯一请求", "ID,每次请求都会返回。定位问题时需要提供该次请求的", "RequestId。", ":", "type", "RequestId", ":", "str" ]
def __init__(self): r""" :param ModifyTemplateStatus: 修改模板参数响应 :type ModifyTemplateStatus: :class:`tencentcloud.sms.v20190711.models.ModifyTemplateStatus` :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.ModifyTemplateStatus = None self.RequestId = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "ModifyTemplateStatus", "=", "None", "self", ".", "RequestId", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/sms/v20190711/models.py#L897-L905
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/imghdr.py
python
test_xbm
(h, f)
X bitmap (X10 or X11)
X bitmap (X10 or X11)
[ "X", "bitmap", "(", "X10", "or", "X11", ")" ]
def test_xbm(h, f): """X bitmap (X10 or X11)""" if h.startswith(b'#define '): return 'xbm'
[ "def", "test_xbm", "(", "h", ",", "f", ")", ":", "if", "h", ".", "startswith", "(", "b'#define '", ")", ":", "return", "'xbm'" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/imghdr.py#L100-L103
nelson-liu/contextual-repr-analysis
1550abe0f2cad8586ac891e44dc2c5256d7d0685
contexteval/data/dataset_readers/conll2003_ner.py
python
_is_divider
(line: str)
[]
def _is_divider(line: str) -> bool: empty_line = line.strip() == '' if empty_line: return True else: first_token = line.split()[0] if first_token == "-DOCSTART-": return True else: return False
[ "def", "_is_divider", "(", "line", ":", "str", ")", "->", "bool", ":", "empty_line", "=", "line", ".", "strip", "(", ")", "==", "''", "if", "empty_line", ":", "return", "True", "else", ":", "first_token", "=", "line", ".", "split", "(", ")", "[", "0", "]", "if", "first_token", "==", "\"-DOCSTART-\"", ":", "return", "True", "else", ":", "return", "False" ]
https://github.com/nelson-liu/contextual-repr-analysis/blob/1550abe0f2cad8586ac891e44dc2c5256d7d0685/contexteval/data/dataset_readers/conll2003_ner.py#L17-L26
MycroftAI/mycroft-core
3d963cee402e232174850f36918313e87313fb13
mycroft/enclosure/api.py
python
EnclosureAPI.register
(self, skill_name="")
Registers a skill as active. Used for speak() and speak_dialog() to 'patch' a previous implementation. Somewhat hacky.
Registers a skill as active. Used for speak() and speak_dialog() to 'patch' a previous implementation. Somewhat hacky.
[ "Registers", "a", "skill", "as", "active", ".", "Used", "for", "speak", "()", "and", "speak_dialog", "()", "to", "patch", "a", "previous", "implementation", ".", "Somewhat", "hacky", "." ]
def register(self, skill_name=""): """Registers a skill as active. Used for speak() and speak_dialog() to 'patch' a previous implementation. Somewhat hacky. """ if self.name != "": self.display_manager.set_active(self.name) else: self.display_manager.set_active(skill_name)
[ "def", "register", "(", "self", ",", "skill_name", "=", "\"\"", ")", ":", "if", "self", ".", "name", "!=", "\"\"", ":", "self", ".", "display_manager", ".", "set_active", "(", "self", ".", "name", ")", "else", ":", "self", ".", "display_manager", ".", "set_active", "(", "skill_name", ")" ]
https://github.com/MycroftAI/mycroft-core/blob/3d963cee402e232174850f36918313e87313fb13/mycroft/enclosure/api.py#L44-L51
kivy/python-for-android
4ecaa5fe01aa25e3bc8cadc52ae481645754f955
pythonforandroid/logger.py
python
shprint
(command, *args, **kwargs)
return output
Runs the command (which should be an sh.Command instance), while logging the output.
Runs the command (which should be an sh.Command instance), while logging the output.
[ "Runs", "the", "command", "(", "which", "should", "be", "an", "sh", ".", "Command", "instance", ")", "while", "logging", "the", "output", "." ]
def shprint(command, *args, **kwargs): '''Runs the command (which should be an sh.Command instance), while logging the output.''' kwargs["_iter"] = True kwargs["_out_bufsize"] = 1 kwargs["_err_to_out"] = True kwargs["_bg"] = True is_critical = kwargs.pop('_critical', False) tail_n = kwargs.pop('_tail', None) full_debug = False if "P4A_FULL_DEBUG" in os.environ: tail_n = 0 full_debug = True filter_in = kwargs.pop('_filter', None) filter_out = kwargs.pop('_filterout', None) if len(logger.handlers) > 1: logger.removeHandler(logger.handlers[1]) columns = get_console_width() command_path = str(command).split('/') command_string = command_path[-1] string = ' '.join(['{}->{} running'.format(Out_Fore.LIGHTBLACK_EX, Out_Style.RESET_ALL), command_string] + list(args)) # If logging is not in DEBUG mode, trim the command if necessary if logger.level > logging.DEBUG: logger.info('{}{}'.format(shorten_string(string, columns - 12), Err_Style.RESET_ALL)) else: logger.debug('{}{}'.format(string, Err_Style.RESET_ALL)) need_closing_newline = False try: msg_hdr = ' working: ' msg_width = columns - len(msg_hdr) - 1 output = command(*args, **kwargs) for line in output: if isinstance(line, bytes): line = line.decode('utf-8', errors='replace') if logger.level > logging.DEBUG: if full_debug: stdout.write(line) stdout.flush() continue msg = line.replace( '\n', ' ').replace( '\t', ' ').replace( '\b', ' ').rstrip() if msg: if "CI" not in os.environ: stdout.write(u'{}\r{}{:<{width}}'.format( Err_Style.RESET_ALL, msg_hdr, shorten_string(msg, msg_width), width=msg_width)) stdout.flush() need_closing_newline = True else: logger.debug(''.join(['\t', line.rstrip()])) if need_closing_newline: stdout.write('{}\r{:>{width}}\r'.format( Err_Style.RESET_ALL, ' ', width=(columns - 1))) stdout.flush() except sh.ErrorReturnCode as err: if need_closing_newline: stdout.write('{}\r{:>{width}}\r'.format( Err_Style.RESET_ALL, ' ', width=(columns - 1))) stdout.flush() if tail_n is not None or filter_in or filter_out: def printtail(out, name, forecolor, tail_n=0, re_filter_in=None, re_filter_out=None): lines = out.splitlines() if re_filter_in is not None: lines = [line for line in lines if re_filter_in.search(line)] if re_filter_out is not None: lines = [line for line in lines if not re_filter_out.search(line)] if tail_n == 0 or len(lines) <= tail_n: info('{}:\n{}\t{}{}'.format( name, forecolor, '\t\n'.join(lines), Out_Fore.RESET)) else: info('{} (last {} lines of {}):\n{}\t{}{}'.format( name, tail_n, len(lines), forecolor, '\t\n'.join([s for s in lines[-tail_n:]]), Out_Fore.RESET)) printtail(err.stdout.decode('utf-8'), 'STDOUT', Out_Fore.YELLOW, tail_n, re.compile(filter_in) if filter_in else None, re.compile(filter_out) if filter_out else None) printtail(err.stderr.decode('utf-8'), 'STDERR', Err_Fore.RED) if is_critical or full_debug: env = kwargs.get("_env") if env is not None: info("{}ENV:{}\n{}\n".format( Err_Fore.YELLOW, Err_Fore.RESET, "\n".join( "export {}='{}'".format(n, v) for n, v in env.items()))) info("{}COMMAND:{}\ncd {} && {} {}\n".format( Err_Fore.YELLOW, Err_Fore.RESET, os.getcwd(), command, ' '.join(args))) warning("{}ERROR: {} failed!{}".format( Err_Fore.RED, command, Err_Fore.RESET)) if is_critical: exit(1) else: raise return output
[ "def", "shprint", "(", "command", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "\"_iter\"", "]", "=", "True", "kwargs", "[", "\"_out_bufsize\"", "]", "=", "1", "kwargs", "[", "\"_err_to_out\"", "]", "=", "True", "kwargs", "[", "\"_bg\"", "]", "=", "True", "is_critical", "=", "kwargs", ".", "pop", "(", "'_critical'", ",", "False", ")", "tail_n", "=", "kwargs", ".", "pop", "(", "'_tail'", ",", "None", ")", "full_debug", "=", "False", "if", "\"P4A_FULL_DEBUG\"", "in", "os", ".", "environ", ":", "tail_n", "=", "0", "full_debug", "=", "True", "filter_in", "=", "kwargs", ".", "pop", "(", "'_filter'", ",", "None", ")", "filter_out", "=", "kwargs", ".", "pop", "(", "'_filterout'", ",", "None", ")", "if", "len", "(", "logger", ".", "handlers", ")", ">", "1", ":", "logger", ".", "removeHandler", "(", "logger", ".", "handlers", "[", "1", "]", ")", "columns", "=", "get_console_width", "(", ")", "command_path", "=", "str", "(", "command", ")", ".", "split", "(", "'/'", ")", "command_string", "=", "command_path", "[", "-", "1", "]", "string", "=", "' '", ".", "join", "(", "[", "'{}->{} running'", ".", "format", "(", "Out_Fore", ".", "LIGHTBLACK_EX", ",", "Out_Style", ".", "RESET_ALL", ")", ",", "command_string", "]", "+", "list", "(", "args", ")", ")", "# If logging is not in DEBUG mode, trim the command if necessary", "if", "logger", ".", "level", ">", "logging", ".", "DEBUG", ":", "logger", ".", "info", "(", "'{}{}'", ".", "format", "(", "shorten_string", "(", "string", ",", "columns", "-", "12", ")", ",", "Err_Style", ".", "RESET_ALL", ")", ")", "else", ":", "logger", ".", "debug", "(", "'{}{}'", ".", "format", "(", "string", ",", "Err_Style", ".", "RESET_ALL", ")", ")", "need_closing_newline", "=", "False", "try", ":", "msg_hdr", "=", "' working: '", "msg_width", "=", "columns", "-", "len", "(", "msg_hdr", ")", "-", "1", "output", "=", "command", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "line", "in", "output", ":", "if", "isinstance", "(", "line", ",", "bytes", ")", ":", "line", "=", "line", ".", "decode", "(", "'utf-8'", ",", "errors", "=", "'replace'", ")", "if", "logger", ".", "level", ">", "logging", ".", "DEBUG", ":", "if", "full_debug", ":", "stdout", ".", "write", "(", "line", ")", "stdout", ".", "flush", "(", ")", "continue", "msg", "=", "line", ".", "replace", "(", "'\\n'", ",", "' '", ")", ".", "replace", "(", "'\\t'", ",", "' '", ")", ".", "replace", "(", "'\\b'", ",", "' '", ")", ".", "rstrip", "(", ")", "if", "msg", ":", "if", "\"CI\"", "not", "in", "os", ".", "environ", ":", "stdout", ".", "write", "(", "u'{}\\r{}{:<{width}}'", ".", "format", "(", "Err_Style", ".", "RESET_ALL", ",", "msg_hdr", ",", "shorten_string", "(", "msg", ",", "msg_width", ")", ",", "width", "=", "msg_width", ")", ")", "stdout", ".", "flush", "(", ")", "need_closing_newline", "=", "True", "else", ":", "logger", ".", "debug", "(", "''", ".", "join", "(", "[", "'\\t'", ",", "line", ".", "rstrip", "(", ")", "]", ")", ")", "if", "need_closing_newline", ":", "stdout", ".", "write", "(", "'{}\\r{:>{width}}\\r'", ".", "format", "(", "Err_Style", ".", "RESET_ALL", ",", "' '", ",", "width", "=", "(", "columns", "-", "1", ")", ")", ")", "stdout", ".", "flush", "(", ")", "except", "sh", ".", "ErrorReturnCode", "as", "err", ":", "if", "need_closing_newline", ":", "stdout", ".", "write", "(", "'{}\\r{:>{width}}\\r'", ".", "format", "(", "Err_Style", ".", "RESET_ALL", ",", "' '", ",", "width", "=", "(", "columns", "-", "1", ")", ")", ")", "stdout", ".", "flush", "(", ")", "if", "tail_n", "is", "not", "None", "or", "filter_in", "or", "filter_out", ":", "def", "printtail", "(", "out", ",", "name", ",", "forecolor", ",", "tail_n", "=", "0", ",", "re_filter_in", "=", "None", ",", "re_filter_out", "=", "None", ")", ":", "lines", "=", "out", ".", "splitlines", "(", ")", "if", "re_filter_in", "is", "not", "None", ":", "lines", "=", "[", "line", "for", "line", "in", "lines", "if", "re_filter_in", ".", "search", "(", "line", ")", "]", "if", "re_filter_out", "is", "not", "None", ":", "lines", "=", "[", "line", "for", "line", "in", "lines", "if", "not", "re_filter_out", ".", "search", "(", "line", ")", "]", "if", "tail_n", "==", "0", "or", "len", "(", "lines", ")", "<=", "tail_n", ":", "info", "(", "'{}:\\n{}\\t{}{}'", ".", "format", "(", "name", ",", "forecolor", ",", "'\\t\\n'", ".", "join", "(", "lines", ")", ",", "Out_Fore", ".", "RESET", ")", ")", "else", ":", "info", "(", "'{} (last {} lines of {}):\\n{}\\t{}{}'", ".", "format", "(", "name", ",", "tail_n", ",", "len", "(", "lines", ")", ",", "forecolor", ",", "'\\t\\n'", ".", "join", "(", "[", "s", "for", "s", "in", "lines", "[", "-", "tail_n", ":", "]", "]", ")", ",", "Out_Fore", ".", "RESET", ")", ")", "printtail", "(", "err", ".", "stdout", ".", "decode", "(", "'utf-8'", ")", ",", "'STDOUT'", ",", "Out_Fore", ".", "YELLOW", ",", "tail_n", ",", "re", ".", "compile", "(", "filter_in", ")", "if", "filter_in", "else", "None", ",", "re", ".", "compile", "(", "filter_out", ")", "if", "filter_out", "else", "None", ")", "printtail", "(", "err", ".", "stderr", ".", "decode", "(", "'utf-8'", ")", ",", "'STDERR'", ",", "Err_Fore", ".", "RED", ")", "if", "is_critical", "or", "full_debug", ":", "env", "=", "kwargs", ".", "get", "(", "\"_env\"", ")", "if", "env", "is", "not", "None", ":", "info", "(", "\"{}ENV:{}\\n{}\\n\"", ".", "format", "(", "Err_Fore", ".", "YELLOW", ",", "Err_Fore", ".", "RESET", ",", "\"\\n\"", ".", "join", "(", "\"export {}='{}'\"", ".", "format", "(", "n", ",", "v", ")", "for", "n", ",", "v", "in", "env", ".", "items", "(", ")", ")", ")", ")", "info", "(", "\"{}COMMAND:{}\\ncd {} && {} {}\\n\"", ".", "format", "(", "Err_Fore", ".", "YELLOW", ",", "Err_Fore", ".", "RESET", ",", "os", ".", "getcwd", "(", ")", ",", "command", ",", "' '", ".", "join", "(", "args", ")", ")", ")", "warning", "(", "\"{}ERROR: {} failed!{}\"", ".", "format", "(", "Err_Fore", ".", "RED", ",", "command", ",", "Err_Fore", ".", "RESET", ")", ")", "if", "is_critical", ":", "exit", "(", "1", ")", "else", ":", "raise", "return", "output" ]
https://github.com/kivy/python-for-android/blob/4ecaa5fe01aa25e3bc8cadc52ae481645754f955/pythonforandroid/logger.py#L131-L233
DrSleep/tensorflow-deeplab-resnet
066023c033624e6c8154340e06e8fbad4f702bdf
train_msc.py
python
load
(saver, sess, ckpt_path)
Load trained weights. Args: saver: TensorFlow Saver object. sess: TensorFlow session. ckpt_path: path to checkpoint file with parameters.
Load trained weights. Args: saver: TensorFlow Saver object. sess: TensorFlow session. ckpt_path: path to checkpoint file with parameters.
[ "Load", "trained", "weights", ".", "Args", ":", "saver", ":", "TensorFlow", "Saver", "object", ".", "sess", ":", "TensorFlow", "session", ".", "ckpt_path", ":", "path", "to", "checkpoint", "file", "with", "parameters", "." ]
def load(saver, sess, ckpt_path): '''Load trained weights. Args: saver: TensorFlow Saver object. sess: TensorFlow session. ckpt_path: path to checkpoint file with parameters. ''' saver.restore(sess, ckpt_path) print("Restored model parameters from {}".format(ckpt_path))
[ "def", "load", "(", "saver", ",", "sess", ",", "ckpt_path", ")", ":", "saver", ".", "restore", "(", "sess", ",", "ckpt_path", ")", "print", "(", "\"Restored model parameters from {}\"", ".", "format", "(", "ckpt_path", ")", ")" ]
https://github.com/DrSleep/tensorflow-deeplab-resnet/blob/066023c033624e6c8154340e06e8fbad4f702bdf/train_msc.py#L110-L119
choasup/SIN
4851efb7b1c64180026e51ab8abcd95265c0602c
lib/rpn_msr/generate.py
python
_vis_proposals
(im, dets, thresh=0.5)
Draw detected bounding boxes.
Draw detected bounding boxes.
[ "Draw", "detected", "bounding", "boxes", "." ]
def _vis_proposals(im, dets, thresh=0.5): """Draw detected bounding boxes.""" inds = np.where(dets[:, -1] >= thresh)[0] if len(inds) == 0: return class_name = 'obj' im = im[:, :, (2, 1, 0)] fig, ax = plt.subplots(figsize=(12, 12)) ax.imshow(im, aspect='equal') for i in inds: bbox = dets[i, :4] score = dets[i, -1] ax.add_patch( plt.Rectangle((bbox[0], bbox[1]), bbox[2] - bbox[0], bbox[3] - bbox[1], fill=False, edgecolor='red', linewidth=3.5) ) ax.text(bbox[0], bbox[1] - 2, '{:s} {:.3f}'.format(class_name, score), bbox=dict(facecolor='blue', alpha=0.5), fontsize=14, color='white') ax.set_title(('{} detections with ' 'p({} | box) >= {:.1f}').format(class_name, class_name, thresh), fontsize=14) plt.axis('off') plt.tight_layout() plt.draw()
[ "def", "_vis_proposals", "(", "im", ",", "dets", ",", "thresh", "=", "0.5", ")", ":", "inds", "=", "np", ".", "where", "(", "dets", "[", ":", ",", "-", "1", "]", ">=", "thresh", ")", "[", "0", "]", "if", "len", "(", "inds", ")", "==", "0", ":", "return", "class_name", "=", "'obj'", "im", "=", "im", "[", ":", ",", ":", ",", "(", "2", ",", "1", ",", "0", ")", "]", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", "figsize", "=", "(", "12", ",", "12", ")", ")", "ax", ".", "imshow", "(", "im", ",", "aspect", "=", "'equal'", ")", "for", "i", "in", "inds", ":", "bbox", "=", "dets", "[", "i", ",", ":", "4", "]", "score", "=", "dets", "[", "i", ",", "-", "1", "]", "ax", ".", "add_patch", "(", "plt", ".", "Rectangle", "(", "(", "bbox", "[", "0", "]", ",", "bbox", "[", "1", "]", ")", ",", "bbox", "[", "2", "]", "-", "bbox", "[", "0", "]", ",", "bbox", "[", "3", "]", "-", "bbox", "[", "1", "]", ",", "fill", "=", "False", ",", "edgecolor", "=", "'red'", ",", "linewidth", "=", "3.5", ")", ")", "ax", ".", "text", "(", "bbox", "[", "0", "]", ",", "bbox", "[", "1", "]", "-", "2", ",", "'{:s} {:.3f}'", ".", "format", "(", "class_name", ",", "score", ")", ",", "bbox", "=", "dict", "(", "facecolor", "=", "'blue'", ",", "alpha", "=", "0.5", ")", ",", "fontsize", "=", "14", ",", "color", "=", "'white'", ")", "ax", ".", "set_title", "(", "(", "'{} detections with '", "'p({} | box) >= {:.1f}'", ")", ".", "format", "(", "class_name", ",", "class_name", ",", "thresh", ")", ",", "fontsize", "=", "14", ")", "plt", ".", "axis", "(", "'off'", ")", "plt", ".", "tight_layout", "(", ")", "plt", ".", "draw", "(", ")" ]
https://github.com/choasup/SIN/blob/4851efb7b1c64180026e51ab8abcd95265c0602c/lib/rpn_msr/generate.py#L14-L45
sympy/sympy
d822fcba181155b85ff2b29fe525adbafb22b448
sympy/polys/subresultants_qq_zz.py
python
sign_seq
(poly_seq, x)
return [sign(LC(poly_seq[i], x)) for i in range(len(poly_seq))]
Given a sequence of polynomials poly_seq, it returns the sequence of signs of the leading coefficients of the polynomials in poly_seq.
Given a sequence of polynomials poly_seq, it returns the sequence of signs of the leading coefficients of the polynomials in poly_seq.
[ "Given", "a", "sequence", "of", "polynomials", "poly_seq", "it", "returns", "the", "sequence", "of", "signs", "of", "the", "leading", "coefficients", "of", "the", "polynomials", "in", "poly_seq", "." ]
def sign_seq(poly_seq, x): """ Given a sequence of polynomials poly_seq, it returns the sequence of signs of the leading coefficients of the polynomials in poly_seq. """ return [sign(LC(poly_seq[i], x)) for i in range(len(poly_seq))]
[ "def", "sign_seq", "(", "poly_seq", ",", "x", ")", ":", "return", "[", "sign", "(", "LC", "(", "poly_seq", "[", "i", "]", ",", "x", ")", ")", "for", "i", "in", "range", "(", "len", "(", "poly_seq", ")", ")", "]" ]
https://github.com/sympy/sympy/blob/d822fcba181155b85ff2b29fe525adbafb22b448/sympy/polys/subresultants_qq_zz.py#L611-L618
WZMIAOMIAO/deep-learning-for-image-processing
a4502c284958d4bf78fb77b089a90e7688ddc196
pytorch_object_detection/faster_rcnn/network_files/rpn_function.py
python
concat_box_prediction_layers
(box_cls, box_regression)
return box_cls, box_regression
对box_cla和box_regression两个list中的每个预测特征层的预测信息 的tensor排列顺序以及shape进行调整 -> [N, -1, C] Args: box_cls: 每个预测特征层上的预测目标概率 box_regression: 每个预测特征层上的预测目标bboxes regression参数 Returns:
对box_cla和box_regression两个list中的每个预测特征层的预测信息 的tensor排列顺序以及shape进行调整 -> [N, -1, C] Args: box_cls: 每个预测特征层上的预测目标概率 box_regression: 每个预测特征层上的预测目标bboxes regression参数
[ "对box_cla和box_regression两个list中的每个预测特征层的预测信息", "的tensor排列顺序以及shape进行调整", "-", ">", "[", "N", "-", "1", "C", "]", "Args", ":", "box_cls", ":", "每个预测特征层上的预测目标概率", "box_regression", ":", "每个预测特征层上的预测目标bboxes", "regression参数" ]
def concat_box_prediction_layers(box_cls, box_regression): # type: (List[Tensor], List[Tensor]) -> Tuple[Tensor, Tensor] """ 对box_cla和box_regression两个list中的每个预测特征层的预测信息 的tensor排列顺序以及shape进行调整 -> [N, -1, C] Args: box_cls: 每个预测特征层上的预测目标概率 box_regression: 每个预测特征层上的预测目标bboxes regression参数 Returns: """ box_cls_flattened = [] box_regression_flattened = [] # 遍历每个预测特征层 for box_cls_per_level, box_regression_per_level in zip(box_cls, box_regression): # [batch_size, anchors_num_per_position * classes_num, height, width] # 注意,当计算RPN中的proposal时,classes_num=1,只区分目标和背景 N, AxC, H, W = box_cls_per_level.shape # # [batch_size, anchors_num_per_position * 4, height, width] Ax4 = box_regression_per_level.shape[1] # anchors_num_per_position A = Ax4 // 4 # classes_num C = AxC // A # [N, -1, C] box_cls_per_level = permute_and_flatten(box_cls_per_level, N, A, C, H, W) box_cls_flattened.append(box_cls_per_level) # [N, -1, C] box_regression_per_level = permute_and_flatten(box_regression_per_level, N, A, 4, H, W) box_regression_flattened.append(box_regression_per_level) box_cls = torch.cat(box_cls_flattened, dim=1).flatten(0, -2) # start_dim, end_dim box_regression = torch.cat(box_regression_flattened, dim=1).reshape(-1, 4) return box_cls, box_regression
[ "def", "concat_box_prediction_layers", "(", "box_cls", ",", "box_regression", ")", ":", "# type: (List[Tensor], List[Tensor]) -> Tuple[Tensor, Tensor]", "box_cls_flattened", "=", "[", "]", "box_regression_flattened", "=", "[", "]", "# 遍历每个预测特征层", "for", "box_cls_per_level", ",", "box_regression_per_level", "in", "zip", "(", "box_cls", ",", "box_regression", ")", ":", "# [batch_size, anchors_num_per_position * classes_num, height, width]", "# 注意,当计算RPN中的proposal时,classes_num=1,只区分目标和背景", "N", ",", "AxC", ",", "H", ",", "W", "=", "box_cls_per_level", ".", "shape", "# # [batch_size, anchors_num_per_position * 4, height, width]", "Ax4", "=", "box_regression_per_level", ".", "shape", "[", "1", "]", "# anchors_num_per_position", "A", "=", "Ax4", "//", "4", "# classes_num", "C", "=", "AxC", "//", "A", "# [N, -1, C]", "box_cls_per_level", "=", "permute_and_flatten", "(", "box_cls_per_level", ",", "N", ",", "A", ",", "C", ",", "H", ",", "W", ")", "box_cls_flattened", ".", "append", "(", "box_cls_per_level", ")", "# [N, -1, C]", "box_regression_per_level", "=", "permute_and_flatten", "(", "box_regression_per_level", ",", "N", ",", "A", ",", "4", ",", "H", ",", "W", ")", "box_regression_flattened", ".", "append", "(", "box_regression_per_level", ")", "box_cls", "=", "torch", ".", "cat", "(", "box_cls_flattened", ",", "dim", "=", "1", ")", ".", "flatten", "(", "0", ",", "-", "2", ")", "# start_dim, end_dim", "box_regression", "=", "torch", ".", "cat", "(", "box_regression_flattened", ",", "dim", "=", "1", ")", ".", "reshape", "(", "-", "1", ",", "4", ")", "return", "box_cls", ",", "box_regression" ]
https://github.com/WZMIAOMIAO/deep-learning-for-image-processing/blob/a4502c284958d4bf78fb77b089a90e7688ddc196/pytorch_object_detection/faster_rcnn/network_files/rpn_function.py#L272-L309
cornellius-gp/gpytorch
61f643eb8b487aef332c818f661fbcdb1df576ca
gpytorch/likelihoods/multitask_gaussian_likelihood.py
python
_MultitaskGaussianLikelihoodBase.marginal
(self, function_dist, *params, **kwargs)
return function_dist.__class__(mean, covar)
r""" If `rank` == 0, adds the task noises to the diagonal of the covariance matrix of the supplied :obj:`gpytorch.distributions.MultivariateNormal` or :obj:`gpytorch.distributions.MultitaskMultivariateNormal`. Otherwise, adds a rank `rank` covariance matrix to it. To accomplish this, we form a new :obj:`gpytorch.lazy.KroneckerProductLazyTensor` between :math:`I_{n}`, an identity matrix with size equal to the data and a (not necessarily diagonal) matrix containing the task noises :math:`D_{t}`. We also incorporate a shared `noise` parameter from the base :class:`gpytorch.likelihoods.GaussianLikelihood` that we extend. The final covariance matrix after this method is then :math:`K + D_{t} \otimes I_{n} + \sigma^{2}I_{nt}`. Args: function_dist (:obj:`gpytorch.distributions.MultitaskMultivariateNormal`): Random variable whose covariance matrix is a :obj:`gpytorch.lazy.LazyTensor` we intend to augment. Returns: :obj:`gpytorch.distributions.MultitaskMultivariateNormal`: A new random variable whose covariance matrix is a :obj:`gpytorch.lazy.LazyTensor` with :math:`D_{t} \otimes I_{n}` and :math:`\sigma^{2}I_{nt}` added.
r""" If `rank` == 0, adds the task noises to the diagonal of the covariance matrix of the supplied :obj:`gpytorch.distributions.MultivariateNormal` or :obj:`gpytorch.distributions.MultitaskMultivariateNormal`. Otherwise, adds a rank `rank` covariance matrix to it.
[ "r", "If", "rank", "==", "0", "adds", "the", "task", "noises", "to", "the", "diagonal", "of", "the", "covariance", "matrix", "of", "the", "supplied", ":", "obj", ":", "gpytorch", ".", "distributions", ".", "MultivariateNormal", "or", ":", "obj", ":", "gpytorch", ".", "distributions", ".", "MultitaskMultivariateNormal", ".", "Otherwise", "adds", "a", "rank", "rank", "covariance", "matrix", "to", "it", "." ]
def marginal(self, function_dist, *params, **kwargs): r""" If `rank` == 0, adds the task noises to the diagonal of the covariance matrix of the supplied :obj:`gpytorch.distributions.MultivariateNormal` or :obj:`gpytorch.distributions.MultitaskMultivariateNormal`. Otherwise, adds a rank `rank` covariance matrix to it. To accomplish this, we form a new :obj:`gpytorch.lazy.KroneckerProductLazyTensor` between :math:`I_{n}`, an identity matrix with size equal to the data and a (not necessarily diagonal) matrix containing the task noises :math:`D_{t}`. We also incorporate a shared `noise` parameter from the base :class:`gpytorch.likelihoods.GaussianLikelihood` that we extend. The final covariance matrix after this method is then :math:`K + D_{t} \otimes I_{n} + \sigma^{2}I_{nt}`. Args: function_dist (:obj:`gpytorch.distributions.MultitaskMultivariateNormal`): Random variable whose covariance matrix is a :obj:`gpytorch.lazy.LazyTensor` we intend to augment. Returns: :obj:`gpytorch.distributions.MultitaskMultivariateNormal`: A new random variable whose covariance matrix is a :obj:`gpytorch.lazy.LazyTensor` with :math:`D_{t} \otimes I_{n}` and :math:`\sigma^{2}I_{nt}` added. """ mean, covar = function_dist.mean, function_dist.lazy_covariance_matrix # ensure that sumKroneckerLT is actually called if isinstance(covar, LazyEvaluatedKernelTensor): covar = covar.evaluate_kernel() covar_kron_lt = self._shaped_noise_covar(mean.shape, add_noise=self.has_global_noise) covar = covar + covar_kron_lt return function_dist.__class__(mean, covar)
[ "def", "marginal", "(", "self", ",", "function_dist", ",", "*", "params", ",", "*", "*", "kwargs", ")", ":", "mean", ",", "covar", "=", "function_dist", ".", "mean", ",", "function_dist", ".", "lazy_covariance_matrix", "# ensure that sumKroneckerLT is actually called", "if", "isinstance", "(", "covar", ",", "LazyEvaluatedKernelTensor", ")", ":", "covar", "=", "covar", ".", "evaluate_kernel", "(", ")", "covar_kron_lt", "=", "self", ".", "_shaped_noise_covar", "(", "mean", ".", "shape", ",", "add_noise", "=", "self", ".", "has_global_noise", ")", "covar", "=", "covar", "+", "covar_kron_lt", "return", "function_dist", ".", "__class__", "(", "mean", ",", "covar", ")" ]
https://github.com/cornellius-gp/gpytorch/blob/61f643eb8b487aef332c818f661fbcdb1df576ca/gpytorch/likelihoods/multitask_gaussian_likelihood.py#L67-L99
square/pylink
a2d9fbd3add62ffd06ba737c5ea82b8491fdc425
pylink/jlink.py
python
JLink.__exit__
(self, exc_type, exc_val, exc_tb)
Closes the JLink connection on exit of the context manager. Stops the SWO if enabled and closes the J-Link connection if one exists. Args: self (JLink): the ``JLink`` instance exc_type (BaseExceptionType, None): the exception class, if any raised inside the context manager exc_val (BaseException, None): the exception object, if any raised inside the context manager exc_tb (TracebackType, None): the exception traceback, if any exception was raised inside the context manager. Returns: ``True`` if exception raised inside the context manager was handled and shall be suppressed (not propagated), ``None`` otherwise.
Closes the JLink connection on exit of the context manager.
[ "Closes", "the", "JLink", "connection", "on", "exit", "of", "the", "context", "manager", "." ]
def __exit__(self, exc_type, exc_val, exc_tb): """Closes the JLink connection on exit of the context manager. Stops the SWO if enabled and closes the J-Link connection if one exists. Args: self (JLink): the ``JLink`` instance exc_type (BaseExceptionType, None): the exception class, if any raised inside the context manager exc_val (BaseException, None): the exception object, if any raised inside the context manager exc_tb (TracebackType, None): the exception traceback, if any exception was raised inside the context manager. Returns: ``True`` if exception raised inside the context manager was handled and shall be suppressed (not propagated), ``None`` otherwise. """ self._finalize()
[ "def", "__exit__", "(", "self", ",", "exc_type", ",", "exc_val", ",", "exc_tb", ")", ":", "self", ".", "_finalize", "(", ")" ]
https://github.com/square/pylink/blob/a2d9fbd3add62ffd06ba737c5ea82b8491fdc425/pylink/jlink.py#L373-L392
FriedAppleTeam/FRAPL
89c14d57e0cc77b915fe1e95f60e9e1847699103
Framework/FridaLink/FridaLink/Core/HookEngine.py
python
HookEngineProtocol.showImportSymbolCPUForIdx
(self, import_idx)
[]
def showImportSymbolCPUForIdx(self, import_idx): if self.clientSocket is None: fl_log("FridaLink: Frida not connected\n"); return importEntry = self.imports[import_idx] if importEntry.address not in self.importHookMap: return address = importEntry.address; if self.cpuContextViews.hasView(address) == False: entry = self.importHookMap[address] newView = CPUContextView(self, entry.hook.id, entry.hook.symbol) self.cpuContextViews.addView("CPU Context", newView) self.cpuContextViews.setContent(entry.hook.id, {"arch":entry.arch, "context":entry.cpu_ctx}) self.cpuContextViews.showView(address)
[ "def", "showImportSymbolCPUForIdx", "(", "self", ",", "import_idx", ")", ":", "if", "self", ".", "clientSocket", "is", "None", ":", "fl_log", "(", "\"FridaLink: Frida not connected\\n\"", ")", "return", "importEntry", "=", "self", ".", "imports", "[", "import_idx", "]", "if", "importEntry", ".", "address", "not", "in", "self", ".", "importHookMap", ":", "return", "address", "=", "importEntry", ".", "address", "if", "self", ".", "cpuContextViews", ".", "hasView", "(", "address", ")", "==", "False", ":", "entry", "=", "self", ".", "importHookMap", "[", "address", "]", "newView", "=", "CPUContextView", "(", "self", ",", "entry", ".", "hook", ".", "id", ",", "entry", ".", "hook", ".", "symbol", ")", "self", ".", "cpuContextViews", ".", "addView", "(", "\"CPU Context\"", ",", "newView", ")", "self", ".", "cpuContextViews", ".", "setContent", "(", "entry", ".", "hook", ".", "id", ",", "{", "\"arch\"", ":", "entry", ".", "arch", ",", "\"context\"", ":", "entry", ".", "cpu_ctx", "}", ")", "self", ".", "cpuContextViews", ".", "showView", "(", "address", ")" ]
https://github.com/FriedAppleTeam/FRAPL/blob/89c14d57e0cc77b915fe1e95f60e9e1847699103/Framework/FridaLink/FridaLink/Core/HookEngine.py#L654-L670
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v9/services/services/google_ads_service/client.py
python
GoogleAdsServiceClient.parse_shopping_performance_view_path
(path: str)
return m.groupdict() if m else {}
Parse a shopping_performance_view path into its component segments.
Parse a shopping_performance_view path into its component segments.
[ "Parse", "a", "shopping_performance_view", "path", "into", "its", "component", "segments", "." ]
def parse_shopping_performance_view_path(path: str) -> Dict[str, str]: """Parse a shopping_performance_view path into its component segments.""" m = re.match( r"^customers/(?P<customer_id>.+?)/shoppingPerformanceView$", path ) return m.groupdict() if m else {}
[ "def", "parse_shopping_performance_view_path", "(", "path", ":", "str", ")", "->", "Dict", "[", "str", ",", "str", "]", ":", "m", "=", "re", ".", "match", "(", "r\"^customers/(?P<customer_id>.+?)/shoppingPerformanceView$\"", ",", "path", ")", "return", "m", ".", "groupdict", "(", ")", "if", "m", "else", "{", "}" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v9/services/services/google_ads_service/client.py#L2740-L2745
mithril-global/GoAgentX
788fbd5e1c824c75cf98a9aef8a6d4ec8df25e95
GoAgentX.app/Contents/PlugIns/shadowsocks.gxbundle/Contents/Resources/bin/python/M2Crypto/X509.py
python
X509_Store_Context.get_error
(self)
return m2.x509_store_ctx_get_error(self.ctx)
Get error code.
Get error code.
[ "Get", "error", "code", "." ]
def get_error(self): """ Get error code. """ return m2.x509_store_ctx_get_error(self.ctx)
[ "def", "get_error", "(", "self", ")", ":", "return", "m2", ".", "x509_store_ctx_get_error", "(", "self", ".", "ctx", ")" ]
https://github.com/mithril-global/GoAgentX/blob/788fbd5e1c824c75cf98a9aef8a6d4ec8df25e95/GoAgentX.app/Contents/PlugIns/shadowsocks.gxbundle/Contents/Resources/bin/python/M2Crypto/X509.py#L700-L704
nfvlabs/openmano
b09eabec0a168aeda8adc3ea99f734e45e810205
openvim/utils/RADclass.py
python
RADclass.set_os
(self,os)
return (True, "")
Sets the operating system. Returns (True,Warning) in case of success and ('False',<error description>) in case of error
Sets the operating system. Returns (True,Warning) in case of success and ('False',<error description>) in case of error
[ "Sets", "the", "operating", "system", ".", "Returns", "(", "True", "Warning", ")", "in", "case", "of", "success", "and", "(", "False", "<error", "description", ">", ")", "in", "case", "of", "error" ]
def set_os(self,os): """Sets the operating system. Returns (True,Warning) in case of success and ('False',<error description>) in case of error""" if not isinstance(os,OpSys): return (False, 'The variable \'os\' must be of class OpSys') self.os.assign(os) return (True, "")
[ "def", "set_os", "(", "self", ",", "os", ")", ":", "if", "not", "isinstance", "(", "os", ",", "OpSys", ")", ":", "return", "(", "False", ",", "'The variable \\'os\\' must be of class OpSys'", ")", "self", ".", "os", ".", "assign", "(", "os", ")", "return", "(", "True", ",", "\"\"", ")" ]
https://github.com/nfvlabs/openmano/blob/b09eabec0a168aeda8adc3ea99f734e45e810205/openvim/utils/RADclass.py#L300-L307
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/PIL/Image.py
python
effect_mandelbrot
(size, extent, quality)
return Image()._new(core.effect_mandelbrot(size, extent, quality))
Generate a Mandelbrot set covering the given extent. :param size: The requested size in pixels, as a 2-tuple: (width, height). :param extent: The extent to cover, as a 4-tuple: (x0, y0, x1, y2). :param quality: Quality.
Generate a Mandelbrot set covering the given extent.
[ "Generate", "a", "Mandelbrot", "set", "covering", "the", "given", "extent", "." ]
def effect_mandelbrot(size, extent, quality): """ Generate a Mandelbrot set covering the given extent. :param size: The requested size in pixels, as a 2-tuple: (width, height). :param extent: The extent to cover, as a 4-tuple: (x0, y0, x1, y2). :param quality: Quality. """ return Image()._new(core.effect_mandelbrot(size, extent, quality))
[ "def", "effect_mandelbrot", "(", "size", ",", "extent", ",", "quality", ")", ":", "return", "Image", "(", ")", ".", "_new", "(", "core", ".", "effect_mandelbrot", "(", "size", ",", "extent", ",", "quality", ")", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/PIL/Image.py#L2820-L2830
GetStream/stream-python
142b5b43c0a60a96c36f25b6fc5a224dd2e418cc
setup.py
python
PyTest.finalize_options
(self)
[]
def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True
[ "def", "finalize_options", "(", "self", ")", ":", "TestCommand", ".", "finalize_options", "(", "self", ")", "self", ".", "test_args", "=", "[", "]", "self", ".", "test_suite", "=", "True" ]
https://github.com/GetStream/stream-python/blob/142b5b43c0a60a96c36f25b6fc5a224dd2e418cc/setup.py#L23-L26
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/celery/schedules.py
python
crontab_parser._parse_part
(self, part)
return self._expand_range((part,))
[]
def _parse_part(self, part): for regex, handler in self.pats: m = regex.match(part) if m: return handler(m.groups()) return self._expand_range((part,))
[ "def", "_parse_part", "(", "self", ",", "part", ")", ":", "for", "regex", ",", "handler", "in", "self", ".", "pats", ":", "m", "=", "regex", ".", "match", "(", "part", ")", "if", "m", ":", "return", "handler", "(", "m", ".", "groups", "(", ")", ")", "return", "self", ".", "_expand_range", "(", "(", "part", ",", ")", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/celery/schedules.py#L263-L268
AstroPrint/AstroBox
e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75
src/ext/makerbot_driver/GcodeProcessors/EmptyLayerProcessor.py
python
EmptyLayerProcessor.check_for_layer_end
(self, string)
[]
def check_for_layer_end(self, string): match = re.match(self.MG_layer_end, string) if match is not None: return 'mg' match = re.match(self.SF_layer_end, string) if match is not None: return 'sf' else: return None
[ "def", "check_for_layer_end", "(", "self", ",", "string", ")", ":", "match", "=", "re", ".", "match", "(", "self", ".", "MG_layer_end", ",", "string", ")", "if", "match", "is", "not", "None", ":", "return", "'mg'", "match", "=", "re", ".", "match", "(", "self", ".", "SF_layer_end", ",", "string", ")", "if", "match", "is", "not", "None", ":", "return", "'sf'", "else", ":", "return", "None" ]
https://github.com/AstroPrint/AstroBox/blob/e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75/src/ext/makerbot_driver/GcodeProcessors/EmptyLayerProcessor.py#L133-L141
cobbler/cobbler
eed8cdca3e970c8aa1d199e80b8c8f19b3f940cc
cobbler/remote.py
python
CobblerXMLRPCInterface.remove_autoinstall_snippet
(self, file_path: str, token: str)
return True
Remove an automated OS installation snippet file :param file_path: automated OS installation snippet file path :param token: Cobbler token, obtained form login() :return: bool if operation was successful
Remove an automated OS installation snippet file
[ "Remove", "an", "automated", "OS", "installation", "snippet", "file" ]
def remove_autoinstall_snippet(self, file_path: str, token: str): """ Remove an automated OS installation snippet file :param file_path: automated OS installation snippet file path :param token: Cobbler token, obtained form login() :return: bool if operation was successful """ what = "remove_autoinstall_snippet" self._log(what, name=file_path, token=token) self.check_access(token, what, file_path, True) self.autoinstall_mgr.remove_autoinstall_snippet(file_path) return True
[ "def", "remove_autoinstall_snippet", "(", "self", ",", "file_path", ":", "str", ",", "token", ":", "str", ")", ":", "what", "=", "\"remove_autoinstall_snippet\"", "self", ".", "_log", "(", "what", ",", "name", "=", "file_path", ",", "token", "=", "token", ")", "self", ".", "check_access", "(", "token", ",", "what", ",", "file_path", ",", "True", ")", "self", ".", "autoinstall_mgr", ".", "remove_autoinstall_snippet", "(", "file_path", ")", "return", "True" ]
https://github.com/cobbler/cobbler/blob/eed8cdca3e970c8aa1d199e80b8c8f19b3f940cc/cobbler/remote.py#L3517-L3532
mchristopher/PokemonGo-DesktopMap
ec37575f2776ee7d64456e2a1f6b6b78830b4fe0
app/pywin/Lib/mailbox.py
python
MaildirMessage.get_subdir
(self)
return self._subdir
Return 'new' or 'cur'.
Return 'new' or 'cur'.
[ "Return", "new", "or", "cur", "." ]
def get_subdir(self): """Return 'new' or 'cur'.""" return self._subdir
[ "def", "get_subdir", "(", "self", ")", ":", "return", "self", ".", "_subdir" ]
https://github.com/mchristopher/PokemonGo-DesktopMap/blob/ec37575f2776ee7d64456e2a1f6b6b78830b4fe0/app/pywin/Lib/mailbox.py#L1491-L1493
nameko/nameko
17ecee2bcfa90cb0f3a2f3328c5004f48e4e02a3
nameko/rpc.py
python
MethodProxy.__repr__
(self)
return '<proxy method: {}.{}>'.format(service_name, method_name)
[]
def __repr__(self): service_name = self.service_name method_name = self.method_name return '<proxy method: {}.{}>'.format(service_name, method_name)
[ "def", "__repr__", "(", "self", ")", ":", "service_name", "=", "self", ".", "service_name", "method_name", "=", "self", ".", "method_name", "return", "'<proxy method: {}.{}>'", ".", "format", "(", "service_name", ",", "method_name", ")" ]
https://github.com/nameko/nameko/blob/17ecee2bcfa90cb0f3a2f3328c5004f48e4e02a3/nameko/rpc.py#L463-L466
nipy/nipy
d16d268938dcd5c15748ca051532c21f57cf8a22
nipy/externals/configobj.py
python
ConfigObj._handle_value
(self, value)
return (the_list, comment)
Given a value string, unquote, remove comment, handle lists. (including empty and single member lists)
Given a value string, unquote, remove comment, handle lists. (including empty and single member lists)
[ "Given", "a", "value", "string", "unquote", "remove", "comment", "handle", "lists", ".", "(", "including", "empty", "and", "single", "member", "lists", ")" ]
def _handle_value(self, value): """ Given a value string, unquote, remove comment, handle lists. (including empty and single member lists) """ if self._inspec: # Parsing a configspec so don't handle comments return (value, '') # do we look for lists in values ? if not self.list_values: mat = self._nolistvalue.match(value) if mat is None: raise SyntaxError() # NOTE: we don't unquote here return mat.groups() # mat = self._valueexp.match(value) if mat is None: # the value is badly constructed, probably badly quoted, # or an invalid list raise SyntaxError() (list_values, single, empty_list, comment) = mat.groups() if (list_values == '') and (single is None): # change this if you want to accept empty values raise SyntaxError() # NOTE: note there is no error handling from here if the regex # is wrong: then incorrect values will slip through if empty_list is not None: # the single comma - meaning an empty list return ([], comment) if single is not None: # handle empty values if list_values and not single: # FIXME: the '' is a workaround because our regex now matches # '' at the end of a list if it has a trailing comma single = None else: single = single or '""' single = self._unquote(single) if list_values == '': # not a list value return (single, comment) the_list = self._listvalueexp.findall(list_values) the_list = [self._unquote(val) for val in the_list] if single is not None: the_list += [single] return (the_list, comment)
[ "def", "_handle_value", "(", "self", ",", "value", ")", ":", "if", "self", ".", "_inspec", ":", "# Parsing a configspec so don't handle comments", "return", "(", "value", ",", "''", ")", "# do we look for lists in values ?", "if", "not", "self", ".", "list_values", ":", "mat", "=", "self", ".", "_nolistvalue", ".", "match", "(", "value", ")", "if", "mat", "is", "None", ":", "raise", "SyntaxError", "(", ")", "# NOTE: we don't unquote here", "return", "mat", ".", "groups", "(", ")", "#", "mat", "=", "self", ".", "_valueexp", ".", "match", "(", "value", ")", "if", "mat", "is", "None", ":", "# the value is badly constructed, probably badly quoted,", "# or an invalid list", "raise", "SyntaxError", "(", ")", "(", "list_values", ",", "single", ",", "empty_list", ",", "comment", ")", "=", "mat", ".", "groups", "(", ")", "if", "(", "list_values", "==", "''", ")", "and", "(", "single", "is", "None", ")", ":", "# change this if you want to accept empty values", "raise", "SyntaxError", "(", ")", "# NOTE: note there is no error handling from here if the regex", "# is wrong: then incorrect values will slip through", "if", "empty_list", "is", "not", "None", ":", "# the single comma - meaning an empty list", "return", "(", "[", "]", ",", "comment", ")", "if", "single", "is", "not", "None", ":", "# handle empty values", "if", "list_values", "and", "not", "single", ":", "# FIXME: the '' is a workaround because our regex now matches", "# '' at the end of a list if it has a trailing comma", "single", "=", "None", "else", ":", "single", "=", "single", "or", "'\"\"'", "single", "=", "self", ".", "_unquote", "(", "single", ")", "if", "list_values", "==", "''", ":", "# not a list value", "return", "(", "single", ",", "comment", ")", "the_list", "=", "self", ".", "_listvalueexp", ".", "findall", "(", "list_values", ")", "the_list", "=", "[", "self", ".", "_unquote", "(", "val", ")", "for", "val", "in", "the_list", "]", "if", "single", "is", "not", "None", ":", "the_list", "+=", "[", "single", "]", "return", "(", "the_list", ",", "comment", ")" ]
https://github.com/nipy/nipy/blob/d16d268938dcd5c15748ca051532c21f57cf8a22/nipy/externals/configobj.py#L1851-L1897
MrH0wl/Cloudmare
65e5bc9888f9d362ab2abfb103ea6c1e869d67aa
thirdparty/dns/opcode.py
python
is_update
(flags)
return from_flags(flags) == Opcode.UPDATE
Is the opcode in flags UPDATE? *flags*, an ``int``, the DNS message flags. Returns a ``bool``.
Is the opcode in flags UPDATE?
[ "Is", "the", "opcode", "in", "flags", "UPDATE?" ]
def is_update(flags): """Is the opcode in flags UPDATE? *flags*, an ``int``, the DNS message flags. Returns a ``bool``. """ return from_flags(flags) == Opcode.UPDATE
[ "def", "is_update", "(", "flags", ")", ":", "return", "from_flags", "(", "flags", ")", "==", "Opcode", ".", "UPDATE" ]
https://github.com/MrH0wl/Cloudmare/blob/65e5bc9888f9d362ab2abfb103ea6c1e869d67aa/thirdparty/dns/opcode.py#L99-L107
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/locale.py
python
_group
(s, monetary=False)
return ( left_spaces + thousands_sep.join(groups) + right_spaces, len(thousands_sep) * (len(groups) - 1) )
[]
def _group(s, monetary=False): conv = localeconv() thousands_sep = conv[monetary and 'mon_thousands_sep' or 'thousands_sep'] grouping = conv[monetary and 'mon_grouping' or 'grouping'] if not grouping: return (s, 0) if s[-1] == ' ': stripped = s.rstrip() right_spaces = s[len(stripped):] s = stripped else: right_spaces = '' left_spaces = '' groups = [] for interval in _grouping_intervals(grouping): if not s or s[-1] not in "0123456789": # only non-digit characters remain (sign, spaces) left_spaces = s s = '' break groups.append(s[-interval:]) s = s[:-interval] if s: groups.append(s) groups.reverse() return ( left_spaces + thousands_sep.join(groups) + right_spaces, len(thousands_sep) * (len(groups) - 1) )
[ "def", "_group", "(", "s", ",", "monetary", "=", "False", ")", ":", "conv", "=", "localeconv", "(", ")", "thousands_sep", "=", "conv", "[", "monetary", "and", "'mon_thousands_sep'", "or", "'thousands_sep'", "]", "grouping", "=", "conv", "[", "monetary", "and", "'mon_grouping'", "or", "'grouping'", "]", "if", "not", "grouping", ":", "return", "(", "s", ",", "0", ")", "if", "s", "[", "-", "1", "]", "==", "' '", ":", "stripped", "=", "s", ".", "rstrip", "(", ")", "right_spaces", "=", "s", "[", "len", "(", "stripped", ")", ":", "]", "s", "=", "stripped", "else", ":", "right_spaces", "=", "''", "left_spaces", "=", "''", "groups", "=", "[", "]", "for", "interval", "in", "_grouping_intervals", "(", "grouping", ")", ":", "if", "not", "s", "or", "s", "[", "-", "1", "]", "not", "in", "\"0123456789\"", ":", "# only non-digit characters remain (sign, spaces)", "left_spaces", "=", "s", "s", "=", "''", "break", "groups", ".", "append", "(", "s", "[", "-", "interval", ":", "]", ")", "s", "=", "s", "[", ":", "-", "interval", "]", "if", "s", ":", "groups", ".", "append", "(", "s", ")", "groups", ".", "reverse", "(", ")", "return", "(", "left_spaces", "+", "thousands_sep", ".", "join", "(", "groups", ")", "+", "right_spaces", ",", "len", "(", "thousands_sep", ")", "*", "(", "len", "(", "groups", ")", "-", "1", ")", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/locale.py#L138-L166
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
citrix_hypervisor/datadog_checks/citrix_hypervisor/config_models/defaults.py
python
instance_kerberos_force_initiate
(field, value)
return False
[]
def instance_kerberos_force_initiate(field, value): return False
[ "def", "instance_kerberos_force_initiate", "(", "field", ",", "value", ")", ":", "return", "False" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/citrix_hypervisor/datadog_checks/citrix_hypervisor/config_models/defaults.py#L85-L86
akfamily/akshare
590e50eece9ec067da3538c7059fd660b71f1339
akshare/stock/stock_zh_a_sina.py
python
_get_zh_a_page_count
()
所有股票的总页数 http://vip.stock.finance.sina.com.cn/mkt/#hs_a :return: 需要采集的股票总页数 :rtype: int
所有股票的总页数 http://vip.stock.finance.sina.com.cn/mkt/#hs_a :return: 需要采集的股票总页数 :rtype: int
[ "所有股票的总页数", "http", ":", "//", "vip", ".", "stock", ".", "finance", ".", "sina", ".", "com", ".", "cn", "/", "mkt", "/", "#hs_a", ":", "return", ":", "需要采集的股票总页数", ":", "rtype", ":", "int" ]
def _get_zh_a_page_count() -> int: """ 所有股票的总页数 http://vip.stock.finance.sina.com.cn/mkt/#hs_a :return: 需要采集的股票总页数 :rtype: int """ res = requests.get(zh_sina_a_stock_count_url) page_count = int(re.findall(re.compile(r"\d+"), res.text)[0]) / 80 if isinstance(page_count, int): return page_count else: return int(page_count) + 1
[ "def", "_get_zh_a_page_count", "(", ")", "->", "int", ":", "res", "=", "requests", ".", "get", "(", "zh_sina_a_stock_count_url", ")", "page_count", "=", "int", "(", "re", ".", "findall", "(", "re", ".", "compile", "(", "r\"\\d+\"", ")", ",", "res", ".", "text", ")", "[", "0", "]", ")", "/", "80", "if", "isinstance", "(", "page_count", ",", "int", ")", ":", "return", "page_count", "else", ":", "return", "int", "(", "page_count", ")", "+", "1" ]
https://github.com/akfamily/akshare/blob/590e50eece9ec067da3538c7059fd660b71f1339/akshare/stock/stock_zh_a_sina.py#L29-L41
dengdan/seglink
cc36732d78a637ac10587c11befe19944ec1c1ea
preprocessing/ssd_vgg_preprocessing.py
python
preprocess_for_eval
(image, labels, bboxes, xs, ys, out_shape=EVAL_SIZE, data_format='NHWC', difficults=None, resize=Resize.WARP_RESIZE, scope='ssd_preprocessing_train')
Preprocess an image for evaluation. Args: image: A `Tensor` representing an image of arbitrary size. out_shape: Output shape after pre-processing (if resize != None) resize: Resize strategy. Returns: A preprocessed image.
Preprocess an image for evaluation.
[ "Preprocess", "an", "image", "for", "evaluation", "." ]
def preprocess_for_eval(image, labels, bboxes, xs, ys, out_shape=EVAL_SIZE, data_format='NHWC', difficults=None, resize=Resize.WARP_RESIZE, scope='ssd_preprocessing_train'): """Preprocess an image for evaluation. Args: image: A `Tensor` representing an image of arbitrary size. out_shape: Output shape after pre-processing (if resize != None) resize: Resize strategy. Returns: A preprocessed image. """ with tf.name_scope(scope): if image.get_shape().ndims != 3: raise ValueError('Input must be of size [height, width, C>0]') image = tf.to_float(image) image = tf_image_whitened(image, [_R_MEAN, _G_MEAN, _B_MEAN]) if resize == Resize.NONE: pass else: image = tf_image.resize_image(image, out_shape, method=tf.image.ResizeMethod.BILINEAR, align_corners=False) # Image data format. if data_format == 'NCHW': image = tf.transpose(image, perm=(2, 0, 1)) return image, labels, bboxes, xs, ys
[ "def", "preprocess_for_eval", "(", "image", ",", "labels", ",", "bboxes", ",", "xs", ",", "ys", ",", "out_shape", "=", "EVAL_SIZE", ",", "data_format", "=", "'NHWC'", ",", "difficults", "=", "None", ",", "resize", "=", "Resize", ".", "WARP_RESIZE", ",", "scope", "=", "'ssd_preprocessing_train'", ")", ":", "with", "tf", ".", "name_scope", "(", "scope", ")", ":", "if", "image", ".", "get_shape", "(", ")", ".", "ndims", "!=", "3", ":", "raise", "ValueError", "(", "'Input must be of size [height, width, C>0]'", ")", "image", "=", "tf", ".", "to_float", "(", "image", ")", "image", "=", "tf_image_whitened", "(", "image", ",", "[", "_R_MEAN", ",", "_G_MEAN", ",", "_B_MEAN", "]", ")", "if", "resize", "==", "Resize", ".", "NONE", ":", "pass", "else", ":", "image", "=", "tf_image", ".", "resize_image", "(", "image", ",", "out_shape", ",", "method", "=", "tf", ".", "image", ".", "ResizeMethod", ".", "BILINEAR", ",", "align_corners", "=", "False", ")", "# Image data format.", "if", "data_format", "==", "'NCHW'", ":", "image", "=", "tf", ".", "transpose", "(", "image", ",", "perm", "=", "(", "2", ",", "0", ",", "1", ")", ")", "return", "image", ",", "labels", ",", "bboxes", ",", "xs", ",", "ys" ]
https://github.com/dengdan/seglink/blob/cc36732d78a637ac10587c11befe19944ec1c1ea/preprocessing/ssd_vgg_preprocessing.py#L298-L329
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/gdata/src/atom/core.py
python
XmlElement.get_elements
(self, tag=None, namespace=None, version=1)
return matches
Find all sub elements which match the tag and namespace. To find all elements in this object, call get_elements with the tag and namespace both set to None (the default). This method searches through the object's members and the elements stored in _other_elements which did not match any of the XML parsing rules for this class. Args: tag: str namespace: str version: int Specifies the version of the XML rules to be used when searching for matching elements. Returns: A list of the matching XmlElements.
Find all sub elements which match the tag and namespace.
[ "Find", "all", "sub", "elements", "which", "match", "the", "tag", "and", "namespace", "." ]
def get_elements(self, tag=None, namespace=None, version=1): """Find all sub elements which match the tag and namespace. To find all elements in this object, call get_elements with the tag and namespace both set to None (the default). This method searches through the object's members and the elements stored in _other_elements which did not match any of the XML parsing rules for this class. Args: tag: str namespace: str version: int Specifies the version of the XML rules to be used when searching for matching elements. Returns: A list of the matching XmlElements. """ matches = [] ignored1, elements, ignored2 = self.__class__._get_rules(version) if elements: for qname, element_def in elements.iteritems(): member = getattr(self, element_def[0]) if member: if _qname_matches(tag, namespace, qname): if element_def[2]: # If this is a repeating element, copy all instances into the # result list. matches.extend(member) else: matches.append(member) for element in self._other_elements: if _qname_matches(tag, namespace, element._qname): matches.append(element) return matches
[ "def", "get_elements", "(", "self", ",", "tag", "=", "None", ",", "namespace", "=", "None", ",", "version", "=", "1", ")", ":", "matches", "=", "[", "]", "ignored1", ",", "elements", ",", "ignored2", "=", "self", ".", "__class__", ".", "_get_rules", "(", "version", ")", "if", "elements", ":", "for", "qname", ",", "element_def", "in", "elements", ".", "iteritems", "(", ")", ":", "member", "=", "getattr", "(", "self", ",", "element_def", "[", "0", "]", ")", "if", "member", ":", "if", "_qname_matches", "(", "tag", ",", "namespace", ",", "qname", ")", ":", "if", "element_def", "[", "2", "]", ":", "# If this is a repeating element, copy all instances into the", "# result list.", "matches", ".", "extend", "(", "member", ")", "else", ":", "matches", ".", "append", "(", "member", ")", "for", "element", "in", "self", ".", "_other_elements", ":", "if", "_qname_matches", "(", "tag", ",", "namespace", ",", "element", ".", "_qname", ")", ":", "matches", ".", "append", "(", "element", ")", "return", "matches" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/gdata/src/atom/core.py#L190-L223
vivisect/vivisect
37b0b655d8dedfcf322e86b0f144b096e48d547e
cobra/__init__.py
python
SocketBuilder.setSslClientCert
(self, crtfile, keyfile)
Set the cert/key used by this client to negotiate SSL.
Set the cert/key used by this client to negotiate SSL.
[ "Set", "the", "cert", "/", "key", "used", "by", "this", "client", "to", "negotiate", "SSL", "." ]
def setSslClientCert(self, crtfile, keyfile): ''' Set the cert/key used by this client to negotiate SSL. ''' self.ssl = True self.sslcrt = crtfile self.sslkey = keyfile
[ "def", "setSslClientCert", "(", "self", ",", "crtfile", ",", "keyfile", ")", ":", "self", ".", "ssl", "=", "True", "self", ".", "sslcrt", "=", "crtfile", "self", ".", "sslkey", "=", "keyfile" ]
https://github.com/vivisect/vivisect/blob/37b0b655d8dedfcf322e86b0f144b096e48d547e/cobra/__init__.py#L299-L305
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/apps/userreports/models.py
python
ReportConfiguration.save
(self, *args, **kwargs)
[]
def save(self, *args, **kwargs): self.report_meta.last_modified = datetime.utcnow() super(ReportConfiguration, self).save(*args, **kwargs)
[ "def", "save", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "report_meta", ".", "last_modified", "=", "datetime", ".", "utcnow", "(", ")", "super", "(", "ReportConfiguration", ",", "self", ")", ".", "save", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/userreports/models.py#L744-L746
frictionlessdata/datapackage-py
52416bd5be2e146490ee91f51b80d9b2178e0070
datapackage/resource.py
python
Resource.schema
(self)
return self.__get_table().schema
Resource's schema > Only for tabular resources For tabular resources it returns `Schema` instance to interact with data schema. Read API documentation - [tableschema.Schema](https://github.com/frictionlessdata/tableschema-py#schema). # Returns tableschema.Schema: schema
Resource's schema
[ "Resource", "s", "schema" ]
def schema(self): """Resource's schema > Only for tabular resources For tabular resources it returns `Schema` instance to interact with data schema. Read API documentation - [tableschema.Schema](https://github.com/frictionlessdata/tableschema-py#schema). # Returns tableschema.Schema: schema """ if not self.tabular: return None return self.__get_table().schema
[ "def", "schema", "(", "self", ")", ":", "if", "not", "self", ".", "tabular", ":", "return", "None", "return", "self", ".", "__get_table", "(", ")", ".", "schema" ]
https://github.com/frictionlessdata/datapackage-py/blob/52416bd5be2e146490ee91f51b80d9b2178e0070/datapackage/resource.py#L252-L266
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/polys/factortools.py
python
dup_zz_irreducible_p
(f, K)
Test irreducibility using Eisenstein's criterion.
Test irreducibility using Eisenstein's criterion.
[ "Test", "irreducibility", "using", "Eisenstein", "s", "criterion", "." ]
def dup_zz_irreducible_p(f, K): """Test irreducibility using Eisenstein's criterion. """ lc = dup_LC(f, K) tc = dup_TC(f, K) e_fc = dup_content(f[1:], K) if e_fc: e_ff = factorint(int(e_fc)) for p in e_ff.keys(): if (lc % p) and (tc % p**2): return True
[ "def", "dup_zz_irreducible_p", "(", "f", ",", "K", ")", ":", "lc", "=", "dup_LC", "(", "f", ",", "K", ")", "tc", "=", "dup_TC", "(", "f", ",", "K", ")", "e_fc", "=", "dup_content", "(", "f", "[", "1", ":", "]", ",", "K", ")", "if", "e_fc", ":", "e_ff", "=", "factorint", "(", "int", "(", "e_fc", ")", ")", "for", "p", "in", "e_ff", ".", "keys", "(", ")", ":", "if", "(", "lc", "%", "p", ")", "and", "(", "tc", "%", "p", "**", "2", ")", ":", "return", "True" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/polys/factortools.py#L364-L376
HCIILAB/DeRPN
21e6738ee1f7d3f159ee48d435c543e773f8ce99
caffe/tools/extra/extract_seconds.py
python
extract_seconds
(input_file, output_file)
[]
def extract_seconds(input_file, output_file): with open(input_file, 'r') as f: lines = f.readlines() log_created_year = get_log_created_year(input_file) start_datetime = get_start_time(lines, log_created_year) assert start_datetime, 'Start time not found' out = open(output_file, 'w') for line in lines: line = line.strip() if line.find('Iteration') != -1: dt = extract_datetime_from_line(line, log_created_year) elapsed_seconds = (dt - start_datetime).total_seconds() out.write('%f\n' % elapsed_seconds) out.close()
[ "def", "extract_seconds", "(", "input_file", ",", "output_file", ")", ":", "with", "open", "(", "input_file", ",", "'r'", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "log_created_year", "=", "get_log_created_year", "(", "input_file", ")", "start_datetime", "=", "get_start_time", "(", "lines", ",", "log_created_year", ")", "assert", "start_datetime", ",", "'Start time not found'", "out", "=", "open", "(", "output_file", ",", "'w'", ")", "for", "line", "in", "lines", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "line", ".", "find", "(", "'Iteration'", ")", "!=", "-", "1", ":", "dt", "=", "extract_datetime_from_line", "(", "line", ",", "log_created_year", ")", "elapsed_seconds", "=", "(", "dt", "-", "start_datetime", ")", ".", "total_seconds", "(", ")", "out", ".", "write", "(", "'%f\\n'", "%", "elapsed_seconds", ")", "out", ".", "close", "(", ")" ]
https://github.com/HCIILAB/DeRPN/blob/21e6738ee1f7d3f159ee48d435c543e773f8ce99/caffe/tools/extra/extract_seconds.py#L44-L58
hubblestack/hubble
763142474edcecdec5fd25591dc29c3536e8f969
hubblestack/modules/file.py
python
user_to_uid
(user)
Convert user name to a uid user user name to convert to its uid CLI Example: .. code-block:: bash salt '*' file.user_to_uid root
Convert user name to a uid
[ "Convert", "user", "name", "to", "a", "uid" ]
def user_to_uid(user): """ Convert user name to a uid user user name to convert to its uid CLI Example: .. code-block:: bash salt '*' file.user_to_uid root """ if user is None: user = hubblestack.utils.user.get_user() try: if isinstance(user, int): return user return pwd.getpwnam(user).pw_uid except KeyError: return ""
[ "def", "user_to_uid", "(", "user", ")", ":", "if", "user", "is", "None", ":", "user", "=", "hubblestack", ".", "utils", ".", "user", ".", "get_user", "(", ")", "try", ":", "if", "isinstance", "(", "user", ",", "int", ")", ":", "return", "user", "return", "pwd", ".", "getpwnam", "(", "user", ")", ".", "pw_uid", "except", "KeyError", ":", "return", "\"\"" ]
https://github.com/hubblestack/hubble/blob/763142474edcecdec5fd25591dc29c3536e8f969/hubblestack/modules/file.py#L60-L80
DataBiosphere/toil
2e148eee2114ece8dcc3ec8a83f36333266ece0d
src/toil/fileStores/abstractFileStore.py
python
AbstractFileStore.getLocalTempFile
(self)
return os.path.abspath(tmpFile)
Get a new local temporary file that will persist for the duration of the job. :return: The absolute path to a local temporary file. This file will exist for the duration of the job only, and is guaranteed to be deleted once the job terminates.
Get a new local temporary file that will persist for the duration of the job.
[ "Get", "a", "new", "local", "temporary", "file", "that", "will", "persist", "for", "the", "duration", "of", "the", "job", "." ]
def getLocalTempFile(self) -> str: """ Get a new local temporary file that will persist for the duration of the job. :return: The absolute path to a local temporary file. This file will exist for the duration of the job only, and is guaranteed to be deleted once the job terminates. """ handle, tmpFile = tempfile.mkstemp( prefix="tmp", suffix=".tmp", dir=self.localTempDir ) os.close(handle) return os.path.abspath(tmpFile)
[ "def", "getLocalTempFile", "(", "self", ")", "->", "str", ":", "handle", ",", "tmpFile", "=", "tempfile", ".", "mkstemp", "(", "prefix", "=", "\"tmp\"", ",", "suffix", "=", "\".tmp\"", ",", "dir", "=", "self", ".", "localTempDir", ")", "os", ".", "close", "(", "handle", ")", "return", "os", ".", "path", ".", "abspath", "(", "tmpFile", ")" ]
https://github.com/DataBiosphere/toil/blob/2e148eee2114ece8dcc3ec8a83f36333266ece0d/src/toil/fileStores/abstractFileStore.py#L213-L225
facebookresearch/detectron2
cb92ae1763cd7d3777c243f07749574cdaec6cb8
detectron2/export/shared.py
python
_generic_status_identifier
( predict_net: caffe2_pb2.NetDef, status_updater: Callable, known_status: Dict[Tuple[str, int], Any], )
return _known_status
Statically infer the status of each blob, the status can be such as device type (CPU/GPU), layout (NCHW/NHWC), data type (float32/int8), etc. "Blob" here is versioned blob (Tuple[str, int]) in the format compatible with ssa. Inputs: predict_net: the caffe2 network status_updater: a callable, given an op and the status of its input/output, it returns the updated status of input/output. `None` is used for representing unknown status. known_status: a dict containing known status, used as initialization. Outputs: A dict mapping from versioned blob to its status
Statically infer the status of each blob, the status can be such as device type (CPU/GPU), layout (NCHW/NHWC), data type (float32/int8), etc. "Blob" here is versioned blob (Tuple[str, int]) in the format compatible with ssa. Inputs: predict_net: the caffe2 network status_updater: a callable, given an op and the status of its input/output, it returns the updated status of input/output. `None` is used for representing unknown status. known_status: a dict containing known status, used as initialization. Outputs: A dict mapping from versioned blob to its status
[ "Statically", "infer", "the", "status", "of", "each", "blob", "the", "status", "can", "be", "such", "as", "device", "type", "(", "CPU", "/", "GPU", ")", "layout", "(", "NCHW", "/", "NHWC", ")", "data", "type", "(", "float32", "/", "int8", ")", "etc", ".", "Blob", "here", "is", "versioned", "blob", "(", "Tuple", "[", "str", "int", "]", ")", "in", "the", "format", "compatible", "with", "ssa", ".", "Inputs", ":", "predict_net", ":", "the", "caffe2", "network", "status_updater", ":", "a", "callable", "given", "an", "op", "and", "the", "status", "of", "its", "input", "/", "output", "it", "returns", "the", "updated", "status", "of", "input", "/", "output", ".", "None", "is", "used", "for", "representing", "unknown", "status", ".", "known_status", ":", "a", "dict", "containing", "known", "status", "used", "as", "initialization", ".", "Outputs", ":", "A", "dict", "mapping", "from", "versioned", "blob", "to", "its", "status" ]
def _generic_status_identifier( predict_net: caffe2_pb2.NetDef, status_updater: Callable, known_status: Dict[Tuple[str, int], Any], ) -> Dict[Tuple[str, int], Any]: """ Statically infer the status of each blob, the status can be such as device type (CPU/GPU), layout (NCHW/NHWC), data type (float32/int8), etc. "Blob" here is versioned blob (Tuple[str, int]) in the format compatible with ssa. Inputs: predict_net: the caffe2 network status_updater: a callable, given an op and the status of its input/output, it returns the updated status of input/output. `None` is used for representing unknown status. known_status: a dict containing known status, used as initialization. Outputs: A dict mapping from versioned blob to its status """ ssa, versions = core.get_ssa(predict_net) versioned_ext_input = [(b, 0) for b in predict_net.external_input] versioned_ext_output = [(b, versions[b]) for b in predict_net.external_output] all_versioned_blobs = set().union(*[set(x[0] + x[1]) for x in ssa]) allowed_vbs = all_versioned_blobs.union(versioned_ext_input).union(versioned_ext_output) assert all(k in allowed_vbs for k in known_status) assert all(v is not None for v in known_status.values()) _known_status = copy.deepcopy(known_status) def _check_and_update(key, value): assert value is not None if key in _known_status: if not _known_status[key] == value: raise RuntimeError( "Confilict status for {}, existing status {}, new status {}".format( key, _known_status[key], value ) ) _known_status[key] = value def _update_i(op, ssa_i): versioned_inputs = ssa_i[0] versioned_outputs = ssa_i[1] inputs_status = [_known_status.get(b, None) for b in versioned_inputs] outputs_status = [_known_status.get(b, None) for b in versioned_outputs] new_inputs_status, new_outputs_status = status_updater(op, inputs_status, outputs_status) for versioned_blob, status in zip( versioned_inputs + versioned_outputs, new_inputs_status + new_outputs_status ): if status is not None: _check_and_update(versioned_blob, status) for op, ssa_i in zip(predict_net.op, ssa): _update_i(op, ssa_i) for op, ssa_i in zip(reversed(predict_net.op), reversed(ssa)): _update_i(op, ssa_i) # NOTE: This strictly checks all the blob from predict_net must be assgined # a known status. However sometimes it's impossible (eg. having deadend op), # we may relax this constraint if for k in all_versioned_blobs: if k not in _known_status: raise NotImplementedError( "Can not infer the status for {}. Currently only support the case where" " a single forward and backward pass can identify status for all blobs.".format(k) ) return _known_status
[ "def", "_generic_status_identifier", "(", "predict_net", ":", "caffe2_pb2", ".", "NetDef", ",", "status_updater", ":", "Callable", ",", "known_status", ":", "Dict", "[", "Tuple", "[", "str", ",", "int", "]", ",", "Any", "]", ",", ")", "->", "Dict", "[", "Tuple", "[", "str", ",", "int", "]", ",", "Any", "]", ":", "ssa", ",", "versions", "=", "core", ".", "get_ssa", "(", "predict_net", ")", "versioned_ext_input", "=", "[", "(", "b", ",", "0", ")", "for", "b", "in", "predict_net", ".", "external_input", "]", "versioned_ext_output", "=", "[", "(", "b", ",", "versions", "[", "b", "]", ")", "for", "b", "in", "predict_net", ".", "external_output", "]", "all_versioned_blobs", "=", "set", "(", ")", ".", "union", "(", "*", "[", "set", "(", "x", "[", "0", "]", "+", "x", "[", "1", "]", ")", "for", "x", "in", "ssa", "]", ")", "allowed_vbs", "=", "all_versioned_blobs", ".", "union", "(", "versioned_ext_input", ")", ".", "union", "(", "versioned_ext_output", ")", "assert", "all", "(", "k", "in", "allowed_vbs", "for", "k", "in", "known_status", ")", "assert", "all", "(", "v", "is", "not", "None", "for", "v", "in", "known_status", ".", "values", "(", ")", ")", "_known_status", "=", "copy", ".", "deepcopy", "(", "known_status", ")", "def", "_check_and_update", "(", "key", ",", "value", ")", ":", "assert", "value", "is", "not", "None", "if", "key", "in", "_known_status", ":", "if", "not", "_known_status", "[", "key", "]", "==", "value", ":", "raise", "RuntimeError", "(", "\"Confilict status for {}, existing status {}, new status {}\"", ".", "format", "(", "key", ",", "_known_status", "[", "key", "]", ",", "value", ")", ")", "_known_status", "[", "key", "]", "=", "value", "def", "_update_i", "(", "op", ",", "ssa_i", ")", ":", "versioned_inputs", "=", "ssa_i", "[", "0", "]", "versioned_outputs", "=", "ssa_i", "[", "1", "]", "inputs_status", "=", "[", "_known_status", ".", "get", "(", "b", ",", "None", ")", "for", "b", "in", "versioned_inputs", "]", "outputs_status", "=", "[", "_known_status", ".", "get", "(", "b", ",", "None", ")", "for", "b", "in", "versioned_outputs", "]", "new_inputs_status", ",", "new_outputs_status", "=", "status_updater", "(", "op", ",", "inputs_status", ",", "outputs_status", ")", "for", "versioned_blob", ",", "status", "in", "zip", "(", "versioned_inputs", "+", "versioned_outputs", ",", "new_inputs_status", "+", "new_outputs_status", ")", ":", "if", "status", "is", "not", "None", ":", "_check_and_update", "(", "versioned_blob", ",", "status", ")", "for", "op", ",", "ssa_i", "in", "zip", "(", "predict_net", ".", "op", ",", "ssa", ")", ":", "_update_i", "(", "op", ",", "ssa_i", ")", "for", "op", ",", "ssa_i", "in", "zip", "(", "reversed", "(", "predict_net", ".", "op", ")", ",", "reversed", "(", "ssa", ")", ")", ":", "_update_i", "(", "op", ",", "ssa_i", ")", "# NOTE: This strictly checks all the blob from predict_net must be assgined", "# a known status. However sometimes it's impossible (eg. having deadend op),", "# we may relax this constraint if", "for", "k", "in", "all_versioned_blobs", ":", "if", "k", "not", "in", "_known_status", ":", "raise", "NotImplementedError", "(", "\"Can not infer the status for {}. Currently only support the case where\"", "\" a single forward and backward pass can identify status for all blobs.\"", ".", "format", "(", "k", ")", ")", "return", "_known_status" ]
https://github.com/facebookresearch/detectron2/blob/cb92ae1763cd7d3777c243f07749574cdaec6cb8/detectron2/export/shared.py#L376-L445
oracle/oci-python-sdk
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
src/oci/cloud_guard/cloud_guard_client_composite_operations.py
python
CloudGuardClientCompositeOperations.update_target_and_wait_for_state
(self, target_id, update_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={})
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_target` and waits for the :py:class:`~oci.cloud_guard.models.Target` acted upon to enter the given state(s). :param str target_id: (required) OCID of target :param oci.cloud_guard.models.UpdateTargetDetails update_target_details: (required) The information to be updated. :param list[str] wait_for_states: An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.Target.lifecycle_state` :param dict operation_kwargs: A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_target` :param dict waiter_kwargs: A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds`` as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_target` and waits for the :py:class:`~oci.cloud_guard.models.Target` acted upon to enter the given state(s).
[ "Calls", ":", "py", ":", "func", ":", "~oci", ".", "cloud_guard", ".", "CloudGuardClient", ".", "update_target", "and", "waits", "for", "the", ":", "py", ":", "class", ":", "~oci", ".", "cloud_guard", ".", "models", ".", "Target", "acted", "upon", "to", "enter", "the", "given", "state", "(", "s", ")", "." ]
def update_target_and_wait_for_state(self, target_id, update_target_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}): """ Calls :py:func:`~oci.cloud_guard.CloudGuardClient.update_target` and waits for the :py:class:`~oci.cloud_guard.models.Target` acted upon to enter the given state(s). :param str target_id: (required) OCID of target :param oci.cloud_guard.models.UpdateTargetDetails update_target_details: (required) The information to be updated. :param list[str] wait_for_states: An array of states to wait on. These should be valid values for :py:attr:`~oci.cloud_guard.models.Target.lifecycle_state` :param dict operation_kwargs: A dictionary of keyword arguments to pass to :py:func:`~oci.cloud_guard.CloudGuardClient.update_target` :param dict waiter_kwargs: A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds`` as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait """ operation_result = self.client.update_target(target_id, update_target_details, **operation_kwargs) if not wait_for_states: return operation_result lowered_wait_for_states = [w.lower() for w in wait_for_states] wait_for_resource_id = operation_result.data.id try: waiter_result = oci.wait_until( self.client, self.client.get_target(wait_for_resource_id), evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states, **waiter_kwargs ) result_to_return = waiter_result return result_to_return except Exception as e: raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
[ "def", "update_target_and_wait_for_state", "(", "self", ",", "target_id", ",", "update_target_details", ",", "wait_for_states", "=", "[", "]", ",", "operation_kwargs", "=", "{", "}", ",", "waiter_kwargs", "=", "{", "}", ")", ":", "operation_result", "=", "self", ".", "client", ".", "update_target", "(", "target_id", ",", "update_target_details", ",", "*", "*", "operation_kwargs", ")", "if", "not", "wait_for_states", ":", "return", "operation_result", "lowered_wait_for_states", "=", "[", "w", ".", "lower", "(", ")", "for", "w", "in", "wait_for_states", "]", "wait_for_resource_id", "=", "operation_result", ".", "data", ".", "id", "try", ":", "waiter_result", "=", "oci", ".", "wait_until", "(", "self", ".", "client", ",", "self", ".", "client", ".", "get_target", "(", "wait_for_resource_id", ")", ",", "evaluate_response", "=", "lambda", "r", ":", "getattr", "(", "r", ".", "data", ",", "'lifecycle_state'", ")", "and", "getattr", "(", "r", ".", "data", ",", "'lifecycle_state'", ")", ".", "lower", "(", ")", "in", "lowered_wait_for_states", ",", "*", "*", "waiter_kwargs", ")", "result_to_return", "=", "waiter_result", "return", "result_to_return", "except", "Exception", "as", "e", ":", "raise", "oci", ".", "exceptions", ".", "CompositeOperationError", "(", "partial_results", "=", "[", "operation_result", "]", ",", "cause", "=", "e", ")" ]
https://github.com/oracle/oci-python-sdk/blob/3c1604e4e212008fb6718e2f68cdb5ef71fd5793/src/oci/cloud_guard/cloud_guard_client_composite_operations.py#L785-L824
Alexey-T/CudaText
6a8b9a974c5d5029c6c273bde83198c83b3a5fb9
app/py/sys/chardet/cli/chardetect.py
python
description_of
(lines, name='stdin')
Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str
Return a string describing the probable encoding of a file or list of strings.
[ "Return", "a", "string", "describing", "the", "probable", "encoding", "of", "a", "file", "or", "list", "of", "strings", "." ]
def description_of(lines, name='stdin'): """ Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str """ u = UniversalDetector() for line in lines: line = bytearray(line) u.feed(line) # shortcut out of the loop to save reading further - particularly useful if we read a BOM. if u.done: break u.close() result = u.result if PY2: name = name.decode(sys.getfilesystemencoding(), 'ignore') if result['encoding']: return '{}: {} with confidence {}'.format(name, result['encoding'], result['confidence']) else: return '{}: no result'.format(name)
[ "def", "description_of", "(", "lines", ",", "name", "=", "'stdin'", ")", ":", "u", "=", "UniversalDetector", "(", ")", "for", "line", "in", "lines", ":", "line", "=", "bytearray", "(", "line", ")", "u", ".", "feed", "(", "line", ")", "# shortcut out of the loop to save reading further - particularly useful if we read a BOM.", "if", "u", ".", "done", ":", "break", "u", ".", "close", "(", ")", "result", "=", "u", ".", "result", "if", "PY2", ":", "name", "=", "name", ".", "decode", "(", "sys", ".", "getfilesystemencoding", "(", ")", ",", "'ignore'", ")", "if", "result", "[", "'encoding'", "]", ":", "return", "'{}: {} with confidence {}'", ".", "format", "(", "name", ",", "result", "[", "'encoding'", "]", ",", "result", "[", "'confidence'", "]", ")", "else", ":", "return", "'{}: no result'", ".", "format", "(", "name", ")" ]
https://github.com/Alexey-T/CudaText/blob/6a8b9a974c5d5029c6c273bde83198c83b3a5fb9/app/py/sys/chardet/cli/chardetect.py#L25-L50
Cue/scales
0aced26eb050ceb98ee9d5d6cdca8db448666986
src/greplin/scales/aggregation.py
python
MeterFormat.getCount
(self, data)
return data['count']
Get the count
Get the count
[ "Get", "the", "count" ]
def getCount(self, data): """Get the count""" assert data['type'] == "meter" return data['count']
[ "def", "getCount", "(", "self", ",", "data", ")", ":", "assert", "data", "[", "'type'", "]", "==", "\"meter\"", "return", "data", "[", "'count'", "]" ]
https://github.com/Cue/scales/blob/0aced26eb050ceb98ee9d5d6cdca8db448666986/src/greplin/scales/aggregation.py#L104-L107
ym2011/POC-EXP
206b22d3a6b2a172359678df33bbc5b2ad04b6c3
K8/Web-Exp/sqlmap/thirdparty/socks/socks.py
python
wrapmodule
(module)
wrapmodule(module) Attempts to replace a module's socket library with a SOCKS socket. Must set a default proxy using setdefaultproxy(...) first. This will only work on modules that import socket directly into the namespace; most of the Python Standard Library falls into this category.
wrapmodule(module) Attempts to replace a module's socket library with a SOCKS socket. Must set a default proxy using setdefaultproxy(...) first. This will only work on modules that import socket directly into the namespace; most of the Python Standard Library falls into this category.
[ "wrapmodule", "(", "module", ")", "Attempts", "to", "replace", "a", "module", "s", "socket", "library", "with", "a", "SOCKS", "socket", ".", "Must", "set", "a", "default", "proxy", "using", "setdefaultproxy", "(", "...", ")", "first", ".", "This", "will", "only", "work", "on", "modules", "that", "import", "socket", "directly", "into", "the", "namespace", ";", "most", "of", "the", "Python", "Standard", "Library", "falls", "into", "this", "category", "." ]
def wrapmodule(module): """wrapmodule(module) Attempts to replace a module's socket library with a SOCKS socket. Must set a default proxy using setdefaultproxy(...) first. This will only work on modules that import socket directly into the namespace; most of the Python Standard Library falls into this category. """ if _defaultproxy != None: module.socket.socket = socksocket module.socket.create_connection = create_connection else: raise GeneralProxyError((4, "no proxy specified"))
[ "def", "wrapmodule", "(", "module", ")", ":", "if", "_defaultproxy", "!=", "None", ":", "module", ".", "socket", ".", "socket", "=", "socksocket", "module", ".", "socket", ".", "create_connection", "=", "create_connection", "else", ":", "raise", "GeneralProxyError", "(", "(", "4", ",", "\"no proxy specified\"", ")", ")" ]
https://github.com/ym2011/POC-EXP/blob/206b22d3a6b2a172359678df33bbc5b2ad04b6c3/K8/Web-Exp/sqlmap/thirdparty/socks/socks.py#L103-L114
Azure/azure-cli
6c1b085a0910c6c2139006fcbd8ade44006eb6dd
src/azure-cli/azure/cli/command_modules/acs/_validators.py
python
validate_linux_host_name
(namespace)
Validates a string as a legal host name component. This validation will also occur server-side in the ARM API, but that may take a minute or two before the user sees it. So it's more user-friendly to validate in the CLI pre-flight.
Validates a string as a legal host name component.
[ "Validates", "a", "string", "as", "a", "legal", "host", "name", "component", "." ]
def validate_linux_host_name(namespace): """Validates a string as a legal host name component. This validation will also occur server-side in the ARM API, but that may take a minute or two before the user sees it. So it's more user-friendly to validate in the CLI pre-flight. """ # https://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address rfc1123_regex = re.compile(r'^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$') # pylint:disable=line-too-long found = rfc1123_regex.findall(namespace.name) if not found: raise CLIError('--name cannot exceed 63 characters and can only contain ' 'letters, numbers, or dashes (-).')
[ "def", "validate_linux_host_name", "(", "namespace", ")", ":", "# https://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address", "rfc1123_regex", "=", "re", ".", "compile", "(", "r'^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])(\\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9]))*$'", ")", "# pylint:disable=line-too-long", "found", "=", "rfc1123_regex", ".", "findall", "(", "namespace", ".", "name", ")", "if", "not", "found", ":", "raise", "CLIError", "(", "'--name cannot exceed 63 characters and can only contain '", "'letters, numbers, or dashes (-).'", ")" ]
https://github.com/Azure/azure-cli/blob/6c1b085a0910c6c2139006fcbd8ade44006eb6dd/src/azure-cli/azure/cli/command_modules/acs/_validators.py#L139-L151
biolab/orange2
db40a9449cb45b507d63dcd5739b223f9cffb8e6
distribute_setup.py
python
_patch_file
(path, content)
return True
Will backup the file then patch it
Will backup the file then patch it
[ "Will", "backup", "the", "file", "then", "patch", "it" ]
def _patch_file(path, content): """Will backup the file then patch it""" f = open(path) existing_content = f.read() f.close() if existing_content == content: # already patched log.warn('Already patched.') return False log.warn('Patching...') _rename_path(path) f = open(path, 'w') try: f.write(content) finally: f.close() return True
[ "def", "_patch_file", "(", "path", ",", "content", ")", ":", "f", "=", "open", "(", "path", ")", "existing_content", "=", "f", ".", "read", "(", ")", "f", ".", "close", "(", ")", "if", "existing_content", "==", "content", ":", "# already patched", "log", ".", "warn", "(", "'Already patched.'", ")", "return", "False", "log", ".", "warn", "(", "'Patching...'", ")", "_rename_path", "(", "path", ")", "f", "=", "open", "(", "path", ",", "'w'", ")", "try", ":", "f", ".", "write", "(", "content", ")", "finally", ":", "f", ".", "close", "(", ")", "return", "True" ]
https://github.com/biolab/orange2/blob/db40a9449cb45b507d63dcd5739b223f9cffb8e6/distribute_setup.py#L250-L266
amymcgovern/pyparrot
bf4775ec1199b282e4edde1e4a8e018dcc8725e0
pyparrot/utils/vlc.py
python
MediaList.unlock
(self)
return libvlc_media_list_unlock(self)
Release lock on media list items The L{lock} should be held upon entering this function.
Release lock on media list items The L{lock} should be held upon entering this function.
[ "Release", "lock", "on", "media", "list", "items", "The", "L", "{", "lock", "}", "should", "be", "held", "upon", "entering", "this", "function", "." ]
def unlock(self): '''Release lock on media list items The L{lock} should be held upon entering this function. ''' return libvlc_media_list_unlock(self)
[ "def", "unlock", "(", "self", ")", ":", "return", "libvlc_media_list_unlock", "(", "self", ")" ]
https://github.com/amymcgovern/pyparrot/blob/bf4775ec1199b282e4edde1e4a8e018dcc8725e0/pyparrot/utils/vlc.py#L2986-L2990
aliyun/aliyun-oss-python-sdk
5f2afa0928a58c7c1cc6317ac147f3637481f6fd
oss2/models.py
python
BucketQosInfo.__init__
(self, total_upload_bw = None, intranet_upload_bw = None, extranet_upload_bw = None, total_download_bw = None, intranet_download_bw = None, extranet_download_bw = None, total_qps = None, intranet_qps = None, extranet_qps = None)
[]
def __init__(self, total_upload_bw = None, intranet_upload_bw = None, extranet_upload_bw = None, total_download_bw = None, intranet_download_bw = None, extranet_download_bw = None, total_qps = None, intranet_qps = None, extranet_qps = None): self.total_upload_bw = total_upload_bw self.intranet_upload_bw = intranet_upload_bw self.extranet_upload_bw = extranet_upload_bw self.total_download_bw = total_download_bw self.intranet_download_bw = intranet_download_bw self.extranet_download_bw = extranet_download_bw self.total_qps = total_qps self.intranet_qps = intranet_qps self.extranet_qps = extranet_qps
[ "def", "__init__", "(", "self", ",", "total_upload_bw", "=", "None", ",", "intranet_upload_bw", "=", "None", ",", "extranet_upload_bw", "=", "None", ",", "total_download_bw", "=", "None", ",", "intranet_download_bw", "=", "None", ",", "extranet_download_bw", "=", "None", ",", "total_qps", "=", "None", ",", "intranet_qps", "=", "None", ",", "extranet_qps", "=", "None", ")", ":", "self", ".", "total_upload_bw", "=", "total_upload_bw", "self", ".", "intranet_upload_bw", "=", "intranet_upload_bw", "self", ".", "extranet_upload_bw", "=", "extranet_upload_bw", "self", ".", "total_download_bw", "=", "total_download_bw", "self", ".", "intranet_download_bw", "=", "intranet_download_bw", "self", ".", "extranet_download_bw", "=", "extranet_download_bw", "self", ".", "total_qps", "=", "total_qps", "self", ".", "intranet_qps", "=", "intranet_qps", "self", ".", "extranet_qps", "=", "extranet_qps" ]
https://github.com/aliyun/aliyun-oss-python-sdk/blob/5f2afa0928a58c7c1cc6317ac147f3637481f6fd/oss2/models.py#L1616-L1635
nucleic/enaml
65c2a2a2d765e88f2e1103046680571894bb41ed
enaml/core/code_generator.py
python
CodeGenerator.add_map
(self)
Store the key/value pair on the TOS into the map at 3rd pos.
Store the key/value pair on the TOS into the map at 3rd pos.
[ "Store", "the", "key", "/", "value", "pair", "on", "the", "TOS", "into", "the", "map", "at", "3rd", "pos", "." ]
def add_map(self): """ Store the key/value pair on the TOS into the map at 3rd pos. """ # WARNING in Python 3.8 the order is # TOS -> map -> key -> value self.code_ops.append( # TOS -> map -> value -> key bc.Instr("MAP_ADD", 1), )
[ "def", "add_map", "(", "self", ")", ":", "# WARNING in Python 3.8 the order is # TOS -> map -> key -> value", "self", ".", "code_ops", ".", "append", "(", "# TOS -> map -> value -> key", "bc", ".", "Instr", "(", "\"MAP_ADD\"", ",", "1", ")", ",", ")" ]
https://github.com/nucleic/enaml/blob/65c2a2a2d765e88f2e1103046680571894bb41ed/enaml/core/code_generator.py#L286-L293
openrazer/openrazer
1615f8516e8014bad7f78c781c91e6529679718f
pylib/openrazer/client/devices/mousemat.py
python
RazerMousemat.trigger_reactive
(self)
return self._dbus_interfaces['device'].triggerReactive()
Trigger a reactive flash
Trigger a reactive flash
[ "Trigger", "a", "reactive", "flash" ]
def trigger_reactive(self) -> bool: """ Trigger a reactive flash """ return self._dbus_interfaces['device'].triggerReactive()
[ "def", "trigger_reactive", "(", "self", ")", "->", "bool", ":", "return", "self", ".", "_dbus_interfaces", "[", "'device'", "]", ".", "triggerReactive", "(", ")" ]
https://github.com/openrazer/openrazer/blob/1615f8516e8014bad7f78c781c91e6529679718f/pylib/openrazer/client/devices/mousemat.py#L5-L9
joshfriend/flask-restful-demo
4571c5dd3c38c1563894eb9558f44f8c4ddd8c7d
migrations/env.py
python
run_migrations_offline
()
Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output.
Run migrations in 'offline' mode.
[ "Run", "migrations", "in", "offline", "mode", "." ]
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure(url=url) with context.begin_transaction(): context.run_migrations()
[ "def", "run_migrations_offline", "(", ")", ":", "url", "=", "config", ".", "get_main_option", "(", "\"sqlalchemy.url\"", ")", "context", ".", "configure", "(", "url", "=", "url", ")", "with", "context", ".", "begin_transaction", "(", ")", ":", "context", ".", "run_migrations", "(", ")" ]
https://github.com/joshfriend/flask-restful-demo/blob/4571c5dd3c38c1563894eb9558f44f8c4ddd8c7d/migrations/env.py#L27-L43
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit /tools/sqli/thirdparty/xdot/xdot.py
python
ZoomAction.drag
(self, deltax, deltay)
[]
def drag(self, deltax, deltay): self.dot_widget.zoom_ratio *= 1.005 ** (deltax + deltay) self.dot_widget.zoom_to_fit_on_resize = False self.dot_widget.queue_draw()
[ "def", "drag", "(", "self", ",", "deltax", ",", "deltay", ")", ":", "self", ".", "dot_widget", ".", "zoom_ratio", "*=", "1.005", "**", "(", "deltax", "+", "deltay", ")", "self", ".", "dot_widget", ".", "zoom_to_fit_on_resize", "=", "False", "self", ".", "dot_widget", ".", "queue_draw", "(", ")" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit /tools/sqli/thirdparty/xdot/xdot.py#L1335-L1338
heynemann/motorengine
5e1fea7cc15060f768a697fe4c3593d20f23c4ed
motorengine/query_builder/field_list.py
python
QueryFieldList.__add__
(self, f)
return self
[]
def __add__(self, f): if isinstance(f.value, dict): for field in f.fields: self.slice[field] = f.value if not self.fields: self.fields = f.fields elif not self.fields: self.fields = f.fields self.value = f.value self.slice = {} elif self.value is self.ONLY and f.value is self.ONLY: self._clean_slice() if self._only_called: self.fields = self.fields.union(f.fields) else: self.fields = f.fields elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: self.fields = self.fields.union(f.fields) self._clean_slice() elif self.value is self.ONLY and f.value is self.EXCLUDE: self.fields -= f.fields self._clean_slice() elif self.value is self.EXCLUDE and f.value is self.ONLY: self.value = self.ONLY self.fields = f.fields - self.fields self._clean_slice() # _id should be saved separately to avoid situation such as # exclude('_id').only('other') so the previous code of this method # remove _id from self.fields (its a normal behavior for any field # except for _id because _id field cannot be removed with only) if '_id' in f.fields: self._id = f.value if self.always_include: if self.value is self.ONLY and self.fields: if sorted(self.slice.keys()) != sorted(self.fields): self.fields = self.fields.union(self.always_include) else: # if this is exclude - remove from fields values from # always included fields self.fields -= self.always_include if getattr(f, '_only_called', False): self._only_called = True return self
[ "def", "__add__", "(", "self", ",", "f", ")", ":", "if", "isinstance", "(", "f", ".", "value", ",", "dict", ")", ":", "for", "field", "in", "f", ".", "fields", ":", "self", ".", "slice", "[", "field", "]", "=", "f", ".", "value", "if", "not", "self", ".", "fields", ":", "self", ".", "fields", "=", "f", ".", "fields", "elif", "not", "self", ".", "fields", ":", "self", ".", "fields", "=", "f", ".", "fields", "self", ".", "value", "=", "f", ".", "value", "self", ".", "slice", "=", "{", "}", "elif", "self", ".", "value", "is", "self", ".", "ONLY", "and", "f", ".", "value", "is", "self", ".", "ONLY", ":", "self", ".", "_clean_slice", "(", ")", "if", "self", ".", "_only_called", ":", "self", ".", "fields", "=", "self", ".", "fields", ".", "union", "(", "f", ".", "fields", ")", "else", ":", "self", ".", "fields", "=", "f", ".", "fields", "elif", "self", ".", "value", "is", "self", ".", "EXCLUDE", "and", "f", ".", "value", "is", "self", ".", "EXCLUDE", ":", "self", ".", "fields", "=", "self", ".", "fields", ".", "union", "(", "f", ".", "fields", ")", "self", ".", "_clean_slice", "(", ")", "elif", "self", ".", "value", "is", "self", ".", "ONLY", "and", "f", ".", "value", "is", "self", ".", "EXCLUDE", ":", "self", ".", "fields", "-=", "f", ".", "fields", "self", ".", "_clean_slice", "(", ")", "elif", "self", ".", "value", "is", "self", ".", "EXCLUDE", "and", "f", ".", "value", "is", "self", ".", "ONLY", ":", "self", ".", "value", "=", "self", ".", "ONLY", "self", ".", "fields", "=", "f", ".", "fields", "-", "self", ".", "fields", "self", ".", "_clean_slice", "(", ")", "# _id should be saved separately to avoid situation such as", "# exclude('_id').only('other') so the previous code of this method", "# remove _id from self.fields (its a normal behavior for any field", "# except for _id because _id field cannot be removed with only)", "if", "'_id'", "in", "f", ".", "fields", ":", "self", ".", "_id", "=", "f", ".", "value", "if", "self", ".", "always_include", ":", "if", "self", ".", "value", "is", "self", ".", "ONLY", "and", "self", ".", "fields", ":", "if", "sorted", "(", "self", ".", "slice", ".", "keys", "(", ")", ")", "!=", "sorted", "(", "self", ".", "fields", ")", ":", "self", ".", "fields", "=", "self", ".", "fields", ".", "union", "(", "self", ".", "always_include", ")", "else", ":", "# if this is exclude - remove from fields values from", "# always included fields", "self", ".", "fields", "-=", "self", ".", "always_include", "if", "getattr", "(", "f", ",", "'_only_called'", ",", "False", ")", ":", "self", ".", "_only_called", "=", "True", "return", "self" ]
https://github.com/heynemann/motorengine/blob/5e1fea7cc15060f768a697fe4c3593d20f23c4ed/motorengine/query_builder/field_list.py#L30-L75
danmacnish/cartoonify
39ea84d96b3e93f0480e6d6158bea506d01278ca
cartoonify/app/object_detection/meta_architectures/faster_rcnn_meta_arch.py
python
FasterRCNNMetaArch._unpad_proposals_and_sample_box_classifier_batch
( self, proposal_boxes, proposal_scores, num_proposals, groundtruth_boxlists, groundtruth_classes_with_background_list)
return (tf.stack(single_image_proposal_box_sample), tf.stack(single_image_proposal_score_sample), tf.stack(single_image_num_proposals_sample))
Unpads proposals and samples a minibatch for second stage. Args: proposal_boxes: A float tensor with shape [batch_size, num_proposals, 4] representing the (potentially zero padded) proposal boxes for all images in the batch. These boxes are represented as normalized coordinates. proposal_scores: A float tensor with shape [batch_size, num_proposals] representing the (potentially zero padded) proposal objectness scores for all images in the batch. num_proposals: A Tensor of type `int32`. A 1-D tensor of shape [batch] representing the number of proposals predicted for each image in the batch. groundtruth_boxlists: A list of BoxLists containing (absolute) coordinates of the groundtruth boxes. groundtruth_classes_with_background_list: A list of 2-D one-hot (or k-hot) tensors of shape [num_boxes, num_classes+1] containing the class targets with the 0th index assumed to map to the background class. Returns: proposal_boxes: A float tensor with shape [batch_size, second_stage_batch_size, 4] representing the (potentially zero padded) proposal boxes for all images in the batch. These boxes are represented as normalized coordinates. proposal_scores: A float tensor with shape [batch_size, second_stage_batch_size] representing the (potentially zero padded) proposal objectness scores for all images in the batch. num_proposals: A Tensor of type `int32`. A 1-D tensor of shape [batch] representing the number of proposals predicted for each image in the batch.
Unpads proposals and samples a minibatch for second stage.
[ "Unpads", "proposals", "and", "samples", "a", "minibatch", "for", "second", "stage", "." ]
def _unpad_proposals_and_sample_box_classifier_batch( self, proposal_boxes, proposal_scores, num_proposals, groundtruth_boxlists, groundtruth_classes_with_background_list): """Unpads proposals and samples a minibatch for second stage. Args: proposal_boxes: A float tensor with shape [batch_size, num_proposals, 4] representing the (potentially zero padded) proposal boxes for all images in the batch. These boxes are represented as normalized coordinates. proposal_scores: A float tensor with shape [batch_size, num_proposals] representing the (potentially zero padded) proposal objectness scores for all images in the batch. num_proposals: A Tensor of type `int32`. A 1-D tensor of shape [batch] representing the number of proposals predicted for each image in the batch. groundtruth_boxlists: A list of BoxLists containing (absolute) coordinates of the groundtruth boxes. groundtruth_classes_with_background_list: A list of 2-D one-hot (or k-hot) tensors of shape [num_boxes, num_classes+1] containing the class targets with the 0th index assumed to map to the background class. Returns: proposal_boxes: A float tensor with shape [batch_size, second_stage_batch_size, 4] representing the (potentially zero padded) proposal boxes for all images in the batch. These boxes are represented as normalized coordinates. proposal_scores: A float tensor with shape [batch_size, second_stage_batch_size] representing the (potentially zero padded) proposal objectness scores for all images in the batch. num_proposals: A Tensor of type `int32`. A 1-D tensor of shape [batch] representing the number of proposals predicted for each image in the batch. """ single_image_proposal_box_sample = [] single_image_proposal_score_sample = [] single_image_num_proposals_sample = [] for (single_image_proposal_boxes, single_image_proposal_scores, single_image_num_proposals, single_image_groundtruth_boxlist, single_image_groundtruth_classes_with_background) in zip( tf.unstack(proposal_boxes), tf.unstack(proposal_scores), tf.unstack(num_proposals), groundtruth_boxlists, groundtruth_classes_with_background_list): static_shape = single_image_proposal_boxes.get_shape() sliced_static_shape = tf.TensorShape([tf.Dimension(None), static_shape.dims[-1]]) single_image_proposal_boxes = tf.slice( single_image_proposal_boxes, [0, 0], [single_image_num_proposals, -1]) single_image_proposal_boxes.set_shape(sliced_static_shape) single_image_proposal_scores = tf.slice(single_image_proposal_scores, [0], [single_image_num_proposals]) single_image_boxlist = box_list.BoxList(single_image_proposal_boxes) single_image_boxlist.add_field(fields.BoxListFields.scores, single_image_proposal_scores) sampled_boxlist = self._sample_box_classifier_minibatch( single_image_boxlist, single_image_groundtruth_boxlist, single_image_groundtruth_classes_with_background) sampled_padded_boxlist = box_list_ops.pad_or_clip_box_list( sampled_boxlist, num_boxes=self._second_stage_batch_size) single_image_num_proposals_sample.append(tf.minimum( sampled_boxlist.num_boxes(), self._second_stage_batch_size)) bb = sampled_padded_boxlist.get() single_image_proposal_box_sample.append(bb) single_image_proposal_score_sample.append( sampled_padded_boxlist.get_field(fields.BoxListFields.scores)) return (tf.stack(single_image_proposal_box_sample), tf.stack(single_image_proposal_score_sample), tf.stack(single_image_num_proposals_sample))
[ "def", "_unpad_proposals_and_sample_box_classifier_batch", "(", "self", ",", "proposal_boxes", ",", "proposal_scores", ",", "num_proposals", ",", "groundtruth_boxlists", ",", "groundtruth_classes_with_background_list", ")", ":", "single_image_proposal_box_sample", "=", "[", "]", "single_image_proposal_score_sample", "=", "[", "]", "single_image_num_proposals_sample", "=", "[", "]", "for", "(", "single_image_proposal_boxes", ",", "single_image_proposal_scores", ",", "single_image_num_proposals", ",", "single_image_groundtruth_boxlist", ",", "single_image_groundtruth_classes_with_background", ")", "in", "zip", "(", "tf", ".", "unstack", "(", "proposal_boxes", ")", ",", "tf", ".", "unstack", "(", "proposal_scores", ")", ",", "tf", ".", "unstack", "(", "num_proposals", ")", ",", "groundtruth_boxlists", ",", "groundtruth_classes_with_background_list", ")", ":", "static_shape", "=", "single_image_proposal_boxes", ".", "get_shape", "(", ")", "sliced_static_shape", "=", "tf", ".", "TensorShape", "(", "[", "tf", ".", "Dimension", "(", "None", ")", ",", "static_shape", ".", "dims", "[", "-", "1", "]", "]", ")", "single_image_proposal_boxes", "=", "tf", ".", "slice", "(", "single_image_proposal_boxes", ",", "[", "0", ",", "0", "]", ",", "[", "single_image_num_proposals", ",", "-", "1", "]", ")", "single_image_proposal_boxes", ".", "set_shape", "(", "sliced_static_shape", ")", "single_image_proposal_scores", "=", "tf", ".", "slice", "(", "single_image_proposal_scores", ",", "[", "0", "]", ",", "[", "single_image_num_proposals", "]", ")", "single_image_boxlist", "=", "box_list", ".", "BoxList", "(", "single_image_proposal_boxes", ")", "single_image_boxlist", ".", "add_field", "(", "fields", ".", "BoxListFields", ".", "scores", ",", "single_image_proposal_scores", ")", "sampled_boxlist", "=", "self", ".", "_sample_box_classifier_minibatch", "(", "single_image_boxlist", ",", "single_image_groundtruth_boxlist", ",", "single_image_groundtruth_classes_with_background", ")", "sampled_padded_boxlist", "=", "box_list_ops", ".", "pad_or_clip_box_list", "(", "sampled_boxlist", ",", "num_boxes", "=", "self", ".", "_second_stage_batch_size", ")", "single_image_num_proposals_sample", ".", "append", "(", "tf", ".", "minimum", "(", "sampled_boxlist", ".", "num_boxes", "(", ")", ",", "self", ".", "_second_stage_batch_size", ")", ")", "bb", "=", "sampled_padded_boxlist", ".", "get", "(", ")", "single_image_proposal_box_sample", ".", "append", "(", "bb", ")", "single_image_proposal_score_sample", ".", "append", "(", "sampled_padded_boxlist", ".", "get_field", "(", "fields", ".", "BoxListFields", ".", "scores", ")", ")", "return", "(", "tf", ".", "stack", "(", "single_image_proposal_box_sample", ")", ",", "tf", ".", "stack", "(", "single_image_proposal_score_sample", ")", ",", "tf", ".", "stack", "(", "single_image_num_proposals_sample", ")", ")" ]
https://github.com/danmacnish/cartoonify/blob/39ea84d96b3e93f0480e6d6158bea506d01278ca/cartoonify/app/object_detection/meta_architectures/faster_rcnn_meta_arch.py#L941-L1023
ipython/ipython
c0abea7a6dfe52c1f74c9d0387d4accadba7cc14
IPython/core/interactiveshell.py
python
InteractiveShell.init_history
(self)
Sets up the command history, and starts regular autosaves.
Sets up the command history, and starts regular autosaves.
[ "Sets", "up", "the", "command", "history", "and", "starts", "regular", "autosaves", "." ]
def init_history(self): """Sets up the command history, and starts regular autosaves.""" self.history_manager = HistoryManager(shell=self, parent=self) self.configurables.append(self.history_manager)
[ "def", "init_history", "(", "self", ")", ":", "self", ".", "history_manager", "=", "HistoryManager", "(", "shell", "=", "self", ",", "parent", "=", "self", ")", "self", ".", "configurables", ".", "append", "(", "self", ".", "history_manager", ")" ]
https://github.com/ipython/ipython/blob/c0abea7a6dfe52c1f74c9d0387d4accadba7cc14/IPython/core/interactiveshell.py#L1687-L1690
marionmari/pyGPs
792f3c6cb91126ade9f23a8e39d9cbcd30cfbc7b
pyGPs/GraphExtensions/nodeKernels.py
python
psInvLapKernel
(A)
return K
Pseudo inverse of the normalized Laplacian. :param A: adjacency matrix :return: kernel matrix
Pseudo inverse of the normalized Laplacian.
[ "Pseudo", "inverse", "of", "the", "normalized", "Laplacian", "." ]
def psInvLapKernel(A): ''' Pseudo inverse of the normalized Laplacian. :param A: adjacency matrix :return: kernel matrix ''' L = normLap(A) K = np.linalg.pinv(L) return K
[ "def", "psInvLapKernel", "(", "A", ")", ":", "L", "=", "normLap", "(", "A", ")", "K", "=", "np", ".", "linalg", ".", "pinv", "(", "L", ")", "return", "K" ]
https://github.com/marionmari/pyGPs/blob/792f3c6cb91126ade9f23a8e39d9cbcd30cfbc7b/pyGPs/GraphExtensions/nodeKernels.py#L51-L60
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/flaskbb/forum/views.py
python
NewPost.get
(self, topic_id, slug=None, post_id=None)
return render_template( 'forum/new_post.html', topic=topic, form=form )
[]
def get(self, topic_id, slug=None, post_id=None): topic = Topic.query.filter_by(id=topic_id).first_or_404() form = self.form() if post_id is not None: post = Post.query.filter_by(id=post_id).first_or_404() form.content.data = format_quote(post.username, post.content) return render_template( 'forum/new_post.html', topic=topic, form=form )
[ "def", "get", "(", "self", ",", "topic_id", ",", "slug", "=", "None", ",", "post_id", "=", "None", ")", ":", "topic", "=", "Topic", ".", "query", ".", "filter_by", "(", "id", "=", "topic_id", ")", ".", "first_or_404", "(", ")", "form", "=", "self", ".", "form", "(", ")", "if", "post_id", "is", "not", "None", ":", "post", "=", "Post", ".", "query", ".", "filter_by", "(", "id", "=", "post_id", ")", ".", "first_or_404", "(", ")", "form", ".", "content", ".", "data", "=", "format_quote", "(", "post", ".", "username", ",", "post", ".", "content", ")", "return", "render_template", "(", "'forum/new_post.html'", ",", "topic", "=", "topic", ",", "form", "=", "form", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/flaskbb/forum/views.py#L431-L441
cloudera/impyla
0c736af4cad2bade9b8e313badc08ec50e81c948
impala/_thrift_gen/hive_metastore/ttypes.py
python
GetPrincipalsInRoleRequest.__repr__
(self)
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
[]
def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
[ "def", "__repr__", "(", "self", ")", ":", "L", "=", "[", "'%s=%r'", "%", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "self", ".", "__dict__", ".", "items", "(", ")", "]", "return", "'%s(%s)'", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "', '", ".", "join", "(", "L", ")", ")" ]
https://github.com/cloudera/impyla/blob/0c736af4cad2bade9b8e313badc08ec50e81c948/impala/_thrift_gen/hive_metastore/ttypes.py#L1876-L1879
microsoft/NimbusML
f6be39ce9359786976429bab0ccd837e849b4ba5
src/python/nimbusml/preprocessing/normalization/lpscaler.py
python
LpScaler._nodes_with_presteps
(self)
return [ TypeConverter( result_type='R4')._steal_io(self), self]
Inserts preprocessing before this one.
Inserts preprocessing before this one.
[ "Inserts", "preprocessing", "before", "this", "one", "." ]
def _nodes_with_presteps(self): """ Inserts preprocessing before this one. """ from ..schema import TypeConverter return [ TypeConverter( result_type='R4')._steal_io(self), self]
[ "def", "_nodes_with_presteps", "(", "self", ")", ":", "from", ".", ".", "schema", "import", "TypeConverter", "return", "[", "TypeConverter", "(", "result_type", "=", "'R4'", ")", ".", "_steal_io", "(", "self", ")", ",", "self", "]" ]
https://github.com/microsoft/NimbusML/blob/f6be39ce9359786976429bab0ccd837e849b4ba5/src/python/nimbusml/preprocessing/normalization/lpscaler.py#L60-L68
timkpaine/tdameritrade
f877088d0c3aca9fef03e92b62074153c66194c1
tdameritrade/client.py
python
TDClient.instrument
(self, cusip)
return self._request(GET_INSTRUMENT.format(cusip=cusip)).json()
get instrument info from cusip Args: cusip (str): the cusip to use, can find it by looking up in search
get instrument info from cusip
[ "get", "instrument", "info", "from", "cusip" ]
def instrument(self, cusip): """get instrument info from cusip Args: cusip (str): the cusip to use, can find it by looking up in search """ return self._request(GET_INSTRUMENT.format(cusip=cusip)).json()
[ "def", "instrument", "(", "self", ",", "cusip", ")", ":", "return", "self", ".", "_request", "(", "GET_INSTRUMENT", ".", "format", "(", "cusip", "=", "cusip", ")", ")", ".", "json", "(", ")" ]
https://github.com/timkpaine/tdameritrade/blob/f877088d0c3aca9fef03e92b62074153c66194c1/tdameritrade/client.py#L216-L222
CvvT/dumpDex
92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1
python/idaapi.py
python
set_compiler_id
(*args)
return _idaapi.set_compiler_id(*args)
set_compiler_id(id) -> bool
set_compiler_id(id) -> bool
[ "set_compiler_id", "(", "id", ")", "-", ">", "bool" ]
def set_compiler_id(*args): """ set_compiler_id(id) -> bool """ return _idaapi.set_compiler_id(*args)
[ "def", "set_compiler_id", "(", "*", "args", ")", ":", "return", "_idaapi", ".", "set_compiler_id", "(", "*", "args", ")" ]
https://github.com/CvvT/dumpDex/blob/92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1/python/idaapi.py#L29472-L29476
samuelclay/NewsBlur
2c45209df01a1566ea105e04d499367f32ac9ad2
archive/fabfile.py
python
celery_reload
()
[]
def celery_reload(): with virtualenv(): run('sudo supervisorctl reload celery') run('tail logs/newsblur.log')
[ "def", "celery_reload", "(", ")", ":", "with", "virtualenv", "(", ")", ":", "run", "(", "'sudo supervisorctl reload celery'", ")", "run", "(", "'tail logs/newsblur.log'", ")" ]
https://github.com/samuelclay/NewsBlur/blob/2c45209df01a1566ea105e04d499367f32ac9ad2/archive/fabfile.py#L1880-L1883