partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
valid
EventEmitter._dispatch_coroutine
Schedule a coroutine for execution. Args: event (str): The name of the event that triggered this call. listener (async def): The async def that needs to be executed. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. The values of *args and **kwargs are passed, unaltered, to the async def when generating the coro. If there is an exception generating the coro, such as the wrong number of arguments, the emitter's error event is triggered. If the triggering event _is_ the emitter's error event then the exception is reraised. The reraised exception may show in debug mode for the event loop but is otherwise silently dropped.
eventemitter/emitter.py
def _dispatch_coroutine(self, event, listener, *args, **kwargs): """Schedule a coroutine for execution. Args: event (str): The name of the event that triggered this call. listener (async def): The async def that needs to be executed. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. The values of *args and **kwargs are passed, unaltered, to the async def when generating the coro. If there is an exception generating the coro, such as the wrong number of arguments, the emitter's error event is triggered. If the triggering event _is_ the emitter's error event then the exception is reraised. The reraised exception may show in debug mode for the event loop but is otherwise silently dropped. """ try: coro = listener(*args, **kwargs) except Exception as exc: if event == self.LISTENER_ERROR_EVENT: raise return self.emit(self.LISTENER_ERROR_EVENT, event, listener, exc) asyncio.ensure_future( _try_catch_coro(self, event, listener, coro), loop=self._loop, )
def _dispatch_coroutine(self, event, listener, *args, **kwargs): """Schedule a coroutine for execution. Args: event (str): The name of the event that triggered this call. listener (async def): The async def that needs to be executed. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. The values of *args and **kwargs are passed, unaltered, to the async def when generating the coro. If there is an exception generating the coro, such as the wrong number of arguments, the emitter's error event is triggered. If the triggering event _is_ the emitter's error event then the exception is reraised. The reraised exception may show in debug mode for the event loop but is otherwise silently dropped. """ try: coro = listener(*args, **kwargs) except Exception as exc: if event == self.LISTENER_ERROR_EVENT: raise return self.emit(self.LISTENER_ERROR_EVENT, event, listener, exc) asyncio.ensure_future( _try_catch_coro(self, event, listener, coro), loop=self._loop, )
[ "Schedule", "a", "coroutine", "for", "execution", "." ]
asyncdef/eventemitter
python
https://github.com/asyncdef/eventemitter/blob/148b700c5846d8fdafc562d4326587da5447223f/eventemitter/emitter.py#L159-L190
[ "def", "_dispatch_coroutine", "(", "self", ",", "event", ",", "listener", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "coro", "=", "listener", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "exc", ":", "if", "event", "==", "self", ".", "LISTENER_ERROR_EVENT", ":", "raise", "return", "self", ".", "emit", "(", "self", ".", "LISTENER_ERROR_EVENT", ",", "event", ",", "listener", ",", "exc", ")", "asyncio", ".", "ensure_future", "(", "_try_catch_coro", "(", "self", ",", "event", ",", "listener", ",", "coro", ")", ",", "loop", "=", "self", ".", "_loop", ",", ")" ]
148b700c5846d8fdafc562d4326587da5447223f
valid
EventEmitter._dispatch_function
Execute a sync function. Args: event (str): The name of the event that triggered this call. listener (def): The def that needs to be executed. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. The values of *args and **kwargs are passed, unaltered, to the def when exceuting. If there is an exception executing the def, such as the wrong number of arguments, the emitter's error event is triggered. If the triggering event _is_ the emitter's error event then the exception is reraised. The reraised exception may show in debug mode for the event loop but is otherwise silently dropped.
eventemitter/emitter.py
def _dispatch_function(self, event, listener, *args, **kwargs): """Execute a sync function. Args: event (str): The name of the event that triggered this call. listener (def): The def that needs to be executed. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. The values of *args and **kwargs are passed, unaltered, to the def when exceuting. If there is an exception executing the def, such as the wrong number of arguments, the emitter's error event is triggered. If the triggering event _is_ the emitter's error event then the exception is reraised. The reraised exception may show in debug mode for the event loop but is otherwise silently dropped. """ try: return listener(*args, **kwargs) except Exception as exc: if event == self.LISTENER_ERROR_EVENT: raise return self.emit(self.LISTENER_ERROR_EVENT, event, listener, exc)
def _dispatch_function(self, event, listener, *args, **kwargs): """Execute a sync function. Args: event (str): The name of the event that triggered this call. listener (def): The def that needs to be executed. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. The values of *args and **kwargs are passed, unaltered, to the def when exceuting. If there is an exception executing the def, such as the wrong number of arguments, the emitter's error event is triggered. If the triggering event _is_ the emitter's error event then the exception is reraised. The reraised exception may show in debug mode for the event loop but is otherwise silently dropped. """ try: return listener(*args, **kwargs) except Exception as exc: if event == self.LISTENER_ERROR_EVENT: raise return self.emit(self.LISTENER_ERROR_EVENT, event, listener, exc)
[ "Execute", "a", "sync", "function", "." ]
asyncdef/eventemitter
python
https://github.com/asyncdef/eventemitter/blob/148b700c5846d8fdafc562d4326587da5447223f/eventemitter/emitter.py#L192-L218
[ "def", "_dispatch_function", "(", "self", ",", "event", ",", "listener", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "listener", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "exc", ":", "if", "event", "==", "self", ".", "LISTENER_ERROR_EVENT", ":", "raise", "return", "self", ".", "emit", "(", "self", ".", "LISTENER_ERROR_EVENT", ",", "event", ",", "listener", ",", "exc", ")" ]
148b700c5846d8fdafc562d4326587da5447223f
valid
EventEmitter._dispatch
Dispatch an event to a listener. Args: event (str): The name of the event that triggered this call. listener (def or async def): The listener to trigger. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. This method inspects the listener. If it is a def it dispatches the listener to a method that will execute that def. If it is an async def it dispatches it to a method that will schedule the resulting coro with the event loop.
eventemitter/emitter.py
def _dispatch(self, event, listener, *args, **kwargs): """Dispatch an event to a listener. Args: event (str): The name of the event that triggered this call. listener (def or async def): The listener to trigger. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. This method inspects the listener. If it is a def it dispatches the listener to a method that will execute that def. If it is an async def it dispatches it to a method that will schedule the resulting coro with the event loop. """ if ( asyncio.iscoroutinefunction(listener) or isinstance(listener, functools.partial) and asyncio.iscoroutinefunction(listener.func) ): return self._dispatch_coroutine(event, listener, *args, **kwargs) return self._dispatch_function(event, listener, *args, **kwargs)
def _dispatch(self, event, listener, *args, **kwargs): """Dispatch an event to a listener. Args: event (str): The name of the event that triggered this call. listener (def or async def): The listener to trigger. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. This method inspects the listener. If it is a def it dispatches the listener to a method that will execute that def. If it is an async def it dispatches it to a method that will schedule the resulting coro with the event loop. """ if ( asyncio.iscoroutinefunction(listener) or isinstance(listener, functools.partial) and asyncio.iscoroutinefunction(listener.func) ): return self._dispatch_coroutine(event, listener, *args, **kwargs) return self._dispatch_function(event, listener, *args, **kwargs)
[ "Dispatch", "an", "event", "to", "a", "listener", "." ]
asyncdef/eventemitter
python
https://github.com/asyncdef/eventemitter/blob/148b700c5846d8fdafc562d4326587da5447223f/eventemitter/emitter.py#L220-L242
[ "def", "_dispatch", "(", "self", ",", "event", ",", "listener", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "(", "asyncio", ".", "iscoroutinefunction", "(", "listener", ")", "or", "isinstance", "(", "listener", ",", "functools", ".", "partial", ")", "and", "asyncio", ".", "iscoroutinefunction", "(", "listener", ".", "func", ")", ")", ":", "return", "self", ".", "_dispatch_coroutine", "(", "event", ",", "listener", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_dispatch_function", "(", "event", ",", "listener", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
148b700c5846d8fdafc562d4326587da5447223f
valid
EventEmitter.emit
Call each listener for the event with the given arguments. Args: event (str): The event to trigger listeners on. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. This method passes all arguments other than the event name directly to the listeners. If a listener raises an exception for any reason the 'listener-error', or current value of LISTENER_ERROR_EVENT, is emitted. Listeners to this event are given the event name, listener object, and the exception raised. If an error listener fails it does so silently. All event listeners are fired in a deferred way so this method returns immediately. The calling coro must yield at some point for the event to propagate to the listeners.
eventemitter/emitter.py
def emit(self, event, *args, **kwargs): """Call each listener for the event with the given arguments. Args: event (str): The event to trigger listeners on. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. This method passes all arguments other than the event name directly to the listeners. If a listener raises an exception for any reason the 'listener-error', or current value of LISTENER_ERROR_EVENT, is emitted. Listeners to this event are given the event name, listener object, and the exception raised. If an error listener fails it does so silently. All event listeners are fired in a deferred way so this method returns immediately. The calling coro must yield at some point for the event to propagate to the listeners. """ listeners = self._listeners[event] listeners = itertools.chain(listeners, self._once[event]) self._once[event] = [] for listener in listeners: self._loop.call_soon( functools.partial( self._dispatch, event, listener, *args, **kwargs, ) ) return self
def emit(self, event, *args, **kwargs): """Call each listener for the event with the given arguments. Args: event (str): The event to trigger listeners on. *args: Any number of positional arguments. **kwargs: Any number of keyword arguments. This method passes all arguments other than the event name directly to the listeners. If a listener raises an exception for any reason the 'listener-error', or current value of LISTENER_ERROR_EVENT, is emitted. Listeners to this event are given the event name, listener object, and the exception raised. If an error listener fails it does so silently. All event listeners are fired in a deferred way so this method returns immediately. The calling coro must yield at some point for the event to propagate to the listeners. """ listeners = self._listeners[event] listeners = itertools.chain(listeners, self._once[event]) self._once[event] = [] for listener in listeners: self._loop.call_soon( functools.partial( self._dispatch, event, listener, *args, **kwargs, ) ) return self
[ "Call", "each", "listener", "for", "the", "event", "with", "the", "given", "arguments", "." ]
asyncdef/eventemitter
python
https://github.com/asyncdef/eventemitter/blob/148b700c5846d8fdafc562d4326587da5447223f/eventemitter/emitter.py#L244-L277
[ "def", "emit", "(", "self", ",", "event", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "listeners", "=", "self", ".", "_listeners", "[", "event", "]", "listeners", "=", "itertools", ".", "chain", "(", "listeners", ",", "self", ".", "_once", "[", "event", "]", ")", "self", ".", "_once", "[", "event", "]", "=", "[", "]", "for", "listener", "in", "listeners", ":", "self", ".", "_loop", ".", "call_soon", "(", "functools", ".", "partial", "(", "self", ".", "_dispatch", ",", "event", ",", "listener", ",", "*", "args", ",", "*", "*", "kwargs", ",", ")", ")", "return", "self" ]
148b700c5846d8fdafc562d4326587da5447223f
valid
EventEmitter.count
Get the number of listeners for the event. Args: event (str): The event for which to count all listeners. The resulting count is a combination of listeners added using 'on'/'add_listener' and 'once'.
eventemitter/emitter.py
def count(self, event): """Get the number of listeners for the event. Args: event (str): The event for which to count all listeners. The resulting count is a combination of listeners added using 'on'/'add_listener' and 'once'. """ return len(self._listeners[event]) + len(self._once[event])
def count(self, event): """Get the number of listeners for the event. Args: event (str): The event for which to count all listeners. The resulting count is a combination of listeners added using 'on'/'add_listener' and 'once'. """ return len(self._listeners[event]) + len(self._once[event])
[ "Get", "the", "number", "of", "listeners", "for", "the", "event", "." ]
asyncdef/eventemitter
python
https://github.com/asyncdef/eventemitter/blob/148b700c5846d8fdafc562d4326587da5447223f/eventemitter/emitter.py#L279-L288
[ "def", "count", "(", "self", ",", "event", ")", ":", "return", "len", "(", "self", ".", "_listeners", "[", "event", "]", ")", "+", "len", "(", "self", ".", "_once", "[", "event", "]", ")" ]
148b700c5846d8fdafc562d4326587da5447223f
valid
ABF2.phasicTonic
let's keep the chunkMs as high as we reasonably can. 50ms is good. Things get flakey at lower numbers like 10ms. IMPORTANT! for this to work, prevent 0s from averaging in, so keep bin sizes well above the data resolution.
doc/uses/EPSCs-and-IPSCs/variance method/2016-12-17 02 graphTime2.py
def phasicTonic(self,m1=None,m2=None,chunkMs=50,quietPercentile=10, histResolution=.5,plotToo=False): """ let's keep the chunkMs as high as we reasonably can. 50ms is good. Things get flakey at lower numbers like 10ms. IMPORTANT! for this to work, prevent 0s from averaging in, so keep bin sizes well above the data resolution. """ # prepare sectioning values to be used later m1=0 if m1 is None else m1*self.pointsPerSec m2=len(abf.sweepY) if m2 is None else m2*self.pointsPerSec m1,m2=int(m1),int(m2) # prepare histogram values to be used later padding=200 # pA or mV of maximum expected deviation chunkPoints=int(chunkMs*self.pointsPerMs) histBins=int((padding*2)/histResolution) # center the data at 0 using peak histogram, not the mean Y=self.sweepY[m1:m2] hist,bins=np.histogram(Y,bins=2*padding) Yoffset=bins[np.where(hist==max(hist))[0][0]] Y=Y-Yoffset # we don't have to, but PDF math is easier # calculate all histogram nChunks=int(len(Y)/chunkPoints) hist,bins=np.histogram(Y,bins=histBins,range=(-padding,padding)) hist=hist/len(Y) # count as a fraction of total Xs=bins[1:] # get baseline data from chunks with smallest variance chunks=np.reshape(Y[:nChunks*chunkPoints],(nChunks,chunkPoints)) variances=np.var(chunks,axis=1) percentiles=np.empty(len(variances)) for i,variance in enumerate(variances): percentiles[i]=sorted(variances).index(variance)/len(variances)*100 blData=chunks[np.where(percentiles<=quietPercentile)[0]].flatten() # generate the standard curve and pull it to the histogram height sigma=np.sqrt(np.var(blData)) center=np.average(blData)+histResolution/2 blCurve=mlab.normpdf(Xs,center,sigma) blCurve=blCurve*max(hist)/max(blCurve) # determine the phasic current by subtracting-out the baseline #diff=hist-blCurve diff=hist IGNORE_DISTANCE=5 # KEEP THIS FIXED, NOT A FUNCTION OF VARIANCE ignrCenter=len(Xs)/2 ignrPad=IGNORE_DISTANCE/histResolution ignr1,ignt2=int(ignrCenter-ignrPad),int(ignrCenter+ignrPad) diff[ignr1:ignt2]=0 # optionally graph all this if plotToo: plt.figure(figsize=(15,5)) plt.plot(Y) plt.figure(figsize=(7,7)) ax1=plt.subplot(211) plt.title(abf.ID+" phasic analysis") plt.ylabel("fraction") plt.plot(Xs,hist,'-',alpha=.8,color='b',lw=3) plt.plot(Xs,blCurve,lw=3,alpha=.5,color='r') plt.margins(0,.1) plt.subplot(212,sharex=ax1) plt.title("baseline subtracted") plt.ylabel("fraction") plt.xlabel("data points (%s)"%abf.units) plt.plot(Xs,diff,'-',alpha=.8,color='b',lw=3) plt.axhline(0,lw=3,alpha=.5,color='r') plt.axvline(0,lw=3,alpha=.5,color='k') plt.margins(0,.1) plt.axis([-50,50,None,None]) plt.tight_layout() plt.show() print(np.sum(np.split(diff,2),1)) return diff/len(Y)*abf.pointsPerSec
def phasicTonic(self,m1=None,m2=None,chunkMs=50,quietPercentile=10, histResolution=.5,plotToo=False): """ let's keep the chunkMs as high as we reasonably can. 50ms is good. Things get flakey at lower numbers like 10ms. IMPORTANT! for this to work, prevent 0s from averaging in, so keep bin sizes well above the data resolution. """ # prepare sectioning values to be used later m1=0 if m1 is None else m1*self.pointsPerSec m2=len(abf.sweepY) if m2 is None else m2*self.pointsPerSec m1,m2=int(m1),int(m2) # prepare histogram values to be used later padding=200 # pA or mV of maximum expected deviation chunkPoints=int(chunkMs*self.pointsPerMs) histBins=int((padding*2)/histResolution) # center the data at 0 using peak histogram, not the mean Y=self.sweepY[m1:m2] hist,bins=np.histogram(Y,bins=2*padding) Yoffset=bins[np.where(hist==max(hist))[0][0]] Y=Y-Yoffset # we don't have to, but PDF math is easier # calculate all histogram nChunks=int(len(Y)/chunkPoints) hist,bins=np.histogram(Y,bins=histBins,range=(-padding,padding)) hist=hist/len(Y) # count as a fraction of total Xs=bins[1:] # get baseline data from chunks with smallest variance chunks=np.reshape(Y[:nChunks*chunkPoints],(nChunks,chunkPoints)) variances=np.var(chunks,axis=1) percentiles=np.empty(len(variances)) for i,variance in enumerate(variances): percentiles[i]=sorted(variances).index(variance)/len(variances)*100 blData=chunks[np.where(percentiles<=quietPercentile)[0]].flatten() # generate the standard curve and pull it to the histogram height sigma=np.sqrt(np.var(blData)) center=np.average(blData)+histResolution/2 blCurve=mlab.normpdf(Xs,center,sigma) blCurve=blCurve*max(hist)/max(blCurve) # determine the phasic current by subtracting-out the baseline #diff=hist-blCurve diff=hist IGNORE_DISTANCE=5 # KEEP THIS FIXED, NOT A FUNCTION OF VARIANCE ignrCenter=len(Xs)/2 ignrPad=IGNORE_DISTANCE/histResolution ignr1,ignt2=int(ignrCenter-ignrPad),int(ignrCenter+ignrPad) diff[ignr1:ignt2]=0 # optionally graph all this if plotToo: plt.figure(figsize=(15,5)) plt.plot(Y) plt.figure(figsize=(7,7)) ax1=plt.subplot(211) plt.title(abf.ID+" phasic analysis") plt.ylabel("fraction") plt.plot(Xs,hist,'-',alpha=.8,color='b',lw=3) plt.plot(Xs,blCurve,lw=3,alpha=.5,color='r') plt.margins(0,.1) plt.subplot(212,sharex=ax1) plt.title("baseline subtracted") plt.ylabel("fraction") plt.xlabel("data points (%s)"%abf.units) plt.plot(Xs,diff,'-',alpha=.8,color='b',lw=3) plt.axhline(0,lw=3,alpha=.5,color='r') plt.axvline(0,lw=3,alpha=.5,color='k') plt.margins(0,.1) plt.axis([-50,50,None,None]) plt.tight_layout() plt.show() print(np.sum(np.split(diff,2),1)) return diff/len(Y)*abf.pointsPerSec
[ "let", "s", "keep", "the", "chunkMs", "as", "high", "as", "we", "reasonably", "can", ".", "50ms", "is", "good", ".", "Things", "get", "flakey", "at", "lower", "numbers", "like", "10ms", ".", "IMPORTANT!", "for", "this", "to", "work", "prevent", "0s", "from", "averaging", "in", "so", "keep", "bin", "sizes", "well", "above", "the", "data", "resolution", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/uses/EPSCs-and-IPSCs/variance method/2016-12-17 02 graphTime2.py#L10-L90
[ "def", "phasicTonic", "(", "self", ",", "m1", "=", "None", ",", "m2", "=", "None", ",", "chunkMs", "=", "50", ",", "quietPercentile", "=", "10", ",", "histResolution", "=", ".5", ",", "plotToo", "=", "False", ")", ":", "# prepare sectioning values to be used later", "m1", "=", "0", "if", "m1", "is", "None", "else", "m1", "*", "self", ".", "pointsPerSec", "m2", "=", "len", "(", "abf", ".", "sweepY", ")", "if", "m2", "is", "None", "else", "m2", "*", "self", ".", "pointsPerSec", "m1", ",", "m2", "=", "int", "(", "m1", ")", ",", "int", "(", "m2", ")", "# prepare histogram values to be used later", "padding", "=", "200", "# pA or mV of maximum expected deviation", "chunkPoints", "=", "int", "(", "chunkMs", "*", "self", ".", "pointsPerMs", ")", "histBins", "=", "int", "(", "(", "padding", "*", "2", ")", "/", "histResolution", ")", "# center the data at 0 using peak histogram, not the mean", "Y", "=", "self", ".", "sweepY", "[", "m1", ":", "m2", "]", "hist", ",", "bins", "=", "np", ".", "histogram", "(", "Y", ",", "bins", "=", "2", "*", "padding", ")", "Yoffset", "=", "bins", "[", "np", ".", "where", "(", "hist", "==", "max", "(", "hist", ")", ")", "[", "0", "]", "[", "0", "]", "]", "Y", "=", "Y", "-", "Yoffset", "# we don't have to, but PDF math is easier", "# calculate all histogram", "nChunks", "=", "int", "(", "len", "(", "Y", ")", "/", "chunkPoints", ")", "hist", ",", "bins", "=", "np", ".", "histogram", "(", "Y", ",", "bins", "=", "histBins", ",", "range", "=", "(", "-", "padding", ",", "padding", ")", ")", "hist", "=", "hist", "/", "len", "(", "Y", ")", "# count as a fraction of total", "Xs", "=", "bins", "[", "1", ":", "]", "# get baseline data from chunks with smallest variance", "chunks", "=", "np", ".", "reshape", "(", "Y", "[", ":", "nChunks", "*", "chunkPoints", "]", ",", "(", "nChunks", ",", "chunkPoints", ")", ")", "variances", "=", "np", ".", "var", "(", "chunks", ",", "axis", "=", "1", ")", "percentiles", "=", "np", ".", "empty", "(", "len", "(", "variances", ")", ")", "for", "i", ",", "variance", "in", "enumerate", "(", "variances", ")", ":", "percentiles", "[", "i", "]", "=", "sorted", "(", "variances", ")", ".", "index", "(", "variance", ")", "/", "len", "(", "variances", ")", "*", "100", "blData", "=", "chunks", "[", "np", ".", "where", "(", "percentiles", "<=", "quietPercentile", ")", "[", "0", "]", "]", ".", "flatten", "(", ")", "# generate the standard curve and pull it to the histogram height", "sigma", "=", "np", ".", "sqrt", "(", "np", ".", "var", "(", "blData", ")", ")", "center", "=", "np", ".", "average", "(", "blData", ")", "+", "histResolution", "/", "2", "blCurve", "=", "mlab", ".", "normpdf", "(", "Xs", ",", "center", ",", "sigma", ")", "blCurve", "=", "blCurve", "*", "max", "(", "hist", ")", "/", "max", "(", "blCurve", ")", "# determine the phasic current by subtracting-out the baseline", "#diff=hist-blCurve", "diff", "=", "hist", "IGNORE_DISTANCE", "=", "5", "# KEEP THIS FIXED, NOT A FUNCTION OF VARIANCE", "ignrCenter", "=", "len", "(", "Xs", ")", "/", "2", "ignrPad", "=", "IGNORE_DISTANCE", "/", "histResolution", "ignr1", ",", "ignt2", "=", "int", "(", "ignrCenter", "-", "ignrPad", ")", ",", "int", "(", "ignrCenter", "+", "ignrPad", ")", "diff", "[", "ignr1", ":", "ignt2", "]", "=", "0", "# optionally graph all this", "if", "plotToo", ":", "plt", ".", "figure", "(", "figsize", "=", "(", "15", ",", "5", ")", ")", "plt", ".", "plot", "(", "Y", ")", "plt", ".", "figure", "(", "figsize", "=", "(", "7", ",", "7", ")", ")", "ax1", "=", "plt", ".", "subplot", "(", "211", ")", "plt", ".", "title", "(", "abf", ".", "ID", "+", "\" phasic analysis\"", ")", "plt", ".", "ylabel", "(", "\"fraction\"", ")", "plt", ".", "plot", "(", "Xs", ",", "hist", ",", "'-'", ",", "alpha", "=", ".8", ",", "color", "=", "'b'", ",", "lw", "=", "3", ")", "plt", ".", "plot", "(", "Xs", ",", "blCurve", ",", "lw", "=", "3", ",", "alpha", "=", ".5", ",", "color", "=", "'r'", ")", "plt", ".", "margins", "(", "0", ",", ".1", ")", "plt", ".", "subplot", "(", "212", ",", "sharex", "=", "ax1", ")", "plt", ".", "title", "(", "\"baseline subtracted\"", ")", "plt", ".", "ylabel", "(", "\"fraction\"", ")", "plt", ".", "xlabel", "(", "\"data points (%s)\"", "%", "abf", ".", "units", ")", "plt", ".", "plot", "(", "Xs", ",", "diff", ",", "'-'", ",", "alpha", "=", ".8", ",", "color", "=", "'b'", ",", "lw", "=", "3", ")", "plt", ".", "axhline", "(", "0", ",", "lw", "=", "3", ",", "alpha", "=", ".5", ",", "color", "=", "'r'", ")", "plt", ".", "axvline", "(", "0", ",", "lw", "=", "3", ",", "alpha", "=", ".5", ",", "color", "=", "'k'", ")", "plt", ".", "margins", "(", "0", ",", ".1", ")", "plt", ".", "axis", "(", "[", "-", "50", ",", "50", ",", "None", ",", "None", "]", ")", "plt", ".", "tight_layout", "(", ")", "plt", ".", "show", "(", ")", "print", "(", "np", ".", "sum", "(", "np", ".", "split", "(", "diff", ",", "2", ")", ",", "1", ")", ")", "return", "diff", "/", "len", "(", "Y", ")", "*", "abf", ".", "pointsPerSec" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
genPNGs
Convert each TIF to PNG. Return filenames of new PNGs.
doc/oldcode/indexing/indexing.py
def genPNGs(folder,files=None): """Convert each TIF to PNG. Return filenames of new PNGs.""" if files is None: files=glob.glob(folder+"/*.*") new=[] for fname in files: ext=os.path.basename(fname).split(".")[-1].lower() if ext in ['tif','tiff']: if not os.path.exists(fname+".png"): print(" -- converting %s to PNG..."%os.path.basename(fname)) cm.image_convert(fname) new.append(fname) #fancy burn-in of image data else: pass #print(" -- already converted %s to PNG..."%os.path.basename(fname)) return new
def genPNGs(folder,files=None): """Convert each TIF to PNG. Return filenames of new PNGs.""" if files is None: files=glob.glob(folder+"/*.*") new=[] for fname in files: ext=os.path.basename(fname).split(".")[-1].lower() if ext in ['tif','tiff']: if not os.path.exists(fname+".png"): print(" -- converting %s to PNG..."%os.path.basename(fname)) cm.image_convert(fname) new.append(fname) #fancy burn-in of image data else: pass #print(" -- already converted %s to PNG..."%os.path.basename(fname)) return new
[ "Convert", "each", "TIF", "to", "PNG", ".", "Return", "filenames", "of", "new", "PNGs", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/oldcode/indexing/indexing.py#L23-L38
[ "def", "genPNGs", "(", "folder", ",", "files", "=", "None", ")", ":", "if", "files", "is", "None", ":", "files", "=", "glob", ".", "glob", "(", "folder", "+", "\"/*.*\"", ")", "new", "=", "[", "]", "for", "fname", "in", "files", ":", "ext", "=", "os", ".", "path", ".", "basename", "(", "fname", ")", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", ".", "lower", "(", ")", "if", "ext", "in", "[", "'tif'", ",", "'tiff'", "]", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "fname", "+", "\".png\"", ")", ":", "print", "(", "\" -- converting %s to PNG...\"", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", ")", "cm", ".", "image_convert", "(", "fname", ")", "new", ".", "append", "(", "fname", ")", "#fancy burn-in of image data", "else", ":", "pass", "#print(\" -- already converted %s to PNG...\"%os.path.basename(fname))", "return", "new" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
htmlABFcontent
generate text to go inside <body> for single ABF page.
doc/oldcode/indexing/indexing.py
def htmlABFcontent(ID,group,d): """generate text to go inside <body> for single ABF page.""" html="" files=[] for abfID in group: files.extend(d[abfID]) files=sorted(files) #start with root images html+="<hr>" for fname in files: if ".png" in fname.lower() and not "swhlab4" in fname: fname="../"+os.path.basename(fname) html+='<a href="%s"><img src="%s" width="348"></a> '%(fname,fname) #progress to /swhlab4/ images html+="<hr>" #ABFinfo lastID='' for fname in sorted(files): if not "swhlab4" in fname: continue ID=os.path.basename(fname).split("_")[0] if not ID==lastID: lastID=ID html+="<h3>%s</h3>"%os.path.basename(fname).split("_")[0] if ".png" in fname.lower(): fname=os.path.basename(fname) html+='<a href="%s"><img src="%s" height="300"></a> '%(fname,fname) continue html+="<hr>" for fname in files: if not "swhlab4" in fname: continue if ".pkl" in fname: callit=os.path.basename(fname) thing=cm.getPkl(fname) if "_APs.pkl" in fname: callit+=" (first AP)" thing=cm.dictFlat(thing) if len(thing): thing=thing[0] elif "_MTs.pkl" in fname: if type(thing) == dict: callit+=" (from AVG of all sweeps)" else: callit+=" (first sweep)" thing=thing[0] elif "_SAP.pkl" in fname: continue #don't plot those, too complicated elif "_info.pkl" in fname or "_iv.pkl" in fname: pass #no trouble, go for it else: print(" ?? not sure how to index [%s]"%os.path.basename(fname)) continue if type(thing) is dict: thing=cm.msgDict(thing) if type(thing) is list: out='' for item in thing: out+=str(item)+"\n" thing=out thing=str(thing) #lol stringthing thing="### %s ###\n"%os.path.basename(fname)+thing # putting it in a textbox is obnoxious. put it in the source instead. #html+='<br><br><textarea rows="%d" cols="70">%s</textarea>'%(str(thing).count("\n")+5,thing) html+="(view source for %s) <!--\n\n%s\n\n-->"%(os.path.basename(fname),thing) return html
def htmlABFcontent(ID,group,d): """generate text to go inside <body> for single ABF page.""" html="" files=[] for abfID in group: files.extend(d[abfID]) files=sorted(files) #start with root images html+="<hr>" for fname in files: if ".png" in fname.lower() and not "swhlab4" in fname: fname="../"+os.path.basename(fname) html+='<a href="%s"><img src="%s" width="348"></a> '%(fname,fname) #progress to /swhlab4/ images html+="<hr>" #ABFinfo lastID='' for fname in sorted(files): if not "swhlab4" in fname: continue ID=os.path.basename(fname).split("_")[0] if not ID==lastID: lastID=ID html+="<h3>%s</h3>"%os.path.basename(fname).split("_")[0] if ".png" in fname.lower(): fname=os.path.basename(fname) html+='<a href="%s"><img src="%s" height="300"></a> '%(fname,fname) continue html+="<hr>" for fname in files: if not "swhlab4" in fname: continue if ".pkl" in fname: callit=os.path.basename(fname) thing=cm.getPkl(fname) if "_APs.pkl" in fname: callit+=" (first AP)" thing=cm.dictFlat(thing) if len(thing): thing=thing[0] elif "_MTs.pkl" in fname: if type(thing) == dict: callit+=" (from AVG of all sweeps)" else: callit+=" (first sweep)" thing=thing[0] elif "_SAP.pkl" in fname: continue #don't plot those, too complicated elif "_info.pkl" in fname or "_iv.pkl" in fname: pass #no trouble, go for it else: print(" ?? not sure how to index [%s]"%os.path.basename(fname)) continue if type(thing) is dict: thing=cm.msgDict(thing) if type(thing) is list: out='' for item in thing: out+=str(item)+"\n" thing=out thing=str(thing) #lol stringthing thing="### %s ###\n"%os.path.basename(fname)+thing # putting it in a textbox is obnoxious. put it in the source instead. #html+='<br><br><textarea rows="%d" cols="70">%s</textarea>'%(str(thing).count("\n")+5,thing) html+="(view source for %s) <!--\n\n%s\n\n-->"%(os.path.basename(fname),thing) return html
[ "generate", "text", "to", "go", "inside", "<body", ">", "for", "single", "ABF", "page", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/oldcode/indexing/indexing.py#L40-L111
[ "def", "htmlABFcontent", "(", "ID", ",", "group", ",", "d", ")", ":", "html", "=", "\"\"", "files", "=", "[", "]", "for", "abfID", "in", "group", ":", "files", ".", "extend", "(", "d", "[", "abfID", "]", ")", "files", "=", "sorted", "(", "files", ")", "#start with root images", "html", "+=", "\"<hr>\"", "for", "fname", "in", "files", ":", "if", "\".png\"", "in", "fname", ".", "lower", "(", ")", "and", "not", "\"swhlab4\"", "in", "fname", ":", "fname", "=", "\"../\"", "+", "os", ".", "path", ".", "basename", "(", "fname", ")", "html", "+=", "'<a href=\"%s\"><img src=\"%s\" width=\"348\"></a> '", "%", "(", "fname", ",", "fname", ")", "#progress to /swhlab4/ images", "html", "+=", "\"<hr>\"", "#ABFinfo", "lastID", "=", "''", "for", "fname", "in", "sorted", "(", "files", ")", ":", "if", "not", "\"swhlab4\"", "in", "fname", ":", "continue", "ID", "=", "os", ".", "path", ".", "basename", "(", "fname", ")", ".", "split", "(", "\"_\"", ")", "[", "0", "]", "if", "not", "ID", "==", "lastID", ":", "lastID", "=", "ID", "html", "+=", "\"<h3>%s</h3>\"", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", ".", "split", "(", "\"_\"", ")", "[", "0", "]", "if", "\".png\"", "in", "fname", ".", "lower", "(", ")", ":", "fname", "=", "os", ".", "path", ".", "basename", "(", "fname", ")", "html", "+=", "'<a href=\"%s\"><img src=\"%s\" height=\"300\"></a> '", "%", "(", "fname", ",", "fname", ")", "continue", "html", "+=", "\"<hr>\"", "for", "fname", "in", "files", ":", "if", "not", "\"swhlab4\"", "in", "fname", ":", "continue", "if", "\".pkl\"", "in", "fname", ":", "callit", "=", "os", ".", "path", ".", "basename", "(", "fname", ")", "thing", "=", "cm", ".", "getPkl", "(", "fname", ")", "if", "\"_APs.pkl\"", "in", "fname", ":", "callit", "+=", "\" (first AP)\"", "thing", "=", "cm", ".", "dictFlat", "(", "thing", ")", "if", "len", "(", "thing", ")", ":", "thing", "=", "thing", "[", "0", "]", "elif", "\"_MTs.pkl\"", "in", "fname", ":", "if", "type", "(", "thing", ")", "==", "dict", ":", "callit", "+=", "\" (from AVG of all sweeps)\"", "else", ":", "callit", "+=", "\" (first sweep)\"", "thing", "=", "thing", "[", "0", "]", "elif", "\"_SAP.pkl\"", "in", "fname", ":", "continue", "#don't plot those, too complicated", "elif", "\"_info.pkl\"", "in", "fname", "or", "\"_iv.pkl\"", "in", "fname", ":", "pass", "#no trouble, go for it", "else", ":", "print", "(", "\" ?? not sure how to index [%s]\"", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", ")", "continue", "if", "type", "(", "thing", ")", "is", "dict", ":", "thing", "=", "cm", ".", "msgDict", "(", "thing", ")", "if", "type", "(", "thing", ")", "is", "list", ":", "out", "=", "''", "for", "item", "in", "thing", ":", "out", "+=", "str", "(", "item", ")", "+", "\"\\n\"", "thing", "=", "out", "thing", "=", "str", "(", "thing", ")", "#lol stringthing", "thing", "=", "\"### %s ###\\n\"", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", "+", "thing", "# putting it in a textbox is obnoxious. put it in the source instead.", "#html+='<br><br><textarea rows=\"%d\" cols=\"70\">%s</textarea>'%(str(thing).count(\"\\n\")+5,thing)", "html", "+=", "\"(view source for %s) <!--\\n\\n%s\\n\\n-->\"", "%", "(", "os", ".", "path", ".", "basename", "(", "fname", ")", ",", "thing", ")", "return", "html" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
htmlABF
given an ID and the dict of files, generate a static html for that abf.
doc/oldcode/indexing/indexing.py
def htmlABF(ID,group,d,folder,overwrite=False): """given an ID and the dict of files, generate a static html for that abf.""" fname=folder+"/swhlab4/%s_index.html"%ID if overwrite is False and os.path.exists(fname): return html=TEMPLATES['abf'] html=html.replace("~ID~",ID) html=html.replace("~CONTENT~",htmlABFcontent(ID,group,d)) print(" <- writing [%s]"%os.path.basename(fname)) with open(fname,'w') as f: f.write(html) return
def htmlABF(ID,group,d,folder,overwrite=False): """given an ID and the dict of files, generate a static html for that abf.""" fname=folder+"/swhlab4/%s_index.html"%ID if overwrite is False and os.path.exists(fname): return html=TEMPLATES['abf'] html=html.replace("~ID~",ID) html=html.replace("~CONTENT~",htmlABFcontent(ID,group,d)) print(" <- writing [%s]"%os.path.basename(fname)) with open(fname,'w') as f: f.write(html) return
[ "given", "an", "ID", "and", "the", "dict", "of", "files", "generate", "a", "static", "html", "for", "that", "abf", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/oldcode/indexing/indexing.py#L113-L124
[ "def", "htmlABF", "(", "ID", ",", "group", ",", "d", ",", "folder", ",", "overwrite", "=", "False", ")", ":", "fname", "=", "folder", "+", "\"/swhlab4/%s_index.html\"", "%", "ID", "if", "overwrite", "is", "False", "and", "os", ".", "path", ".", "exists", "(", "fname", ")", ":", "return", "html", "=", "TEMPLATES", "[", "'abf'", "]", "html", "=", "html", ".", "replace", "(", "\"~ID~\"", ",", "ID", ")", "html", "=", "html", ".", "replace", "(", "\"~CONTENT~\"", ",", "htmlABFcontent", "(", "ID", ",", "group", ",", "d", ")", ")", "print", "(", "\" <- writing [%s]\"", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", ")", "with", "open", "(", "fname", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "html", ")", "return" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
expMenu
read experiment.txt and return a dict with [firstOfNewExp, color, star, comments].
doc/oldcode/indexing/indexing.py
def expMenu(groups,folder): """read experiment.txt and return a dict with [firstOfNewExp, color, star, comments].""" ### GENERATE THE MENU DATA BASED ON EXPERIMENT FILE orphans = sorted(list(groups.keys())) menu=[] if os.path.exists(folder+'/experiment.txt'): with open(folder+'/experiment.txt') as f: raw=f.read() else: raw="" for line in raw.split("\n"): item={} if len(line)==0: continue if line.startswith("~"): line=line[1:].split(" ",2) item["ID"]=line[0] item["symbol"]='' if len(line)>1: item["color"]=line[1] else: item["color"]="white" if len(line)>2 and len(line[2]): item["comment"]=line[2] if item["comment"][0]=="*": item["symbol"]='*' else: item["comment"]='' if item["ID"] in orphans: orphans.remove(item["ID"]) elif line.startswith("###"): line=line[3:].strip().split(" ",1) item["title"]=line[0] item["comment"]='' if len(line)>1: if line[1].startswith("- "): line[1]=line[1][2:] item["comment"]=line[1] else: item["unknown"]=line menu.append(item) menu.append({"title":"orphans","comment":""}) for ophan in orphans: menu.append({"orphan":ophan,"ID":ophan,"color":'',"symbol":'',"comment":''}) return menu
def expMenu(groups,folder): """read experiment.txt and return a dict with [firstOfNewExp, color, star, comments].""" ### GENERATE THE MENU DATA BASED ON EXPERIMENT FILE orphans = sorted(list(groups.keys())) menu=[] if os.path.exists(folder+'/experiment.txt'): with open(folder+'/experiment.txt') as f: raw=f.read() else: raw="" for line in raw.split("\n"): item={} if len(line)==0: continue if line.startswith("~"): line=line[1:].split(" ",2) item["ID"]=line[0] item["symbol"]='' if len(line)>1: item["color"]=line[1] else: item["color"]="white" if len(line)>2 and len(line[2]): item["comment"]=line[2] if item["comment"][0]=="*": item["symbol"]='*' else: item["comment"]='' if item["ID"] in orphans: orphans.remove(item["ID"]) elif line.startswith("###"): line=line[3:].strip().split(" ",1) item["title"]=line[0] item["comment"]='' if len(line)>1: if line[1].startswith("- "): line[1]=line[1][2:] item["comment"]=line[1] else: item["unknown"]=line menu.append(item) menu.append({"title":"orphans","comment":""}) for ophan in orphans: menu.append({"orphan":ophan,"ID":ophan,"color":'',"symbol":'',"comment":''}) return menu
[ "read", "experiment", ".", "txt", "and", "return", "a", "dict", "with", "[", "firstOfNewExp", "color", "star", "comments", "]", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/oldcode/indexing/indexing.py#L184-L228
[ "def", "expMenu", "(", "groups", ",", "folder", ")", ":", "### GENERATE THE MENU DATA BASED ON EXPERIMENT FILE", "orphans", "=", "sorted", "(", "list", "(", "groups", ".", "keys", "(", ")", ")", ")", "menu", "=", "[", "]", "if", "os", ".", "path", ".", "exists", "(", "folder", "+", "'/experiment.txt'", ")", ":", "with", "open", "(", "folder", "+", "'/experiment.txt'", ")", "as", "f", ":", "raw", "=", "f", ".", "read", "(", ")", "else", ":", "raw", "=", "\"\"", "for", "line", "in", "raw", ".", "split", "(", "\"\\n\"", ")", ":", "item", "=", "{", "}", "if", "len", "(", "line", ")", "==", "0", ":", "continue", "if", "line", ".", "startswith", "(", "\"~\"", ")", ":", "line", "=", "line", "[", "1", ":", "]", ".", "split", "(", "\" \"", ",", "2", ")", "item", "[", "\"ID\"", "]", "=", "line", "[", "0", "]", "item", "[", "\"symbol\"", "]", "=", "''", "if", "len", "(", "line", ")", ">", "1", ":", "item", "[", "\"color\"", "]", "=", "line", "[", "1", "]", "else", ":", "item", "[", "\"color\"", "]", "=", "\"white\"", "if", "len", "(", "line", ")", ">", "2", "and", "len", "(", "line", "[", "2", "]", ")", ":", "item", "[", "\"comment\"", "]", "=", "line", "[", "2", "]", "if", "item", "[", "\"comment\"", "]", "[", "0", "]", "==", "\"*\"", ":", "item", "[", "\"symbol\"", "]", "=", "'*'", "else", ":", "item", "[", "\"comment\"", "]", "=", "''", "if", "item", "[", "\"ID\"", "]", "in", "orphans", ":", "orphans", ".", "remove", "(", "item", "[", "\"ID\"", "]", ")", "elif", "line", ".", "startswith", "(", "\"###\"", ")", ":", "line", "=", "line", "[", "3", ":", "]", ".", "strip", "(", ")", ".", "split", "(", "\" \"", ",", "1", ")", "item", "[", "\"title\"", "]", "=", "line", "[", "0", "]", "item", "[", "\"comment\"", "]", "=", "''", "if", "len", "(", "line", ")", ">", "1", ":", "if", "line", "[", "1", "]", ".", "startswith", "(", "\"- \"", ")", ":", "line", "[", "1", "]", "=", "line", "[", "1", "]", "[", "2", ":", "]", "item", "[", "\"comment\"", "]", "=", "line", "[", "1", "]", "else", ":", "item", "[", "\"unknown\"", "]", "=", "line", "menu", ".", "append", "(", "item", ")", "menu", ".", "append", "(", "{", "\"title\"", ":", "\"orphans\"", ",", "\"comment\"", ":", "\"\"", "}", ")", "for", "ophan", "in", "orphans", ":", "menu", ".", "append", "(", "{", "\"orphan\"", ":", "ophan", ",", "\"ID\"", ":", "ophan", ",", "\"color\"", ":", "''", ",", "\"symbol\"", ":", "''", ",", "\"comment\"", ":", "''", "}", ")", "return", "menu" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
genIndex
expects a folder of ABFs.
doc/oldcode/indexing/indexing.py
def genIndex(folder,forceIDs=[]): """expects a folder of ABFs.""" if not os.path.exists(folder+"/swhlab4/"): print(" !! cannot index if no /swhlab4/") return timestart=cm.timethis() files=glob.glob(folder+"/*.*") #ABF folder files.extend(glob.glob(folder+"/swhlab4/*.*")) print(" -- indexing glob took %.02f ms"%(cm.timethis(timestart)*1000)) files.extend(genPNGs(folder,files)) files=sorted(files) timestart=cm.timethis() d=cm.getIDfileDict(files) #TODO: this is really slow print(" -- filedict length:",len(d)) print(" -- generating ID dict took %.02f ms"%(cm.timethis(timestart)*1000)) groups=cm.getABFgroups(files) print(" -- groups length:",len(groups)) for ID in sorted(list(groups.keys())): overwrite=False for abfID in groups[ID]: if abfID in forceIDs: overwrite=True try: htmlABF(ID,groups[ID],d,folder,overwrite) except: print("~~ HTML GENERATION FAILED!!!") menu=expMenu(groups,folder) makeSplash(menu,folder) makeMenu(menu,folder) htmlFrames(d,folder) makeMenu(menu,folder) makeSplash(menu,folder)
def genIndex(folder,forceIDs=[]): """expects a folder of ABFs.""" if not os.path.exists(folder+"/swhlab4/"): print(" !! cannot index if no /swhlab4/") return timestart=cm.timethis() files=glob.glob(folder+"/*.*") #ABF folder files.extend(glob.glob(folder+"/swhlab4/*.*")) print(" -- indexing glob took %.02f ms"%(cm.timethis(timestart)*1000)) files.extend(genPNGs(folder,files)) files=sorted(files) timestart=cm.timethis() d=cm.getIDfileDict(files) #TODO: this is really slow print(" -- filedict length:",len(d)) print(" -- generating ID dict took %.02f ms"%(cm.timethis(timestart)*1000)) groups=cm.getABFgroups(files) print(" -- groups length:",len(groups)) for ID in sorted(list(groups.keys())): overwrite=False for abfID in groups[ID]: if abfID in forceIDs: overwrite=True try: htmlABF(ID,groups[ID],d,folder,overwrite) except: print("~~ HTML GENERATION FAILED!!!") menu=expMenu(groups,folder) makeSplash(menu,folder) makeMenu(menu,folder) htmlFrames(d,folder) makeMenu(menu,folder) makeSplash(menu,folder)
[ "expects", "a", "folder", "of", "ABFs", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/oldcode/indexing/indexing.py#L279-L310
[ "def", "genIndex", "(", "folder", ",", "forceIDs", "=", "[", "]", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "folder", "+", "\"/swhlab4/\"", ")", ":", "print", "(", "\" !! cannot index if no /swhlab4/\"", ")", "return", "timestart", "=", "cm", ".", "timethis", "(", ")", "files", "=", "glob", ".", "glob", "(", "folder", "+", "\"/*.*\"", ")", "#ABF folder", "files", ".", "extend", "(", "glob", ".", "glob", "(", "folder", "+", "\"/swhlab4/*.*\"", ")", ")", "print", "(", "\" -- indexing glob took %.02f ms\"", "%", "(", "cm", ".", "timethis", "(", "timestart", ")", "*", "1000", ")", ")", "files", ".", "extend", "(", "genPNGs", "(", "folder", ",", "files", ")", ")", "files", "=", "sorted", "(", "files", ")", "timestart", "=", "cm", ".", "timethis", "(", ")", "d", "=", "cm", ".", "getIDfileDict", "(", "files", ")", "#TODO: this is really slow", "print", "(", "\" -- filedict length:\"", ",", "len", "(", "d", ")", ")", "print", "(", "\" -- generating ID dict took %.02f ms\"", "%", "(", "cm", ".", "timethis", "(", "timestart", ")", "*", "1000", ")", ")", "groups", "=", "cm", ".", "getABFgroups", "(", "files", ")", "print", "(", "\" -- groups length:\"", ",", "len", "(", "groups", ")", ")", "for", "ID", "in", "sorted", "(", "list", "(", "groups", ".", "keys", "(", ")", ")", ")", ":", "overwrite", "=", "False", "for", "abfID", "in", "groups", "[", "ID", "]", ":", "if", "abfID", "in", "forceIDs", ":", "overwrite", "=", "True", "try", ":", "htmlABF", "(", "ID", ",", "groups", "[", "ID", "]", ",", "d", ",", "folder", ",", "overwrite", ")", "except", ":", "print", "(", "\"~~ HTML GENERATION FAILED!!!\"", ")", "menu", "=", "expMenu", "(", "groups", ",", "folder", ")", "makeSplash", "(", "menu", ",", "folder", ")", "makeMenu", "(", "menu", ",", "folder", ")", "htmlFrames", "(", "d", ",", "folder", ")", "makeMenu", "(", "menu", ",", "folder", ")", "makeSplash", "(", "menu", ",", "folder", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
drawPhasePlot
Given an ABF object (SWHLab), draw its phase plot of the current sweep. m1 and m2 are optional marks (in seconds) for plotting only a range of data. Assume a matplotlib figure is already open and just draw on top if it.
doc/uses/phase_plot/phase.py
def drawPhasePlot(abf,m1=0,m2=None): """ Given an ABF object (SWHLab), draw its phase plot of the current sweep. m1 and m2 are optional marks (in seconds) for plotting only a range of data. Assume a matplotlib figure is already open and just draw on top if it. """ if not m2: m2 = abf.sweepLength cm = plt.get_cmap('CMRmap') #cm = plt.get_cmap('CMRmap_r') #cm = plt.get_cmap('spectral') #cm = plt.get_cmap('winter') # prepare Xs, Ys, and dYs Y = abf.sweepY Y = Y[int(abf.pointsPerSec*m1):int(abf.pointsPerSec*m2)] dY = (Y[1:]-Y[:-1])*abf.rate/1000.0 # mV/ms dY = np.append(dY,dY[-1]) Xs = np.arange(len(dY))/abf.pointsPerSec Xs = Xs + Xs[-1]*abf.sweep # plot the voltage plt.subplot(131) plt.grid(alpha=.5) plt.plot(Xs,Y,lw=.5,color=cm(abf.sweep/abf.sweeps)) plt.title("membrane voltage") plt.ylabel("V (mV)") plt.xlabel("time (sec)") plt.margins(0,.1) # plot the first derivative of the voltage plt.subplot(132) plt.grid(alpha=.5) plt.plot(Xs,dY,lw=.5,color=cm(abf.sweep/abf.sweeps)) plt.title("voltage velocity") plt.ylabel("dV (mV/ms)") plt.xlabel("time (sec)") plt.margins(0,.1) # make the phase plot plt.subplot(133) plt.grid(alpha=.5) plt.plot(Y,dY,alpha=.5,lw=.5,color=cm(abf.sweep/abf.sweeps)) plt.title("phase plot") plt.ylabel("dV (mV/ms)") plt.xlabel("V (mV)") plt.margins(.1,.1) # tighten up the figure plt.tight_layout()
def drawPhasePlot(abf,m1=0,m2=None): """ Given an ABF object (SWHLab), draw its phase plot of the current sweep. m1 and m2 are optional marks (in seconds) for plotting only a range of data. Assume a matplotlib figure is already open and just draw on top if it. """ if not m2: m2 = abf.sweepLength cm = plt.get_cmap('CMRmap') #cm = plt.get_cmap('CMRmap_r') #cm = plt.get_cmap('spectral') #cm = plt.get_cmap('winter') # prepare Xs, Ys, and dYs Y = abf.sweepY Y = Y[int(abf.pointsPerSec*m1):int(abf.pointsPerSec*m2)] dY = (Y[1:]-Y[:-1])*abf.rate/1000.0 # mV/ms dY = np.append(dY,dY[-1]) Xs = np.arange(len(dY))/abf.pointsPerSec Xs = Xs + Xs[-1]*abf.sweep # plot the voltage plt.subplot(131) plt.grid(alpha=.5) plt.plot(Xs,Y,lw=.5,color=cm(abf.sweep/abf.sweeps)) plt.title("membrane voltage") plt.ylabel("V (mV)") plt.xlabel("time (sec)") plt.margins(0,.1) # plot the first derivative of the voltage plt.subplot(132) plt.grid(alpha=.5) plt.plot(Xs,dY,lw=.5,color=cm(abf.sweep/abf.sweeps)) plt.title("voltage velocity") plt.ylabel("dV (mV/ms)") plt.xlabel("time (sec)") plt.margins(0,.1) # make the phase plot plt.subplot(133) plt.grid(alpha=.5) plt.plot(Y,dY,alpha=.5,lw=.5,color=cm(abf.sweep/abf.sweeps)) plt.title("phase plot") plt.ylabel("dV (mV/ms)") plt.xlabel("V (mV)") plt.margins(.1,.1) # tighten up the figure plt.tight_layout()
[ "Given", "an", "ABF", "object", "(", "SWHLab", ")", "draw", "its", "phase", "plot", "of", "the", "current", "sweep", ".", "m1", "and", "m2", "are", "optional", "marks", "(", "in", "seconds", ")", "for", "plotting", "only", "a", "range", "of", "data", ".", "Assume", "a", "matplotlib", "figure", "is", "already", "open", "and", "just", "draw", "on", "top", "if", "it", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/uses/phase_plot/phase.py#L7-L58
[ "def", "drawPhasePlot", "(", "abf", ",", "m1", "=", "0", ",", "m2", "=", "None", ")", ":", "if", "not", "m2", ":", "m2", "=", "abf", ".", "sweepLength", "cm", "=", "plt", ".", "get_cmap", "(", "'CMRmap'", ")", "#cm = plt.get_cmap('CMRmap_r')", "#cm = plt.get_cmap('spectral')", "#cm = plt.get_cmap('winter')", "# prepare Xs, Ys, and dYs", "Y", "=", "abf", ".", "sweepY", "Y", "=", "Y", "[", "int", "(", "abf", ".", "pointsPerSec", "*", "m1", ")", ":", "int", "(", "abf", ".", "pointsPerSec", "*", "m2", ")", "]", "dY", "=", "(", "Y", "[", "1", ":", "]", "-", "Y", "[", ":", "-", "1", "]", ")", "*", "abf", ".", "rate", "/", "1000.0", "# mV/ms", "dY", "=", "np", ".", "append", "(", "dY", ",", "dY", "[", "-", "1", "]", ")", "Xs", "=", "np", ".", "arange", "(", "len", "(", "dY", ")", ")", "/", "abf", ".", "pointsPerSec", "Xs", "=", "Xs", "+", "Xs", "[", "-", "1", "]", "*", "abf", ".", "sweep", "# plot the voltage", "plt", ".", "subplot", "(", "131", ")", "plt", ".", "grid", "(", "alpha", "=", ".5", ")", "plt", ".", "plot", "(", "Xs", ",", "Y", ",", "lw", "=", ".5", ",", "color", "=", "cm", "(", "abf", ".", "sweep", "/", "abf", ".", "sweeps", ")", ")", "plt", ".", "title", "(", "\"membrane voltage\"", ")", "plt", ".", "ylabel", "(", "\"V (mV)\"", ")", "plt", ".", "xlabel", "(", "\"time (sec)\"", ")", "plt", ".", "margins", "(", "0", ",", ".1", ")", "# plot the first derivative of the voltage", "plt", ".", "subplot", "(", "132", ")", "plt", ".", "grid", "(", "alpha", "=", ".5", ")", "plt", ".", "plot", "(", "Xs", ",", "dY", ",", "lw", "=", ".5", ",", "color", "=", "cm", "(", "abf", ".", "sweep", "/", "abf", ".", "sweeps", ")", ")", "plt", ".", "title", "(", "\"voltage velocity\"", ")", "plt", ".", "ylabel", "(", "\"dV (mV/ms)\"", ")", "plt", ".", "xlabel", "(", "\"time (sec)\"", ")", "plt", ".", "margins", "(", "0", ",", ".1", ")", "# make the phase plot", "plt", ".", "subplot", "(", "133", ")", "plt", ".", "grid", "(", "alpha", "=", ".5", ")", "plt", ".", "plot", "(", "Y", ",", "dY", ",", "alpha", "=", ".5", ",", "lw", "=", ".5", ",", "color", "=", "cm", "(", "abf", ".", "sweep", "/", "abf", ".", "sweeps", ")", ")", "plt", ".", "title", "(", "\"phase plot\"", ")", "plt", ".", "ylabel", "(", "\"dV (mV/ms)\"", ")", "plt", ".", "xlabel", "(", "\"V (mV)\"", ")", "plt", ".", "margins", "(", ".1", ",", ".1", ")", "# tighten up the figure", "plt", ".", "tight_layout", "(", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
plotAllSweeps
simple example how to load an ABF file and plot every sweep.
doc/misc/neo demo.py
def plotAllSweeps(abfFile): """simple example how to load an ABF file and plot every sweep.""" r = io.AxonIO(filename=abfFile) bl = r.read_block(lazy=False, cascade=True) print(abfFile+"\nplotting %d sweeps..."%len(bl.segments)) plt.figure(figsize=(12,10)) plt.title(abfFile) for sweep in range(len(bl.segments)): trace = bl.segments[sweep].analogsignals[0] plt.plot(trace.times-trace.times[0],trace.magnitude,alpha=.5) plt.ylabel(trace.dimensionality) plt.xlabel("seconds") plt.show() plt.close()
def plotAllSweeps(abfFile): """simple example how to load an ABF file and plot every sweep.""" r = io.AxonIO(filename=abfFile) bl = r.read_block(lazy=False, cascade=True) print(abfFile+"\nplotting %d sweeps..."%len(bl.segments)) plt.figure(figsize=(12,10)) plt.title(abfFile) for sweep in range(len(bl.segments)): trace = bl.segments[sweep].analogsignals[0] plt.plot(trace.times-trace.times[0],trace.magnitude,alpha=.5) plt.ylabel(trace.dimensionality) plt.xlabel("seconds") plt.show() plt.close()
[ "simple", "example", "how", "to", "load", "an", "ABF", "file", "and", "plot", "every", "sweep", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/misc/neo demo.py#L9-L22
[ "def", "plotAllSweeps", "(", "abfFile", ")", ":", "r", "=", "io", ".", "AxonIO", "(", "filename", "=", "abfFile", ")", "bl", "=", "r", ".", "read_block", "(", "lazy", "=", "False", ",", "cascade", "=", "True", ")", "print", "(", "abfFile", "+", "\"\\nplotting %d sweeps...\"", "%", "len", "(", "bl", ".", "segments", ")", ")", "plt", ".", "figure", "(", "figsize", "=", "(", "12", ",", "10", ")", ")", "plt", ".", "title", "(", "abfFile", ")", "for", "sweep", "in", "range", "(", "len", "(", "bl", ".", "segments", ")", ")", ":", "trace", "=", "bl", ".", "segments", "[", "sweep", "]", ".", "analogsignals", "[", "0", "]", "plt", ".", "plot", "(", "trace", ".", "times", "-", "trace", ".", "times", "[", "0", "]", ",", "trace", ".", "magnitude", ",", "alpha", "=", ".5", ")", "plt", ".", "ylabel", "(", "trace", ".", "dimensionality", ")", "plt", ".", "xlabel", "(", "\"seconds\"", ")", "plt", ".", "show", "(", ")", "plt", ".", "close", "(", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
TIF_to_jpg
given a TIF taken by our cameras, make it a pretty labeled JPG. if the filename contains "f10" or "f20", add appropraite scale bars. automatic contrast adjustment is different depending on if its a DIC image or fluorescent image (which is detected automatically).
swhlab/indexing/imaging.py
def TIF_to_jpg(fnameTiff, overwrite=False, saveAs=""): """ given a TIF taken by our cameras, make it a pretty labeled JPG. if the filename contains "f10" or "f20", add appropraite scale bars. automatic contrast adjustment is different depending on if its a DIC image or fluorescent image (which is detected automatically). """ if saveAs == "": saveAs=fnameTiff+".jpg" if overwrite is False and os.path.exists(saveAs): print("file exists, not overwriting...") return # load the image img=pylab.imread(fnameTiff) img=img/np.max(img) # now the data is from 0 to 1 # determine the old histogram hist1,bins1=np.histogram(img.ravel(),bins=256, range=(0,1)) #pylab.plot(bins[:-1],hist) # detect darkfield by average: if np.average(img)<.2: vmin=None vmax=None msg=" | FLU" while np.average(img)<.5: img=np.sqrt(img) msg+="^(.5)" else: msg=" | DIC" percentile=.005 vmin=np.percentile(img.ravel(),percentile) vmax=np.percentile(img.ravel(),100-percentile) # determine the new histogram hist2,bins2=np.histogram(img.ravel(),bins=256, range=(0,1)) # plot it with resizing magic fig=pylab.figure(facecolor='r') fig.gca().imshow(img,cmap=pylab.gray(),vmin=vmin,vmax=vmax) pylab.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0) pylab.gca().xaxis.set_major_locator(pylab.NullLocator()) pylab.gca().yaxis.set_major_locator(pylab.NullLocator()) pylab.axis('off') # resize it to the original size fig.set_size_inches(img.shape[1]/100, img.shape[0]/100) # add text msg="%s | %s"%(os.path.basename(fnameTiff), datetime.datetime.fromtimestamp(os.path.getmtime(fnameTiff)))+msg center=10 pylab.text(center,center,"%s"%(msg),va="top",color='w',size='small', family='monospace',weight='bold', bbox=dict(facecolor='k', alpha=.5)) # add scale bar scaleWidthPx=False if "f10" in fnameTiff: scaleWidthPx,scaleBarText=39,"25 um" if "f20" in fnameTiff: scaleWidthPx,scaleBarText=31,"10 um" if scaleWidthPx: scaleBarPadding=10 x2,y2=img.shape[1]-scaleBarPadding,img.shape[0]-scaleBarPadding x1,y1=x2-scaleWidthPx,y2 for offset,color,alpha in [[2,'k',.5],[0,'w',1]]: pylab.plot([x1+offset,x2+offset],[y1+offset,y2+offset],'-', color=color,lw=4,alpha=alpha) pylab.text((x1+x2)/2+offset,y1-5+offset,scaleBarText,color=color, ha="center",weight="bold",alpha=alpha, size="small",va="bottom",family="monospace") # add histogram #pylab.plot(img.shape[1]-bins1[:-1][::-1]*200,-hist1/max(hist1)*100+110,color='g') #pylab.plot(img.shape[1]-bins2[:-1][::-1]*200,-hist2/max(hist2)*100+110,color='b') #pylab.show() # save it pylab.savefig(saveAs,dpi=100) # clean up pylab.close()
def TIF_to_jpg(fnameTiff, overwrite=False, saveAs=""): """ given a TIF taken by our cameras, make it a pretty labeled JPG. if the filename contains "f10" or "f20", add appropraite scale bars. automatic contrast adjustment is different depending on if its a DIC image or fluorescent image (which is detected automatically). """ if saveAs == "": saveAs=fnameTiff+".jpg" if overwrite is False and os.path.exists(saveAs): print("file exists, not overwriting...") return # load the image img=pylab.imread(fnameTiff) img=img/np.max(img) # now the data is from 0 to 1 # determine the old histogram hist1,bins1=np.histogram(img.ravel(),bins=256, range=(0,1)) #pylab.plot(bins[:-1],hist) # detect darkfield by average: if np.average(img)<.2: vmin=None vmax=None msg=" | FLU" while np.average(img)<.5: img=np.sqrt(img) msg+="^(.5)" else: msg=" | DIC" percentile=.005 vmin=np.percentile(img.ravel(),percentile) vmax=np.percentile(img.ravel(),100-percentile) # determine the new histogram hist2,bins2=np.histogram(img.ravel(),bins=256, range=(0,1)) # plot it with resizing magic fig=pylab.figure(facecolor='r') fig.gca().imshow(img,cmap=pylab.gray(),vmin=vmin,vmax=vmax) pylab.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0) pylab.gca().xaxis.set_major_locator(pylab.NullLocator()) pylab.gca().yaxis.set_major_locator(pylab.NullLocator()) pylab.axis('off') # resize it to the original size fig.set_size_inches(img.shape[1]/100, img.shape[0]/100) # add text msg="%s | %s"%(os.path.basename(fnameTiff), datetime.datetime.fromtimestamp(os.path.getmtime(fnameTiff)))+msg center=10 pylab.text(center,center,"%s"%(msg),va="top",color='w',size='small', family='monospace',weight='bold', bbox=dict(facecolor='k', alpha=.5)) # add scale bar scaleWidthPx=False if "f10" in fnameTiff: scaleWidthPx,scaleBarText=39,"25 um" if "f20" in fnameTiff: scaleWidthPx,scaleBarText=31,"10 um" if scaleWidthPx: scaleBarPadding=10 x2,y2=img.shape[1]-scaleBarPadding,img.shape[0]-scaleBarPadding x1,y1=x2-scaleWidthPx,y2 for offset,color,alpha in [[2,'k',.5],[0,'w',1]]: pylab.plot([x1+offset,x2+offset],[y1+offset,y2+offset],'-', color=color,lw=4,alpha=alpha) pylab.text((x1+x2)/2+offset,y1-5+offset,scaleBarText,color=color, ha="center",weight="bold",alpha=alpha, size="small",va="bottom",family="monospace") # add histogram #pylab.plot(img.shape[1]-bins1[:-1][::-1]*200,-hist1/max(hist1)*100+110,color='g') #pylab.plot(img.shape[1]-bins2[:-1][::-1]*200,-hist2/max(hist2)*100+110,color='b') #pylab.show() # save it pylab.savefig(saveAs,dpi=100) # clean up pylab.close()
[ "given", "a", "TIF", "taken", "by", "our", "cameras", "make", "it", "a", "pretty", "labeled", "JPG", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/swhlab/indexing/imaging.py#L17-L105
[ "def", "TIF_to_jpg", "(", "fnameTiff", ",", "overwrite", "=", "False", ",", "saveAs", "=", "\"\"", ")", ":", "if", "saveAs", "==", "\"\"", ":", "saveAs", "=", "fnameTiff", "+", "\".jpg\"", "if", "overwrite", "is", "False", "and", "os", ".", "path", ".", "exists", "(", "saveAs", ")", ":", "print", "(", "\"file exists, not overwriting...\"", ")", "return", "# load the image", "img", "=", "pylab", ".", "imread", "(", "fnameTiff", ")", "img", "=", "img", "/", "np", ".", "max", "(", "img", ")", "# now the data is from 0 to 1", "# determine the old histogram", "hist1", ",", "bins1", "=", "np", ".", "histogram", "(", "img", ".", "ravel", "(", ")", ",", "bins", "=", "256", ",", "range", "=", "(", "0", ",", "1", ")", ")", "#pylab.plot(bins[:-1],hist)", "# detect darkfield by average:", "if", "np", ".", "average", "(", "img", ")", "<", ".2", ":", "vmin", "=", "None", "vmax", "=", "None", "msg", "=", "\" | FLU\"", "while", "np", ".", "average", "(", "img", ")", "<", ".5", ":", "img", "=", "np", ".", "sqrt", "(", "img", ")", "msg", "+=", "\"^(.5)\"", "else", ":", "msg", "=", "\" | DIC\"", "percentile", "=", ".005", "vmin", "=", "np", ".", "percentile", "(", "img", ".", "ravel", "(", ")", ",", "percentile", ")", "vmax", "=", "np", ".", "percentile", "(", "img", ".", "ravel", "(", ")", ",", "100", "-", "percentile", ")", "# determine the new histogram", "hist2", ",", "bins2", "=", "np", ".", "histogram", "(", "img", ".", "ravel", "(", ")", ",", "bins", "=", "256", ",", "range", "=", "(", "0", ",", "1", ")", ")", "# plot it with resizing magic", "fig", "=", "pylab", ".", "figure", "(", "facecolor", "=", "'r'", ")", "fig", ".", "gca", "(", ")", ".", "imshow", "(", "img", ",", "cmap", "=", "pylab", ".", "gray", "(", ")", ",", "vmin", "=", "vmin", ",", "vmax", "=", "vmax", ")", "pylab", ".", "subplots_adjust", "(", "top", "=", "1", ",", "bottom", "=", "0", ",", "right", "=", "1", ",", "left", "=", "0", ",", "hspace", "=", "0", ",", "wspace", "=", "0", ")", "pylab", ".", "gca", "(", ")", ".", "xaxis", ".", "set_major_locator", "(", "pylab", ".", "NullLocator", "(", ")", ")", "pylab", ".", "gca", "(", ")", ".", "yaxis", ".", "set_major_locator", "(", "pylab", ".", "NullLocator", "(", ")", ")", "pylab", ".", "axis", "(", "'off'", ")", "# resize it to the original size", "fig", ".", "set_size_inches", "(", "img", ".", "shape", "[", "1", "]", "/", "100", ",", "img", ".", "shape", "[", "0", "]", "/", "100", ")", "# add text", "msg", "=", "\"%s | %s\"", "%", "(", "os", ".", "path", ".", "basename", "(", "fnameTiff", ")", ",", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "os", ".", "path", ".", "getmtime", "(", "fnameTiff", ")", ")", ")", "+", "msg", "center", "=", "10", "pylab", ".", "text", "(", "center", ",", "center", ",", "\"%s\"", "%", "(", "msg", ")", ",", "va", "=", "\"top\"", ",", "color", "=", "'w'", ",", "size", "=", "'small'", ",", "family", "=", "'monospace'", ",", "weight", "=", "'bold'", ",", "bbox", "=", "dict", "(", "facecolor", "=", "'k'", ",", "alpha", "=", ".5", ")", ")", "# add scale bar", "scaleWidthPx", "=", "False", "if", "\"f10\"", "in", "fnameTiff", ":", "scaleWidthPx", ",", "scaleBarText", "=", "39", ",", "\"25 um\"", "if", "\"f20\"", "in", "fnameTiff", ":", "scaleWidthPx", ",", "scaleBarText", "=", "31", ",", "\"10 um\"", "if", "scaleWidthPx", ":", "scaleBarPadding", "=", "10", "x2", ",", "y2", "=", "img", ".", "shape", "[", "1", "]", "-", "scaleBarPadding", ",", "img", ".", "shape", "[", "0", "]", "-", "scaleBarPadding", "x1", ",", "y1", "=", "x2", "-", "scaleWidthPx", ",", "y2", "for", "offset", ",", "color", ",", "alpha", "in", "[", "[", "2", ",", "'k'", ",", ".5", "]", ",", "[", "0", ",", "'w'", ",", "1", "]", "]", ":", "pylab", ".", "plot", "(", "[", "x1", "+", "offset", ",", "x2", "+", "offset", "]", ",", "[", "y1", "+", "offset", ",", "y2", "+", "offset", "]", ",", "'-'", ",", "color", "=", "color", ",", "lw", "=", "4", ",", "alpha", "=", "alpha", ")", "pylab", ".", "text", "(", "(", "x1", "+", "x2", ")", "/", "2", "+", "offset", ",", "y1", "-", "5", "+", "offset", ",", "scaleBarText", ",", "color", "=", "color", ",", "ha", "=", "\"center\"", ",", "weight", "=", "\"bold\"", ",", "alpha", "=", "alpha", ",", "size", "=", "\"small\"", ",", "va", "=", "\"bottom\"", ",", "family", "=", "\"monospace\"", ")", "# add histogram", "#pylab.plot(img.shape[1]-bins1[:-1][::-1]*200,-hist1/max(hist1)*100+110,color='g')", "#pylab.plot(img.shape[1]-bins2[:-1][::-1]*200,-hist2/max(hist2)*100+110,color='b')", "#pylab.show()", "# save it", "pylab", ".", "savefig", "(", "saveAs", ",", "dpi", "=", "100", ")", "# clean up", "pylab", ".", "close", "(", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
TIF_to_jpg_all
run TIF_to_jpg() on every TIF of a folder.
swhlab/indexing/imaging.py
def TIF_to_jpg_all(path): """run TIF_to_jpg() on every TIF of a folder.""" for fname in sorted(glob.glob(path+"/*.tif")): print(fname) TIF_to_jpg(fname)
def TIF_to_jpg_all(path): """run TIF_to_jpg() on every TIF of a folder.""" for fname in sorted(glob.glob(path+"/*.tif")): print(fname) TIF_to_jpg(fname)
[ "run", "TIF_to_jpg", "()", "on", "every", "TIF", "of", "a", "folder", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/swhlab/indexing/imaging.py#L107-L111
[ "def", "TIF_to_jpg_all", "(", "path", ")", ":", "for", "fname", "in", "sorted", "(", "glob", ".", "glob", "(", "path", "+", "\"/*.tif\"", ")", ")", ":", "print", "(", "fname", ")", "TIF_to_jpg", "(", "fname", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
analyzeSweep
m1 and m2, if given, are in seconds. returns [# EPSCs, # IPSCs]
doc/uses/EPSCs-and-IPSCs/smooth histogram method/02.py
def analyzeSweep(abf,sweep,m1=None,m2=None,plotToo=False): """ m1 and m2, if given, are in seconds. returns [# EPSCs, # IPSCs] """ abf.setsweep(sweep) if m1 is None: m1=0 else: m1=m1*abf.pointsPerSec if m2 is None: m2=-1 else: m2=m2*abf.pointsPerSec # obtain X and Y Yorig=abf.sweepY[int(m1):int(m2)] X=np.arange(len(Yorig))/abf.pointsPerSec Ylpf=linear_gaussian(Yorig,sigmaSize=abf.pointsPerMs*300,forwardOnly=False) Yflat=Yorig-Ylpf EPSCs,IPSCs=[],[] if plotToo: plt.figure(figsize=(15,6)) ax1=plt.subplot(211) plt.title("%s sweep %d"%(abf.ID,sweep)) plt.grid() plt.plot(X,Yorig,alpha=.5) plt.plot(X,Ylpf,'k',alpha=.5,lw=2) plt.margins(0,.2) plt.subplot(212,sharex=ax1) plt.title("gaussian baseline subtraction") plt.grid() plt.plot(X,Yflat,alpha=.5) plt.axhline(0,color='k',lw=2,alpha=.5) plt.tight_layout() plt.show() # TEST GAUSS hist, bin_edges = np.histogram(Yflat, density=True, bins=200) peakPa=bin_edges[np.where(hist==max(hist))[0][0]+1] if plotToo: plt.figure() plt.grid() plt.plot(bin_edges[1:],hist,alpha=.5) plt.axvline(0,color='k') plt.axvline(peakPa,color='r',ls='--',lw=2,alpha=.5) plt.semilogy() plt.title("sweep data distribution") plt.ylabel("power") plt.xlabel("pA deviation") plt.show() return peakPa
def analyzeSweep(abf,sweep,m1=None,m2=None,plotToo=False): """ m1 and m2, if given, are in seconds. returns [# EPSCs, # IPSCs] """ abf.setsweep(sweep) if m1 is None: m1=0 else: m1=m1*abf.pointsPerSec if m2 is None: m2=-1 else: m2=m2*abf.pointsPerSec # obtain X and Y Yorig=abf.sweepY[int(m1):int(m2)] X=np.arange(len(Yorig))/abf.pointsPerSec Ylpf=linear_gaussian(Yorig,sigmaSize=abf.pointsPerMs*300,forwardOnly=False) Yflat=Yorig-Ylpf EPSCs,IPSCs=[],[] if plotToo: plt.figure(figsize=(15,6)) ax1=plt.subplot(211) plt.title("%s sweep %d"%(abf.ID,sweep)) plt.grid() plt.plot(X,Yorig,alpha=.5) plt.plot(X,Ylpf,'k',alpha=.5,lw=2) plt.margins(0,.2) plt.subplot(212,sharex=ax1) plt.title("gaussian baseline subtraction") plt.grid() plt.plot(X,Yflat,alpha=.5) plt.axhline(0,color='k',lw=2,alpha=.5) plt.tight_layout() plt.show() # TEST GAUSS hist, bin_edges = np.histogram(Yflat, density=True, bins=200) peakPa=bin_edges[np.where(hist==max(hist))[0][0]+1] if plotToo: plt.figure() plt.grid() plt.plot(bin_edges[1:],hist,alpha=.5) plt.axvline(0,color='k') plt.axvline(peakPa,color='r',ls='--',lw=2,alpha=.5) plt.semilogy() plt.title("sweep data distribution") plt.ylabel("power") plt.xlabel("pA deviation") plt.show() return peakPa
[ "m1", "and", "m2", "if", "given", "are", "in", "seconds", ".", "returns", "[", "#", "EPSCs", "#", "IPSCs", "]" ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/uses/EPSCs-and-IPSCs/smooth histogram method/02.py#L136-L190
[ "def", "analyzeSweep", "(", "abf", ",", "sweep", ",", "m1", "=", "None", ",", "m2", "=", "None", ",", "plotToo", "=", "False", ")", ":", "abf", ".", "setsweep", "(", "sweep", ")", "if", "m1", "is", "None", ":", "m1", "=", "0", "else", ":", "m1", "=", "m1", "*", "abf", ".", "pointsPerSec", "if", "m2", "is", "None", ":", "m2", "=", "-", "1", "else", ":", "m2", "=", "m2", "*", "abf", ".", "pointsPerSec", "# obtain X and Y", "Yorig", "=", "abf", ".", "sweepY", "[", "int", "(", "m1", ")", ":", "int", "(", "m2", ")", "]", "X", "=", "np", ".", "arange", "(", "len", "(", "Yorig", ")", ")", "/", "abf", ".", "pointsPerSec", "Ylpf", "=", "linear_gaussian", "(", "Yorig", ",", "sigmaSize", "=", "abf", ".", "pointsPerMs", "*", "300", ",", "forwardOnly", "=", "False", ")", "Yflat", "=", "Yorig", "-", "Ylpf", "EPSCs", ",", "IPSCs", "=", "[", "]", ",", "[", "]", "if", "plotToo", ":", "plt", ".", "figure", "(", "figsize", "=", "(", "15", ",", "6", ")", ")", "ax1", "=", "plt", ".", "subplot", "(", "211", ")", "plt", ".", "title", "(", "\"%s sweep %d\"", "%", "(", "abf", ".", "ID", ",", "sweep", ")", ")", "plt", ".", "grid", "(", ")", "plt", ".", "plot", "(", "X", ",", "Yorig", ",", "alpha", "=", ".5", ")", "plt", ".", "plot", "(", "X", ",", "Ylpf", ",", "'k'", ",", "alpha", "=", ".5", ",", "lw", "=", "2", ")", "plt", ".", "margins", "(", "0", ",", ".2", ")", "plt", ".", "subplot", "(", "212", ",", "sharex", "=", "ax1", ")", "plt", ".", "title", "(", "\"gaussian baseline subtraction\"", ")", "plt", ".", "grid", "(", ")", "plt", ".", "plot", "(", "X", ",", "Yflat", ",", "alpha", "=", ".5", ")", "plt", ".", "axhline", "(", "0", ",", "color", "=", "'k'", ",", "lw", "=", "2", ",", "alpha", "=", ".5", ")", "plt", ".", "tight_layout", "(", ")", "plt", ".", "show", "(", ")", "# TEST GAUSS", "hist", ",", "bin_edges", "=", "np", ".", "histogram", "(", "Yflat", ",", "density", "=", "True", ",", "bins", "=", "200", ")", "peakPa", "=", "bin_edges", "[", "np", ".", "where", "(", "hist", "==", "max", "(", "hist", ")", ")", "[", "0", "]", "[", "0", "]", "+", "1", "]", "if", "plotToo", ":", "plt", ".", "figure", "(", ")", "plt", ".", "grid", "(", ")", "plt", ".", "plot", "(", "bin_edges", "[", "1", ":", "]", ",", "hist", ",", "alpha", "=", ".5", ")", "plt", ".", "axvline", "(", "0", ",", "color", "=", "'k'", ")", "plt", ".", "axvline", "(", "peakPa", ",", "color", "=", "'r'", ",", "ls", "=", "'--'", ",", "lw", "=", "2", ",", "alpha", "=", ".5", ")", "plt", ".", "semilogy", "(", ")", "plt", ".", "title", "(", "\"sweep data distribution\"", ")", "plt", ".", "ylabel", "(", "\"power\"", ")", "plt", ".", "xlabel", "(", "\"pA deviation\"", ")", "plt", ".", "show", "(", ")", "return", "peakPa" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
convert
Convert weird TIF files into web-friendly versions. Auto contrast is applied (saturating lower and upper 0.1%). make saveAs True to save as .TIF.png make saveAs False and it won't save at all make saveAs "someFile.jpg" to save it as a different path/format
doc/oldcode/indexing/image.py
def convert(fname,saveAs=True,showToo=False): """ Convert weird TIF files into web-friendly versions. Auto contrast is applied (saturating lower and upper 0.1%). make saveAs True to save as .TIF.png make saveAs False and it won't save at all make saveAs "someFile.jpg" to save it as a different path/format """ # load the image #im = Image.open(fname) #PIL can't handle 12-bit TIFs well im=ndimage.imread(fname) #scipy does better with it im=np.array(im,dtype=float) # now it's a numpy array # do all image enhancement here cutoffLow=np.percentile(im,.01) cutoffHigh=np.percentile(im,99.99) im[np.where(im<cutoffLow)]=cutoffLow im[np.where(im>cutoffHigh)]=cutoffHigh # IMAGE FORMATTING im-=np.min(im) #auto contrast im/=np.max(im) #normalize im*=255 #stretch contrast (8-bit) im = Image.fromarray(im) # IMAGE DRAWING msg="%s\n"%os.path.basename(fname) msg+="%s\n"%cm.epochToString(os.path.getmtime(fname)) d = ImageDraw.Draw(im) fnt = ImageFont.truetype("arial.ttf", 20) d.text((6,6),msg,font=fnt,fill=0) d.text((4,4),msg,font=fnt,fill=255) if showToo: im.show() if saveAs is False: return if saveAs is True: saveAs=fname+".png" im.convert('RGB').save(saveAs) return saveAs
def convert(fname,saveAs=True,showToo=False): """ Convert weird TIF files into web-friendly versions. Auto contrast is applied (saturating lower and upper 0.1%). make saveAs True to save as .TIF.png make saveAs False and it won't save at all make saveAs "someFile.jpg" to save it as a different path/format """ # load the image #im = Image.open(fname) #PIL can't handle 12-bit TIFs well im=ndimage.imread(fname) #scipy does better with it im=np.array(im,dtype=float) # now it's a numpy array # do all image enhancement here cutoffLow=np.percentile(im,.01) cutoffHigh=np.percentile(im,99.99) im[np.where(im<cutoffLow)]=cutoffLow im[np.where(im>cutoffHigh)]=cutoffHigh # IMAGE FORMATTING im-=np.min(im) #auto contrast im/=np.max(im) #normalize im*=255 #stretch contrast (8-bit) im = Image.fromarray(im) # IMAGE DRAWING msg="%s\n"%os.path.basename(fname) msg+="%s\n"%cm.epochToString(os.path.getmtime(fname)) d = ImageDraw.Draw(im) fnt = ImageFont.truetype("arial.ttf", 20) d.text((6,6),msg,font=fnt,fill=0) d.text((4,4),msg,font=fnt,fill=255) if showToo: im.show() if saveAs is False: return if saveAs is True: saveAs=fname+".png" im.convert('RGB').save(saveAs) return saveAs
[ "Convert", "weird", "TIF", "files", "into", "web", "-", "friendly", "versions", ".", "Auto", "contrast", "is", "applied", "(", "saturating", "lower", "and", "upper", "0", ".", "1%", ")", ".", "make", "saveAs", "True", "to", "save", "as", ".", "TIF", ".", "png", "make", "saveAs", "False", "and", "it", "won", "t", "save", "at", "all", "make", "saveAs", "someFile", ".", "jpg", "to", "save", "it", "as", "a", "different", "path", "/", "format" ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/oldcode/indexing/image.py#L10-L51
[ "def", "convert", "(", "fname", ",", "saveAs", "=", "True", ",", "showToo", "=", "False", ")", ":", "# load the image", "#im = Image.open(fname) #PIL can't handle 12-bit TIFs well", "im", "=", "ndimage", ".", "imread", "(", "fname", ")", "#scipy does better with it", "im", "=", "np", ".", "array", "(", "im", ",", "dtype", "=", "float", ")", "# now it's a numpy array", "# do all image enhancement here", "cutoffLow", "=", "np", ".", "percentile", "(", "im", ",", ".01", ")", "cutoffHigh", "=", "np", ".", "percentile", "(", "im", ",", "99.99", ")", "im", "[", "np", ".", "where", "(", "im", "<", "cutoffLow", ")", "]", "=", "cutoffLow", "im", "[", "np", ".", "where", "(", "im", ">", "cutoffHigh", ")", "]", "=", "cutoffHigh", "# IMAGE FORMATTING", "im", "-=", "np", ".", "min", "(", "im", ")", "#auto contrast", "im", "/=", "np", ".", "max", "(", "im", ")", "#normalize", "im", "*=", "255", "#stretch contrast (8-bit)", "im", "=", "Image", ".", "fromarray", "(", "im", ")", "# IMAGE DRAWING", "msg", "=", "\"%s\\n\"", "%", "os", ".", "path", ".", "basename", "(", "fname", ")", "msg", "+=", "\"%s\\n\"", "%", "cm", ".", "epochToString", "(", "os", ".", "path", ".", "getmtime", "(", "fname", ")", ")", "d", "=", "ImageDraw", ".", "Draw", "(", "im", ")", "fnt", "=", "ImageFont", ".", "truetype", "(", "\"arial.ttf\"", ",", "20", ")", "d", ".", "text", "(", "(", "6", ",", "6", ")", ",", "msg", ",", "font", "=", "fnt", ",", "fill", "=", "0", ")", "d", ".", "text", "(", "(", "4", ",", "4", ")", ",", "msg", ",", "font", "=", "fnt", ",", "fill", "=", "255", ")", "if", "showToo", ":", "im", ".", "show", "(", ")", "if", "saveAs", "is", "False", ":", "return", "if", "saveAs", "is", "True", ":", "saveAs", "=", "fname", "+", "\".png\"", "im", ".", "convert", "(", "'RGB'", ")", ".", "save", "(", "saveAs", ")", "return", "saveAs" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
plot_shaded_data
plot X and Y data, then shade its background by variance.
doc/uses/EPSCs-and-IPSCs/variance method/2016-12-15 noise sample.py
def plot_shaded_data(X,Y,variances,varianceX): """plot X and Y data, then shade its background by variance.""" plt.plot(X,Y,color='k',lw=2) nChunks=int(len(Y)/CHUNK_POINTS) for i in range(0,100,PERCENT_STEP): varLimitLow=np.percentile(variances,i) varLimitHigh=np.percentile(variances,i+PERCENT_STEP) varianceIsAboveMin=np.where(variances>=varLimitLow)[0] varianceIsBelowMax=np.where(variances<=varLimitHigh)[0] varianceIsRange=[chunkNumber for chunkNumber in range(nChunks) \ if chunkNumber in varianceIsAboveMin \ and chunkNumber in varianceIsBelowMax] for chunkNumber in varianceIsRange: t1=chunkNumber*CHUNK_POINTS/POINTS_PER_SEC t2=t1+CHUNK_POINTS/POINTS_PER_SEC plt.axvspan(t1,t2,alpha=.3,color=COLORMAP(i/100),lw=0)
def plot_shaded_data(X,Y,variances,varianceX): """plot X and Y data, then shade its background by variance.""" plt.plot(X,Y,color='k',lw=2) nChunks=int(len(Y)/CHUNK_POINTS) for i in range(0,100,PERCENT_STEP): varLimitLow=np.percentile(variances,i) varLimitHigh=np.percentile(variances,i+PERCENT_STEP) varianceIsAboveMin=np.where(variances>=varLimitLow)[0] varianceIsBelowMax=np.where(variances<=varLimitHigh)[0] varianceIsRange=[chunkNumber for chunkNumber in range(nChunks) \ if chunkNumber in varianceIsAboveMin \ and chunkNumber in varianceIsBelowMax] for chunkNumber in varianceIsRange: t1=chunkNumber*CHUNK_POINTS/POINTS_PER_SEC t2=t1+CHUNK_POINTS/POINTS_PER_SEC plt.axvspan(t1,t2,alpha=.3,color=COLORMAP(i/100),lw=0)
[ "plot", "X", "and", "Y", "data", "then", "shade", "its", "background", "by", "variance", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/uses/EPSCs-and-IPSCs/variance method/2016-12-15 noise sample.py#L21-L36
[ "def", "plot_shaded_data", "(", "X", ",", "Y", ",", "variances", ",", "varianceX", ")", ":", "plt", ".", "plot", "(", "X", ",", "Y", ",", "color", "=", "'k'", ",", "lw", "=", "2", ")", "nChunks", "=", "int", "(", "len", "(", "Y", ")", "/", "CHUNK_POINTS", ")", "for", "i", "in", "range", "(", "0", ",", "100", ",", "PERCENT_STEP", ")", ":", "varLimitLow", "=", "np", ".", "percentile", "(", "variances", ",", "i", ")", "varLimitHigh", "=", "np", ".", "percentile", "(", "variances", ",", "i", "+", "PERCENT_STEP", ")", "varianceIsAboveMin", "=", "np", ".", "where", "(", "variances", ">=", "varLimitLow", ")", "[", "0", "]", "varianceIsBelowMax", "=", "np", ".", "where", "(", "variances", "<=", "varLimitHigh", ")", "[", "0", "]", "varianceIsRange", "=", "[", "chunkNumber", "for", "chunkNumber", "in", "range", "(", "nChunks", ")", "if", "chunkNumber", "in", "varianceIsAboveMin", "and", "chunkNumber", "in", "varianceIsBelowMax", "]", "for", "chunkNumber", "in", "varianceIsRange", ":", "t1", "=", "chunkNumber", "*", "CHUNK_POINTS", "/", "POINTS_PER_SEC", "t2", "=", "t1", "+", "CHUNK_POINTS", "/", "POINTS_PER_SEC", "plt", ".", "axvspan", "(", "t1", ",", "t2", ",", "alpha", "=", ".3", ",", "color", "=", "COLORMAP", "(", "i", "/", "100", ")", ",", "lw", "=", "0", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
show_variances
create some fancy graphs to show color-coded variances.
doc/uses/EPSCs-and-IPSCs/variance method/2016-12-15 noise sample.py
def show_variances(Y,variances,varianceX,logScale=False): """create some fancy graphs to show color-coded variances.""" plt.figure(1,figsize=(10,7)) plt.figure(2,figsize=(10,7)) varSorted=sorted(variances) plt.figure(1) plt.subplot(211) plt.grid() plt.title("chronological variance") plt.ylabel("original data") plot_shaded_data(X,Y,variances,varianceX) plt.margins(0,.1) plt.subplot(212) plt.ylabel("variance (pA) (log%s)"%str(logScale)) plt.xlabel("time in sweep (sec)") plt.plot(varianceX,variances,'k-',lw=2) plt.figure(2) plt.ylabel("variance (pA) (log%s)"%str(logScale)) plt.xlabel("chunk number") plt.title("sorted variance") plt.plot(varSorted,'k-',lw=2) for i in range(0,100,PERCENT_STEP): varLimitLow=np.percentile(variances,i) varLimitHigh=np.percentile(variances,i+PERCENT_STEP) label="%2d-%d percentile"%(i,i++PERCENT_STEP) color=COLORMAP(i/100) print("%s: variance = %.02f - %.02f"%(label,varLimitLow,varLimitHigh)) plt.figure(1) plt.axhspan(varLimitLow,varLimitHigh,alpha=.5,lw=0,color=color,label=label) plt.figure(2) chunkLow=np.where(varSorted>=varLimitLow)[0][0] chunkHigh=np.where(varSorted>=varLimitHigh)[0][0] plt.axvspan(chunkLow,chunkHigh,alpha=.5,lw=0,color=color,label=label) for fignum in [1,2]: plt.figure(fignum) if logScale: plt.semilogy() plt.margins(0,0) plt.grid() if fignum is 2: plt.legend(fontsize=10,loc='upper left',shadow=True) plt.tight_layout() plt.savefig('2016-12-15-variance-%d-log%s.png'%(fignum,str(logScale))) plt.show()
def show_variances(Y,variances,varianceX,logScale=False): """create some fancy graphs to show color-coded variances.""" plt.figure(1,figsize=(10,7)) plt.figure(2,figsize=(10,7)) varSorted=sorted(variances) plt.figure(1) plt.subplot(211) plt.grid() plt.title("chronological variance") plt.ylabel("original data") plot_shaded_data(X,Y,variances,varianceX) plt.margins(0,.1) plt.subplot(212) plt.ylabel("variance (pA) (log%s)"%str(logScale)) plt.xlabel("time in sweep (sec)") plt.plot(varianceX,variances,'k-',lw=2) plt.figure(2) plt.ylabel("variance (pA) (log%s)"%str(logScale)) plt.xlabel("chunk number") plt.title("sorted variance") plt.plot(varSorted,'k-',lw=2) for i in range(0,100,PERCENT_STEP): varLimitLow=np.percentile(variances,i) varLimitHigh=np.percentile(variances,i+PERCENT_STEP) label="%2d-%d percentile"%(i,i++PERCENT_STEP) color=COLORMAP(i/100) print("%s: variance = %.02f - %.02f"%(label,varLimitLow,varLimitHigh)) plt.figure(1) plt.axhspan(varLimitLow,varLimitHigh,alpha=.5,lw=0,color=color,label=label) plt.figure(2) chunkLow=np.where(varSorted>=varLimitLow)[0][0] chunkHigh=np.where(varSorted>=varLimitHigh)[0][0] plt.axvspan(chunkLow,chunkHigh,alpha=.5,lw=0,color=color,label=label) for fignum in [1,2]: plt.figure(fignum) if logScale: plt.semilogy() plt.margins(0,0) plt.grid() if fignum is 2: plt.legend(fontsize=10,loc='upper left',shadow=True) plt.tight_layout() plt.savefig('2016-12-15-variance-%d-log%s.png'%(fignum,str(logScale))) plt.show()
[ "create", "some", "fancy", "graphs", "to", "show", "color", "-", "coded", "variances", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/doc/uses/EPSCs-and-IPSCs/variance method/2016-12-15 noise sample.py#L39-L87
[ "def", "show_variances", "(", "Y", ",", "variances", ",", "varianceX", ",", "logScale", "=", "False", ")", ":", "plt", ".", "figure", "(", "1", ",", "figsize", "=", "(", "10", ",", "7", ")", ")", "plt", ".", "figure", "(", "2", ",", "figsize", "=", "(", "10", ",", "7", ")", ")", "varSorted", "=", "sorted", "(", "variances", ")", "plt", ".", "figure", "(", "1", ")", "plt", ".", "subplot", "(", "211", ")", "plt", ".", "grid", "(", ")", "plt", ".", "title", "(", "\"chronological variance\"", ")", "plt", ".", "ylabel", "(", "\"original data\"", ")", "plot_shaded_data", "(", "X", ",", "Y", ",", "variances", ",", "varianceX", ")", "plt", ".", "margins", "(", "0", ",", ".1", ")", "plt", ".", "subplot", "(", "212", ")", "plt", ".", "ylabel", "(", "\"variance (pA) (log%s)\"", "%", "str", "(", "logScale", ")", ")", "plt", ".", "xlabel", "(", "\"time in sweep (sec)\"", ")", "plt", ".", "plot", "(", "varianceX", ",", "variances", ",", "'k-'", ",", "lw", "=", "2", ")", "plt", ".", "figure", "(", "2", ")", "plt", ".", "ylabel", "(", "\"variance (pA) (log%s)\"", "%", "str", "(", "logScale", ")", ")", "plt", ".", "xlabel", "(", "\"chunk number\"", ")", "plt", ".", "title", "(", "\"sorted variance\"", ")", "plt", ".", "plot", "(", "varSorted", ",", "'k-'", ",", "lw", "=", "2", ")", "for", "i", "in", "range", "(", "0", ",", "100", ",", "PERCENT_STEP", ")", ":", "varLimitLow", "=", "np", ".", "percentile", "(", "variances", ",", "i", ")", "varLimitHigh", "=", "np", ".", "percentile", "(", "variances", ",", "i", "+", "PERCENT_STEP", ")", "label", "=", "\"%2d-%d percentile\"", "%", "(", "i", ",", "i", "+", "+", "PERCENT_STEP", ")", "color", "=", "COLORMAP", "(", "i", "/", "100", ")", "print", "(", "\"%s: variance = %.02f - %.02f\"", "%", "(", "label", ",", "varLimitLow", ",", "varLimitHigh", ")", ")", "plt", ".", "figure", "(", "1", ")", "plt", ".", "axhspan", "(", "varLimitLow", ",", "varLimitHigh", ",", "alpha", "=", ".5", ",", "lw", "=", "0", ",", "color", "=", "color", ",", "label", "=", "label", ")", "plt", ".", "figure", "(", "2", ")", "chunkLow", "=", "np", ".", "where", "(", "varSorted", ">=", "varLimitLow", ")", "[", "0", "]", "[", "0", "]", "chunkHigh", "=", "np", ".", "where", "(", "varSorted", ">=", "varLimitHigh", ")", "[", "0", "]", "[", "0", "]", "plt", ".", "axvspan", "(", "chunkLow", ",", "chunkHigh", ",", "alpha", "=", ".5", ",", "lw", "=", "0", ",", "color", "=", "color", ",", "label", "=", "label", ")", "for", "fignum", "in", "[", "1", ",", "2", "]", ":", "plt", ".", "figure", "(", "fignum", ")", "if", "logScale", ":", "plt", ".", "semilogy", "(", ")", "plt", ".", "margins", "(", "0", ",", "0", ")", "plt", ".", "grid", "(", ")", "if", "fignum", "is", "2", ":", "plt", ".", "legend", "(", "fontsize", "=", "10", ",", "loc", "=", "'upper left'", ",", "shadow", "=", "True", ")", "plt", ".", "tight_layout", "(", ")", "plt", ".", "savefig", "(", "'2016-12-15-variance-%d-log%s.png'", "%", "(", "fignum", ",", "str", "(", "logScale", ")", ")", ")", "plt", ".", "show", "(", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
AP.ensureDetection
run this before analysis. Checks if event detection occured. If not, runs AP detection on all sweeps.
swhlab/analysis/ap.py
def ensureDetection(self): """ run this before analysis. Checks if event detection occured. If not, runs AP detection on all sweeps. """ if self.APs==False: self.log.debug("analysis attempted before event detection...") self.detect()
def ensureDetection(self): """ run this before analysis. Checks if event detection occured. If not, runs AP detection on all sweeps. """ if self.APs==False: self.log.debug("analysis attempted before event detection...") self.detect()
[ "run", "this", "before", "analysis", ".", "Checks", "if", "event", "detection", "occured", ".", "If", "not", "runs", "AP", "detection", "on", "all", "sweeps", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/swhlab/analysis/ap.py#L58-L65
[ "def", "ensureDetection", "(", "self", ")", ":", "if", "self", ".", "APs", "==", "False", ":", "self", ".", "log", ".", "debug", "(", "\"analysis attempted before event detection...\"", ")", "self", ".", "detect", "(", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
AP.detect
runs AP detection on every sweep.
swhlab/analysis/ap.py
def detect(self): """runs AP detection on every sweep.""" self.log.info("initializing AP detection on all sweeps...") t1=cm.timeit() for sweep in range(self.abf.sweeps): self.detectSweep(sweep) self.log.info("AP analysis of %d sweeps found %d APs (completed in %s)", self.abf.sweeps,len(self.APs),cm.timeit(t1))
def detect(self): """runs AP detection on every sweep.""" self.log.info("initializing AP detection on all sweeps...") t1=cm.timeit() for sweep in range(self.abf.sweeps): self.detectSweep(sweep) self.log.info("AP analysis of %d sweeps found %d APs (completed in %s)", self.abf.sweeps,len(self.APs),cm.timeit(t1))
[ "runs", "AP", "detection", "on", "every", "sweep", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/swhlab/analysis/ap.py#L67-L74
[ "def", "detect", "(", "self", ")", ":", "self", ".", "log", ".", "info", "(", "\"initializing AP detection on all sweeps...\"", ")", "t1", "=", "cm", ".", "timeit", "(", ")", "for", "sweep", "in", "range", "(", "self", ".", "abf", ".", "sweeps", ")", ":", "self", ".", "detectSweep", "(", "sweep", ")", "self", ".", "log", ".", "info", "(", "\"AP analysis of %d sweeps found %d APs (completed in %s)\"", ",", "self", ".", "abf", ".", "sweeps", ",", "len", "(", "self", ".", "APs", ")", ",", "cm", ".", "timeit", "(", "t1", ")", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
AP.detectSweep
perform AP detection on current sweep.
swhlab/analysis/ap.py
def detectSweep(self,sweep=0): """perform AP detection on current sweep.""" if self.APs is False: # indicates detection never happened self.APs=[] # now indicates detection occured # delete every AP from this sweep from the existing array for i,ap in enumerate(self.APs): if ap["sweep"]==sweep: self.APs[i]=None if self.APs.count(None): self.log.debug("deleting %d existing APs from memory",self.APs.count(None)) while None in self.APs: self.APs.remove(None) self.log.debug("initiating AP detection (%d already in memory)",len(self.APs)) self.abf.derivative=True self.abf.setsweep(sweep) # detect potential AP (Is) by a dV/dT threshold crossing Is = cm.where_cross(self.abf.sweepD,self.detect_over) self.log.debug("initial AP detection: %d APs"%len(Is)) # eliminate APs where dV/dT doesn't cross below -10 V/S within 2 ms for i,I in enumerate(Is): if np.min(self.abf.sweepD[I:I+2*self.abf.pointsPerMs])>-10: Is[i]=0 Is=Is[np.nonzero(Is)] self.log.debug("after lower threshold checking: %d APs"%len(Is)) # walk 1ms backwards and find point of +10 V/S threshold crossing for i,I in enumerate(Is): stepBack=0 while(self.abf.sweepD[I-stepBack])>10 and stepBack/self.abf.pointsPerMs<1: #2ms max stepBack+=1 Is[i]-=stepBack # analyze each AP sweepAPs=[] for i,I in enumerate(Is): try: timeInSweep=I/self.abf.pointsPerSec if timeInSweep<self.detect_time1 or timeInSweep>self.detect_time2: continue # skip because it's not within the marks ap={} # create the AP entry ap["sweep"]=sweep # number of the sweep containing this AP ap["I"]=I # index sweep point of start of AP (10 mV/ms threshold crossing) ap["Tsweep"]=I/self.abf.pointsPerSec # time in the sweep of index crossing (sec) ap["T"]=ap["Tsweep"]+self.abf.sweepInterval*sweep # time in the experiment ap["Vthreshold"]=self.abf.sweepY[I] # threshold at rate of -10mV/ms # determine how many points from the start dV/dt goes below -10 (from a 5ms chunk) chunk=self.abf.sweepD[I:I+5*self.abf.pointsPerMs] # give it 5ms to cross once I_toNegTen=np.where(chunk<-10)[0][0] chunk=self.abf.sweepD[I+I_toNegTen:I+I_toNegTen+10*self.abf.pointsPerMs] # give it 30ms to cross back if not max(chunk)>-10: self.log.debug("skipping unreal AP at T=%f"%ap["T"]) self.log.error("^^^ can you confirm this is legit?") continue # probably a pre-AP "bump" to be ignored I_recover=np.where(chunk>-10)[0][0]+I_toNegTen+I # point where trace returns to above -10 V/S ap["dVfastIs"]=[I,I_recover] # span of the fast component of the dV/dt trace ap["dVfastMS"]=(I_recover-I)/self.abf.pointsPerMs # time (in ms) of this fast AP component # determine derivative min/max from a 2ms chunk which we expect to capture the fast AP chunk=self.abf.sweepD[ap["dVfastIs"][0]:ap["dVfastIs"][1]] ap["dVmax"]=np.max(chunk) ap["dVmaxI"]=np.where(chunk==ap["dVmax"])[0][0]+I ap["dVmin"]=np.min(chunk) ap["dVminI"]=np.where(chunk==ap["dVmin"])[0][0]+I if ap["dVmax"]<10 or ap["dVmin"]>-10: self.log.debug("throwing out AP with low dV/dt to be an AP") self.log.error("^^^ can you confirm this is legit?") continue # before determining AP shape stats, see where trace recovers to threshold chunkSize=self.abf.pointsPerMs*10 #AP shape may be 10ms if len(Is)-1>i and Is[i+1]<(I+chunkSize): # if slow AP runs into next AP chunkSize=Is[i+1]-I # chop it down if chunkSize<(self.abf.pointsPerMs*2): continue # next AP is so soon, it's >500 Hz. Can't be real. ap["VslowIs"]=[I,I+chunkSize] # time range of slow AP dynamics chunk=self.abf.sweepY[I:I+chunkSize] # determine AP peak and minimum ap["Vmax"]=np.max(chunk) ap["VmaxI"]=np.where(chunk==ap["Vmax"])[0][0]+I chunkForMin=np.copy(chunk) # so we can destroy it chunkForMin[:ap["VmaxI"]-I]=np.inf # minimum won't be before peak now ap["Vmin"]=np.min(chunkForMin) # supposedly the minimum is the AHP ap["VminI"]=np.where(chunkForMin==ap["Vmin"])[0][0]+I if ap["VminI"]<ap["VmaxI"]: self.log.error("-------------------------------") self.log.error("how is the AHP before the peak?") #TODO: start chunk at the peak self.log.error("-------------------------------") #print((I+len(chunk))-ap["VminI"],len(chunk)) if (len(chunk))-((I+len(chunk))-ap["VminI"])<10: self.log.error("-------------------------------") self.log.error("HP too close for comfort!") self.log.error("-------------------------------") ap["msRiseTime"]=(ap["VmaxI"]-I)/self.abf.pointsPerMs # time from threshold to peak ap["msFallTime"]=(ap["VminI"]-ap["VmaxI"])/self.abf.pointsPerMs # time from peak to nadir # determine halfwidth ap["Vhalf"]=np.average([ap["Vmax"],ap["Vthreshold"]]) # half way from threshold to peak ap["VhalfI1"]=cm.where_cross(chunk,ap["Vhalf"])[0]+I # time it's first crossed ap["VhalfI2"]=cm.where_cross(-chunk,-ap["Vhalf"])[1]+I # time it's second crossed ap["msHalfwidth"]=(ap["VhalfI2"]-ap["VhalfI1"])/self.abf.pointsPerMs # time between crossings # AP error checking goes here # TODO: # if we got this far, add the AP to the list sweepAPs.extend([ap]) except Exception as e: self.log.error("crashed analyzing AP %d of %d",i,len(Is)) self.log.error(cm.exceptionToString(e)) #cm.pause() #cm.waitFor(30) #self.log.error("EXCEPTION!:\n%s"%str(sys.exc_info())) self.log.debug("finished analyzing sweep. Found %d APs",len(sweepAPs)) self.APs.extend(sweepAPs) self.abf.derivative=False
def detectSweep(self,sweep=0): """perform AP detection on current sweep.""" if self.APs is False: # indicates detection never happened self.APs=[] # now indicates detection occured # delete every AP from this sweep from the existing array for i,ap in enumerate(self.APs): if ap["sweep"]==sweep: self.APs[i]=None if self.APs.count(None): self.log.debug("deleting %d existing APs from memory",self.APs.count(None)) while None in self.APs: self.APs.remove(None) self.log.debug("initiating AP detection (%d already in memory)",len(self.APs)) self.abf.derivative=True self.abf.setsweep(sweep) # detect potential AP (Is) by a dV/dT threshold crossing Is = cm.where_cross(self.abf.sweepD,self.detect_over) self.log.debug("initial AP detection: %d APs"%len(Is)) # eliminate APs where dV/dT doesn't cross below -10 V/S within 2 ms for i,I in enumerate(Is): if np.min(self.abf.sweepD[I:I+2*self.abf.pointsPerMs])>-10: Is[i]=0 Is=Is[np.nonzero(Is)] self.log.debug("after lower threshold checking: %d APs"%len(Is)) # walk 1ms backwards and find point of +10 V/S threshold crossing for i,I in enumerate(Is): stepBack=0 while(self.abf.sweepD[I-stepBack])>10 and stepBack/self.abf.pointsPerMs<1: #2ms max stepBack+=1 Is[i]-=stepBack # analyze each AP sweepAPs=[] for i,I in enumerate(Is): try: timeInSweep=I/self.abf.pointsPerSec if timeInSweep<self.detect_time1 or timeInSweep>self.detect_time2: continue # skip because it's not within the marks ap={} # create the AP entry ap["sweep"]=sweep # number of the sweep containing this AP ap["I"]=I # index sweep point of start of AP (10 mV/ms threshold crossing) ap["Tsweep"]=I/self.abf.pointsPerSec # time in the sweep of index crossing (sec) ap["T"]=ap["Tsweep"]+self.abf.sweepInterval*sweep # time in the experiment ap["Vthreshold"]=self.abf.sweepY[I] # threshold at rate of -10mV/ms # determine how many points from the start dV/dt goes below -10 (from a 5ms chunk) chunk=self.abf.sweepD[I:I+5*self.abf.pointsPerMs] # give it 5ms to cross once I_toNegTen=np.where(chunk<-10)[0][0] chunk=self.abf.sweepD[I+I_toNegTen:I+I_toNegTen+10*self.abf.pointsPerMs] # give it 30ms to cross back if not max(chunk)>-10: self.log.debug("skipping unreal AP at T=%f"%ap["T"]) self.log.error("^^^ can you confirm this is legit?") continue # probably a pre-AP "bump" to be ignored I_recover=np.where(chunk>-10)[0][0]+I_toNegTen+I # point where trace returns to above -10 V/S ap["dVfastIs"]=[I,I_recover] # span of the fast component of the dV/dt trace ap["dVfastMS"]=(I_recover-I)/self.abf.pointsPerMs # time (in ms) of this fast AP component # determine derivative min/max from a 2ms chunk which we expect to capture the fast AP chunk=self.abf.sweepD[ap["dVfastIs"][0]:ap["dVfastIs"][1]] ap["dVmax"]=np.max(chunk) ap["dVmaxI"]=np.where(chunk==ap["dVmax"])[0][0]+I ap["dVmin"]=np.min(chunk) ap["dVminI"]=np.where(chunk==ap["dVmin"])[0][0]+I if ap["dVmax"]<10 or ap["dVmin"]>-10: self.log.debug("throwing out AP with low dV/dt to be an AP") self.log.error("^^^ can you confirm this is legit?") continue # before determining AP shape stats, see where trace recovers to threshold chunkSize=self.abf.pointsPerMs*10 #AP shape may be 10ms if len(Is)-1>i and Is[i+1]<(I+chunkSize): # if slow AP runs into next AP chunkSize=Is[i+1]-I # chop it down if chunkSize<(self.abf.pointsPerMs*2): continue # next AP is so soon, it's >500 Hz. Can't be real. ap["VslowIs"]=[I,I+chunkSize] # time range of slow AP dynamics chunk=self.abf.sweepY[I:I+chunkSize] # determine AP peak and minimum ap["Vmax"]=np.max(chunk) ap["VmaxI"]=np.where(chunk==ap["Vmax"])[0][0]+I chunkForMin=np.copy(chunk) # so we can destroy it chunkForMin[:ap["VmaxI"]-I]=np.inf # minimum won't be before peak now ap["Vmin"]=np.min(chunkForMin) # supposedly the minimum is the AHP ap["VminI"]=np.where(chunkForMin==ap["Vmin"])[0][0]+I if ap["VminI"]<ap["VmaxI"]: self.log.error("-------------------------------") self.log.error("how is the AHP before the peak?") #TODO: start chunk at the peak self.log.error("-------------------------------") #print((I+len(chunk))-ap["VminI"],len(chunk)) if (len(chunk))-((I+len(chunk))-ap["VminI"])<10: self.log.error("-------------------------------") self.log.error("HP too close for comfort!") self.log.error("-------------------------------") ap["msRiseTime"]=(ap["VmaxI"]-I)/self.abf.pointsPerMs # time from threshold to peak ap["msFallTime"]=(ap["VminI"]-ap["VmaxI"])/self.abf.pointsPerMs # time from peak to nadir # determine halfwidth ap["Vhalf"]=np.average([ap["Vmax"],ap["Vthreshold"]]) # half way from threshold to peak ap["VhalfI1"]=cm.where_cross(chunk,ap["Vhalf"])[0]+I # time it's first crossed ap["VhalfI2"]=cm.where_cross(-chunk,-ap["Vhalf"])[1]+I # time it's second crossed ap["msHalfwidth"]=(ap["VhalfI2"]-ap["VhalfI1"])/self.abf.pointsPerMs # time between crossings # AP error checking goes here # TODO: # if we got this far, add the AP to the list sweepAPs.extend([ap]) except Exception as e: self.log.error("crashed analyzing AP %d of %d",i,len(Is)) self.log.error(cm.exceptionToString(e)) #cm.pause() #cm.waitFor(30) #self.log.error("EXCEPTION!:\n%s"%str(sys.exc_info())) self.log.debug("finished analyzing sweep. Found %d APs",len(sweepAPs)) self.APs.extend(sweepAPs) self.abf.derivative=False
[ "perform", "AP", "detection", "on", "current", "sweep", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/swhlab/analysis/ap.py#L76-L199
[ "def", "detectSweep", "(", "self", ",", "sweep", "=", "0", ")", ":", "if", "self", ".", "APs", "is", "False", ":", "# indicates detection never happened", "self", ".", "APs", "=", "[", "]", "# now indicates detection occured", "# delete every AP from this sweep from the existing array", "for", "i", ",", "ap", "in", "enumerate", "(", "self", ".", "APs", ")", ":", "if", "ap", "[", "\"sweep\"", "]", "==", "sweep", ":", "self", ".", "APs", "[", "i", "]", "=", "None", "if", "self", ".", "APs", ".", "count", "(", "None", ")", ":", "self", ".", "log", ".", "debug", "(", "\"deleting %d existing APs from memory\"", ",", "self", ".", "APs", ".", "count", "(", "None", ")", ")", "while", "None", "in", "self", ".", "APs", ":", "self", ".", "APs", ".", "remove", "(", "None", ")", "self", ".", "log", ".", "debug", "(", "\"initiating AP detection (%d already in memory)\"", ",", "len", "(", "self", ".", "APs", ")", ")", "self", ".", "abf", ".", "derivative", "=", "True", "self", ".", "abf", ".", "setsweep", "(", "sweep", ")", "# detect potential AP (Is) by a dV/dT threshold crossing", "Is", "=", "cm", ".", "where_cross", "(", "self", ".", "abf", ".", "sweepD", ",", "self", ".", "detect_over", ")", "self", ".", "log", ".", "debug", "(", "\"initial AP detection: %d APs\"", "%", "len", "(", "Is", ")", ")", "# eliminate APs where dV/dT doesn't cross below -10 V/S within 2 ms", "for", "i", ",", "I", "in", "enumerate", "(", "Is", ")", ":", "if", "np", ".", "min", "(", "self", ".", "abf", ".", "sweepD", "[", "I", ":", "I", "+", "2", "*", "self", ".", "abf", ".", "pointsPerMs", "]", ")", ">", "-", "10", ":", "Is", "[", "i", "]", "=", "0", "Is", "=", "Is", "[", "np", ".", "nonzero", "(", "Is", ")", "]", "self", ".", "log", ".", "debug", "(", "\"after lower threshold checking: %d APs\"", "%", "len", "(", "Is", ")", ")", "# walk 1ms backwards and find point of +10 V/S threshold crossing", "for", "i", ",", "I", "in", "enumerate", "(", "Is", ")", ":", "stepBack", "=", "0", "while", "(", "self", ".", "abf", ".", "sweepD", "[", "I", "-", "stepBack", "]", ")", ">", "10", "and", "stepBack", "/", "self", ".", "abf", ".", "pointsPerMs", "<", "1", ":", "#2ms max", "stepBack", "+=", "1", "Is", "[", "i", "]", "-=", "stepBack", "# analyze each AP", "sweepAPs", "=", "[", "]", "for", "i", ",", "I", "in", "enumerate", "(", "Is", ")", ":", "try", ":", "timeInSweep", "=", "I", "/", "self", ".", "abf", ".", "pointsPerSec", "if", "timeInSweep", "<", "self", ".", "detect_time1", "or", "timeInSweep", ">", "self", ".", "detect_time2", ":", "continue", "# skip because it's not within the marks", "ap", "=", "{", "}", "# create the AP entry", "ap", "[", "\"sweep\"", "]", "=", "sweep", "# number of the sweep containing this AP", "ap", "[", "\"I\"", "]", "=", "I", "# index sweep point of start of AP (10 mV/ms threshold crossing)", "ap", "[", "\"Tsweep\"", "]", "=", "I", "/", "self", ".", "abf", ".", "pointsPerSec", "# time in the sweep of index crossing (sec)", "ap", "[", "\"T\"", "]", "=", "ap", "[", "\"Tsweep\"", "]", "+", "self", ".", "abf", ".", "sweepInterval", "*", "sweep", "# time in the experiment", "ap", "[", "\"Vthreshold\"", "]", "=", "self", ".", "abf", ".", "sweepY", "[", "I", "]", "# threshold at rate of -10mV/ms", "# determine how many points from the start dV/dt goes below -10 (from a 5ms chunk)", "chunk", "=", "self", ".", "abf", ".", "sweepD", "[", "I", ":", "I", "+", "5", "*", "self", ".", "abf", ".", "pointsPerMs", "]", "# give it 5ms to cross once", "I_toNegTen", "=", "np", ".", "where", "(", "chunk", "<", "-", "10", ")", "[", "0", "]", "[", "0", "]", "chunk", "=", "self", ".", "abf", ".", "sweepD", "[", "I", "+", "I_toNegTen", ":", "I", "+", "I_toNegTen", "+", "10", "*", "self", ".", "abf", ".", "pointsPerMs", "]", "# give it 30ms to cross back", "if", "not", "max", "(", "chunk", ")", ">", "-", "10", ":", "self", ".", "log", ".", "debug", "(", "\"skipping unreal AP at T=%f\"", "%", "ap", "[", "\"T\"", "]", ")", "self", ".", "log", ".", "error", "(", "\"^^^ can you confirm this is legit?\"", ")", "continue", "# probably a pre-AP \"bump\" to be ignored", "I_recover", "=", "np", ".", "where", "(", "chunk", ">", "-", "10", ")", "[", "0", "]", "[", "0", "]", "+", "I_toNegTen", "+", "I", "# point where trace returns to above -10 V/S", "ap", "[", "\"dVfastIs\"", "]", "=", "[", "I", ",", "I_recover", "]", "# span of the fast component of the dV/dt trace", "ap", "[", "\"dVfastMS\"", "]", "=", "(", "I_recover", "-", "I", ")", "/", "self", ".", "abf", ".", "pointsPerMs", "# time (in ms) of this fast AP component", "# determine derivative min/max from a 2ms chunk which we expect to capture the fast AP", "chunk", "=", "self", ".", "abf", ".", "sweepD", "[", "ap", "[", "\"dVfastIs\"", "]", "[", "0", "]", ":", "ap", "[", "\"dVfastIs\"", "]", "[", "1", "]", "]", "ap", "[", "\"dVmax\"", "]", "=", "np", ".", "max", "(", "chunk", ")", "ap", "[", "\"dVmaxI\"", "]", "=", "np", ".", "where", "(", "chunk", "==", "ap", "[", "\"dVmax\"", "]", ")", "[", "0", "]", "[", "0", "]", "+", "I", "ap", "[", "\"dVmin\"", "]", "=", "np", ".", "min", "(", "chunk", ")", "ap", "[", "\"dVminI\"", "]", "=", "np", ".", "where", "(", "chunk", "==", "ap", "[", "\"dVmin\"", "]", ")", "[", "0", "]", "[", "0", "]", "+", "I", "if", "ap", "[", "\"dVmax\"", "]", "<", "10", "or", "ap", "[", "\"dVmin\"", "]", ">", "-", "10", ":", "self", ".", "log", ".", "debug", "(", "\"throwing out AP with low dV/dt to be an AP\"", ")", "self", ".", "log", ".", "error", "(", "\"^^^ can you confirm this is legit?\"", ")", "continue", "# before determining AP shape stats, see where trace recovers to threshold", "chunkSize", "=", "self", ".", "abf", ".", "pointsPerMs", "*", "10", "#AP shape may be 10ms", "if", "len", "(", "Is", ")", "-", "1", ">", "i", "and", "Is", "[", "i", "+", "1", "]", "<", "(", "I", "+", "chunkSize", ")", ":", "# if slow AP runs into next AP", "chunkSize", "=", "Is", "[", "i", "+", "1", "]", "-", "I", "# chop it down", "if", "chunkSize", "<", "(", "self", ".", "abf", ".", "pointsPerMs", "*", "2", ")", ":", "continue", "# next AP is so soon, it's >500 Hz. Can't be real.", "ap", "[", "\"VslowIs\"", "]", "=", "[", "I", ",", "I", "+", "chunkSize", "]", "# time range of slow AP dynamics", "chunk", "=", "self", ".", "abf", ".", "sweepY", "[", "I", ":", "I", "+", "chunkSize", "]", "# determine AP peak and minimum", "ap", "[", "\"Vmax\"", "]", "=", "np", ".", "max", "(", "chunk", ")", "ap", "[", "\"VmaxI\"", "]", "=", "np", ".", "where", "(", "chunk", "==", "ap", "[", "\"Vmax\"", "]", ")", "[", "0", "]", "[", "0", "]", "+", "I", "chunkForMin", "=", "np", ".", "copy", "(", "chunk", ")", "# so we can destroy it", "chunkForMin", "[", ":", "ap", "[", "\"VmaxI\"", "]", "-", "I", "]", "=", "np", ".", "inf", "# minimum won't be before peak now", "ap", "[", "\"Vmin\"", "]", "=", "np", ".", "min", "(", "chunkForMin", ")", "# supposedly the minimum is the AHP", "ap", "[", "\"VminI\"", "]", "=", "np", ".", "where", "(", "chunkForMin", "==", "ap", "[", "\"Vmin\"", "]", ")", "[", "0", "]", "[", "0", "]", "+", "I", "if", "ap", "[", "\"VminI\"", "]", "<", "ap", "[", "\"VmaxI\"", "]", ":", "self", ".", "log", ".", "error", "(", "\"-------------------------------\"", ")", "self", ".", "log", ".", "error", "(", "\"how is the AHP before the peak?\"", ")", "#TODO: start chunk at the peak", "self", ".", "log", ".", "error", "(", "\"-------------------------------\"", ")", "#print((I+len(chunk))-ap[\"VminI\"],len(chunk))", "if", "(", "len", "(", "chunk", ")", ")", "-", "(", "(", "I", "+", "len", "(", "chunk", ")", ")", "-", "ap", "[", "\"VminI\"", "]", ")", "<", "10", ":", "self", ".", "log", ".", "error", "(", "\"-------------------------------\"", ")", "self", ".", "log", ".", "error", "(", "\"HP too close for comfort!\"", ")", "self", ".", "log", ".", "error", "(", "\"-------------------------------\"", ")", "ap", "[", "\"msRiseTime\"", "]", "=", "(", "ap", "[", "\"VmaxI\"", "]", "-", "I", ")", "/", "self", ".", "abf", ".", "pointsPerMs", "# time from threshold to peak", "ap", "[", "\"msFallTime\"", "]", "=", "(", "ap", "[", "\"VminI\"", "]", "-", "ap", "[", "\"VmaxI\"", "]", ")", "/", "self", ".", "abf", ".", "pointsPerMs", "# time from peak to nadir", "# determine halfwidth", "ap", "[", "\"Vhalf\"", "]", "=", "np", ".", "average", "(", "[", "ap", "[", "\"Vmax\"", "]", ",", "ap", "[", "\"Vthreshold\"", "]", "]", ")", "# half way from threshold to peak", "ap", "[", "\"VhalfI1\"", "]", "=", "cm", ".", "where_cross", "(", "chunk", ",", "ap", "[", "\"Vhalf\"", "]", ")", "[", "0", "]", "+", "I", "# time it's first crossed", "ap", "[", "\"VhalfI2\"", "]", "=", "cm", ".", "where_cross", "(", "-", "chunk", ",", "-", "ap", "[", "\"Vhalf\"", "]", ")", "[", "1", "]", "+", "I", "# time it's second crossed", "ap", "[", "\"msHalfwidth\"", "]", "=", "(", "ap", "[", "\"VhalfI2\"", "]", "-", "ap", "[", "\"VhalfI1\"", "]", ")", "/", "self", ".", "abf", ".", "pointsPerMs", "# time between crossings", "# AP error checking goes here", "# TODO:", "# if we got this far, add the AP to the list", "sweepAPs", ".", "extend", "(", "[", "ap", "]", ")", "except", "Exception", "as", "e", ":", "self", ".", "log", ".", "error", "(", "\"crashed analyzing AP %d of %d\"", ",", "i", ",", "len", "(", "Is", ")", ")", "self", ".", "log", ".", "error", "(", "cm", ".", "exceptionToString", "(", "e", ")", ")", "#cm.pause()", "#cm.waitFor(30)", "#self.log.error(\"EXCEPTION!:\\n%s\"%str(sys.exc_info()))", "self", ".", "log", ".", "debug", "(", "\"finished analyzing sweep. Found %d APs\"", ",", "len", "(", "sweepAPs", ")", ")", "self", ".", "APs", ".", "extend", "(", "sweepAPs", ")", "self", ".", "abf", ".", "derivative", "=", "False" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
AP.get_times
return an array of times (in sec) of all APs.
swhlab/analysis/ap.py
def get_times(self): """return an array of times (in sec) of all APs.""" self.ensureDetection() times=[] for ap in self.APs: times.append(ap["T"]) return np.array(sorted(times))
def get_times(self): """return an array of times (in sec) of all APs.""" self.ensureDetection() times=[] for ap in self.APs: times.append(ap["T"]) return np.array(sorted(times))
[ "return", "an", "array", "of", "times", "(", "in", "sec", ")", "of", "all", "APs", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/swhlab/analysis/ap.py#L203-L209
[ "def", "get_times", "(", "self", ")", ":", "self", ".", "ensureDetection", "(", ")", "times", "=", "[", "]", "for", "ap", "in", "self", ".", "APs", ":", "times", ".", "append", "(", "ap", "[", "\"T\"", "]", ")", "return", "np", ".", "array", "(", "sorted", "(", "times", ")", ")" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
AP.get_bySweep
returns AP info by sweep arranged as a list (by sweep). feature: * "freqs" - list of instantaneous frequencies by sweep. * "firsts" - list of first instantaneous frequency by sweep. * "times" - list of times of each AP in the sweep. * "count" - numer of APs per sweep. * "average" - average instanteous frequency per sweep. * "median" - median instanteous frequency per sweep.
swhlab/analysis/ap.py
def get_bySweep(self,feature="freqs"): """ returns AP info by sweep arranged as a list (by sweep). feature: * "freqs" - list of instantaneous frequencies by sweep. * "firsts" - list of first instantaneous frequency by sweep. * "times" - list of times of each AP in the sweep. * "count" - numer of APs per sweep. * "average" - average instanteous frequency per sweep. * "median" - median instanteous frequency per sweep. """ self.ensureDetection() bySweepTimes=[[]]*self.abf.sweeps # determine AP spike times by sweep for sweep in range(self.abf.sweeps): sweepTimes=[] for ap in self.APs: if ap["sweep"]==sweep: sweepTimes.append(ap["Tsweep"]) bySweepTimes[sweep]=sweepTimes # determine instantaneous frequencies by sweep bySweepFreqs=[[]]*self.abf.sweeps for i,times in enumerate(bySweepTimes): if len(times)<2: continue diffs=np.array(times[1:])-np.array(times[:-1]) bySweepFreqs[i]=np.array(1/diffs).tolist() # give the user what they want if feature == "freqs": return bySweepFreqs elif feature == "firsts": result=np.zeros(self.abf.sweeps) # initialize to this for i,freqs in enumerate(bySweepFreqs): if len(freqs): result[i]=freqs[0] return result elif feature == "times": return bySweepTimes elif feature == "count": result=np.zeros(self.abf.sweeps) # initialize to this for i,times in enumerate(bySweepTimes): result[i]=len(bySweepTimes[i]) return result elif feature == "average": result=np.zeros(self.abf.sweeps) # initialize to this for i,freqs in enumerate(bySweepFreqs): if len(freqs): result[i]=np.nanmean(freqs) return result elif feature == "median": result=np.zeros(self.abf.sweeps) # initialize to this for i,freqs in enumerate(bySweepFreqs): if len(freqs): result[i]=np.nanmedian(freqs) return result else: self.log.error("get_bySweep() can't handle [%s]",feature) return None
def get_bySweep(self,feature="freqs"): """ returns AP info by sweep arranged as a list (by sweep). feature: * "freqs" - list of instantaneous frequencies by sweep. * "firsts" - list of first instantaneous frequency by sweep. * "times" - list of times of each AP in the sweep. * "count" - numer of APs per sweep. * "average" - average instanteous frequency per sweep. * "median" - median instanteous frequency per sweep. """ self.ensureDetection() bySweepTimes=[[]]*self.abf.sweeps # determine AP spike times by sweep for sweep in range(self.abf.sweeps): sweepTimes=[] for ap in self.APs: if ap["sweep"]==sweep: sweepTimes.append(ap["Tsweep"]) bySweepTimes[sweep]=sweepTimes # determine instantaneous frequencies by sweep bySweepFreqs=[[]]*self.abf.sweeps for i,times in enumerate(bySweepTimes): if len(times)<2: continue diffs=np.array(times[1:])-np.array(times[:-1]) bySweepFreqs[i]=np.array(1/diffs).tolist() # give the user what they want if feature == "freqs": return bySweepFreqs elif feature == "firsts": result=np.zeros(self.abf.sweeps) # initialize to this for i,freqs in enumerate(bySweepFreqs): if len(freqs): result[i]=freqs[0] return result elif feature == "times": return bySweepTimes elif feature == "count": result=np.zeros(self.abf.sweeps) # initialize to this for i,times in enumerate(bySweepTimes): result[i]=len(bySweepTimes[i]) return result elif feature == "average": result=np.zeros(self.abf.sweeps) # initialize to this for i,freqs in enumerate(bySweepFreqs): if len(freqs): result[i]=np.nanmean(freqs) return result elif feature == "median": result=np.zeros(self.abf.sweeps) # initialize to this for i,freqs in enumerate(bySweepFreqs): if len(freqs): result[i]=np.nanmedian(freqs) return result else: self.log.error("get_bySweep() can't handle [%s]",feature) return None
[ "returns", "AP", "info", "by", "sweep", "arranged", "as", "a", "list", "(", "by", "sweep", ")", "." ]
swharden/SWHLab
python
https://github.com/swharden/SWHLab/blob/a86c3c65323cec809a4bd4f81919644927094bf5/swhlab/analysis/ap.py#L211-L278
[ "def", "get_bySweep", "(", "self", ",", "feature", "=", "\"freqs\"", ")", ":", "self", ".", "ensureDetection", "(", ")", "bySweepTimes", "=", "[", "[", "]", "]", "*", "self", ".", "abf", ".", "sweeps", "# determine AP spike times by sweep", "for", "sweep", "in", "range", "(", "self", ".", "abf", ".", "sweeps", ")", ":", "sweepTimes", "=", "[", "]", "for", "ap", "in", "self", ".", "APs", ":", "if", "ap", "[", "\"sweep\"", "]", "==", "sweep", ":", "sweepTimes", ".", "append", "(", "ap", "[", "\"Tsweep\"", "]", ")", "bySweepTimes", "[", "sweep", "]", "=", "sweepTimes", "# determine instantaneous frequencies by sweep", "bySweepFreqs", "=", "[", "[", "]", "]", "*", "self", ".", "abf", ".", "sweeps", "for", "i", ",", "times", "in", "enumerate", "(", "bySweepTimes", ")", ":", "if", "len", "(", "times", ")", "<", "2", ":", "continue", "diffs", "=", "np", ".", "array", "(", "times", "[", "1", ":", "]", ")", "-", "np", ".", "array", "(", "times", "[", ":", "-", "1", "]", ")", "bySweepFreqs", "[", "i", "]", "=", "np", ".", "array", "(", "1", "/", "diffs", ")", ".", "tolist", "(", ")", "# give the user what they want", "if", "feature", "==", "\"freqs\"", ":", "return", "bySweepFreqs", "elif", "feature", "==", "\"firsts\"", ":", "result", "=", "np", ".", "zeros", "(", "self", ".", "abf", ".", "sweeps", ")", "# initialize to this", "for", "i", ",", "freqs", "in", "enumerate", "(", "bySweepFreqs", ")", ":", "if", "len", "(", "freqs", ")", ":", "result", "[", "i", "]", "=", "freqs", "[", "0", "]", "return", "result", "elif", "feature", "==", "\"times\"", ":", "return", "bySweepTimes", "elif", "feature", "==", "\"count\"", ":", "result", "=", "np", ".", "zeros", "(", "self", ".", "abf", ".", "sweeps", ")", "# initialize to this", "for", "i", ",", "times", "in", "enumerate", "(", "bySweepTimes", ")", ":", "result", "[", "i", "]", "=", "len", "(", "bySweepTimes", "[", "i", "]", ")", "return", "result", "elif", "feature", "==", "\"average\"", ":", "result", "=", "np", ".", "zeros", "(", "self", ".", "abf", ".", "sweeps", ")", "# initialize to this", "for", "i", ",", "freqs", "in", "enumerate", "(", "bySweepFreqs", ")", ":", "if", "len", "(", "freqs", ")", ":", "result", "[", "i", "]", "=", "np", ".", "nanmean", "(", "freqs", ")", "return", "result", "elif", "feature", "==", "\"median\"", ":", "result", "=", "np", ".", "zeros", "(", "self", ".", "abf", ".", "sweeps", ")", "# initialize to this", "for", "i", ",", "freqs", "in", "enumerate", "(", "bySweepFreqs", ")", ":", "if", "len", "(", "freqs", ")", ":", "result", "[", "i", "]", "=", "np", ".", "nanmedian", "(", "freqs", ")", "return", "result", "else", ":", "self", ".", "log", ".", "error", "(", "\"get_bySweep() can't handle [%s]\"", ",", "feature", ")", "return", "None" ]
a86c3c65323cec809a4bd4f81919644927094bf5
valid
get_author_and_version
Return package author and version as listed in `init.py`.
setup.py
def get_author_and_version(package): """ Return package author and version as listed in `init.py`. """ init_py = open(os.path.join(package, '__init__.py')).read() author = re.search("__author__ = ['\"]([^'\"]+)['\"]", init_py).group(1) version = re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1) return author, version
def get_author_and_version(package): """ Return package author and version as listed in `init.py`. """ init_py = open(os.path.join(package, '__init__.py')).read() author = re.search("__author__ = ['\"]([^'\"]+)['\"]", init_py).group(1) version = re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1) return author, version
[ "Return", "package", "author", "and", "version", "as", "listed", "in", "init", ".", "py", "." ]
jazzband/django-discover-jenkins
python
https://github.com/jazzband/django-discover-jenkins/blob/c0c859dfdd571de6e8f63865dfc8ebac6bab1d07/setup.py#L12-L19
[ "def", "get_author_and_version", "(", "package", ")", ":", "init_py", "=", "open", "(", "os", ".", "path", ".", "join", "(", "package", ",", "'__init__.py'", ")", ")", ".", "read", "(", ")", "author", "=", "re", ".", "search", "(", "\"__author__ = ['\\\"]([^'\\\"]+)['\\\"]\"", ",", "init_py", ")", ".", "group", "(", "1", ")", "version", "=", "re", ".", "search", "(", "\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\"", ",", "init_py", ")", ".", "group", "(", "1", ")", "return", "author", ",", "version" ]
c0c859dfdd571de6e8f63865dfc8ebac6bab1d07
valid
api_subclass_factory
Create an API subclass with fewer methods than its base class. Arguments: name (:py:class:`str`): The name of the new class. docstring (:py:class:`str`): The docstring for the new class. remove_methods (:py:class:`dict`): The methods to remove from the base class's :py:attr:`API_METHODS` for the subclass. The key is the name of the root method (e.g. ``'auth'`` for ``'auth.test'``, the value is either a tuple of child method names (e.g. ``('test',)``) or, if all children should be removed, the special value :py:const:`ALL`. base (:py:class:`type`, optional): The base class (defaults to :py:class:`SlackApi`). Returns: :py:class:`type`: The new subclass. Raises: :py:class:`KeyError`: If the method wasn't in the superclass.
aslack/slack_api.py
def api_subclass_factory(name, docstring, remove_methods, base=SlackApi): """Create an API subclass with fewer methods than its base class. Arguments: name (:py:class:`str`): The name of the new class. docstring (:py:class:`str`): The docstring for the new class. remove_methods (:py:class:`dict`): The methods to remove from the base class's :py:attr:`API_METHODS` for the subclass. The key is the name of the root method (e.g. ``'auth'`` for ``'auth.test'``, the value is either a tuple of child method names (e.g. ``('test',)``) or, if all children should be removed, the special value :py:const:`ALL`. base (:py:class:`type`, optional): The base class (defaults to :py:class:`SlackApi`). Returns: :py:class:`type`: The new subclass. Raises: :py:class:`KeyError`: If the method wasn't in the superclass. """ methods = deepcopy(base.API_METHODS) for parent, to_remove in remove_methods.items(): if to_remove is ALL: del methods[parent] else: for method in to_remove: del methods[parent][method] return type(name, (base,), dict(API_METHODS=methods, __doc__=docstring))
def api_subclass_factory(name, docstring, remove_methods, base=SlackApi): """Create an API subclass with fewer methods than its base class. Arguments: name (:py:class:`str`): The name of the new class. docstring (:py:class:`str`): The docstring for the new class. remove_methods (:py:class:`dict`): The methods to remove from the base class's :py:attr:`API_METHODS` for the subclass. The key is the name of the root method (e.g. ``'auth'`` for ``'auth.test'``, the value is either a tuple of child method names (e.g. ``('test',)``) or, if all children should be removed, the special value :py:const:`ALL`. base (:py:class:`type`, optional): The base class (defaults to :py:class:`SlackApi`). Returns: :py:class:`type`: The new subclass. Raises: :py:class:`KeyError`: If the method wasn't in the superclass. """ methods = deepcopy(base.API_METHODS) for parent, to_remove in remove_methods.items(): if to_remove is ALL: del methods[parent] else: for method in to_remove: del methods[parent][method] return type(name, (base,), dict(API_METHODS=methods, __doc__=docstring))
[ "Create", "an", "API", "subclass", "with", "fewer", "methods", "than", "its", "base", "class", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_api.py#L225-L254
[ "def", "api_subclass_factory", "(", "name", ",", "docstring", ",", "remove_methods", ",", "base", "=", "SlackApi", ")", ":", "methods", "=", "deepcopy", "(", "base", ".", "API_METHODS", ")", "for", "parent", ",", "to_remove", "in", "remove_methods", ".", "items", "(", ")", ":", "if", "to_remove", "is", "ALL", ":", "del", "methods", "[", "parent", "]", "else", ":", "for", "method", "in", "to_remove", ":", "del", "methods", "[", "parent", "]", "[", "method", "]", "return", "type", "(", "name", ",", "(", "base", ",", ")", ",", "dict", "(", "API_METHODS", "=", "methods", ",", "__doc__", "=", "docstring", ")", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackApi.execute_method
Execute a specified Slack Web API method. Arguments: method (:py:class:`str`): The name of the method. **params (:py:class:`dict`): Any additional parameters required. Returns: :py:class:`dict`: The JSON data from the response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: If the HTTP request returns a code other than 200 (OK). SlackApiError: If the Slack API is reached but the response contains an error message.
aslack/slack_api.py
async def execute_method(self, method, **params): """Execute a specified Slack Web API method. Arguments: method (:py:class:`str`): The name of the method. **params (:py:class:`dict`): Any additional parameters required. Returns: :py:class:`dict`: The JSON data from the response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: If the HTTP request returns a code other than 200 (OK). SlackApiError: If the Slack API is reached but the response contains an error message. """ url = self.url_builder(method, url_params=params) logger.info('Executing method %r', method) response = await aiohttp.get(url) logger.info('Status: %r', response.status) if response.status == 200: json = await response.json() logger.debug('...with JSON %r', json) if json.get('ok'): return json raise SlackApiError(json['error']) else: raise_for_status(response)
async def execute_method(self, method, **params): """Execute a specified Slack Web API method. Arguments: method (:py:class:`str`): The name of the method. **params (:py:class:`dict`): Any additional parameters required. Returns: :py:class:`dict`: The JSON data from the response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: If the HTTP request returns a code other than 200 (OK). SlackApiError: If the Slack API is reached but the response contains an error message. """ url = self.url_builder(method, url_params=params) logger.info('Executing method %r', method) response = await aiohttp.get(url) logger.info('Status: %r', response.status) if response.status == 200: json = await response.json() logger.debug('...with JSON %r', json) if json.get('ok'): return json raise SlackApiError(json['error']) else: raise_for_status(response)
[ "Execute", "a", "specified", "Slack", "Web", "API", "method", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_api.py#L172-L201
[ "async", "def", "execute_method", "(", "self", ",", "method", ",", "*", "*", "params", ")", ":", "url", "=", "self", ".", "url_builder", "(", "method", ",", "url_params", "=", "params", ")", "logger", ".", "info", "(", "'Executing method %r'", ",", "method", ")", "response", "=", "await", "aiohttp", ".", "get", "(", "url", ")", "logger", ".", "info", "(", "'Status: %r'", ",", "response", ".", "status", ")", "if", "response", ".", "status", "==", "200", ":", "json", "=", "await", "response", ".", "json", "(", ")", "logger", ".", "debug", "(", "'...with JSON %r'", ",", "json", ")", "if", "json", ".", "get", "(", "'ok'", ")", ":", "return", "json", "raise", "SlackApiError", "(", "json", "[", "'error'", "]", ")", "else", ":", "raise_for_status", "(", "response", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackApi.method_exists
Whether a given method exists in the known API. Arguments: method (:py:class:`str`): The name of the method. Returns: :py:class:`bool`: Whether the method is in the known API.
aslack/slack_api.py
def method_exists(cls, method): """Whether a given method exists in the known API. Arguments: method (:py:class:`str`): The name of the method. Returns: :py:class:`bool`: Whether the method is in the known API. """ methods = cls.API_METHODS for key in method.split('.'): methods = methods.get(key) if methods is None: break if isinstance(methods, str): logger.debug('%r: %r', method, methods) return True return False
def method_exists(cls, method): """Whether a given method exists in the known API. Arguments: method (:py:class:`str`): The name of the method. Returns: :py:class:`bool`: Whether the method is in the known API. """ methods = cls.API_METHODS for key in method.split('.'): methods = methods.get(key) if methods is None: break if isinstance(methods, str): logger.debug('%r: %r', method, methods) return True return False
[ "Whether", "a", "given", "method", "exists", "in", "the", "known", "API", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_api.py#L204-L222
[ "def", "method_exists", "(", "cls", ",", "method", ")", ":", "methods", "=", "cls", ".", "API_METHODS", "for", "key", "in", "method", ".", "split", "(", "'.'", ")", ":", "methods", "=", "methods", ".", "get", "(", "key", ")", "if", "methods", "is", "None", ":", "break", "if", "isinstance", "(", "methods", ",", "str", ")", ":", "logger", ".", "debug", "(", "'%r: %r'", ",", "method", ",", "methods", ")", "return", "True", "return", "False" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
XPathSelectorHandler._add_parsley_ns
Extend XPath evaluation with Parsley extensions' namespace
parslepy/selectors.py
def _add_parsley_ns(cls, namespace_dict): """ Extend XPath evaluation with Parsley extensions' namespace """ namespace_dict.update({ 'parslepy' : cls.LOCAL_NAMESPACE, 'parsley' : cls.LOCAL_NAMESPACE, }) return namespace_dict
def _add_parsley_ns(cls, namespace_dict): """ Extend XPath evaluation with Parsley extensions' namespace """ namespace_dict.update({ 'parslepy' : cls.LOCAL_NAMESPACE, 'parsley' : cls.LOCAL_NAMESPACE, }) return namespace_dict
[ "Extend", "XPath", "evaluation", "with", "Parsley", "extensions", "namespace" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/selectors.py#L222-L231
[ "def", "_add_parsley_ns", "(", "cls", ",", "namespace_dict", ")", ":", "namespace_dict", ".", "update", "(", "{", "'parslepy'", ":", "cls", ".", "LOCAL_NAMESPACE", ",", "'parsley'", ":", "cls", ".", "LOCAL_NAMESPACE", ",", "}", ")", "return", "namespace_dict" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
XPathSelectorHandler.make
XPath expression can also use EXSLT functions (as long as they are understood by libxslt)
parslepy/selectors.py
def make(self, selection): """ XPath expression can also use EXSLT functions (as long as they are understood by libxslt) """ cached = self._selector_cache.get(selection) if cached: return cached try: selector = lxml.etree.XPath(selection, namespaces = self.namespaces, extensions = self.extensions, smart_strings=(self.SMART_STRINGS or self._test_smart_strings_needed(selection)), ) except lxml.etree.XPathSyntaxError as syntax_error: syntax_error.msg += ": %s" % selection raise syntax_error except Exception as e: if self.DEBUG: print(repr(e), selection) raise # wrap it/cache it self._selector_cache[selection] = Selector(selector) return self._selector_cache[selection]
def make(self, selection): """ XPath expression can also use EXSLT functions (as long as they are understood by libxslt) """ cached = self._selector_cache.get(selection) if cached: return cached try: selector = lxml.etree.XPath(selection, namespaces = self.namespaces, extensions = self.extensions, smart_strings=(self.SMART_STRINGS or self._test_smart_strings_needed(selection)), ) except lxml.etree.XPathSyntaxError as syntax_error: syntax_error.msg += ": %s" % selection raise syntax_error except Exception as e: if self.DEBUG: print(repr(e), selection) raise # wrap it/cache it self._selector_cache[selection] = Selector(selector) return self._selector_cache[selection]
[ "XPath", "expression", "can", "also", "use", "EXSLT", "functions", "(", "as", "long", "as", "they", "are", "understood", "by", "libxslt", ")" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/selectors.py#L233-L262
[ "def", "make", "(", "self", ",", "selection", ")", ":", "cached", "=", "self", ".", "_selector_cache", ".", "get", "(", "selection", ")", "if", "cached", ":", "return", "cached", "try", ":", "selector", "=", "lxml", ".", "etree", ".", "XPath", "(", "selection", ",", "namespaces", "=", "self", ".", "namespaces", ",", "extensions", "=", "self", ".", "extensions", ",", "smart_strings", "=", "(", "self", ".", "SMART_STRINGS", "or", "self", ".", "_test_smart_strings_needed", "(", "selection", ")", ")", ",", ")", "except", "lxml", ".", "etree", ".", "XPathSyntaxError", "as", "syntax_error", ":", "syntax_error", ".", "msg", "+=", "\": %s\"", "%", "selection", "raise", "syntax_error", "except", "Exception", "as", "e", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "repr", "(", "e", ")", ",", "selection", ")", "raise", "# wrap it/cache it", "self", ".", "_selector_cache", "[", "selection", "]", "=", "Selector", "(", "selector", ")", "return", "self", ".", "_selector_cache", "[", "selection", "]" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
XPathSelectorHandler.extract
Try and convert matching Elements to unicode strings. If this fails, the selector evaluation probably already returned some string(s) of some sort, or boolean value, or int/float, so return that instead.
parslepy/selectors.py
def extract(self, document, selector, debug_offset=''): """ Try and convert matching Elements to unicode strings. If this fails, the selector evaluation probably already returned some string(s) of some sort, or boolean value, or int/float, so return that instead. """ selected = self.select(document, selector) if selected is not None: if isinstance(selected, (list, tuple)): # FIXME: return None or return empty list? if not len(selected): return return [self._extract_single(m) for m in selected] else: return self._extract_single(selected) # selector did not match anything else: if self.DEBUG: print(debug_offset, "selector did not match anything; return None") return None
def extract(self, document, selector, debug_offset=''): """ Try and convert matching Elements to unicode strings. If this fails, the selector evaluation probably already returned some string(s) of some sort, or boolean value, or int/float, so return that instead. """ selected = self.select(document, selector) if selected is not None: if isinstance(selected, (list, tuple)): # FIXME: return None or return empty list? if not len(selected): return return [self._extract_single(m) for m in selected] else: return self._extract_single(selected) # selector did not match anything else: if self.DEBUG: print(debug_offset, "selector did not match anything; return None") return None
[ "Try", "and", "convert", "matching", "Elements", "to", "unicode", "strings", "." ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/selectors.py#L273-L299
[ "def", "extract", "(", "self", ",", "document", ",", "selector", ",", "debug_offset", "=", "''", ")", ":", "selected", "=", "self", ".", "select", "(", "document", ",", "selector", ")", "if", "selected", "is", "not", "None", ":", "if", "isinstance", "(", "selected", ",", "(", "list", ",", "tuple", ")", ")", ":", "# FIXME: return None or return empty list?", "if", "not", "len", "(", "selected", ")", ":", "return", "return", "[", "self", ".", "_extract_single", "(", "m", ")", "for", "m", "in", "selected", "]", "else", ":", "return", "self", ".", "_extract_single", "(", "selected", ")", "# selector did not match anything", "else", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"selector did not match anything; return None\"", ")", "return", "None" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
DefaultSelectorHandler.make
Scopes and selectors are tested in this order: * is this a CSS selector with an appended @something attribute? * is this a regular CSS selector? * is this an XPath expression? XPath expression can also use EXSLT functions (as long as they are understood by libxslt)
parslepy/selectors.py
def make(self, selection): """ Scopes and selectors are tested in this order: * is this a CSS selector with an appended @something attribute? * is this a regular CSS selector? * is this an XPath expression? XPath expression can also use EXSLT functions (as long as they are understood by libxslt) """ cached = self._selector_cache.get(selection) if cached: return cached namespaces = self.EXSLT_NAMESPACES self._add_parsley_ns(namespaces) try: # CSS with attribute? (non-standard but convenient) # CSS selector cannot select attributes # this "<css selector> @<attr>" syntax is a Parsley extension # construct CSS selector and append attribute to XPath expression m = self.REGEX_ENDING_ATTRIBUTE.match(selection) if m: # the selector should be a regular CSS selector cssxpath = css_to_xpath(m.group("expr")) # if "|" is used for namespace prefix reference, # convert it to XPath prefix syntax attribute = m.group("attr").replace('|', ':') cssxpath = "%s/%s" % (cssxpath, attribute) else: cssxpath = css_to_xpath(selection) selector = lxml.etree.XPath( cssxpath, namespaces = self.namespaces, extensions = self.extensions, smart_strings=(self.SMART_STRINGS or self._test_smart_strings_needed(selection)), ) except tuple(self.CSSSELECT_SYNTAXERROR_EXCEPTIONS) as syntax_error: if self.DEBUG: print(repr(syntax_error), selection) print("Try interpreting as XPath selector") try: selector = lxml.etree.XPath(selection, namespaces = self.namespaces, extensions = self.extensions, smart_strings=(self.SMART_STRINGS or self._test_smart_strings_needed(selection)), ) except lxml.etree.XPathSyntaxError as syntax_error: syntax_error.msg += ": %s" % selection raise syntax_error except Exception as e: if self.DEBUG: print(repr(e), selection) raise # for exception when trying to convert <cssselector> @<attribute> syntax except lxml.etree.XPathSyntaxError as syntax_error: syntax_error.msg += ": %s" % selection raise syntax_error except Exception as e: if self.DEBUG: print(repr(e), selection) raise # wrap it/cache it self._selector_cache[selection] = Selector(selector) return self._selector_cache[selection]
def make(self, selection): """ Scopes and selectors are tested in this order: * is this a CSS selector with an appended @something attribute? * is this a regular CSS selector? * is this an XPath expression? XPath expression can also use EXSLT functions (as long as they are understood by libxslt) """ cached = self._selector_cache.get(selection) if cached: return cached namespaces = self.EXSLT_NAMESPACES self._add_parsley_ns(namespaces) try: # CSS with attribute? (non-standard but convenient) # CSS selector cannot select attributes # this "<css selector> @<attr>" syntax is a Parsley extension # construct CSS selector and append attribute to XPath expression m = self.REGEX_ENDING_ATTRIBUTE.match(selection) if m: # the selector should be a regular CSS selector cssxpath = css_to_xpath(m.group("expr")) # if "|" is used for namespace prefix reference, # convert it to XPath prefix syntax attribute = m.group("attr").replace('|', ':') cssxpath = "%s/%s" % (cssxpath, attribute) else: cssxpath = css_to_xpath(selection) selector = lxml.etree.XPath( cssxpath, namespaces = self.namespaces, extensions = self.extensions, smart_strings=(self.SMART_STRINGS or self._test_smart_strings_needed(selection)), ) except tuple(self.CSSSELECT_SYNTAXERROR_EXCEPTIONS) as syntax_error: if self.DEBUG: print(repr(syntax_error), selection) print("Try interpreting as XPath selector") try: selector = lxml.etree.XPath(selection, namespaces = self.namespaces, extensions = self.extensions, smart_strings=(self.SMART_STRINGS or self._test_smart_strings_needed(selection)), ) except lxml.etree.XPathSyntaxError as syntax_error: syntax_error.msg += ": %s" % selection raise syntax_error except Exception as e: if self.DEBUG: print(repr(e), selection) raise # for exception when trying to convert <cssselector> @<attribute> syntax except lxml.etree.XPathSyntaxError as syntax_error: syntax_error.msg += ": %s" % selection raise syntax_error except Exception as e: if self.DEBUG: print(repr(e), selection) raise # wrap it/cache it self._selector_cache[selection] = Selector(selector) return self._selector_cache[selection]
[ "Scopes", "and", "selectors", "are", "tested", "in", "this", "order", ":", "*", "is", "this", "a", "CSS", "selector", "with", "an", "appended", "@something", "attribute?", "*", "is", "this", "a", "regular", "CSS", "selector?", "*", "is", "this", "an", "XPath", "expression?" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/selectors.py#L435-L510
[ "def", "make", "(", "self", ",", "selection", ")", ":", "cached", "=", "self", ".", "_selector_cache", ".", "get", "(", "selection", ")", "if", "cached", ":", "return", "cached", "namespaces", "=", "self", ".", "EXSLT_NAMESPACES", "self", ".", "_add_parsley_ns", "(", "namespaces", ")", "try", ":", "# CSS with attribute? (non-standard but convenient)", "# CSS selector cannot select attributes", "# this \"<css selector> @<attr>\" syntax is a Parsley extension", "# construct CSS selector and append attribute to XPath expression", "m", "=", "self", ".", "REGEX_ENDING_ATTRIBUTE", ".", "match", "(", "selection", ")", "if", "m", ":", "# the selector should be a regular CSS selector", "cssxpath", "=", "css_to_xpath", "(", "m", ".", "group", "(", "\"expr\"", ")", ")", "# if \"|\" is used for namespace prefix reference,", "# convert it to XPath prefix syntax", "attribute", "=", "m", ".", "group", "(", "\"attr\"", ")", ".", "replace", "(", "'|'", ",", "':'", ")", "cssxpath", "=", "\"%s/%s\"", "%", "(", "cssxpath", ",", "attribute", ")", "else", ":", "cssxpath", "=", "css_to_xpath", "(", "selection", ")", "selector", "=", "lxml", ".", "etree", ".", "XPath", "(", "cssxpath", ",", "namespaces", "=", "self", ".", "namespaces", ",", "extensions", "=", "self", ".", "extensions", ",", "smart_strings", "=", "(", "self", ".", "SMART_STRINGS", "or", "self", ".", "_test_smart_strings_needed", "(", "selection", ")", ")", ",", ")", "except", "tuple", "(", "self", ".", "CSSSELECT_SYNTAXERROR_EXCEPTIONS", ")", "as", "syntax_error", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "repr", "(", "syntax_error", ")", ",", "selection", ")", "print", "(", "\"Try interpreting as XPath selector\"", ")", "try", ":", "selector", "=", "lxml", ".", "etree", ".", "XPath", "(", "selection", ",", "namespaces", "=", "self", ".", "namespaces", ",", "extensions", "=", "self", ".", "extensions", ",", "smart_strings", "=", "(", "self", ".", "SMART_STRINGS", "or", "self", ".", "_test_smart_strings_needed", "(", "selection", ")", ")", ",", ")", "except", "lxml", ".", "etree", ".", "XPathSyntaxError", "as", "syntax_error", ":", "syntax_error", ".", "msg", "+=", "\": %s\"", "%", "selection", "raise", "syntax_error", "except", "Exception", "as", "e", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "repr", "(", "e", ")", ",", "selection", ")", "raise", "# for exception when trying to convert <cssselector> @<attribute> syntax", "except", "lxml", ".", "etree", ".", "XPathSyntaxError", "as", "syntax_error", ":", "syntax_error", ".", "msg", "+=", "\": %s\"", "%", "selection", "raise", "syntax_error", "except", "Exception", "as", "e", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "repr", "(", "e", ")", ",", "selection", ")", "raise", "# wrap it/cache it", "self", ".", "_selector_cache", "[", "selection", "]", "=", "Selector", "(", "selector", ")", "return", "self", ".", "_selector_cache", "[", "selection", "]" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
SlackBot.join_rtm
Join the real-time messaging service. Arguments: filters (:py:class:`dict`, optional): Dictionary mapping message filters to the functions they should dispatch to. Use a :py:class:`collections.OrderedDict` if precedence is important; only one filter, the first match, will be applied to each message.
aslack/slack_bot/bot.py
async def join_rtm(self, filters=None): """Join the real-time messaging service. Arguments: filters (:py:class:`dict`, optional): Dictionary mapping message filters to the functions they should dispatch to. Use a :py:class:`collections.OrderedDict` if precedence is important; only one filter, the first match, will be applied to each message. """ if filters is None: filters = [cls(self) for cls in self.MESSAGE_FILTERS] url = await self._get_socket_url() logger.debug('Connecting to %r', url) async with ws_connect(url) as socket: first_msg = await socket.receive() self._validate_first_message(first_msg) self.socket = socket async for message in socket: if message.tp == MsgType.text: await self.handle_message(message, filters) elif message.tp in (MsgType.closed, MsgType.error): if not socket.closed: await socket.close() self.socket = None break logger.info('Left real-time messaging.')
async def join_rtm(self, filters=None): """Join the real-time messaging service. Arguments: filters (:py:class:`dict`, optional): Dictionary mapping message filters to the functions they should dispatch to. Use a :py:class:`collections.OrderedDict` if precedence is important; only one filter, the first match, will be applied to each message. """ if filters is None: filters = [cls(self) for cls in self.MESSAGE_FILTERS] url = await self._get_socket_url() logger.debug('Connecting to %r', url) async with ws_connect(url) as socket: first_msg = await socket.receive() self._validate_first_message(first_msg) self.socket = socket async for message in socket: if message.tp == MsgType.text: await self.handle_message(message, filters) elif message.tp in (MsgType.closed, MsgType.error): if not socket.closed: await socket.close() self.socket = None break logger.info('Left real-time messaging.')
[ "Join", "the", "real", "-", "time", "messaging", "service", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L75-L102
[ "async", "def", "join_rtm", "(", "self", ",", "filters", "=", "None", ")", ":", "if", "filters", "is", "None", ":", "filters", "=", "[", "cls", "(", "self", ")", "for", "cls", "in", "self", ".", "MESSAGE_FILTERS", "]", "url", "=", "await", "self", ".", "_get_socket_url", "(", ")", "logger", ".", "debug", "(", "'Connecting to %r'", ",", "url", ")", "async", "with", "ws_connect", "(", "url", ")", "as", "socket", ":", "first_msg", "=", "await", "socket", ".", "receive", "(", ")", "self", ".", "_validate_first_message", "(", "first_msg", ")", "self", ".", "socket", "=", "socket", "async", "for", "message", "in", "socket", ":", "if", "message", ".", "tp", "==", "MsgType", ".", "text", ":", "await", "self", ".", "handle_message", "(", "message", ",", "filters", ")", "elif", "message", ".", "tp", "in", "(", "MsgType", ".", "closed", ",", "MsgType", ".", "error", ")", ":", "if", "not", "socket", ".", "closed", ":", "await", "socket", ".", "close", "(", ")", "self", ".", "socket", "=", "None", "break", "logger", ".", "info", "(", "'Left real-time messaging.'", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot.handle_message
Handle an incoming message appropriately. Arguments: message (:py:class:`aiohttp.websocket.Message`): The incoming message to handle. filters (:py:class:`list`): The filters to apply to incoming messages.
aslack/slack_bot/bot.py
async def handle_message(self, message, filters): """Handle an incoming message appropriately. Arguments: message (:py:class:`aiohttp.websocket.Message`): The incoming message to handle. filters (:py:class:`list`): The filters to apply to incoming messages. """ data = self._unpack_message(message) logger.debug(data) if data.get('type') == 'error': raise SlackApiError( data.get('error', {}).get('msg', str(data)) ) elif self.message_is_to_me(data): text = data['text'][len(self.address_as):].strip() if text == 'help': return self._respond( channel=data['channel'], text=self._instruction_list(filters), ) elif text == 'version': return self._respond( channel=data['channel'], text=self.VERSION, ) for _filter in filters: if _filter.matches(data): logger.debug('Response triggered') async for response in _filter: self._respond(channel=data['channel'], text=response)
async def handle_message(self, message, filters): """Handle an incoming message appropriately. Arguments: message (:py:class:`aiohttp.websocket.Message`): The incoming message to handle. filters (:py:class:`list`): The filters to apply to incoming messages. """ data = self._unpack_message(message) logger.debug(data) if data.get('type') == 'error': raise SlackApiError( data.get('error', {}).get('msg', str(data)) ) elif self.message_is_to_me(data): text = data['text'][len(self.address_as):].strip() if text == 'help': return self._respond( channel=data['channel'], text=self._instruction_list(filters), ) elif text == 'version': return self._respond( channel=data['channel'], text=self.VERSION, ) for _filter in filters: if _filter.matches(data): logger.debug('Response triggered') async for response in _filter: self._respond(channel=data['channel'], text=response)
[ "Handle", "an", "incoming", "message", "appropriately", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L104-L136
[ "async", "def", "handle_message", "(", "self", ",", "message", ",", "filters", ")", ":", "data", "=", "self", ".", "_unpack_message", "(", "message", ")", "logger", ".", "debug", "(", "data", ")", "if", "data", ".", "get", "(", "'type'", ")", "==", "'error'", ":", "raise", "SlackApiError", "(", "data", ".", "get", "(", "'error'", ",", "{", "}", ")", ".", "get", "(", "'msg'", ",", "str", "(", "data", ")", ")", ")", "elif", "self", ".", "message_is_to_me", "(", "data", ")", ":", "text", "=", "data", "[", "'text'", "]", "[", "len", "(", "self", ".", "address_as", ")", ":", "]", ".", "strip", "(", ")", "if", "text", "==", "'help'", ":", "return", "self", ".", "_respond", "(", "channel", "=", "data", "[", "'channel'", "]", ",", "text", "=", "self", ".", "_instruction_list", "(", "filters", ")", ",", ")", "elif", "text", "==", "'version'", ":", "return", "self", ".", "_respond", "(", "channel", "=", "data", "[", "'channel'", "]", ",", "text", "=", "self", ".", "VERSION", ",", ")", "for", "_filter", "in", "filters", ":", "if", "_filter", ".", "matches", "(", "data", ")", ":", "logger", ".", "debug", "(", "'Response triggered'", ")", "async", "for", "response", "in", "_filter", ":", "self", ".", "_respond", "(", "channel", "=", "data", "[", "'channel'", "]", ",", "text", "=", "response", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot.message_is_to_me
If you send a message directly to me
aslack/slack_bot/bot.py
def message_is_to_me(self, data): """If you send a message directly to me""" return (data.get('type') == 'message' and data.get('text', '').startswith(self.address_as))
def message_is_to_me(self, data): """If you send a message directly to me""" return (data.get('type') == 'message' and data.get('text', '').startswith(self.address_as))
[ "If", "you", "send", "a", "message", "directly", "to", "me" ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L143-L146
[ "def", "message_is_to_me", "(", "self", ",", "data", ")", ":", "return", "(", "data", ".", "get", "(", "'type'", ")", "==", "'message'", "and", "data", ".", "get", "(", "'text'", ",", "''", ")", ".", "startswith", "(", "self", ".", "address_as", ")", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot.from_api_token
Create a new instance from the API token. Arguments: token (:py:class:`str`, optional): The bot's API token (defaults to ``None``, which means looking in the environment). api_cls (:py:class:`type`, optional): The class to create as the ``api`` argument for API access (defaults to :py:class:`aslack.slack_api.SlackBotApi`). Returns: :py:class:`SlackBot`: The new instance.
aslack/slack_bot/bot.py
async def from_api_token(cls, token=None, api_cls=SlackBotApi): """Create a new instance from the API token. Arguments: token (:py:class:`str`, optional): The bot's API token (defaults to ``None``, which means looking in the environment). api_cls (:py:class:`type`, optional): The class to create as the ``api`` argument for API access (defaults to :py:class:`aslack.slack_api.SlackBotApi`). Returns: :py:class:`SlackBot`: The new instance. """ api = api_cls.from_env() if token is None else api_cls(api_token=token) data = await api.execute_method(cls.API_AUTH_ENDPOINT) return cls(data['user_id'], data['user'], api)
async def from_api_token(cls, token=None, api_cls=SlackBotApi): """Create a new instance from the API token. Arguments: token (:py:class:`str`, optional): The bot's API token (defaults to ``None``, which means looking in the environment). api_cls (:py:class:`type`, optional): The class to create as the ``api`` argument for API access (defaults to :py:class:`aslack.slack_api.SlackBotApi`). Returns: :py:class:`SlackBot`: The new instance. """ api = api_cls.from_env() if token is None else api_cls(api_token=token) data = await api.execute_method(cls.API_AUTH_ENDPOINT) return cls(data['user_id'], data['user'], api)
[ "Create", "a", "new", "instance", "from", "the", "API", "token", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L149-L166
[ "async", "def", "from_api_token", "(", "cls", ",", "token", "=", "None", ",", "api_cls", "=", "SlackBotApi", ")", ":", "api", "=", "api_cls", ".", "from_env", "(", ")", "if", "token", "is", "None", "else", "api_cls", "(", "api_token", "=", "token", ")", "data", "=", "await", "api", ".", "execute_method", "(", "cls", ".", "API_AUTH_ENDPOINT", ")", "return", "cls", "(", "data", "[", "'user_id'", "]", ",", "data", "[", "'user'", "]", ",", "api", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot._format_message
Format an outgoing message for transmission. Note: Adds the message type (``'message'``) and incremental ID. Arguments: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send. Returns: :py:class:`str`: The JSON string of the message.
aslack/slack_bot/bot.py
def _format_message(self, channel, text): """Format an outgoing message for transmission. Note: Adds the message type (``'message'``) and incremental ID. Arguments: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send. Returns: :py:class:`str`: The JSON string of the message. """ payload = {'type': 'message', 'id': next(self._msg_ids)} payload.update(channel=channel, text=text) return json.dumps(payload)
def _format_message(self, channel, text): """Format an outgoing message for transmission. Note: Adds the message type (``'message'``) and incremental ID. Arguments: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send. Returns: :py:class:`str`: The JSON string of the message. """ payload = {'type': 'message', 'id': next(self._msg_ids)} payload.update(channel=channel, text=text) return json.dumps(payload)
[ "Format", "an", "outgoing", "message", "for", "transmission", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L168-L184
[ "def", "_format_message", "(", "self", ",", "channel", ",", "text", ")", ":", "payload", "=", "{", "'type'", ":", "'message'", ",", "'id'", ":", "next", "(", "self", ".", "_msg_ids", ")", "}", "payload", ".", "update", "(", "channel", "=", "channel", ",", "text", "=", "text", ")", "return", "json", ".", "dumps", "(", "payload", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot._get_socket_url
Get the WebSocket URL for the RTM session. Warning: The URL expires if the session is not joined within 30 seconds of the API call to the start endpoint. Returns: :py:class:`str`: The socket URL.
aslack/slack_bot/bot.py
async def _get_socket_url(self): """Get the WebSocket URL for the RTM session. Warning: The URL expires if the session is not joined within 30 seconds of the API call to the start endpoint. Returns: :py:class:`str`: The socket URL. """ data = await self.api.execute_method( self.RTM_START_ENDPOINT, simple_latest=True, no_unreads=True, ) return data['url']
async def _get_socket_url(self): """Get the WebSocket URL for the RTM session. Warning: The URL expires if the session is not joined within 30 seconds of the API call to the start endpoint. Returns: :py:class:`str`: The socket URL. """ data = await self.api.execute_method( self.RTM_START_ENDPOINT, simple_latest=True, no_unreads=True, ) return data['url']
[ "Get", "the", "WebSocket", "URL", "for", "the", "RTM", "session", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L186-L202
[ "async", "def", "_get_socket_url", "(", "self", ")", ":", "data", "=", "await", "self", ".", "api", ".", "execute_method", "(", "self", ".", "RTM_START_ENDPOINT", ",", "simple_latest", "=", "True", ",", "no_unreads", "=", "True", ",", ")", "return", "data", "[", "'url'", "]" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot._instruction_list
Generates the instructions for a bot and its filters. Note: The guidance for each filter is generated by combining the docstrings of the predicate filter and resulting dispatch function with a single space between. The class's :py:attr:`INSTRUCTIONS` and the default help command are added. Arguments: filters (:py:class:`list`): The filters to apply to incoming messages. Returns: :py:class:`str`: The bot's instructions.
aslack/slack_bot/bot.py
def _instruction_list(self, filters): """Generates the instructions for a bot and its filters. Note: The guidance for each filter is generated by combining the docstrings of the predicate filter and resulting dispatch function with a single space between. The class's :py:attr:`INSTRUCTIONS` and the default help command are added. Arguments: filters (:py:class:`list`): The filters to apply to incoming messages. Returns: :py:class:`str`: The bot's instructions. """ return '\n\n'.join([ self.INSTRUCTIONS.strip(), '*Supported methods:*', 'If you send "@{}: help" to me I reply with these ' 'instructions.'.format(self.user), 'If you send "@{}: version" to me I reply with my current ' 'version.'.format(self.user), ] + [filter.description() for filter in filters])
def _instruction_list(self, filters): """Generates the instructions for a bot and its filters. Note: The guidance for each filter is generated by combining the docstrings of the predicate filter and resulting dispatch function with a single space between. The class's :py:attr:`INSTRUCTIONS` and the default help command are added. Arguments: filters (:py:class:`list`): The filters to apply to incoming messages. Returns: :py:class:`str`: The bot's instructions. """ return '\n\n'.join([ self.INSTRUCTIONS.strip(), '*Supported methods:*', 'If you send "@{}: help" to me I reply with these ' 'instructions.'.format(self.user), 'If you send "@{}: version" to me I reply with my current ' 'version.'.format(self.user), ] + [filter.description() for filter in filters])
[ "Generates", "the", "instructions", "for", "a", "bot", "and", "its", "filters", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L204-L229
[ "def", "_instruction_list", "(", "self", ",", "filters", ")", ":", "return", "'\\n\\n'", ".", "join", "(", "[", "self", ".", "INSTRUCTIONS", ".", "strip", "(", ")", ",", "'*Supported methods:*'", ",", "'If you send \"@{}: help\" to me I reply with these '", "'instructions.'", ".", "format", "(", "self", ".", "user", ")", ",", "'If you send \"@{}: version\" to me I reply with my current '", "'version.'", ".", "format", "(", "self", ".", "user", ")", ",", "]", "+", "[", "filter", ".", "description", "(", ")", "for", "filter", "in", "filters", "]", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot._respond
Respond to a message on the current socket. Args: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send.
aslack/slack_bot/bot.py
def _respond(self, channel, text): """Respond to a message on the current socket. Args: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send. """ result = self._format_message(channel, text) if result is not None: logger.info( 'Sending message: %r', truncate(result, max_len=50), ) self.socket.send_str(result)
def _respond(self, channel, text): """Respond to a message on the current socket. Args: channel (:py:class:`str`): The channel to send to. text (:py:class:`str`): The message text to send. """ result = self._format_message(channel, text) if result is not None: logger.info( 'Sending message: %r', truncate(result, max_len=50), ) self.socket.send_str(result)
[ "Respond", "to", "a", "message", "on", "the", "current", "socket", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L231-L245
[ "def", "_respond", "(", "self", ",", "channel", ",", "text", ")", ":", "result", "=", "self", ".", "_format_message", "(", "channel", ",", "text", ")", "if", "result", "is", "not", "None", ":", "logger", ".", "info", "(", "'Sending message: %r'", ",", "truncate", "(", "result", ",", "max_len", "=", "50", ")", ",", ")", "self", ".", "socket", ".", "send_str", "(", "result", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
SlackBot._validate_first_message
Check the first message matches the expected handshake. Note: The handshake is provided as :py:attr:`RTM_HANDSHAKE`. Arguments: msg (:py:class:`aiohttp.Message`): The message to validate. Raises: :py:class:`SlackApiError`: If the data doesn't match the expected handshake.
aslack/slack_bot/bot.py
def _validate_first_message(cls, msg): """Check the first message matches the expected handshake. Note: The handshake is provided as :py:attr:`RTM_HANDSHAKE`. Arguments: msg (:py:class:`aiohttp.Message`): The message to validate. Raises: :py:class:`SlackApiError`: If the data doesn't match the expected handshake. """ data = cls._unpack_message(msg) logger.debug(data) if data != cls.RTM_HANDSHAKE: raise SlackApiError('Unexpected response: {!r}'.format(data)) logger.info('Joined real-time messaging.')
def _validate_first_message(cls, msg): """Check the first message matches the expected handshake. Note: The handshake is provided as :py:attr:`RTM_HANDSHAKE`. Arguments: msg (:py:class:`aiohttp.Message`): The message to validate. Raises: :py:class:`SlackApiError`: If the data doesn't match the expected handshake. """ data = cls._unpack_message(msg) logger.debug(data) if data != cls.RTM_HANDSHAKE: raise SlackApiError('Unexpected response: {!r}'.format(data)) logger.info('Joined real-time messaging.')
[ "Check", "the", "first", "message", "matches", "the", "expected", "handshake", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/bot.py#L267-L285
[ "def", "_validate_first_message", "(", "cls", ",", "msg", ")", ":", "data", "=", "cls", ".", "_unpack_message", "(", "msg", ")", "logger", ".", "debug", "(", "data", ")", "if", "data", "!=", "cls", ".", "RTM_HANDSHAKE", ":", "raise", "SlackApiError", "(", "'Unexpected response: {!r}'", ".", "format", "(", "data", ")", ")", "logger", ".", "info", "(", "'Joined real-time messaging.'", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
find_first_existing_executable
Accepts list of [('executable_file_path', 'options')], Returns first working executable_file_path
discover_jenkins/utils.py
def find_first_existing_executable(exe_list): """ Accepts list of [('executable_file_path', 'options')], Returns first working executable_file_path """ for filepath, opts in exe_list: try: proc = subprocess.Popen([filepath, opts], stdout=subprocess.PIPE, stderr=subprocess.PIPE) proc.communicate() except OSError: pass else: return filepath
def find_first_existing_executable(exe_list): """ Accepts list of [('executable_file_path', 'options')], Returns first working executable_file_path """ for filepath, opts in exe_list: try: proc = subprocess.Popen([filepath, opts], stdout=subprocess.PIPE, stderr=subprocess.PIPE) proc.communicate() except OSError: pass else: return filepath
[ "Accepts", "list", "of", "[", "(", "executable_file_path", "options", ")", "]", "Returns", "first", "working", "executable_file_path" ]
jazzband/django-discover-jenkins
python
https://github.com/jazzband/django-discover-jenkins/blob/c0c859dfdd571de6e8f63865dfc8ebac6bab1d07/discover_jenkins/utils.py#L18-L32
[ "def", "find_first_existing_executable", "(", "exe_list", ")", ":", "for", "filepath", ",", "opts", "in", "exe_list", ":", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "[", "filepath", ",", "opts", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "proc", ".", "communicate", "(", ")", "except", "OSError", ":", "pass", "else", ":", "return", "filepath" ]
c0c859dfdd571de6e8f63865dfc8ebac6bab1d07
valid
get_app_locations
Returns list of paths to tested apps
discover_jenkins/utils.py
def get_app_locations(): """ Returns list of paths to tested apps """ return [os.path.dirname(os.path.normpath(import_module(app_name).__file__)) for app_name in PROJECT_APPS]
def get_app_locations(): """ Returns list of paths to tested apps """ return [os.path.dirname(os.path.normpath(import_module(app_name).__file__)) for app_name in PROJECT_APPS]
[ "Returns", "list", "of", "paths", "to", "tested", "apps" ]
jazzband/django-discover-jenkins
python
https://github.com/jazzband/django-discover-jenkins/blob/c0c859dfdd571de6e8f63865dfc8ebac6bab1d07/discover_jenkins/utils.py#L35-L40
[ "def", "get_app_locations", "(", ")", ":", "return", "[", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "normpath", "(", "import_module", "(", "app_name", ")", ".", "__file__", ")", ")", "for", "app_name", "in", "PROJECT_APPS", "]" ]
c0c859dfdd571de6e8f63865dfc8ebac6bab1d07
valid
get_tasks
Get the imported task classes for each task that will be run
discover_jenkins/runner.py
def get_tasks(): """Get the imported task classes for each task that will be run""" task_classes = [] for task_path in TASKS: try: module, classname = task_path.rsplit('.', 1) except ValueError: raise ImproperlyConfigured('%s isn\'t a task module' % task_path) try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing task %s: "%s"' % (module, e)) try: task_class = getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Task module "%s" does not define a ' '"%s" class' % (module, classname)) task_classes.append(task_class) return task_classes
def get_tasks(): """Get the imported task classes for each task that will be run""" task_classes = [] for task_path in TASKS: try: module, classname = task_path.rsplit('.', 1) except ValueError: raise ImproperlyConfigured('%s isn\'t a task module' % task_path) try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing task %s: "%s"' % (module, e)) try: task_class = getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Task module "%s" does not define a ' '"%s" class' % (module, classname)) task_classes.append(task_class) return task_classes
[ "Get", "the", "imported", "task", "classes", "for", "each", "task", "that", "will", "be", "run" ]
jazzband/django-discover-jenkins
python
https://github.com/jazzband/django-discover-jenkins/blob/c0c859dfdd571de6e8f63865dfc8ebac6bab1d07/discover_jenkins/runner.py#L13-L32
[ "def", "get_tasks", "(", ")", ":", "task_classes", "=", "[", "]", "for", "task_path", "in", "TASKS", ":", "try", ":", "module", ",", "classname", "=", "task_path", ".", "rsplit", "(", "'.'", ",", "1", ")", "except", "ValueError", ":", "raise", "ImproperlyConfigured", "(", "'%s isn\\'t a task module'", "%", "task_path", ")", "try", ":", "mod", "=", "import_module", "(", "module", ")", "except", "ImportError", "as", "e", ":", "raise", "ImproperlyConfigured", "(", "'Error importing task %s: \"%s\"'", "%", "(", "module", ",", "e", ")", ")", "try", ":", "task_class", "=", "getattr", "(", "mod", ",", "classname", ")", "except", "AttributeError", ":", "raise", "ImproperlyConfigured", "(", "'Task module \"%s\" does not define a '", "'\"%s\" class'", "%", "(", "module", ",", "classname", ")", ")", "task_classes", ".", "append", "(", "task_class", ")", "return", "task_classes" ]
c0c859dfdd571de6e8f63865dfc8ebac6bab1d07
valid
get_task_options
Get the options for each task that will be run
discover_jenkins/runner.py
def get_task_options(): """Get the options for each task that will be run""" options = () task_classes = get_tasks() for cls in task_classes: options += cls.option_list return options
def get_task_options(): """Get the options for each task that will be run""" options = () task_classes = get_tasks() for cls in task_classes: options += cls.option_list return options
[ "Get", "the", "options", "for", "each", "task", "that", "will", "be", "run" ]
jazzband/django-discover-jenkins
python
https://github.com/jazzband/django-discover-jenkins/blob/c0c859dfdd571de6e8f63865dfc8ebac6bab1d07/discover_jenkins/runner.py#L35-L43
[ "def", "get_task_options", "(", ")", ":", "options", "=", "(", ")", "task_classes", "=", "get_tasks", "(", ")", "for", "cls", "in", "task_classes", ":", "options", "+=", "cls", ".", "option_list", "return", "options" ]
c0c859dfdd571de6e8f63865dfc8ebac6bab1d07
valid
Database.to_cldf
Write the data from the db to a CLDF dataset according to the metadata in `self.dataset`. :param dest: :param mdname: :return: path of the metadata file
src/pycldf/db.py
def to_cldf(self, dest, mdname='cldf-metadata.json'): """ Write the data from the db to a CLDF dataset according to the metadata in `self.dataset`. :param dest: :param mdname: :return: path of the metadata file """ dest = Path(dest) if not dest.exists(): dest.mkdir() data = self.read() if data[self.source_table_name]: sources = Sources() for src in data[self.source_table_name]: sources.add(Source( src['genre'], src['id'], **{k: v for k, v in src.items() if k not in ['id', 'genre']})) sources.write(dest / self.dataset.properties.get('dc:source', 'sources.bib')) for table_type, items in data.items(): try: table = self.dataset[table_type] table.common_props['dc:extent'] = table.write( [self.retranslate(table, item) for item in items], base=dest) except KeyError: assert table_type == self.source_table_name, table_type return self.dataset.write_metadata(dest / mdname)
def to_cldf(self, dest, mdname='cldf-metadata.json'): """ Write the data from the db to a CLDF dataset according to the metadata in `self.dataset`. :param dest: :param mdname: :return: path of the metadata file """ dest = Path(dest) if not dest.exists(): dest.mkdir() data = self.read() if data[self.source_table_name]: sources = Sources() for src in data[self.source_table_name]: sources.add(Source( src['genre'], src['id'], **{k: v for k, v in src.items() if k not in ['id', 'genre']})) sources.write(dest / self.dataset.properties.get('dc:source', 'sources.bib')) for table_type, items in data.items(): try: table = self.dataset[table_type] table.common_props['dc:extent'] = table.write( [self.retranslate(table, item) for item in items], base=dest) except KeyError: assert table_type == self.source_table_name, table_type return self.dataset.write_metadata(dest / mdname)
[ "Write", "the", "data", "from", "the", "db", "to", "a", "CLDF", "dataset", "according", "to", "the", "metadata", "in", "self", ".", "dataset", "." ]
cldf/pycldf
python
https://github.com/cldf/pycldf/blob/636f1eb3ea769394e14ad9e42a83b6096efa9728/src/pycldf/db.py#L181-L212
[ "def", "to_cldf", "(", "self", ",", "dest", ",", "mdname", "=", "'cldf-metadata.json'", ")", ":", "dest", "=", "Path", "(", "dest", ")", "if", "not", "dest", ".", "exists", "(", ")", ":", "dest", ".", "mkdir", "(", ")", "data", "=", "self", ".", "read", "(", ")", "if", "data", "[", "self", ".", "source_table_name", "]", ":", "sources", "=", "Sources", "(", ")", "for", "src", "in", "data", "[", "self", ".", "source_table_name", "]", ":", "sources", ".", "add", "(", "Source", "(", "src", "[", "'genre'", "]", ",", "src", "[", "'id'", "]", ",", "*", "*", "{", "k", ":", "v", "for", "k", ",", "v", "in", "src", ".", "items", "(", ")", "if", "k", "not", "in", "[", "'id'", ",", "'genre'", "]", "}", ")", ")", "sources", ".", "write", "(", "dest", "/", "self", ".", "dataset", ".", "properties", ".", "get", "(", "'dc:source'", ",", "'sources.bib'", ")", ")", "for", "table_type", ",", "items", "in", "data", ".", "items", "(", ")", ":", "try", ":", "table", "=", "self", ".", "dataset", "[", "table_type", "]", "table", ".", "common_props", "[", "'dc:extent'", "]", "=", "table", ".", "write", "(", "[", "self", ".", "retranslate", "(", "table", ",", "item", ")", "for", "item", "in", "items", "]", ",", "base", "=", "dest", ")", "except", "KeyError", ":", "assert", "table_type", "==", "self", ".", "source_table_name", ",", "table_type", "return", "self", ".", "dataset", ".", "write_metadata", "(", "dest", "/", "mdname", ")" ]
636f1eb3ea769394e14ad9e42a83b6096efa9728
valid
validate
cldf validate <DATASET> Validate a dataset against the CLDF specification, i.e. check - whether required tables and columns are present - whether values for required columns are present - the referential integrity of the dataset
src/pycldf/__main__.py
def validate(args): """ cldf validate <DATASET> Validate a dataset against the CLDF specification, i.e. check - whether required tables and columns are present - whether values for required columns are present - the referential integrity of the dataset """ ds = _get_dataset(args) ds.validate(log=args.log)
def validate(args): """ cldf validate <DATASET> Validate a dataset against the CLDF specification, i.e. check - whether required tables and columns are present - whether values for required columns are present - the referential integrity of the dataset """ ds = _get_dataset(args) ds.validate(log=args.log)
[ "cldf", "validate", "<DATASET", ">" ]
cldf/pycldf
python
https://github.com/cldf/pycldf/blob/636f1eb3ea769394e14ad9e42a83b6096efa9728/src/pycldf/__main__.py#L37-L47
[ "def", "validate", "(", "args", ")", ":", "ds", "=", "_get_dataset", "(", "args", ")", "ds", ".", "validate", "(", "log", "=", "args", ".", "log", ")" ]
636f1eb3ea769394e14ad9e42a83b6096efa9728
valid
stats
cldf stats <DATASET> Print basic stats for CLDF dataset <DATASET>, where <DATASET> may be the path to - a CLDF metadata file - a CLDF core data file
src/pycldf/__main__.py
def stats(args): """ cldf stats <DATASET> Print basic stats for CLDF dataset <DATASET>, where <DATASET> may be the path to - a CLDF metadata file - a CLDF core data file """ ds = _get_dataset(args) print(ds) md = Table('key', 'value') md.extend(ds.properties.items()) print(md.render(condensed=False, tablefmt=None)) print() t = Table('Path', 'Type', 'Rows') for p, type_, r in ds.stats(): t.append([p, type_, r]) print(t.render(condensed=False, tablefmt=None))
def stats(args): """ cldf stats <DATASET> Print basic stats for CLDF dataset <DATASET>, where <DATASET> may be the path to - a CLDF metadata file - a CLDF core data file """ ds = _get_dataset(args) print(ds) md = Table('key', 'value') md.extend(ds.properties.items()) print(md.render(condensed=False, tablefmt=None)) print() t = Table('Path', 'Type', 'Rows') for p, type_, r in ds.stats(): t.append([p, type_, r]) print(t.render(condensed=False, tablefmt=None))
[ "cldf", "stats", "<DATASET", ">" ]
cldf/pycldf
python
https://github.com/cldf/pycldf/blob/636f1eb3ea769394e14ad9e42a83b6096efa9728/src/pycldf/__main__.py#L50-L67
[ "def", "stats", "(", "args", ")", ":", "ds", "=", "_get_dataset", "(", "args", ")", "print", "(", "ds", ")", "md", "=", "Table", "(", "'key'", ",", "'value'", ")", "md", ".", "extend", "(", "ds", ".", "properties", ".", "items", "(", ")", ")", "print", "(", "md", ".", "render", "(", "condensed", "=", "False", ",", "tablefmt", "=", "None", ")", ")", "print", "(", ")", "t", "=", "Table", "(", "'Path'", ",", "'Type'", ",", "'Rows'", ")", "for", "p", ",", "type_", ",", "r", "in", "ds", ".", "stats", "(", ")", ":", "t", ".", "append", "(", "[", "p", ",", "type_", ",", "r", "]", ")", "print", "(", "t", ".", "render", "(", "condensed", "=", "False", ",", "tablefmt", "=", "None", ")", ")" ]
636f1eb3ea769394e14ad9e42a83b6096efa9728
valid
createdb
cldf createdb <DATASET> <SQLITE_DB_PATH> Load CLDF dataset <DATASET> into a SQLite DB, where <DATASET> may be the path to - a CLDF metadata file - a CLDF core data file
src/pycldf/__main__.py
def createdb(args): """ cldf createdb <DATASET> <SQLITE_DB_PATH> Load CLDF dataset <DATASET> into a SQLite DB, where <DATASET> may be the path to - a CLDF metadata file - a CLDF core data file """ if len(args.args) < 2: raise ParserError('not enough arguments') ds = _get_dataset(args) db = Database(ds, fname=args.args[1]) db.write_from_tg() args.log.info('{0} loaded in {1}'.format(ds, db.fname))
def createdb(args): """ cldf createdb <DATASET> <SQLITE_DB_PATH> Load CLDF dataset <DATASET> into a SQLite DB, where <DATASET> may be the path to - a CLDF metadata file - a CLDF core data file """ if len(args.args) < 2: raise ParserError('not enough arguments') ds = _get_dataset(args) db = Database(ds, fname=args.args[1]) db.write_from_tg() args.log.info('{0} loaded in {1}'.format(ds, db.fname))
[ "cldf", "createdb", "<DATASET", ">", "<SQLITE_DB_PATH", ">" ]
cldf/pycldf
python
https://github.com/cldf/pycldf/blob/636f1eb3ea769394e14ad9e42a83b6096efa9728/src/pycldf/__main__.py#L70-L83
[ "def", "createdb", "(", "args", ")", ":", "if", "len", "(", "args", ".", "args", ")", "<", "2", ":", "raise", "ParserError", "(", "'not enough arguments'", ")", "ds", "=", "_get_dataset", "(", "args", ")", "db", "=", "Database", "(", "ds", ",", "fname", "=", "args", ".", "args", "[", "1", "]", ")", "db", ".", "write_from_tg", "(", ")", "args", ".", "log", ".", "info", "(", "'{0} loaded in {1}'", ".", "format", "(", "ds", ",", "db", ".", "fname", ")", ")" ]
636f1eb3ea769394e14ad9e42a83b6096efa9728
valid
dumpdb
cldf dumpdb <DATASET> <SQLITE_DB_PATH> [<METADATA_PATH>]
src/pycldf/__main__.py
def dumpdb(args): """ cldf dumpdb <DATASET> <SQLITE_DB_PATH> [<METADATA_PATH>] """ if len(args.args) < 2: raise ParserError('not enough arguments') # pragma: no cover ds = _get_dataset(args) db = Database(ds, fname=args.args[1]) mdpath = Path(args.args[2]) if len(args.args) > 2 else ds.tablegroup._fname args.log.info('dumped db to {0}'.format(db.to_cldf(mdpath.parent, mdname=mdpath.name)))
def dumpdb(args): """ cldf dumpdb <DATASET> <SQLITE_DB_PATH> [<METADATA_PATH>] """ if len(args.args) < 2: raise ParserError('not enough arguments') # pragma: no cover ds = _get_dataset(args) db = Database(ds, fname=args.args[1]) mdpath = Path(args.args[2]) if len(args.args) > 2 else ds.tablegroup._fname args.log.info('dumped db to {0}'.format(db.to_cldf(mdpath.parent, mdname=mdpath.name)))
[ "cldf", "dumpdb", "<DATASET", ">", "<SQLITE_DB_PATH", ">", "[", "<METADATA_PATH", ">", "]" ]
cldf/pycldf
python
https://github.com/cldf/pycldf/blob/636f1eb3ea769394e14ad9e42a83b6096efa9728/src/pycldf/__main__.py#L86-L95
[ "def", "dumpdb", "(", "args", ")", ":", "if", "len", "(", "args", ".", "args", ")", "<", "2", ":", "raise", "ParserError", "(", "'not enough arguments'", ")", "# pragma: no cover", "ds", "=", "_get_dataset", "(", "args", ")", "db", "=", "Database", "(", "ds", ",", "fname", "=", "args", ".", "args", "[", "1", "]", ")", "mdpath", "=", "Path", "(", "args", ".", "args", "[", "2", "]", ")", "if", "len", "(", "args", ".", "args", ")", ">", "2", "else", "ds", ".", "tablegroup", ".", "_fname", "args", ".", "log", ".", "info", "(", "'dumped db to {0}'", ".", "format", "(", "db", ".", "to_cldf", "(", "mdpath", ".", "parent", ",", "mdname", "=", "mdpath", ".", "name", ")", ")", ")" ]
636f1eb3ea769394e14ad9e42a83b6096efa9728
valid
MessageHandler.description
A user-friendly description of the handler. Returns: :py:class:`str`: The handler's description.
aslack/slack_bot/handler.py
def description(self): """A user-friendly description of the handler. Returns: :py:class:`str`: The handler's description. """ if self._description is None: text = '\n'.join(self.__doc__.splitlines()[1:]).strip() lines = [] for line in map(str.strip, text.splitlines()): if line and lines: lines[-1] = ' '.join((lines[-1], line)) elif line: lines.append(line) else: lines.append('') self._description = '\n'.join(lines) return self._description
def description(self): """A user-friendly description of the handler. Returns: :py:class:`str`: The handler's description. """ if self._description is None: text = '\n'.join(self.__doc__.splitlines()[1:]).strip() lines = [] for line in map(str.strip, text.splitlines()): if line and lines: lines[-1] = ' '.join((lines[-1], line)) elif line: lines.append(line) else: lines.append('') self._description = '\n'.join(lines) return self._description
[ "A", "user", "-", "friendly", "description", "of", "the", "handler", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/slack_bot/handler.py#L22-L40
[ "def", "description", "(", "self", ")", ":", "if", "self", ".", "_description", "is", "None", ":", "text", "=", "'\\n'", ".", "join", "(", "self", ".", "__doc__", ".", "splitlines", "(", ")", "[", "1", ":", "]", ")", ".", "strip", "(", ")", "lines", "=", "[", "]", "for", "line", "in", "map", "(", "str", ".", "strip", ",", "text", ".", "splitlines", "(", ")", ")", ":", "if", "line", "and", "lines", ":", "lines", "[", "-", "1", "]", "=", "' '", ".", "join", "(", "(", "lines", "[", "-", "1", "]", ",", "line", ")", ")", "elif", "line", ":", "lines", ".", "append", "(", "line", ")", "else", ":", "lines", ".", "append", "(", "''", ")", "self", ".", "_description", "=", "'\\n'", ".", "join", "(", "lines", ")", "return", "self", ".", "_description" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
Parselet.from_jsonfile
Create a Parselet instance from a file containing the Parsley script as a JSON object >>> import parslepy >>> with open('parselet.json') as fp: ... parslepy.Parselet.from_jsonfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor
parslepy/base.py
def from_jsonfile(cls, fp, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from a file containing the Parsley script as a JSON object >>> import parslepy >>> with open('parselet.json') as fp: ... parslepy.Parselet.from_jsonfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls._from_jsonlines(fp, selector_handler=selector_handler, strict=strict, debug=debug)
def from_jsonfile(cls, fp, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from a file containing the Parsley script as a JSON object >>> import parslepy >>> with open('parselet.json') as fp: ... parslepy.Parselet.from_jsonfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls._from_jsonlines(fp, selector_handler=selector_handler, strict=strict, debug=debug)
[ "Create", "a", "Parselet", "instance", "from", "a", "file", "containing", "the", "Parsley", "script", "as", "a", "JSON", "object" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L182-L200
[ "def", "from_jsonfile", "(", "cls", ",", "fp", ",", "selector_handler", "=", "None", ",", "strict", "=", "False", ",", "debug", "=", "False", ")", ":", "return", "cls", ".", "_from_jsonlines", "(", "fp", ",", "selector_handler", "=", "selector_handler", ",", "strict", "=", "strict", ",", "debug", "=", "debug", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet.from_yamlfile
Create a Parselet instance from a file containing the Parsley script as a YAML object >>> import parslepy >>> with open('parselet.yml') as fp: ... parslepy.Parselet.from_yamlfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor
parslepy/base.py
def from_yamlfile(cls, fp, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from a file containing the Parsley script as a YAML object >>> import parslepy >>> with open('parselet.yml') as fp: ... parslepy.Parselet.from_yamlfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls.from_yamlstring(fp.read(), selector_handler=selector_handler, strict=strict, debug=debug)
def from_yamlfile(cls, fp, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from a file containing the Parsley script as a YAML object >>> import parslepy >>> with open('parselet.yml') as fp: ... parslepy.Parselet.from_yamlfile(fp) ... <parslepy.base.Parselet object at 0x2014e50> :param file fp: an open file-like pointer containing the Parsley script :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls.from_yamlstring(fp.read(), selector_handler=selector_handler, strict=strict, debug=debug)
[ "Create", "a", "Parselet", "instance", "from", "a", "file", "containing", "the", "Parsley", "script", "as", "a", "YAML", "object" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L203-L220
[ "def", "from_yamlfile", "(", "cls", ",", "fp", ",", "selector_handler", "=", "None", ",", "strict", "=", "False", ",", "debug", "=", "False", ")", ":", "return", "cls", ".", "from_yamlstring", "(", "fp", ".", "read", "(", ")", ",", "selector_handler", "=", "selector_handler", ",", "strict", "=", "strict", ",", "debug", "=", "debug", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet.from_yamlstring
Create a Parselet instance from s (str) containing the Parsley script as YAML >>> import parslepy >>> parsley_string = '''--- title: h1 link: a @href ''' >>> p = parslepy.Parselet.from_yamlstring(parsley_string) >>> type(p) <class 'parslepy.base.Parselet'> >>> :param string s: a Parsley script as a YAML string :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor
parslepy/base.py
def from_yamlstring(cls, s, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from s (str) containing the Parsley script as YAML >>> import parslepy >>> parsley_string = '''--- title: h1 link: a @href ''' >>> p = parslepy.Parselet.from_yamlstring(parsley_string) >>> type(p) <class 'parslepy.base.Parselet'> >>> :param string s: a Parsley script as a YAML string :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ import yaml return cls(yaml.load(s), selector_handler=selector_handler, strict=strict, debug=debug)
def from_yamlstring(cls, s, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from s (str) containing the Parsley script as YAML >>> import parslepy >>> parsley_string = '''--- title: h1 link: a @href ''' >>> p = parslepy.Parselet.from_yamlstring(parsley_string) >>> type(p) <class 'parslepy.base.Parselet'> >>> :param string s: a Parsley script as a YAML string :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ import yaml return cls(yaml.load(s), selector_handler=selector_handler, strict=strict, debug=debug)
[ "Create", "a", "Parselet", "instance", "from", "s", "(", "str", ")", "containing", "the", "Parsley", "script", "as", "YAML" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L223-L245
[ "def", "from_yamlstring", "(", "cls", ",", "s", ",", "selector_handler", "=", "None", ",", "strict", "=", "False", ",", "debug", "=", "False", ")", ":", "import", "yaml", "return", "cls", "(", "yaml", ".", "load", "(", "s", ")", ",", "selector_handler", "=", "selector_handler", ",", "strict", "=", "strict", ",", "debug", "=", "debug", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet.from_jsonstring
Create a Parselet instance from s (str) containing the Parsley script as JSON >>> import parslepy >>> parsley_string = '{ "title": "h1", "link": "a @href"}' >>> p = parslepy.Parselet.from_jsonstring(parsley_string) >>> type(p) <class 'parslepy.base.Parselet'> >>> :param string s: a Parsley script as a JSON string :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor
parslepy/base.py
def from_jsonstring(cls, s, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from s (str) containing the Parsley script as JSON >>> import parslepy >>> parsley_string = '{ "title": "h1", "link": "a @href"}' >>> p = parslepy.Parselet.from_jsonstring(parsley_string) >>> type(p) <class 'parslepy.base.Parselet'> >>> :param string s: a Parsley script as a JSON string :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls._from_jsonlines(s.split("\n"), selector_handler=selector_handler, strict=strict, debug=debug)
def from_jsonstring(cls, s, selector_handler=None, strict=False, debug=False): """ Create a Parselet instance from s (str) containing the Parsley script as JSON >>> import parslepy >>> parsley_string = '{ "title": "h1", "link": "a @href"}' >>> p = parslepy.Parselet.from_jsonstring(parsley_string) >>> type(p) <class 'parslepy.base.Parselet'> >>> :param string s: a Parsley script as a JSON string :rtype: :class:`.Parselet` Other arguments: same as for :class:`.Parselet` contructor """ return cls._from_jsonlines(s.split("\n"), selector_handler=selector_handler, strict=strict, debug=debug)
[ "Create", "a", "Parselet", "instance", "from", "s", "(", "str", ")", "containing", "the", "Parsley", "script", "as", "JSON" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L248-L267
[ "def", "from_jsonstring", "(", "cls", ",", "s", ",", "selector_handler", "=", "None", ",", "strict", "=", "False", ",", "debug", "=", "False", ")", ":", "return", "cls", ".", "_from_jsonlines", "(", "s", ".", "split", "(", "\"\\n\"", ")", ",", "selector_handler", "=", "selector_handler", ",", "strict", "=", "strict", ",", "debug", "=", "debug", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet._from_jsonlines
Interpret input lines as a JSON Parsley script. Python-style comment lines are skipped.
parslepy/base.py
def _from_jsonlines(cls, lines, selector_handler=None, strict=False, debug=False): """ Interpret input lines as a JSON Parsley script. Python-style comment lines are skipped. """ return cls(json.loads( "\n".join([l for l in lines if not cls.REGEX_COMMENT_LINE.match(l)]) ), selector_handler=selector_handler, strict=strict, debug=debug)
def _from_jsonlines(cls, lines, selector_handler=None, strict=False, debug=False): """ Interpret input lines as a JSON Parsley script. Python-style comment lines are skipped. """ return cls(json.loads( "\n".join([l for l in lines if not cls.REGEX_COMMENT_LINE.match(l)]) ), selector_handler=selector_handler, strict=strict, debug=debug)
[ "Interpret", "input", "lines", "as", "a", "JSON", "Parsley", "script", ".", "Python", "-", "style", "comment", "lines", "are", "skipped", "." ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L270-L278
[ "def", "_from_jsonlines", "(", "cls", ",", "lines", ",", "selector_handler", "=", "None", ",", "strict", "=", "False", ",", "debug", "=", "False", ")", ":", "return", "cls", "(", "json", ".", "loads", "(", "\"\\n\"", ".", "join", "(", "[", "l", "for", "l", "in", "lines", "if", "not", "cls", ".", "REGEX_COMMENT_LINE", ".", "match", "(", "l", ")", "]", ")", ")", ",", "selector_handler", "=", "selector_handler", ",", "strict", "=", "strict", ",", "debug", "=", "debug", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet.parse
Parse an HTML or XML document and return the extacted object following the Parsley rules give at instantiation. :param fp: file-like object containing an HTML or XML document, or URL or filename :param parser: *lxml.etree._FeedParser* instance (optional); defaults to lxml.etree.HTMLParser() :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python :class:`dict` object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` To parse from a string, use the :meth:`~base.Parselet.parse_fromstring` method instead. Note that the fp paramater is passed directly to `lxml.etree.parse <http://lxml.de/api/lxml.etree-module.html#parse>`_, so you can also give it an URL, and lxml will download it for you. (Also see `<http://lxml.de/tutorial.html#the-parse-function>`_.)
parslepy/base.py
def parse(self, fp, parser=None, context=None): """ Parse an HTML or XML document and return the extacted object following the Parsley rules give at instantiation. :param fp: file-like object containing an HTML or XML document, or URL or filename :param parser: *lxml.etree._FeedParser* instance (optional); defaults to lxml.etree.HTMLParser() :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python :class:`dict` object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` To parse from a string, use the :meth:`~base.Parselet.parse_fromstring` method instead. Note that the fp paramater is passed directly to `lxml.etree.parse <http://lxml.de/api/lxml.etree-module.html#parse>`_, so you can also give it an URL, and lxml will download it for you. (Also see `<http://lxml.de/tutorial.html#the-parse-function>`_.) """ if parser is None: parser = lxml.etree.HTMLParser() doc = lxml.etree.parse(fp, parser=parser).getroot() return self.extract(doc, context=context)
def parse(self, fp, parser=None, context=None): """ Parse an HTML or XML document and return the extacted object following the Parsley rules give at instantiation. :param fp: file-like object containing an HTML or XML document, or URL or filename :param parser: *lxml.etree._FeedParser* instance (optional); defaults to lxml.etree.HTMLParser() :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python :class:`dict` object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` To parse from a string, use the :meth:`~base.Parselet.parse_fromstring` method instead. Note that the fp paramater is passed directly to `lxml.etree.parse <http://lxml.de/api/lxml.etree-module.html#parse>`_, so you can also give it an URL, and lxml will download it for you. (Also see `<http://lxml.de/tutorial.html#the-parse-function>`_.) """ if parser is None: parser = lxml.etree.HTMLParser() doc = lxml.etree.parse(fp, parser=parser).getroot() return self.extract(doc, context=context)
[ "Parse", "an", "HTML", "or", "XML", "document", "and", "return", "the", "extacted", "object", "following", "the", "Parsley", "rules", "give", "at", "instantiation", "." ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L280-L302
[ "def", "parse", "(", "self", ",", "fp", ",", "parser", "=", "None", ",", "context", "=", "None", ")", ":", "if", "parser", "is", "None", ":", "parser", "=", "lxml", ".", "etree", ".", "HTMLParser", "(", ")", "doc", "=", "lxml", ".", "etree", ".", "parse", "(", "fp", ",", "parser", "=", "parser", ")", ".", "getroot", "(", ")", "return", "self", ".", "extract", "(", "doc", ",", "context", "=", "context", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet.parse_fromstring
Parse an HTML or XML document and return the extacted object following the Parsley rules give at instantiation. :param string s: an HTML or XML document as a string :param parser: *lxml.etree._FeedParser* instance (optional); defaults to lxml.etree.HTMLParser() :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python :class:`dict` object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey`
parslepy/base.py
def parse_fromstring(self, s, parser=None, context=None): """ Parse an HTML or XML document and return the extacted object following the Parsley rules give at instantiation. :param string s: an HTML or XML document as a string :param parser: *lxml.etree._FeedParser* instance (optional); defaults to lxml.etree.HTMLParser() :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python :class:`dict` object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` """ if parser is None: parser = lxml.etree.HTMLParser() doc = lxml.etree.fromstring(s, parser=parser) return self.extract(doc, context=context)
def parse_fromstring(self, s, parser=None, context=None): """ Parse an HTML or XML document and return the extacted object following the Parsley rules give at instantiation. :param string s: an HTML or XML document as a string :param parser: *lxml.etree._FeedParser* instance (optional); defaults to lxml.etree.HTMLParser() :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python :class:`dict` object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` """ if parser is None: parser = lxml.etree.HTMLParser() doc = lxml.etree.fromstring(s, parser=parser) return self.extract(doc, context=context)
[ "Parse", "an", "HTML", "or", "XML", "document", "and", "return", "the", "extacted", "object", "following", "the", "Parsley", "rules", "give", "at", "instantiation", "." ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L304-L319
[ "def", "parse_fromstring", "(", "self", ",", "s", ",", "parser", "=", "None", ",", "context", "=", "None", ")", ":", "if", "parser", "is", "None", ":", "parser", "=", "lxml", ".", "etree", ".", "HTMLParser", "(", ")", "doc", "=", "lxml", ".", "etree", ".", "fromstring", "(", "s", ",", "parser", "=", "parser", ")", "return", "self", ".", "extract", "(", "doc", ",", "context", "=", "context", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet.compile
Build the abstract Parsley tree starting from the root node (recursive)
parslepy/base.py
def compile(self): """ Build the abstract Parsley tree starting from the root node (recursive) """ if not isinstance(self.parselet, dict): raise ValueError("Parselet must be a dict of some sort. Or use .from_jsonstring(), " \ ".from_jsonfile(), .from_yamlstring(), or .from_yamlfile()") self.parselet_tree = self._compile(self.parselet)
def compile(self): """ Build the abstract Parsley tree starting from the root node (recursive) """ if not isinstance(self.parselet, dict): raise ValueError("Parselet must be a dict of some sort. Or use .from_jsonstring(), " \ ".from_jsonfile(), .from_yamlstring(), or .from_yamlfile()") self.parselet_tree = self._compile(self.parselet)
[ "Build", "the", "abstract", "Parsley", "tree", "starting", "from", "the", "root", "node", "(", "recursive", ")" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L321-L329
[ "def", "compile", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "parselet", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"Parselet must be a dict of some sort. Or use .from_jsonstring(), \"", "\".from_jsonfile(), .from_yamlstring(), or .from_yamlfile()\"", ")", "self", ".", "parselet_tree", "=", "self", ".", "_compile", "(", "self", ".", "parselet", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet._compile
Build part of the abstract Parsley extraction tree Arguments: parselet_node (dict) -- part of the Parsley tree to compile (can be the root dict/node) level (int) -- current recursion depth (used for debug)
parslepy/base.py
def _compile(self, parselet_node, level=0): """ Build part of the abstract Parsley extraction tree Arguments: parselet_node (dict) -- part of the Parsley tree to compile (can be the root dict/node) level (int) -- current recursion depth (used for debug) """ if self.DEBUG: debug_offset = "".join([" " for x in range(level)]) if self.DEBUG: print(debug_offset, "%s::compile(%s)" % ( self.__class__.__name__, parselet_node)) if isinstance(parselet_node, dict): parselet_tree = ParsleyNode() for k, v in list(parselet_node.items()): # we parse the key raw elements but without much # interpretation (which is done by the SelectorHandler) try: m = self.REGEX_PARSELET_KEY.match(k) if not m: if self.DEBUG: print(debug_offset, "could not parse key", k) raise InvalidKeySyntax(k) except: raise InvalidKeySyntax("Key %s is not valid" % k) key = m.group('key') # by default, fields are required key_required = True operator = m.group('operator') if operator == '?': key_required = False # FIXME: "!" operator not supported (complete array) scope = m.group('scope') # example: get list of H3 tags # { "titles": ["h3"] } # FIXME: should we support multiple selectors in list? # e.g. { "titles": ["h1", "h2", "h3", "h4"] } if isinstance(v, (list, tuple)): v = v[0] iterate = True else: iterate = False # keys in the abstract Parsley trees are of type `ParsleyContext` try: parsley_context = ParsleyContext( key, operator=operator, required=key_required, scope=self.selector_handler.make(scope) if scope else None, iterate=iterate) except SyntaxError: if self.DEBUG: print("Invalid scope:", k, scope) raise if self.DEBUG: print(debug_offset, "current context:", parsley_context) # go deeper in the Parsley tree... try: child_tree = self._compile(v, level=level+1) except SyntaxError: if self.DEBUG: print("Invalid value: ", v) raise except: raise if self.DEBUG: print(debug_offset, "child tree:", child_tree) parselet_tree[parsley_context] = child_tree return parselet_tree # a string leaf should match some kind of selector, # let the selector handler deal with it elif isstr(parselet_node): return self.selector_handler.make(parselet_node) else: raise ValueError( "Unsupported type(%s) for Parselet node <%s>" % ( type(parselet_node), parselet_node))
def _compile(self, parselet_node, level=0): """ Build part of the abstract Parsley extraction tree Arguments: parselet_node (dict) -- part of the Parsley tree to compile (can be the root dict/node) level (int) -- current recursion depth (used for debug) """ if self.DEBUG: debug_offset = "".join([" " for x in range(level)]) if self.DEBUG: print(debug_offset, "%s::compile(%s)" % ( self.__class__.__name__, parselet_node)) if isinstance(parselet_node, dict): parselet_tree = ParsleyNode() for k, v in list(parselet_node.items()): # we parse the key raw elements but without much # interpretation (which is done by the SelectorHandler) try: m = self.REGEX_PARSELET_KEY.match(k) if not m: if self.DEBUG: print(debug_offset, "could not parse key", k) raise InvalidKeySyntax(k) except: raise InvalidKeySyntax("Key %s is not valid" % k) key = m.group('key') # by default, fields are required key_required = True operator = m.group('operator') if operator == '?': key_required = False # FIXME: "!" operator not supported (complete array) scope = m.group('scope') # example: get list of H3 tags # { "titles": ["h3"] } # FIXME: should we support multiple selectors in list? # e.g. { "titles": ["h1", "h2", "h3", "h4"] } if isinstance(v, (list, tuple)): v = v[0] iterate = True else: iterate = False # keys in the abstract Parsley trees are of type `ParsleyContext` try: parsley_context = ParsleyContext( key, operator=operator, required=key_required, scope=self.selector_handler.make(scope) if scope else None, iterate=iterate) except SyntaxError: if self.DEBUG: print("Invalid scope:", k, scope) raise if self.DEBUG: print(debug_offset, "current context:", parsley_context) # go deeper in the Parsley tree... try: child_tree = self._compile(v, level=level+1) except SyntaxError: if self.DEBUG: print("Invalid value: ", v) raise except: raise if self.DEBUG: print(debug_offset, "child tree:", child_tree) parselet_tree[parsley_context] = child_tree return parselet_tree # a string leaf should match some kind of selector, # let the selector handler deal with it elif isstr(parselet_node): return self.selector_handler.make(parselet_node) else: raise ValueError( "Unsupported type(%s) for Parselet node <%s>" % ( type(parselet_node), parselet_node))
[ "Build", "part", "of", "the", "abstract", "Parsley", "extraction", "tree" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L338-L429
[ "def", "_compile", "(", "self", ",", "parselet_node", ",", "level", "=", "0", ")", ":", "if", "self", ".", "DEBUG", ":", "debug_offset", "=", "\"\"", ".", "join", "(", "[", "\" \"", "for", "x", "in", "range", "(", "level", ")", "]", ")", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"%s::compile(%s)\"", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "parselet_node", ")", ")", "if", "isinstance", "(", "parselet_node", ",", "dict", ")", ":", "parselet_tree", "=", "ParsleyNode", "(", ")", "for", "k", ",", "v", "in", "list", "(", "parselet_node", ".", "items", "(", ")", ")", ":", "# we parse the key raw elements but without much", "# interpretation (which is done by the SelectorHandler)", "try", ":", "m", "=", "self", ".", "REGEX_PARSELET_KEY", ".", "match", "(", "k", ")", "if", "not", "m", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"could not parse key\"", ",", "k", ")", "raise", "InvalidKeySyntax", "(", "k", ")", "except", ":", "raise", "InvalidKeySyntax", "(", "\"Key %s is not valid\"", "%", "k", ")", "key", "=", "m", ".", "group", "(", "'key'", ")", "# by default, fields are required", "key_required", "=", "True", "operator", "=", "m", ".", "group", "(", "'operator'", ")", "if", "operator", "==", "'?'", ":", "key_required", "=", "False", "# FIXME: \"!\" operator not supported (complete array)", "scope", "=", "m", ".", "group", "(", "'scope'", ")", "# example: get list of H3 tags", "# { \"titles\": [\"h3\"] }", "# FIXME: should we support multiple selectors in list?", "# e.g. { \"titles\": [\"h1\", \"h2\", \"h3\", \"h4\"] }", "if", "isinstance", "(", "v", ",", "(", "list", ",", "tuple", ")", ")", ":", "v", "=", "v", "[", "0", "]", "iterate", "=", "True", "else", ":", "iterate", "=", "False", "# keys in the abstract Parsley trees are of type `ParsleyContext`", "try", ":", "parsley_context", "=", "ParsleyContext", "(", "key", ",", "operator", "=", "operator", ",", "required", "=", "key_required", ",", "scope", "=", "self", ".", "selector_handler", ".", "make", "(", "scope", ")", "if", "scope", "else", "None", ",", "iterate", "=", "iterate", ")", "except", "SyntaxError", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "\"Invalid scope:\"", ",", "k", ",", "scope", ")", "raise", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"current context:\"", ",", "parsley_context", ")", "# go deeper in the Parsley tree...", "try", ":", "child_tree", "=", "self", ".", "_compile", "(", "v", ",", "level", "=", "level", "+", "1", ")", "except", "SyntaxError", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "\"Invalid value: \"", ",", "v", ")", "raise", "except", ":", "raise", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"child tree:\"", ",", "child_tree", ")", "parselet_tree", "[", "parsley_context", "]", "=", "child_tree", "return", "parselet_tree", "# a string leaf should match some kind of selector,", "# let the selector handler deal with it", "elif", "isstr", "(", "parselet_node", ")", ":", "return", "self", ".", "selector_handler", ".", "make", "(", "parselet_node", ")", "else", ":", "raise", "ValueError", "(", "\"Unsupported type(%s) for Parselet node <%s>\"", "%", "(", "type", "(", "parselet_node", ")", ",", "parselet_node", ")", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet.extract
Extract values as a dict object following the structure of the Parsley script (recursive) :param document: lxml-parsed document :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python *dict* object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` >>> import lxml.etree >>> import parslepy >>> html = ''' ... <!DOCTYPE html> ... <html> ... <head> ... <title>Sample document to test parslepy</title> ... <meta http-equiv="content-type" content="text/html;charset=utf-8" /> ... </head> ... <body> ... <h1 id="main">What&rsquo;s new</h1> ... <ul> ... <li class="newsitem"><a href="/article-001.html">This is the first article</a></li> ... <li class="newsitem"><a href="/article-002.html">A second report on something</a></li> ... <li class="newsitem"><a href="/article-003.html">Python is great!</a> <span class="fresh">New!</span></li> ... </ul> ... </body> ... </html> ... ''' >>> html_parser = lxml.etree.HTMLParser() >>> doc = lxml.etree.fromstring(html, parser=html_parser) >>> doc <Element html at 0x7f5fb1fce9b0> >>> rules = { ... "headingcss": "#main", ... "headingxpath": "//h1[@id='main']" ... } >>> p = parslepy.Parselet(rules) >>> p.extract(doc) {'headingcss': u'What\u2019s new', 'headingxpath': u'What\u2019s new'}
parslepy/base.py
def extract(self, document, context=None): """ Extract values as a dict object following the structure of the Parsley script (recursive) :param document: lxml-parsed document :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python *dict* object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` >>> import lxml.etree >>> import parslepy >>> html = ''' ... <!DOCTYPE html> ... <html> ... <head> ... <title>Sample document to test parslepy</title> ... <meta http-equiv="content-type" content="text/html;charset=utf-8" /> ... </head> ... <body> ... <h1 id="main">What&rsquo;s new</h1> ... <ul> ... <li class="newsitem"><a href="/article-001.html">This is the first article</a></li> ... <li class="newsitem"><a href="/article-002.html">A second report on something</a></li> ... <li class="newsitem"><a href="/article-003.html">Python is great!</a> <span class="fresh">New!</span></li> ... </ul> ... </body> ... </html> ... ''' >>> html_parser = lxml.etree.HTMLParser() >>> doc = lxml.etree.fromstring(html, parser=html_parser) >>> doc <Element html at 0x7f5fb1fce9b0> >>> rules = { ... "headingcss": "#main", ... "headingxpath": "//h1[@id='main']" ... } >>> p = parslepy.Parselet(rules) >>> p.extract(doc) {'headingcss': u'What\u2019s new', 'headingxpath': u'What\u2019s new'} """ if context: self.selector_handler.context = context return self._extract(self.parselet_tree, document)
def extract(self, document, context=None): """ Extract values as a dict object following the structure of the Parsley script (recursive) :param document: lxml-parsed document :param context: user-supplied context that will be passed to custom XPath extensions (as first argument) :rtype: Python *dict* object with mapped extracted content :raises: :class:`.NonMatchingNonOptionalKey` >>> import lxml.etree >>> import parslepy >>> html = ''' ... <!DOCTYPE html> ... <html> ... <head> ... <title>Sample document to test parslepy</title> ... <meta http-equiv="content-type" content="text/html;charset=utf-8" /> ... </head> ... <body> ... <h1 id="main">What&rsquo;s new</h1> ... <ul> ... <li class="newsitem"><a href="/article-001.html">This is the first article</a></li> ... <li class="newsitem"><a href="/article-002.html">A second report on something</a></li> ... <li class="newsitem"><a href="/article-003.html">Python is great!</a> <span class="fresh">New!</span></li> ... </ul> ... </body> ... </html> ... ''' >>> html_parser = lxml.etree.HTMLParser() >>> doc = lxml.etree.fromstring(html, parser=html_parser) >>> doc <Element html at 0x7f5fb1fce9b0> >>> rules = { ... "headingcss": "#main", ... "headingxpath": "//h1[@id='main']" ... } >>> p = parslepy.Parselet(rules) >>> p.extract(doc) {'headingcss': u'What\u2019s new', 'headingxpath': u'What\u2019s new'} """ if context: self.selector_handler.context = context return self._extract(self.parselet_tree, document)
[ "Extract", "values", "as", "a", "dict", "object", "following", "the", "structure", "of", "the", "Parsley", "script", "(", "recursive", ")" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L431-L475
[ "def", "extract", "(", "self", ",", "document", ",", "context", "=", "None", ")", ":", "if", "context", ":", "self", ".", "selector_handler", ".", "context", "=", "context", "return", "self", ".", "_extract", "(", "self", ".", "parselet_tree", ",", "document", ")" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Parselet._extract
Extract values at this document node level using the parselet_node instructions: - go deeper in tree - or call selector handler in case of a terminal selector leaf
parslepy/base.py
def _extract(self, parselet_node, document, level=0): """ Extract values at this document node level using the parselet_node instructions: - go deeper in tree - or call selector handler in case of a terminal selector leaf """ if self.DEBUG: debug_offset = "".join([" " for x in range(level)]) # we must go deeper in the Parsley tree if isinstance(parselet_node, ParsleyNode): # default output output = {} # process all children for ctx, v in list(parselet_node.items()): if self.DEBUG: print(debug_offset, "context:", ctx, v) extracted=None try: # scoped-extraction: # extraction should be done deeper in the document tree if ctx.scope: extracted = [] selected = self.selector_handler.select(document, ctx.scope) if selected: for i, elem in enumerate(selected, start=1): parse_result = self._extract(v, elem, level=level+1) if isinstance(parse_result, (list, tuple)): extracted.extend(parse_result) else: extracted.append(parse_result) # if we're not in an array, # we only care about the first iteration if not ctx.iterate: break if self.DEBUG: print(debug_offset, "parsed %d elements in scope (%s)" % (i, ctx.scope)) # local extraction else: extracted = self._extract(v, document, level=level+1) except NonMatchingNonOptionalKey as e: if self.DEBUG: print(debug_offset, str(e)) if not ctx.required or not self.STRICT_MODE: output[ctx.key] = {} else: raise except Exception as e: if self.DEBUG: print(str(e)) raise # replace empty-list result when not looping by empty dict if ( isinstance(extracted, list) and not extracted and not ctx.iterate): extracted = {} # keep only the first element if we're not in an array if self.KEEP_ONLY_FIRST_ELEMENT_IF_LIST: try: if ( isinstance(extracted, list) and extracted and not ctx.iterate): if self.DEBUG: print(debug_offset, "keep only 1st element") extracted = extracted[0] except Exception as e: if self.DEBUG: print(str(e)) print(debug_offset, "error getting first element") # extraction for a required key gave nothing if ( self.STRICT_MODE and ctx.required and extracted is None): raise NonMatchingNonOptionalKey( 'key "%s" is required but yield nothing\nCurrent path: %s/(%s)\n' % ( ctx.key, document.getroottree().getpath(document),v ) ) # special key to extract a selector-defined level deeper # but still output at same level # this can be useful for breaking up long selectors # or when you need to mix XPath and CSS selectors # e.g. # { # "something(#content div.main)": { # "--(.//div[re:test(@class, 'style\d{3,6}')])": { # "title": "h1", # "subtitle": "h2" # } # } # } # if ctx.key == self.SPECIAL_LEVEL_KEY: if isinstance(extracted, dict): output.update(extracted) elif isinstance(extracted, list): if extracted: raise RuntimeError( "could not merge non-empty list at higher level") else: #empty list, dont bother? pass else: # required keys are handled above if extracted is not None: output[ctx.key] = extracted else: # do not add this optional key/value pair in the output pass return output # a leaf/Selector node elif isinstance(parselet_node, Selector): return self.selector_handler.extract(document, parselet_node) else: # FIXME: can this happen? # if selector handler returned None at compile time, # probably yes pass
def _extract(self, parselet_node, document, level=0): """ Extract values at this document node level using the parselet_node instructions: - go deeper in tree - or call selector handler in case of a terminal selector leaf """ if self.DEBUG: debug_offset = "".join([" " for x in range(level)]) # we must go deeper in the Parsley tree if isinstance(parselet_node, ParsleyNode): # default output output = {} # process all children for ctx, v in list(parselet_node.items()): if self.DEBUG: print(debug_offset, "context:", ctx, v) extracted=None try: # scoped-extraction: # extraction should be done deeper in the document tree if ctx.scope: extracted = [] selected = self.selector_handler.select(document, ctx.scope) if selected: for i, elem in enumerate(selected, start=1): parse_result = self._extract(v, elem, level=level+1) if isinstance(parse_result, (list, tuple)): extracted.extend(parse_result) else: extracted.append(parse_result) # if we're not in an array, # we only care about the first iteration if not ctx.iterate: break if self.DEBUG: print(debug_offset, "parsed %d elements in scope (%s)" % (i, ctx.scope)) # local extraction else: extracted = self._extract(v, document, level=level+1) except NonMatchingNonOptionalKey as e: if self.DEBUG: print(debug_offset, str(e)) if not ctx.required or not self.STRICT_MODE: output[ctx.key] = {} else: raise except Exception as e: if self.DEBUG: print(str(e)) raise # replace empty-list result when not looping by empty dict if ( isinstance(extracted, list) and not extracted and not ctx.iterate): extracted = {} # keep only the first element if we're not in an array if self.KEEP_ONLY_FIRST_ELEMENT_IF_LIST: try: if ( isinstance(extracted, list) and extracted and not ctx.iterate): if self.DEBUG: print(debug_offset, "keep only 1st element") extracted = extracted[0] except Exception as e: if self.DEBUG: print(str(e)) print(debug_offset, "error getting first element") # extraction for a required key gave nothing if ( self.STRICT_MODE and ctx.required and extracted is None): raise NonMatchingNonOptionalKey( 'key "%s" is required but yield nothing\nCurrent path: %s/(%s)\n' % ( ctx.key, document.getroottree().getpath(document),v ) ) # special key to extract a selector-defined level deeper # but still output at same level # this can be useful for breaking up long selectors # or when you need to mix XPath and CSS selectors # e.g. # { # "something(#content div.main)": { # "--(.//div[re:test(@class, 'style\d{3,6}')])": { # "title": "h1", # "subtitle": "h2" # } # } # } # if ctx.key == self.SPECIAL_LEVEL_KEY: if isinstance(extracted, dict): output.update(extracted) elif isinstance(extracted, list): if extracted: raise RuntimeError( "could not merge non-empty list at higher level") else: #empty list, dont bother? pass else: # required keys are handled above if extracted is not None: output[ctx.key] = extracted else: # do not add this optional key/value pair in the output pass return output # a leaf/Selector node elif isinstance(parselet_node, Selector): return self.selector_handler.extract(document, parselet_node) else: # FIXME: can this happen? # if selector handler returned None at compile time, # probably yes pass
[ "Extract", "values", "at", "this", "document", "node", "level", "using", "the", "parselet_node", "instructions", ":", "-", "go", "deeper", "in", "tree", "-", "or", "call", "selector", "handler", "in", "case", "of", "a", "terminal", "selector", "leaf" ]
redapple/parslepy
python
https://github.com/redapple/parslepy/blob/a8bc4c0592824459629018c8f4c6ae3dad6cc3cc/parslepy/base.py#L477-L614
[ "def", "_extract", "(", "self", ",", "parselet_node", ",", "document", ",", "level", "=", "0", ")", ":", "if", "self", ".", "DEBUG", ":", "debug_offset", "=", "\"\"", ".", "join", "(", "[", "\" \"", "for", "x", "in", "range", "(", "level", ")", "]", ")", "# we must go deeper in the Parsley tree", "if", "isinstance", "(", "parselet_node", ",", "ParsleyNode", ")", ":", "# default output", "output", "=", "{", "}", "# process all children", "for", "ctx", ",", "v", "in", "list", "(", "parselet_node", ".", "items", "(", ")", ")", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"context:\"", ",", "ctx", ",", "v", ")", "extracted", "=", "None", "try", ":", "# scoped-extraction:", "# extraction should be done deeper in the document tree", "if", "ctx", ".", "scope", ":", "extracted", "=", "[", "]", "selected", "=", "self", ".", "selector_handler", ".", "select", "(", "document", ",", "ctx", ".", "scope", ")", "if", "selected", ":", "for", "i", ",", "elem", "in", "enumerate", "(", "selected", ",", "start", "=", "1", ")", ":", "parse_result", "=", "self", ".", "_extract", "(", "v", ",", "elem", ",", "level", "=", "level", "+", "1", ")", "if", "isinstance", "(", "parse_result", ",", "(", "list", ",", "tuple", ")", ")", ":", "extracted", ".", "extend", "(", "parse_result", ")", "else", ":", "extracted", ".", "append", "(", "parse_result", ")", "# if we're not in an array,", "# we only care about the first iteration", "if", "not", "ctx", ".", "iterate", ":", "break", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"parsed %d elements in scope (%s)\"", "%", "(", "i", ",", "ctx", ".", "scope", ")", ")", "# local extraction", "else", ":", "extracted", "=", "self", ".", "_extract", "(", "v", ",", "document", ",", "level", "=", "level", "+", "1", ")", "except", "NonMatchingNonOptionalKey", "as", "e", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "str", "(", "e", ")", ")", "if", "not", "ctx", ".", "required", "or", "not", "self", ".", "STRICT_MODE", ":", "output", "[", "ctx", ".", "key", "]", "=", "{", "}", "else", ":", "raise", "except", "Exception", "as", "e", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "str", "(", "e", ")", ")", "raise", "# replace empty-list result when not looping by empty dict", "if", "(", "isinstance", "(", "extracted", ",", "list", ")", "and", "not", "extracted", "and", "not", "ctx", ".", "iterate", ")", ":", "extracted", "=", "{", "}", "# keep only the first element if we're not in an array", "if", "self", ".", "KEEP_ONLY_FIRST_ELEMENT_IF_LIST", ":", "try", ":", "if", "(", "isinstance", "(", "extracted", ",", "list", ")", "and", "extracted", "and", "not", "ctx", ".", "iterate", ")", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "debug_offset", ",", "\"keep only 1st element\"", ")", "extracted", "=", "extracted", "[", "0", "]", "except", "Exception", "as", "e", ":", "if", "self", ".", "DEBUG", ":", "print", "(", "str", "(", "e", ")", ")", "print", "(", "debug_offset", ",", "\"error getting first element\"", ")", "# extraction for a required key gave nothing", "if", "(", "self", ".", "STRICT_MODE", "and", "ctx", ".", "required", "and", "extracted", "is", "None", ")", ":", "raise", "NonMatchingNonOptionalKey", "(", "'key \"%s\" is required but yield nothing\\nCurrent path: %s/(%s)\\n'", "%", "(", "ctx", ".", "key", ",", "document", ".", "getroottree", "(", ")", ".", "getpath", "(", "document", ")", ",", "v", ")", ")", "# special key to extract a selector-defined level deeper", "# but still output at same level", "# this can be useful for breaking up long selectors", "# or when you need to mix XPath and CSS selectors", "# e.g.", "# {", "# \"something(#content div.main)\": {", "# \"--(.//div[re:test(@class, 'style\\d{3,6}')])\": {", "# \"title\": \"h1\",", "# \"subtitle\": \"h2\"", "# }", "# }", "# }", "#", "if", "ctx", ".", "key", "==", "self", ".", "SPECIAL_LEVEL_KEY", ":", "if", "isinstance", "(", "extracted", ",", "dict", ")", ":", "output", ".", "update", "(", "extracted", ")", "elif", "isinstance", "(", "extracted", ",", "list", ")", ":", "if", "extracted", ":", "raise", "RuntimeError", "(", "\"could not merge non-empty list at higher level\"", ")", "else", ":", "#empty list, dont bother?", "pass", "else", ":", "# required keys are handled above", "if", "extracted", "is", "not", "None", ":", "output", "[", "ctx", ".", "key", "]", "=", "extracted", "else", ":", "# do not add this optional key/value pair in the output", "pass", "return", "output", "# a leaf/Selector node", "elif", "isinstance", "(", "parselet_node", ",", "Selector", ")", ":", "return", "self", ".", "selector_handler", ".", "extract", "(", "document", ",", "parselet_node", ")", "else", ":", "# FIXME: can this happen?", "# if selector handler returned None at compile time,", "# probably yes", "pass" ]
a8bc4c0592824459629018c8f4c6ae3dad6cc3cc
valid
Dataset.auto_constraints
Use CLDF reference properties to implicitely create foreign key constraints. :param component: A Table object or `None`.
src/pycldf/dataset.py
def auto_constraints(self, component=None): """ Use CLDF reference properties to implicitely create foreign key constraints. :param component: A Table object or `None`. """ if not component: for table in self.tables: self.auto_constraints(table) return if not component.tableSchema.primaryKey: idcol = component.get_column(term_uri('id')) if idcol: component.tableSchema.primaryKey = [idcol.name] self._auto_foreign_keys(component) try: table_type = self.get_tabletype(component) except ValueError: # New component is not a known CLDF term, so cannot add components # automatically. TODO: We might me able to infer some based on # `xxxReference` column properties? return # auto-add foreign keys targetting the new component: for table in self.tables: self._auto_foreign_keys(table, component=component, table_type=table_type)
def auto_constraints(self, component=None): """ Use CLDF reference properties to implicitely create foreign key constraints. :param component: A Table object or `None`. """ if not component: for table in self.tables: self.auto_constraints(table) return if not component.tableSchema.primaryKey: idcol = component.get_column(term_uri('id')) if idcol: component.tableSchema.primaryKey = [idcol.name] self._auto_foreign_keys(component) try: table_type = self.get_tabletype(component) except ValueError: # New component is not a known CLDF term, so cannot add components # automatically. TODO: We might me able to infer some based on # `xxxReference` column properties? return # auto-add foreign keys targetting the new component: for table in self.tables: self._auto_foreign_keys(table, component=component, table_type=table_type)
[ "Use", "CLDF", "reference", "properties", "to", "implicitely", "create", "foreign", "key", "constraints", "." ]
cldf/pycldf
python
https://github.com/cldf/pycldf/blob/636f1eb3ea769394e14ad9e42a83b6096efa9728/src/pycldf/dataset.py#L161-L189
[ "def", "auto_constraints", "(", "self", ",", "component", "=", "None", ")", ":", "if", "not", "component", ":", "for", "table", "in", "self", ".", "tables", ":", "self", ".", "auto_constraints", "(", "table", ")", "return", "if", "not", "component", ".", "tableSchema", ".", "primaryKey", ":", "idcol", "=", "component", ".", "get_column", "(", "term_uri", "(", "'id'", ")", ")", "if", "idcol", ":", "component", ".", "tableSchema", ".", "primaryKey", "=", "[", "idcol", ".", "name", "]", "self", ".", "_auto_foreign_keys", "(", "component", ")", "try", ":", "table_type", "=", "self", ".", "get_tabletype", "(", "component", ")", "except", "ValueError", ":", "# New component is not a known CLDF term, so cannot add components", "# automatically. TODO: We might me able to infer some based on", "# `xxxReference` column properties?", "return", "# auto-add foreign keys targetting the new component:", "for", "table", "in", "self", ".", "tables", ":", "self", ".", "_auto_foreign_keys", "(", "table", ",", "component", "=", "component", ",", "table_type", "=", "table_type", ")" ]
636f1eb3ea769394e14ad9e42a83b6096efa9728
valid
Service.url_builder
Create a URL for the specified endpoint. Arguments: endpoint (:py:class:`str`): The API endpoint to access. root: (:py:class:`str`, optional): The root URL for the service API. params: (:py:class:`dict`, optional): The values for format into the created URL (defaults to ``None``). url_params: (:py:class:`dict`, optional): Parameters to add to the end of the URL (defaults to ``None``). Returns: :py:class:`str`: The resulting URL.
aslack/core.py
def url_builder(self, endpoint, *, root=None, params=None, url_params=None): """Create a URL for the specified endpoint. Arguments: endpoint (:py:class:`str`): The API endpoint to access. root: (:py:class:`str`, optional): The root URL for the service API. params: (:py:class:`dict`, optional): The values for format into the created URL (defaults to ``None``). url_params: (:py:class:`dict`, optional): Parameters to add to the end of the URL (defaults to ``None``). Returns: :py:class:`str`: The resulting URL. """ if root is None: root = self.ROOT scheme, netloc, path, _, _ = urlsplit(root) return urlunsplit(( scheme, netloc, urljoin(path, endpoint), urlencode(url_params or {}), '', )).format(**params or {})
def url_builder(self, endpoint, *, root=None, params=None, url_params=None): """Create a URL for the specified endpoint. Arguments: endpoint (:py:class:`str`): The API endpoint to access. root: (:py:class:`str`, optional): The root URL for the service API. params: (:py:class:`dict`, optional): The values for format into the created URL (defaults to ``None``). url_params: (:py:class:`dict`, optional): Parameters to add to the end of the URL (defaults to ``None``). Returns: :py:class:`str`: The resulting URL. """ if root is None: root = self.ROOT scheme, netloc, path, _, _ = urlsplit(root) return urlunsplit(( scheme, netloc, urljoin(path, endpoint), urlencode(url_params or {}), '', )).format(**params or {})
[ "Create", "a", "URL", "for", "the", "specified", "endpoint", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/core.py#L37-L62
[ "def", "url_builder", "(", "self", ",", "endpoint", ",", "*", ",", "root", "=", "None", ",", "params", "=", "None", ",", "url_params", "=", "None", ")", ":", "if", "root", "is", "None", ":", "root", "=", "self", ".", "ROOT", "scheme", ",", "netloc", ",", "path", ",", "_", ",", "_", "=", "urlsplit", "(", "root", ")", "return", "urlunsplit", "(", "(", "scheme", ",", "netloc", ",", "urljoin", "(", "path", ",", "endpoint", ")", ",", "urlencode", "(", "url_params", "or", "{", "}", ")", ",", "''", ",", ")", ")", ".", "format", "(", "*", "*", "params", "or", "{", "}", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
raise_for_status
Raise an appropriate error for a given response. Arguments: response (:py:class:`aiohttp.ClientResponse`): The API response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: The appropriate error for the response's status.
aslack/utils.py
def raise_for_status(response): """Raise an appropriate error for a given response. Arguments: response (:py:class:`aiohttp.ClientResponse`): The API response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: The appropriate error for the response's status. """ for err_name in web_exceptions.__all__: err = getattr(web_exceptions, err_name) if err.status_code == response.status: payload = dict( headers=response.headers, reason=response.reason, ) if issubclass(err, web_exceptions._HTTPMove): # pylint: disable=protected-access raise err(response.headers['Location'], **payload) raise err(**payload)
def raise_for_status(response): """Raise an appropriate error for a given response. Arguments: response (:py:class:`aiohttp.ClientResponse`): The API response. Raises: :py:class:`aiohttp.web_exceptions.HTTPException`: The appropriate error for the response's status. """ for err_name in web_exceptions.__all__: err = getattr(web_exceptions, err_name) if err.status_code == response.status: payload = dict( headers=response.headers, reason=response.reason, ) if issubclass(err, web_exceptions._HTTPMove): # pylint: disable=protected-access raise err(response.headers['Location'], **payload) raise err(**payload)
[ "Raise", "an", "appropriate", "error", "for", "a", "given", "response", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/utils.py#L27-L47
[ "def", "raise_for_status", "(", "response", ")", ":", "for", "err_name", "in", "web_exceptions", ".", "__all__", ":", "err", "=", "getattr", "(", "web_exceptions", ",", "err_name", ")", "if", "err", ".", "status_code", "==", "response", ".", "status", ":", "payload", "=", "dict", "(", "headers", "=", "response", ".", "headers", ",", "reason", "=", "response", ".", "reason", ",", ")", "if", "issubclass", "(", "err", ",", "web_exceptions", ".", "_HTTPMove", ")", ":", "# pylint: disable=protected-access", "raise", "err", "(", "response", ".", "headers", "[", "'Location'", "]", ",", "*", "*", "payload", ")", "raise", "err", "(", "*", "*", "payload", ")" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
truncate
Truncate the supplied text for display. Arguments: text (:py:class:`str`): The text to truncate. max_len (:py:class:`int`, optional): The maximum length of the text before truncation (defaults to 350 characters). end (:py:class:`str`, optional): The ending to use to show that the text was truncated (defaults to ``'...'``). Returns: :py:class:`str`: The truncated text.
aslack/utils.py
def truncate(text, max_len=350, end='...'): """Truncate the supplied text for display. Arguments: text (:py:class:`str`): The text to truncate. max_len (:py:class:`int`, optional): The maximum length of the text before truncation (defaults to 350 characters). end (:py:class:`str`, optional): The ending to use to show that the text was truncated (defaults to ``'...'``). Returns: :py:class:`str`: The truncated text. """ if len(text) <= max_len: return text return text[:max_len].rsplit(' ', maxsplit=1)[0] + end
def truncate(text, max_len=350, end='...'): """Truncate the supplied text for display. Arguments: text (:py:class:`str`): The text to truncate. max_len (:py:class:`int`, optional): The maximum length of the text before truncation (defaults to 350 characters). end (:py:class:`str`, optional): The ending to use to show that the text was truncated (defaults to ``'...'``). Returns: :py:class:`str`: The truncated text. """ if len(text) <= max_len: return text return text[:max_len].rsplit(' ', maxsplit=1)[0] + end
[ "Truncate", "the", "supplied", "text", "for", "display", "." ]
textbook/aslack
python
https://github.com/textbook/aslack/blob/9ac6a44e4464180109fa4be130ad7a980a9d1acc/aslack/utils.py#L50-L66
[ "def", "truncate", "(", "text", ",", "max_len", "=", "350", ",", "end", "=", "'...'", ")", ":", "if", "len", "(", "text", ")", "<=", "max_len", ":", "return", "text", "return", "text", "[", ":", "max_len", "]", ".", "rsplit", "(", "' '", ",", "maxsplit", "=", "1", ")", "[", "0", "]", "+", "end" ]
9ac6a44e4464180109fa4be130ad7a980a9d1acc
valid
Sources.add
Add a source, either specified by glottolog reference id, or as bibtex record.
src/pycldf/sources.py
def add(self, *entries): """ Add a source, either specified by glottolog reference id, or as bibtex record. """ for entry in entries: if isinstance(entry, string_types): self._add_entries(database.parse_string(entry, bib_format='bibtex')) else: self._add_entries(entry)
def add(self, *entries): """ Add a source, either specified by glottolog reference id, or as bibtex record. """ for entry in entries: if isinstance(entry, string_types): self._add_entries(database.parse_string(entry, bib_format='bibtex')) else: self._add_entries(entry)
[ "Add", "a", "source", "either", "specified", "by", "glottolog", "reference", "id", "or", "as", "bibtex", "record", "." ]
cldf/pycldf
python
https://github.com/cldf/pycldf/blob/636f1eb3ea769394e14ad9e42a83b6096efa9728/src/pycldf/sources.py#L185-L193
[ "def", "add", "(", "self", ",", "*", "entries", ")", ":", "for", "entry", "in", "entries", ":", "if", "isinstance", "(", "entry", ",", "string_types", ")", ":", "self", ".", "_add_entries", "(", "database", ".", "parse_string", "(", "entry", ",", "bib_format", "=", "'bibtex'", ")", ")", "else", ":", "self", ".", "_add_entries", "(", "entry", ")" ]
636f1eb3ea769394e14ad9e42a83b6096efa9728
valid
primary_avatar
This tag tries to get the default avatar for a user without doing any db requests. It achieve this by linking to a special view that will do all the work for us. If that special view is then cached by a CDN for instance, we will avoid many db calls.
avatar/templatetags/avatar_tags.py
def primary_avatar(user, size=AVATAR_DEFAULT_SIZE): """ This tag tries to get the default avatar for a user without doing any db requests. It achieve this by linking to a special view that will do all the work for us. If that special view is then cached by a CDN for instance, we will avoid many db calls. """ alt = unicode(user) url = reverse('avatar_render_primary', kwargs={'user' : user, 'size' : size}) return """<img src="%s" alt="%s" />""" % (url, alt, )
def primary_avatar(user, size=AVATAR_DEFAULT_SIZE): """ This tag tries to get the default avatar for a user without doing any db requests. It achieve this by linking to a special view that will do all the work for us. If that special view is then cached by a CDN for instance, we will avoid many db calls. """ alt = unicode(user) url = reverse('avatar_render_primary', kwargs={'user' : user, 'size' : size}) return """<img src="%s" alt="%s" />""" % (url, alt, )
[ "This", "tag", "tries", "to", "get", "the", "default", "avatar", "for", "a", "user", "without", "doing", "any", "db", "requests", ".", "It", "achieve", "this", "by", "linking", "to", "a", "special", "view", "that", "will", "do", "all", "the", "work", "for", "us", ".", "If", "that", "special", "view", "is", "then", "cached", "by", "a", "CDN", "for", "instance", "we", "will", "avoid", "many", "db", "calls", "." ]
GeoNode/geonode-avatar
python
https://github.com/GeoNode/geonode-avatar/blob/45f33be4e623d0e7a2b31a83eb68af48dfbb959b/avatar/templatetags/avatar_tags.py#L58-L68
[ "def", "primary_avatar", "(", "user", ",", "size", "=", "AVATAR_DEFAULT_SIZE", ")", ":", "alt", "=", "unicode", "(", "user", ")", "url", "=", "reverse", "(", "'avatar_render_primary'", ",", "kwargs", "=", "{", "'user'", ":", "user", ",", "'size'", ":", "size", "}", ")", "return", "\"\"\"<img src=\"%s\" alt=\"%s\" />\"\"\"", "%", "(", "url", ",", "alt", ",", ")" ]
45f33be4e623d0e7a2b31a83eb68af48dfbb959b
valid
get_cache_key
Returns a cache key consisten of a username and image size.
avatar/util.py
def get_cache_key(user_or_username, size, prefix): """ Returns a cache key consisten of a username and image size. """ if isinstance(user_or_username, get_user_model()): user_or_username = user_or_username.username return '%s_%s_%s' % (prefix, user_or_username, size)
def get_cache_key(user_or_username, size, prefix): """ Returns a cache key consisten of a username and image size. """ if isinstance(user_or_username, get_user_model()): user_or_username = user_or_username.username return '%s_%s_%s' % (prefix, user_or_username, size)
[ "Returns", "a", "cache", "key", "consisten", "of", "a", "username", "and", "image", "size", "." ]
GeoNode/geonode-avatar
python
https://github.com/GeoNode/geonode-avatar/blob/45f33be4e623d0e7a2b31a83eb68af48dfbb959b/avatar/util.py#L11-L17
[ "def", "get_cache_key", "(", "user_or_username", ",", "size", ",", "prefix", ")", ":", "if", "isinstance", "(", "user_or_username", ",", "get_user_model", "(", ")", ")", ":", "user_or_username", "=", "user_or_username", ".", "username", "return", "'%s_%s_%s'", "%", "(", "prefix", ",", "user_or_username", ",", "size", ")" ]
45f33be4e623d0e7a2b31a83eb68af48dfbb959b
valid
cache_result
Decorator to cache the result of functions that take a ``user`` and a ``size`` value.
avatar/util.py
def cache_result(func): """ Decorator to cache the result of functions that take a ``user`` and a ``size`` value. """ def cache_set(key, value): cache.set(key, value, AVATAR_CACHE_TIMEOUT) return value def cached_func(user, size): prefix = func.__name__ cached_funcs.add(prefix) key = get_cache_key(user, size, prefix=prefix) return cache.get(key) or cache_set(key, func(user, size)) return cached_func
def cache_result(func): """ Decorator to cache the result of functions that take a ``user`` and a ``size`` value. """ def cache_set(key, value): cache.set(key, value, AVATAR_CACHE_TIMEOUT) return value def cached_func(user, size): prefix = func.__name__ cached_funcs.add(prefix) key = get_cache_key(user, size, prefix=prefix) return cache.get(key) or cache_set(key, func(user, size)) return cached_func
[ "Decorator", "to", "cache", "the", "result", "of", "functions", "that", "take", "a", "user", "and", "a", "size", "value", "." ]
GeoNode/geonode-avatar
python
https://github.com/GeoNode/geonode-avatar/blob/45f33be4e623d0e7a2b31a83eb68af48dfbb959b/avatar/util.py#L19-L33
[ "def", "cache_result", "(", "func", ")", ":", "def", "cache_set", "(", "key", ",", "value", ")", ":", "cache", ".", "set", "(", "key", ",", "value", ",", "AVATAR_CACHE_TIMEOUT", ")", "return", "value", "def", "cached_func", "(", "user", ",", "size", ")", ":", "prefix", "=", "func", ".", "__name__", "cached_funcs", ".", "add", "(", "prefix", ")", "key", "=", "get_cache_key", "(", "user", ",", "size", ",", "prefix", "=", "prefix", ")", "return", "cache", ".", "get", "(", "key", ")", "or", "cache_set", "(", "key", ",", "func", "(", "user", ",", "size", ")", ")", "return", "cached_func" ]
45f33be4e623d0e7a2b31a83eb68af48dfbb959b
valid
invalidate_cache
Function to be called when saving or changing an user's avatars.
avatar/util.py
def invalidate_cache(user, size=None): """ Function to be called when saving or changing an user's avatars. """ sizes = set(AUTO_GENERATE_AVATAR_SIZES) if size is not None: sizes.add(size) for prefix in cached_funcs: for size in sizes: cache.delete(get_cache_key(user, size, prefix))
def invalidate_cache(user, size=None): """ Function to be called when saving or changing an user's avatars. """ sizes = set(AUTO_GENERATE_AVATAR_SIZES) if size is not None: sizes.add(size) for prefix in cached_funcs: for size in sizes: cache.delete(get_cache_key(user, size, prefix))
[ "Function", "to", "be", "called", "when", "saving", "or", "changing", "an", "user", "s", "avatars", "." ]
GeoNode/geonode-avatar
python
https://github.com/GeoNode/geonode-avatar/blob/45f33be4e623d0e7a2b31a83eb68af48dfbb959b/avatar/util.py#L35-L44
[ "def", "invalidate_cache", "(", "user", ",", "size", "=", "None", ")", ":", "sizes", "=", "set", "(", "AUTO_GENERATE_AVATAR_SIZES", ")", "if", "size", "is", "not", "None", ":", "sizes", ".", "add", "(", "size", ")", "for", "prefix", "in", "cached_funcs", ":", "for", "size", "in", "sizes", ":", "cache", ".", "delete", "(", "get_cache_key", "(", "user", ",", "size", ",", "prefix", ")", ")" ]
45f33be4e623d0e7a2b31a83eb68af48dfbb959b
valid
get_field_for_proxy
Returns a field object instance for a given PrefProxy object. :param PrefProxy pref_proxy: :rtype: models.Field
siteprefs/utils.py
def get_field_for_proxy(pref_proxy): """Returns a field object instance for a given PrefProxy object. :param PrefProxy pref_proxy: :rtype: models.Field """ field = { bool: models.BooleanField, int: models.IntegerField, float: models.FloatField, datetime: models.DateTimeField, }.get(type(pref_proxy.default), models.TextField)() update_field_from_proxy(field, pref_proxy) return field
def get_field_for_proxy(pref_proxy): """Returns a field object instance for a given PrefProxy object. :param PrefProxy pref_proxy: :rtype: models.Field """ field = { bool: models.BooleanField, int: models.IntegerField, float: models.FloatField, datetime: models.DateTimeField, }.get(type(pref_proxy.default), models.TextField)() update_field_from_proxy(field, pref_proxy) return field
[ "Returns", "a", "field", "object", "instance", "for", "a", "given", "PrefProxy", "object", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/utils.py#L227-L245
[ "def", "get_field_for_proxy", "(", "pref_proxy", ")", ":", "field", "=", "{", "bool", ":", "models", ".", "BooleanField", ",", "int", ":", "models", ".", "IntegerField", ",", "float", ":", "models", ".", "FloatField", ",", "datetime", ":", "models", ".", "DateTimeField", ",", "}", ".", "get", "(", "type", "(", "pref_proxy", ".", "default", ")", ",", "models", ".", "TextField", ")", "(", ")", "update_field_from_proxy", "(", "field", ",", "pref_proxy", ")", "return", "field" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
update_field_from_proxy
Updates field object with data from a PrefProxy object. :param models.Field field_obj: :param PrefProxy pref_proxy:
siteprefs/utils.py
def update_field_from_proxy(field_obj, pref_proxy): """Updates field object with data from a PrefProxy object. :param models.Field field_obj: :param PrefProxy pref_proxy: """ attr_names = ('verbose_name', 'help_text', 'default') for attr_name in attr_names: setattr(field_obj, attr_name, getattr(pref_proxy, attr_name))
def update_field_from_proxy(field_obj, pref_proxy): """Updates field object with data from a PrefProxy object. :param models.Field field_obj: :param PrefProxy pref_proxy: """ attr_names = ('verbose_name', 'help_text', 'default') for attr_name in attr_names: setattr(field_obj, attr_name, getattr(pref_proxy, attr_name))
[ "Updates", "field", "object", "with", "data", "from", "a", "PrefProxy", "object", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/utils.py#L248-L259
[ "def", "update_field_from_proxy", "(", "field_obj", ",", "pref_proxy", ")", ":", "attr_names", "=", "(", "'verbose_name'", ",", "'help_text'", ",", "'default'", ")", "for", "attr_name", "in", "attr_names", ":", "setattr", "(", "field_obj", ",", "attr_name", ",", "getattr", "(", "pref_proxy", ",", "attr_name", ")", ")" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
get_pref_model_class
Returns preferences model class dynamically crated for a given app or None on conflict.
siteprefs/utils.py
def get_pref_model_class(app, prefs, get_prefs_func): """Returns preferences model class dynamically crated for a given app or None on conflict.""" module = '%s.%s' % (app, PREFS_MODULE_NAME) model_dict = { '_prefs_app': app, '_get_prefs': staticmethod(get_prefs_func), '__module__': module, 'Meta': type('Meta', (models.options.Options,), { 'verbose_name': _('Preference'), 'verbose_name_plural': _('Preferences'), 'app_label': app, 'managed': False, }) } for field_name, val_proxy in prefs.items(): model_dict[field_name] = val_proxy.field model = type('Preferences', (models.Model,), model_dict) def fake_save_base(self, *args, **kwargs): updated_prefs = { f.name: getattr(self, f.name) for f in self._meta.fields if not isinstance(f, models.fields.AutoField) } app_prefs = self._get_prefs(self._prefs_app) for pref in app_prefs.keys(): if pref in updated_prefs: app_prefs[pref].db_value = updated_prefs[pref] self.pk = self._prefs_app # Make Django 1.7 happy. prefs_save.send(sender=self, app=self._prefs_app, updated_prefs=updated_prefs) return True model.save_base = fake_save_base return model
def get_pref_model_class(app, prefs, get_prefs_func): """Returns preferences model class dynamically crated for a given app or None on conflict.""" module = '%s.%s' % (app, PREFS_MODULE_NAME) model_dict = { '_prefs_app': app, '_get_prefs': staticmethod(get_prefs_func), '__module__': module, 'Meta': type('Meta', (models.options.Options,), { 'verbose_name': _('Preference'), 'verbose_name_plural': _('Preferences'), 'app_label': app, 'managed': False, }) } for field_name, val_proxy in prefs.items(): model_dict[field_name] = val_proxy.field model = type('Preferences', (models.Model,), model_dict) def fake_save_base(self, *args, **kwargs): updated_prefs = { f.name: getattr(self, f.name) for f in self._meta.fields if not isinstance(f, models.fields.AutoField) } app_prefs = self._get_prefs(self._prefs_app) for pref in app_prefs.keys(): if pref in updated_prefs: app_prefs[pref].db_value = updated_prefs[pref] self.pk = self._prefs_app # Make Django 1.7 happy. prefs_save.send(sender=self, app=self._prefs_app, updated_prefs=updated_prefs) return True model.save_base = fake_save_base return model
[ "Returns", "preferences", "model", "class", "dynamically", "crated", "for", "a", "given", "app", "or", "None", "on", "conflict", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/utils.py#L262-L303
[ "def", "get_pref_model_class", "(", "app", ",", "prefs", ",", "get_prefs_func", ")", ":", "module", "=", "'%s.%s'", "%", "(", "app", ",", "PREFS_MODULE_NAME", ")", "model_dict", "=", "{", "'_prefs_app'", ":", "app", ",", "'_get_prefs'", ":", "staticmethod", "(", "get_prefs_func", ")", ",", "'__module__'", ":", "module", ",", "'Meta'", ":", "type", "(", "'Meta'", ",", "(", "models", ".", "options", ".", "Options", ",", ")", ",", "{", "'verbose_name'", ":", "_", "(", "'Preference'", ")", ",", "'verbose_name_plural'", ":", "_", "(", "'Preferences'", ")", ",", "'app_label'", ":", "app", ",", "'managed'", ":", "False", ",", "}", ")", "}", "for", "field_name", ",", "val_proxy", "in", "prefs", ".", "items", "(", ")", ":", "model_dict", "[", "field_name", "]", "=", "val_proxy", ".", "field", "model", "=", "type", "(", "'Preferences'", ",", "(", "models", ".", "Model", ",", ")", ",", "model_dict", ")", "def", "fake_save_base", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "updated_prefs", "=", "{", "f", ".", "name", ":", "getattr", "(", "self", ",", "f", ".", "name", ")", "for", "f", "in", "self", ".", "_meta", ".", "fields", "if", "not", "isinstance", "(", "f", ",", "models", ".", "fields", ".", "AutoField", ")", "}", "app_prefs", "=", "self", ".", "_get_prefs", "(", "self", ".", "_prefs_app", ")", "for", "pref", "in", "app_prefs", ".", "keys", "(", ")", ":", "if", "pref", "in", "updated_prefs", ":", "app_prefs", "[", "pref", "]", ".", "db_value", "=", "updated_prefs", "[", "pref", "]", "self", ".", "pk", "=", "self", ".", "_prefs_app", "# Make Django 1.7 happy.", "prefs_save", ".", "send", "(", "sender", "=", "self", ",", "app", "=", "self", ".", "_prefs_app", ",", "updated_prefs", "=", "updated_prefs", ")", "return", "True", "model", ".", "save_base", "=", "fake_save_base", "return", "model" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
get_frame_locals
Returns locals dictionary from a given frame. :param int stepback: :rtype: dict
siteprefs/utils.py
def get_frame_locals(stepback=0): """Returns locals dictionary from a given frame. :param int stepback: :rtype: dict """ with Frame(stepback=stepback) as frame: locals_dict = frame.f_locals return locals_dict
def get_frame_locals(stepback=0): """Returns locals dictionary from a given frame. :param int stepback: :rtype: dict """ with Frame(stepback=stepback) as frame: locals_dict = frame.f_locals return locals_dict
[ "Returns", "locals", "dictionary", "from", "a", "given", "frame", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/utils.py#L353-L364
[ "def", "get_frame_locals", "(", "stepback", "=", "0", ")", ":", "with", "Frame", "(", "stepback", "=", "stepback", ")", "as", "frame", ":", "locals_dict", "=", "frame", ".", "f_locals", "return", "locals_dict" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
traverse_local_prefs
Generator to walk through variables considered as preferences in locals dict of a given frame. :param int stepback: :rtype: tuple
siteprefs/utils.py
def traverse_local_prefs(stepback=0): """Generator to walk through variables considered as preferences in locals dict of a given frame. :param int stepback: :rtype: tuple """ locals_dict = get_frame_locals(stepback+1) for k in locals_dict: if not k.startswith('_') and k.upper() == k: yield k, locals_dict
def traverse_local_prefs(stepback=0): """Generator to walk through variables considered as preferences in locals dict of a given frame. :param int stepback: :rtype: tuple """ locals_dict = get_frame_locals(stepback+1) for k in locals_dict: if not k.startswith('_') and k.upper() == k: yield k, locals_dict
[ "Generator", "to", "walk", "through", "variables", "considered", "as", "preferences", "in", "locals", "dict", "of", "a", "given", "frame", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/utils.py#L367-L379
[ "def", "traverse_local_prefs", "(", "stepback", "=", "0", ")", ":", "locals_dict", "=", "get_frame_locals", "(", "stepback", "+", "1", ")", "for", "k", "in", "locals_dict", ":", "if", "not", "k", ".", "startswith", "(", "'_'", ")", "and", "k", ".", "upper", "(", ")", "==", "k", ":", "yield", "k", ",", "locals_dict" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
import_prefs
Imports preferences modules from packages (apps) and project root.
siteprefs/utils.py
def import_prefs(): """Imports preferences modules from packages (apps) and project root.""" # settings.py locals if autodiscover_siteprefs() is in urls.py settings_locals = get_frame_locals(3) if 'self' not in settings_locals: # If not SiteprefsConfig.ready() # Try to import project-wide prefs. project_package = settings_locals['__package__'] # Expected project layout introduced in Django 1.4 if not project_package: # Fallback to old layout. project_package = os.path.split(os.path.dirname(settings_locals['__file__']))[-1] import_module(project_package, PREFS_MODULE_NAME) import_project_modules(PREFS_MODULE_NAME)
def import_prefs(): """Imports preferences modules from packages (apps) and project root.""" # settings.py locals if autodiscover_siteprefs() is in urls.py settings_locals = get_frame_locals(3) if 'self' not in settings_locals: # If not SiteprefsConfig.ready() # Try to import project-wide prefs. project_package = settings_locals['__package__'] # Expected project layout introduced in Django 1.4 if not project_package: # Fallback to old layout. project_package = os.path.split(os.path.dirname(settings_locals['__file__']))[-1] import_module(project_package, PREFS_MODULE_NAME) import_project_modules(PREFS_MODULE_NAME)
[ "Imports", "preferences", "modules", "from", "packages", "(", "apps", ")", "and", "project", "root", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/utils.py#L392-L408
[ "def", "import_prefs", "(", ")", ":", "# settings.py locals if autodiscover_siteprefs() is in urls.py", "settings_locals", "=", "get_frame_locals", "(", "3", ")", "if", "'self'", "not", "in", "settings_locals", ":", "# If not SiteprefsConfig.ready()", "# Try to import project-wide prefs.", "project_package", "=", "settings_locals", "[", "'__package__'", "]", "# Expected project layout introduced in Django 1.4", "if", "not", "project_package", ":", "# Fallback to old layout.", "project_package", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "dirname", "(", "settings_locals", "[", "'__file__'", "]", ")", ")", "[", "-", "1", "]", "import_module", "(", "project_package", ",", "PREFS_MODULE_NAME", ")", "import_project_modules", "(", "PREFS_MODULE_NAME", ")" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
print_file_info
Prints file details in the current directory
examples.py
def print_file_info(): """Prints file details in the current directory""" tpl = TableLogger(columns='file,created,modified,size') for f in os.listdir('.'): size = os.stat(f).st_size date_created = datetime.fromtimestamp(os.path.getctime(f)) date_modified = datetime.fromtimestamp(os.path.getmtime(f)) tpl(f, date_created, date_modified, size)
def print_file_info(): """Prints file details in the current directory""" tpl = TableLogger(columns='file,created,modified,size') for f in os.listdir('.'): size = os.stat(f).st_size date_created = datetime.fromtimestamp(os.path.getctime(f)) date_modified = datetime.fromtimestamp(os.path.getmtime(f)) tpl(f, date_created, date_modified, size)
[ "Prints", "file", "details", "in", "the", "current", "directory" ]
AleksTk/table-logger
python
https://github.com/AleksTk/table-logger/blob/d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8/examples.py#L27-L34
[ "def", "print_file_info", "(", ")", ":", "tpl", "=", "TableLogger", "(", "columns", "=", "'file,created,modified,size'", ")", "for", "f", "in", "os", ".", "listdir", "(", "'.'", ")", ":", "size", "=", "os", ".", "stat", "(", "f", ")", ".", "st_size", "date_created", "=", "datetime", ".", "fromtimestamp", "(", "os", ".", "path", ".", "getctime", "(", "f", ")", ")", "date_modified", "=", "datetime", ".", "fromtimestamp", "(", "os", ".", "path", ".", "getmtime", "(", "f", ")", ")", "tpl", "(", "f", ",", "date_created", ",", "date_modified", ",", "size", ")" ]
d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8
valid
DispatchGroup._bind_args
Attempt to bind the args to the type signature. First try to just bind to the signature, then ensure that all arguments match the parameter types.
dispatching.py
def _bind_args(sig, param_matchers, args, kwargs): ''' Attempt to bind the args to the type signature. First try to just bind to the signature, then ensure that all arguments match the parameter types. ''' #Bind to signature. May throw its own TypeError bound = sig.bind(*args, **kwargs) if not all(param_matcher(bound.arguments[param_name]) for param_name, param_matcher in param_matchers): raise TypeError return bound
def _bind_args(sig, param_matchers, args, kwargs): ''' Attempt to bind the args to the type signature. First try to just bind to the signature, then ensure that all arguments match the parameter types. ''' #Bind to signature. May throw its own TypeError bound = sig.bind(*args, **kwargs) if not all(param_matcher(bound.arguments[param_name]) for param_name, param_matcher in param_matchers): raise TypeError return bound
[ "Attempt", "to", "bind", "the", "args", "to", "the", "type", "signature", ".", "First", "try", "to", "just", "bind", "to", "the", "signature", "then", "ensure", "that", "all", "arguments", "match", "the", "parameter", "types", "." ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L34-L47
[ "def", "_bind_args", "(", "sig", ",", "param_matchers", ",", "args", ",", "kwargs", ")", ":", "#Bind to signature. May throw its own TypeError", "bound", "=", "sig", ".", "bind", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "all", "(", "param_matcher", "(", "bound", ".", "arguments", "[", "param_name", "]", ")", "for", "param_name", ",", "param_matcher", "in", "param_matchers", ")", ":", "raise", "TypeError", "return", "bound" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup._make_param_matcher
For a given annotation, return a function which, when called on a function argument, returns true if that argument matches the annotation. If the annotation is a type, it calls isinstance; if it's a callable, it calls it on the object; otherwise, it performs a value comparison. If the parameter is variadic (*args) and the annotation is a type, the matcher will attempt to match each of the arguments in args
dispatching.py
def _make_param_matcher(annotation, kind=None): ''' For a given annotation, return a function which, when called on a function argument, returns true if that argument matches the annotation. If the annotation is a type, it calls isinstance; if it's a callable, it calls it on the object; otherwise, it performs a value comparison. If the parameter is variadic (*args) and the annotation is a type, the matcher will attempt to match each of the arguments in args ''' if isinstance(annotation, type) or ( isinstance(annotation, tuple) and all(isinstance(a, type) for a in annotation)): if kind is Parameter.VAR_POSITIONAL: return (lambda args: all(isinstance(x, annotation) for x in args)) else: return (lambda x: isinstance(x, annotation)) elif callable(annotation): return annotation else: return (lambda x: x == annotation)
def _make_param_matcher(annotation, kind=None): ''' For a given annotation, return a function which, when called on a function argument, returns true if that argument matches the annotation. If the annotation is a type, it calls isinstance; if it's a callable, it calls it on the object; otherwise, it performs a value comparison. If the parameter is variadic (*args) and the annotation is a type, the matcher will attempt to match each of the arguments in args ''' if isinstance(annotation, type) or ( isinstance(annotation, tuple) and all(isinstance(a, type) for a in annotation)): if kind is Parameter.VAR_POSITIONAL: return (lambda args: all(isinstance(x, annotation) for x in args)) else: return (lambda x: isinstance(x, annotation)) elif callable(annotation): return annotation else: return (lambda x: x == annotation)
[ "For", "a", "given", "annotation", "return", "a", "function", "which", "when", "called", "on", "a", "function", "argument", "returns", "true", "if", "that", "argument", "matches", "the", "annotation", ".", "If", "the", "annotation", "is", "a", "type", "it", "calls", "isinstance", ";", "if", "it", "s", "a", "callable", "it", "calls", "it", "on", "the", "object", ";", "otherwise", "it", "performs", "a", "value", "comparison", ".", "If", "the", "parameter", "is", "variadic", "(", "*", "args", ")", "and", "the", "annotation", "is", "a", "type", "the", "matcher", "will", "attempt", "to", "match", "each", "of", "the", "arguments", "in", "args" ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L50-L69
[ "def", "_make_param_matcher", "(", "annotation", ",", "kind", "=", "None", ")", ":", "if", "isinstance", "(", "annotation", ",", "type", ")", "or", "(", "isinstance", "(", "annotation", ",", "tuple", ")", "and", "all", "(", "isinstance", "(", "a", ",", "type", ")", "for", "a", "in", "annotation", ")", ")", ":", "if", "kind", "is", "Parameter", ".", "VAR_POSITIONAL", ":", "return", "(", "lambda", "args", ":", "all", "(", "isinstance", "(", "x", ",", "annotation", ")", "for", "x", "in", "args", ")", ")", "else", ":", "return", "(", "lambda", "x", ":", "isinstance", "(", "x", ",", "annotation", ")", ")", "elif", "callable", "(", "annotation", ")", ":", "return", "annotation", "else", ":", "return", "(", "lambda", "x", ":", "x", "==", "annotation", ")" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup._make_all_matchers
For every parameter, create a matcher if the parameter has an annotation.
dispatching.py
def _make_all_matchers(cls, parameters): ''' For every parameter, create a matcher if the parameter has an annotation. ''' for name, param in parameters: annotation = param.annotation if annotation is not Parameter.empty: yield name, cls._make_param_matcher(annotation, param.kind)
def _make_all_matchers(cls, parameters): ''' For every parameter, create a matcher if the parameter has an annotation. ''' for name, param in parameters: annotation = param.annotation if annotation is not Parameter.empty: yield name, cls._make_param_matcher(annotation, param.kind)
[ "For", "every", "parameter", "create", "a", "matcher", "if", "the", "parameter", "has", "an", "annotation", "." ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L72-L80
[ "def", "_make_all_matchers", "(", "cls", ",", "parameters", ")", ":", "for", "name", ",", "param", "in", "parameters", ":", "annotation", "=", "param", ".", "annotation", "if", "annotation", "is", "not", "Parameter", ".", "empty", ":", "yield", "name", ",", "cls", ".", "_make_param_matcher", "(", "annotation", ",", "param", ".", "kind", ")" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup._make_dispatch
Create a dispatch pair for func- a tuple of (bind_args, func), where bind_args is a function that, when called with (args, kwargs), attempts to bind those args to the type signature of func, or else raise a TypeError
dispatching.py
def _make_dispatch(cls, func): ''' Create a dispatch pair for func- a tuple of (bind_args, func), where bind_args is a function that, when called with (args, kwargs), attempts to bind those args to the type signature of func, or else raise a TypeError ''' sig = signature(func) matchers = tuple(cls._make_all_matchers(sig.parameters.items())) return (partial(cls._bind_args, sig, matchers), func)
def _make_dispatch(cls, func): ''' Create a dispatch pair for func- a tuple of (bind_args, func), where bind_args is a function that, when called with (args, kwargs), attempts to bind those args to the type signature of func, or else raise a TypeError ''' sig = signature(func) matchers = tuple(cls._make_all_matchers(sig.parameters.items())) return (partial(cls._bind_args, sig, matchers), func)
[ "Create", "a", "dispatch", "pair", "for", "func", "-", "a", "tuple", "of", "(", "bind_args", "func", ")", "where", "bind_args", "is", "a", "function", "that", "when", "called", "with", "(", "args", "kwargs", ")", "attempts", "to", "bind", "those", "args", "to", "the", "type", "signature", "of", "func", "or", "else", "raise", "a", "TypeError" ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L83-L92
[ "def", "_make_dispatch", "(", "cls", ",", "func", ")", ":", "sig", "=", "signature", "(", "func", ")", "matchers", "=", "tuple", "(", "cls", ".", "_make_all_matchers", "(", "sig", ".", "parameters", ".", "items", "(", ")", ")", ")", "return", "(", "partial", "(", "cls", ".", "_bind_args", ",", "sig", ",", "matchers", ")", ",", "func", ")" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup._make_wrapper
Makes a wrapper function that executes a dispatch call for func. The wrapper has the dispatch and dispatch_first attributes, so that additional overloads can be added to the group.
dispatching.py
def _make_wrapper(self, func): ''' Makes a wrapper function that executes a dispatch call for func. The wrapper has the dispatch and dispatch_first attributes, so that additional overloads can be added to the group. ''' #TODO: consider using a class to make attribute forwarding easier. #TODO: consider using simply another DispatchGroup, with self.callees # assigned by reference to the original callees. @wraps(func) def executor(*args, **kwargs): return self.execute(args, kwargs) executor.dispatch = self.dispatch executor.dispatch_first = self.dispatch_first executor.func = func executor.lookup = self.lookup return executor
def _make_wrapper(self, func): ''' Makes a wrapper function that executes a dispatch call for func. The wrapper has the dispatch and dispatch_first attributes, so that additional overloads can be added to the group. ''' #TODO: consider using a class to make attribute forwarding easier. #TODO: consider using simply another DispatchGroup, with self.callees # assigned by reference to the original callees. @wraps(func) def executor(*args, **kwargs): return self.execute(args, kwargs) executor.dispatch = self.dispatch executor.dispatch_first = self.dispatch_first executor.func = func executor.lookup = self.lookup return executor
[ "Makes", "a", "wrapper", "function", "that", "executes", "a", "dispatch", "call", "for", "func", ".", "The", "wrapper", "has", "the", "dispatch", "and", "dispatch_first", "attributes", "so", "that", "additional", "overloads", "can", "be", "added", "to", "the", "group", "." ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L94-L111
[ "def", "_make_wrapper", "(", "self", ",", "func", ")", ":", "#TODO: consider using a class to make attribute forwarding easier.", "#TODO: consider using simply another DispatchGroup, with self.callees", "# assigned by reference to the original callees.", "@", "wraps", "(", "func", ")", "def", "executor", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "execute", "(", "args", ",", "kwargs", ")", "executor", ".", "dispatch", "=", "self", ".", "dispatch", "executor", ".", "dispatch_first", "=", "self", ".", "dispatch_first", "executor", ".", "func", "=", "func", "executor", ".", "lookup", "=", "self", ".", "lookup", "return", "executor" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup.dispatch
Adds the decorated function to this dispatch.
dispatching.py
def dispatch(self, func): ''' Adds the decorated function to this dispatch. ''' self.callees.append(self._make_dispatch(func)) return self._make_wrapper(func)
def dispatch(self, func): ''' Adds the decorated function to this dispatch. ''' self.callees.append(self._make_dispatch(func)) return self._make_wrapper(func)
[ "Adds", "the", "decorated", "function", "to", "this", "dispatch", "." ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L113-L118
[ "def", "dispatch", "(", "self", ",", "func", ")", ":", "self", ".", "callees", ".", "append", "(", "self", ".", "_make_dispatch", "(", "func", ")", ")", "return", "self", ".", "_make_wrapper", "(", "func", ")" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup.dispatch_first
Adds the decorated function to this dispatch, at the FRONT of the order. Useful for allowing third parties to add overloaded functionality to be executed before default functionality.
dispatching.py
def dispatch_first(self, func): ''' Adds the decorated function to this dispatch, at the FRONT of the order. Useful for allowing third parties to add overloaded functionality to be executed before default functionality. ''' self.callees.appendleft(self._make_dispatch(func)) return self._make_wrapper(func)
def dispatch_first(self, func): ''' Adds the decorated function to this dispatch, at the FRONT of the order. Useful for allowing third parties to add overloaded functionality to be executed before default functionality. ''' self.callees.appendleft(self._make_dispatch(func)) return self._make_wrapper(func)
[ "Adds", "the", "decorated", "function", "to", "this", "dispatch", "at", "the", "FRONT", "of", "the", "order", ".", "Useful", "for", "allowing", "third", "parties", "to", "add", "overloaded", "functionality", "to", "be", "executed", "before", "default", "functionality", "." ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L120-L127
[ "def", "dispatch_first", "(", "self", ",", "func", ")", ":", "self", ".", "callees", ".", "appendleft", "(", "self", ".", "_make_dispatch", "(", "func", ")", ")", "return", "self", ".", "_make_wrapper", "(", "func", ")" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup.lookup_explicit
Lookup the function that will be called with a given set of arguments, or raise DispatchError. Requires explicit tuple/dict grouping of arguments (see DispatchGroup.lookup for a function-like interface).
dispatching.py
def lookup_explicit(self, args, kwargs): ''' Lookup the function that will be called with a given set of arguments, or raise DispatchError. Requires explicit tuple/dict grouping of arguments (see DispatchGroup.lookup for a function-like interface). ''' for bind_args, callee in self.callees: try: #bind to the signature and types. Raises TypeError on failure bind_args(args, kwargs) except TypeError: #TypeError: failed to bind arguments. Try the next dispatch continue #All the parameters matched. Return the function and args return callee else: #Nothing was able to bind. Error. raise DispatchError(args, kwargs, self)
def lookup_explicit(self, args, kwargs): ''' Lookup the function that will be called with a given set of arguments, or raise DispatchError. Requires explicit tuple/dict grouping of arguments (see DispatchGroup.lookup for a function-like interface). ''' for bind_args, callee in self.callees: try: #bind to the signature and types. Raises TypeError on failure bind_args(args, kwargs) except TypeError: #TypeError: failed to bind arguments. Try the next dispatch continue #All the parameters matched. Return the function and args return callee else: #Nothing was able to bind. Error. raise DispatchError(args, kwargs, self)
[ "Lookup", "the", "function", "that", "will", "be", "called", "with", "a", "given", "set", "of", "arguments", "or", "raise", "DispatchError", ".", "Requires", "explicit", "tuple", "/", "dict", "grouping", "of", "arguments", "(", "see", "DispatchGroup", ".", "lookup", "for", "a", "function", "-", "like", "interface", ")", "." ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L129-L148
[ "def", "lookup_explicit", "(", "self", ",", "args", ",", "kwargs", ")", ":", "for", "bind_args", ",", "callee", "in", "self", ".", "callees", ":", "try", ":", "#bind to the signature and types. Raises TypeError on failure", "bind_args", "(", "args", ",", "kwargs", ")", "except", "TypeError", ":", "#TypeError: failed to bind arguments. Try the next dispatch", "continue", "#All the parameters matched. Return the function and args", "return", "callee", "else", ":", "#Nothing was able to bind. Error.", "raise", "DispatchError", "(", "args", ",", "kwargs", ",", "self", ")" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
DispatchGroup.execute
Dispatch a call. Call the first function whose type signature matches the arguemts.
dispatching.py
def execute(self, args, kwargs): ''' Dispatch a call. Call the first function whose type signature matches the arguemts. ''' return self.lookup_explicit(args, kwargs)(*args, **kwargs)
def execute(self, args, kwargs): ''' Dispatch a call. Call the first function whose type signature matches the arguemts. ''' return self.lookup_explicit(args, kwargs)(*args, **kwargs)
[ "Dispatch", "a", "call", ".", "Call", "the", "first", "function", "whose", "type", "signature", "matches", "the", "arguemts", "." ]
Lucretiel/Dispatch
python
https://github.com/Lucretiel/Dispatch/blob/dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4/dispatching.py#L157-L162
[ "def", "execute", "(", "self", ",", "args", ",", "kwargs", ")", ":", "return", "self", ".", "lookup_explicit", "(", "args", ",", "kwargs", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
dffbce6bacb4370c4ecd11652e5ba8a6aaf2b5b4
valid
TableLogger.setup_formatters
Setup formatters by observing the first row. Args: *args: row cells
table_logger/table_logger.py
def setup_formatters(self, *args): """Setup formatters by observing the first row. Args: *args: row cells """ formatters = [] col_offset = 0 # initialize formatters for row-id, timestamp and time-diff columns if self.rownum: formatters.append(fmt.RowNumberFormatter.setup(0)) col_offset += 1 if self.timestamp: formatters.append(fmt.DatetimeFormatter.setup( datetime.datetime.now(), fmt='{:%Y-%m-%d %H:%M:%S.%f}'.format, col_width=26)) col_offset += 1 if self.time_diff: formatters.append(fmt.TimeDeltaFormatter.setup(0)) col_offset += 1 # initialize formatters for user-defined columns for coli, value in enumerate(args): fmt_class = type2fmt.get(type(value), fmt.GenericFormatter) kwargs = {} # set column width if self.default_colwidth is not None: kwargs['col_width'] = self.default_colwidth if coli in self.column_widths: kwargs['col_width'] = self.column_widths[coli] elif self.columns and self.columns[coli + col_offset] in self.column_widths: kwargs['col_width'] = self.column_widths[self.columns[coli + col_offset]] # set formatter function if fmt_class == fmt.FloatFormatter and self.float_format is not None: kwargs['fmt'] = self.float_format if coli in self.column_formatters: kwargs['fmt'] = self.column_formatters[coli] elif self.columns and self.columns[coli + col_offset] in self.column_formatters: kwargs['fmt'] = self.column_formatters[self.columns[coli + col_offset]] formatter = fmt_class.setup(value, **kwargs) formatters.append(formatter) self.formatters = formatters
def setup_formatters(self, *args): """Setup formatters by observing the first row. Args: *args: row cells """ formatters = [] col_offset = 0 # initialize formatters for row-id, timestamp and time-diff columns if self.rownum: formatters.append(fmt.RowNumberFormatter.setup(0)) col_offset += 1 if self.timestamp: formatters.append(fmt.DatetimeFormatter.setup( datetime.datetime.now(), fmt='{:%Y-%m-%d %H:%M:%S.%f}'.format, col_width=26)) col_offset += 1 if self.time_diff: formatters.append(fmt.TimeDeltaFormatter.setup(0)) col_offset += 1 # initialize formatters for user-defined columns for coli, value in enumerate(args): fmt_class = type2fmt.get(type(value), fmt.GenericFormatter) kwargs = {} # set column width if self.default_colwidth is not None: kwargs['col_width'] = self.default_colwidth if coli in self.column_widths: kwargs['col_width'] = self.column_widths[coli] elif self.columns and self.columns[coli + col_offset] in self.column_widths: kwargs['col_width'] = self.column_widths[self.columns[coli + col_offset]] # set formatter function if fmt_class == fmt.FloatFormatter and self.float_format is not None: kwargs['fmt'] = self.float_format if coli in self.column_formatters: kwargs['fmt'] = self.column_formatters[coli] elif self.columns and self.columns[coli + col_offset] in self.column_formatters: kwargs['fmt'] = self.column_formatters[self.columns[coli + col_offset]] formatter = fmt_class.setup(value, **kwargs) formatters.append(formatter) self.formatters = formatters
[ "Setup", "formatters", "by", "observing", "the", "first", "row", ".", "Args", ":", "*", "args", ":", "row", "cells" ]
AleksTk/table-logger
python
https://github.com/AleksTk/table-logger/blob/d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8/table_logger/table_logger.py#L208-L254
[ "def", "setup_formatters", "(", "self", ",", "*", "args", ")", ":", "formatters", "=", "[", "]", "col_offset", "=", "0", "# initialize formatters for row-id, timestamp and time-diff columns", "if", "self", ".", "rownum", ":", "formatters", ".", "append", "(", "fmt", ".", "RowNumberFormatter", ".", "setup", "(", "0", ")", ")", "col_offset", "+=", "1", "if", "self", ".", "timestamp", ":", "formatters", ".", "append", "(", "fmt", ".", "DatetimeFormatter", ".", "setup", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ",", "fmt", "=", "'{:%Y-%m-%d %H:%M:%S.%f}'", ".", "format", ",", "col_width", "=", "26", ")", ")", "col_offset", "+=", "1", "if", "self", ".", "time_diff", ":", "formatters", ".", "append", "(", "fmt", ".", "TimeDeltaFormatter", ".", "setup", "(", "0", ")", ")", "col_offset", "+=", "1", "# initialize formatters for user-defined columns", "for", "coli", ",", "value", "in", "enumerate", "(", "args", ")", ":", "fmt_class", "=", "type2fmt", ".", "get", "(", "type", "(", "value", ")", ",", "fmt", ".", "GenericFormatter", ")", "kwargs", "=", "{", "}", "# set column width", "if", "self", ".", "default_colwidth", "is", "not", "None", ":", "kwargs", "[", "'col_width'", "]", "=", "self", ".", "default_colwidth", "if", "coli", "in", "self", ".", "column_widths", ":", "kwargs", "[", "'col_width'", "]", "=", "self", ".", "column_widths", "[", "coli", "]", "elif", "self", ".", "columns", "and", "self", ".", "columns", "[", "coli", "+", "col_offset", "]", "in", "self", ".", "column_widths", ":", "kwargs", "[", "'col_width'", "]", "=", "self", ".", "column_widths", "[", "self", ".", "columns", "[", "coli", "+", "col_offset", "]", "]", "# set formatter function", "if", "fmt_class", "==", "fmt", ".", "FloatFormatter", "and", "self", ".", "float_format", "is", "not", "None", ":", "kwargs", "[", "'fmt'", "]", "=", "self", ".", "float_format", "if", "coli", "in", "self", ".", "column_formatters", ":", "kwargs", "[", "'fmt'", "]", "=", "self", ".", "column_formatters", "[", "coli", "]", "elif", "self", ".", "columns", "and", "self", ".", "columns", "[", "coli", "+", "col_offset", "]", "in", "self", ".", "column_formatters", ":", "kwargs", "[", "'fmt'", "]", "=", "self", ".", "column_formatters", "[", "self", ".", "columns", "[", "coli", "+", "col_offset", "]", "]", "formatter", "=", "fmt_class", ".", "setup", "(", "value", ",", "*", "*", "kwargs", ")", "formatters", ".", "append", "(", "formatter", ")", "self", ".", "formatters", "=", "formatters" ]
d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8
valid
TableLogger.setup
Do preparations before printing the first row Args: *args: first row cells
table_logger/table_logger.py
def setup(self, *args): """Do preparations before printing the first row Args: *args: first row cells """ self.setup_formatters(*args) if self.columns: self.print_header() elif self.border and not self.csv: self.print_line(self.make_horizontal_border())
def setup(self, *args): """Do preparations before printing the first row Args: *args: first row cells """ self.setup_formatters(*args) if self.columns: self.print_header() elif self.border and not self.csv: self.print_line(self.make_horizontal_border())
[ "Do", "preparations", "before", "printing", "the", "first", "row", "Args", ":", "*", "args", ":", "first", "row", "cells" ]
AleksTk/table-logger
python
https://github.com/AleksTk/table-logger/blob/d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8/table_logger/table_logger.py#L256-L266
[ "def", "setup", "(", "self", ",", "*", "args", ")", ":", "self", ".", "setup_formatters", "(", "*", "args", ")", "if", "self", ".", "columns", ":", "self", ".", "print_header", "(", ")", "elif", "self", ".", "border", "and", "not", "self", ".", "csv", ":", "self", ".", "print_line", "(", "self", ".", "make_horizontal_border", "(", ")", ")" ]
d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8
valid
TableLogger.csv_format
Converts row values into a csv line Args: row: a list of row cells as unicode Returns: csv_line (unicode)
table_logger/table_logger.py
def csv_format(self, row): """Converts row values into a csv line Args: row: a list of row cells as unicode Returns: csv_line (unicode) """ if PY2: buf = io.BytesIO() csvwriter = csv.writer(buf) csvwriter.writerow([c.strip().encode(self.encoding) for c in row]) csv_line = buf.getvalue().decode(self.encoding).rstrip() else: buf = io.StringIO() csvwriter = csv.writer(buf) csvwriter.writerow([c.strip() for c in row]) csv_line = buf.getvalue().rstrip() return csv_line
def csv_format(self, row): """Converts row values into a csv line Args: row: a list of row cells as unicode Returns: csv_line (unicode) """ if PY2: buf = io.BytesIO() csvwriter = csv.writer(buf) csvwriter.writerow([c.strip().encode(self.encoding) for c in row]) csv_line = buf.getvalue().decode(self.encoding).rstrip() else: buf = io.StringIO() csvwriter = csv.writer(buf) csvwriter.writerow([c.strip() for c in row]) csv_line = buf.getvalue().rstrip() return csv_line
[ "Converts", "row", "values", "into", "a", "csv", "line", "Args", ":", "row", ":", "a", "list", "of", "row", "cells", "as", "unicode", "Returns", ":", "csv_line", "(", "unicode", ")" ]
AleksTk/table-logger
python
https://github.com/AleksTk/table-logger/blob/d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8/table_logger/table_logger.py#L306-L324
[ "def", "csv_format", "(", "self", ",", "row", ")", ":", "if", "PY2", ":", "buf", "=", "io", ".", "BytesIO", "(", ")", "csvwriter", "=", "csv", ".", "writer", "(", "buf", ")", "csvwriter", ".", "writerow", "(", "[", "c", ".", "strip", "(", ")", ".", "encode", "(", "self", ".", "encoding", ")", "for", "c", "in", "row", "]", ")", "csv_line", "=", "buf", ".", "getvalue", "(", ")", ".", "decode", "(", "self", ".", "encoding", ")", ".", "rstrip", "(", ")", "else", ":", "buf", "=", "io", ".", "StringIO", "(", ")", "csvwriter", "=", "csv", ".", "writer", "(", "buf", ")", "csvwriter", ".", "writerow", "(", "[", "c", ".", "strip", "(", ")", "for", "c", "in", "row", "]", ")", "csv_line", "=", "buf", ".", "getvalue", "(", ")", ".", "rstrip", "(", ")", "return", "csv_line" ]
d2326e053fb972ed7ae4950d0e8c6e7c9f4399b8
valid
convertShpToExtend
reprojette en WGS84 et recupere l'extend
python/utils.py
def convertShpToExtend(pathToShp): """ reprojette en WGS84 et recupere l'extend """ driver = ogr.GetDriverByName('ESRI Shapefile') dataset = driver.Open(pathToShp) if dataset is not None: # from Layer layer = dataset.GetLayer() spatialRef = layer.GetSpatialRef() # from Geometry feature = layer.GetNextFeature() geom = feature.GetGeometryRef() spatialRef = geom.GetSpatialReference() #WGS84 outSpatialRef = osr.SpatialReference() outSpatialRef.ImportFromEPSG(4326) coordTrans = osr.CoordinateTransformation(spatialRef, outSpatialRef) env = geom.GetEnvelope() pointMAX = ogr.Geometry(ogr.wkbPoint) pointMAX.AddPoint(env[1], env[3]) pointMAX.Transform(coordTrans) pointMIN = ogr.Geometry(ogr.wkbPoint) pointMIN.AddPoint(env[0], env[2]) pointMIN.Transform(coordTrans) return [pointMAX.GetPoint()[1],pointMIN.GetPoint()[0],pointMIN.GetPoint()[1],pointMAX.GetPoint()[0]] else: exit(" shapefile not found. Please verify your path to the shapefile")
def convertShpToExtend(pathToShp): """ reprojette en WGS84 et recupere l'extend """ driver = ogr.GetDriverByName('ESRI Shapefile') dataset = driver.Open(pathToShp) if dataset is not None: # from Layer layer = dataset.GetLayer() spatialRef = layer.GetSpatialRef() # from Geometry feature = layer.GetNextFeature() geom = feature.GetGeometryRef() spatialRef = geom.GetSpatialReference() #WGS84 outSpatialRef = osr.SpatialReference() outSpatialRef.ImportFromEPSG(4326) coordTrans = osr.CoordinateTransformation(spatialRef, outSpatialRef) env = geom.GetEnvelope() pointMAX = ogr.Geometry(ogr.wkbPoint) pointMAX.AddPoint(env[1], env[3]) pointMAX.Transform(coordTrans) pointMIN = ogr.Geometry(ogr.wkbPoint) pointMIN.AddPoint(env[0], env[2]) pointMIN.Transform(coordTrans) return [pointMAX.GetPoint()[1],pointMIN.GetPoint()[0],pointMIN.GetPoint()[1],pointMAX.GetPoint()[0]] else: exit(" shapefile not found. Please verify your path to the shapefile")
[ "reprojette", "en", "WGS84", "et", "recupere", "l", "extend" ]
yoannMoreau/gfsDownload
python
https://github.com/yoannMoreau/gfsDownload/blob/56e91a5dffb536596a80d2d614ebe858d9ab5ab7/python/utils.py#L75-L110
[ "def", "convertShpToExtend", "(", "pathToShp", ")", ":", "driver", "=", "ogr", ".", "GetDriverByName", "(", "'ESRI Shapefile'", ")", "dataset", "=", "driver", ".", "Open", "(", "pathToShp", ")", "if", "dataset", "is", "not", "None", ":", "# from Layer", "layer", "=", "dataset", ".", "GetLayer", "(", ")", "spatialRef", "=", "layer", ".", "GetSpatialRef", "(", ")", "# from Geometry", "feature", "=", "layer", ".", "GetNextFeature", "(", ")", "geom", "=", "feature", ".", "GetGeometryRef", "(", ")", "spatialRef", "=", "geom", ".", "GetSpatialReference", "(", ")", "#WGS84", "outSpatialRef", "=", "osr", ".", "SpatialReference", "(", ")", "outSpatialRef", ".", "ImportFromEPSG", "(", "4326", ")", "coordTrans", "=", "osr", ".", "CoordinateTransformation", "(", "spatialRef", ",", "outSpatialRef", ")", "env", "=", "geom", ".", "GetEnvelope", "(", ")", "pointMAX", "=", "ogr", ".", "Geometry", "(", "ogr", ".", "wkbPoint", ")", "pointMAX", ".", "AddPoint", "(", "env", "[", "1", "]", ",", "env", "[", "3", "]", ")", "pointMAX", ".", "Transform", "(", "coordTrans", ")", "pointMIN", "=", "ogr", ".", "Geometry", "(", "ogr", ".", "wkbPoint", ")", "pointMIN", ".", "AddPoint", "(", "env", "[", "0", "]", ",", "env", "[", "2", "]", ")", "pointMIN", ".", "Transform", "(", "coordTrans", ")", "return", "[", "pointMAX", ".", "GetPoint", "(", ")", "[", "1", "]", ",", "pointMIN", ".", "GetPoint", "(", ")", "[", "0", "]", ",", "pointMIN", ".", "GetPoint", "(", ")", "[", "1", "]", ",", "pointMAX", ".", "GetPoint", "(", ")", "[", "0", "]", "]", "else", ":", "exit", "(", "\" shapefile not found. Please verify your path to the shapefile\"", ")" ]
56e91a5dffb536596a80d2d614ebe858d9ab5ab7
valid
create_request_gfs
Genere la structure de requete pour le téléchargement de données GFS INPUTS:\n -date : au format annee-mois-jour\n -heure : au format heure:minute:seconde\n -coord : une liste des coordonnees au format [N,W,S,E]\n -dim_grille : taille de la grille en degree \n
python/utils.py
def create_request_gfs(dateStart,dateEnd,stepList,levelList,grid,extent,paramList,typeData): """ Genere la structure de requete pour le téléchargement de données GFS INPUTS:\n -date : au format annee-mois-jour\n -heure : au format heure:minute:seconde\n -coord : une liste des coordonnees au format [N,W,S,E]\n -dim_grille : taille de la grille en degree \n """ URLlist=[] #Control datetype listForcastSurface=['GUST','HINDEX','PRES','HGT','TMP','WEASD','SNOD','CPOFP','WILT','FLDCP','SUNSD','LFTX','CAPE','CIN','4LFTX','HPBL','LAND'] if (0 not in [int(x) for x in stepList]): listForcastSurface=listForcastSurface+['PEVPR','CPRAT','PRATE','APCP','ACPCP','WATR','CSNOW','CICEP','CFPER','CRAIN','LHTFL','SHTFL','SHTFL','GFLUX','UFLX','VFLX','U-GWD','V-GWD','DSWRF','DLWRF','ULWRF','USWRF','ALBDO'] listAnalyseSurface=['HGT','PRES','LFTX','CAPE','CIN','4LFTX'] if typeData == 'analyse' and all([x in listAnalyseSurface for x in paramList]): typeData= 'analyse' validChoice = None prbParameters = None else: if all([x in listForcastSurface for x in paramList]) and typeData != 'cycleforecast': if typeData=='analyse': typeData= 'forecast' validChoice = typeData else: validChoice = None indexParameters=[i for i, elem in enumerate([x in listAnalyseSurface for x in paramList], 1) if not elem] prbParameters=[] for i in indexParameters: prbParameters.append(paramList[i-1]) else: if typeData != 'cycleforecast': typeData= 'cycleforecast' validChoice = typeData else: validChoice = None indexParameters=[i for i, elem in enumerate([x in listAnalyseSurface for x in paramList], 1) if not elem] prbParameters=[] for i in indexParameters: prbParameters.append(paramList[i-1]) #Control si date/timeList disponible today=date.today() lastData = today - timedelta(days=14) if dateStart < lastData or dateEnd > today : exit('date are not in 14 days range from today' ) else: #Pour chaque jour souhaité nbDays=(dateEnd-dateStart).days+1 for i in range(0,nbDays): #on crontrole pour les timeList if dateStart + timedelta(days=i) == today: maxT=datetime.now().hour-5 timeListCorr=[ x for x in stepList if x<maxT ] else: timeListCorr=stepList for t in timeListCorr: URL='http://nomads.ncep.noaa.gov/cgi-bin/filter_gfs_' #grid URL=URL+"{:.2f}".format(grid).replace('.','p')+'.pl?file=gfs.' #time ( attention limiter avec décalage horaire for today URL=URL+'t'+str(t).zfill(2)+'z.' if (grid==0.5): URL=URL+'pgrb2full.' else: URL=URL+'pgrb2.' URL=URL+"{:.2f}".format(grid).replace('.','p')+'.' if typeData=='cycleforecast': URL=URL+'f006&lev_' elif typeData=='forecast': URL=URL+'f000&lev_' else: URL=URL+'anl&lev_' URL=URL+"=on&lev_".join(levelList)+"=on&var_" URL=URL+"=on&var_".join(paramList)+"=on&subregion=&" URL=URL+"leftlon="+str(round(float(extent[1])-0.05,1))+"&rightlon="+str(round(float(extent[3])+0.05,1))+"&toplat="+str(round(float(extent[0])+0.5,1))+"&bottomlat="+str(round(float(extent[2])-0.5,1)) URL=URL+"&dir=%2Fgfs."+"{:%Y%m%d}".format(dateStart+timedelta(days=i))+str(t).zfill(2) URLlist.append(URL) return (URLlist,validChoice,prbParameters)
def create_request_gfs(dateStart,dateEnd,stepList,levelList,grid,extent,paramList,typeData): """ Genere la structure de requete pour le téléchargement de données GFS INPUTS:\n -date : au format annee-mois-jour\n -heure : au format heure:minute:seconde\n -coord : une liste des coordonnees au format [N,W,S,E]\n -dim_grille : taille de la grille en degree \n """ URLlist=[] #Control datetype listForcastSurface=['GUST','HINDEX','PRES','HGT','TMP','WEASD','SNOD','CPOFP','WILT','FLDCP','SUNSD','LFTX','CAPE','CIN','4LFTX','HPBL','LAND'] if (0 not in [int(x) for x in stepList]): listForcastSurface=listForcastSurface+['PEVPR','CPRAT','PRATE','APCP','ACPCP','WATR','CSNOW','CICEP','CFPER','CRAIN','LHTFL','SHTFL','SHTFL','GFLUX','UFLX','VFLX','U-GWD','V-GWD','DSWRF','DLWRF','ULWRF','USWRF','ALBDO'] listAnalyseSurface=['HGT','PRES','LFTX','CAPE','CIN','4LFTX'] if typeData == 'analyse' and all([x in listAnalyseSurface for x in paramList]): typeData= 'analyse' validChoice = None prbParameters = None else: if all([x in listForcastSurface for x in paramList]) and typeData != 'cycleforecast': if typeData=='analyse': typeData= 'forecast' validChoice = typeData else: validChoice = None indexParameters=[i for i, elem in enumerate([x in listAnalyseSurface for x in paramList], 1) if not elem] prbParameters=[] for i in indexParameters: prbParameters.append(paramList[i-1]) else: if typeData != 'cycleforecast': typeData= 'cycleforecast' validChoice = typeData else: validChoice = None indexParameters=[i for i, elem in enumerate([x in listAnalyseSurface for x in paramList], 1) if not elem] prbParameters=[] for i in indexParameters: prbParameters.append(paramList[i-1]) #Control si date/timeList disponible today=date.today() lastData = today - timedelta(days=14) if dateStart < lastData or dateEnd > today : exit('date are not in 14 days range from today' ) else: #Pour chaque jour souhaité nbDays=(dateEnd-dateStart).days+1 for i in range(0,nbDays): #on crontrole pour les timeList if dateStart + timedelta(days=i) == today: maxT=datetime.now().hour-5 timeListCorr=[ x for x in stepList if x<maxT ] else: timeListCorr=stepList for t in timeListCorr: URL='http://nomads.ncep.noaa.gov/cgi-bin/filter_gfs_' #grid URL=URL+"{:.2f}".format(grid).replace('.','p')+'.pl?file=gfs.' #time ( attention limiter avec décalage horaire for today URL=URL+'t'+str(t).zfill(2)+'z.' if (grid==0.5): URL=URL+'pgrb2full.' else: URL=URL+'pgrb2.' URL=URL+"{:.2f}".format(grid).replace('.','p')+'.' if typeData=='cycleforecast': URL=URL+'f006&lev_' elif typeData=='forecast': URL=URL+'f000&lev_' else: URL=URL+'anl&lev_' URL=URL+"=on&lev_".join(levelList)+"=on&var_" URL=URL+"=on&var_".join(paramList)+"=on&subregion=&" URL=URL+"leftlon="+str(round(float(extent[1])-0.05,1))+"&rightlon="+str(round(float(extent[3])+0.05,1))+"&toplat="+str(round(float(extent[0])+0.5,1))+"&bottomlat="+str(round(float(extent[2])-0.5,1)) URL=URL+"&dir=%2Fgfs."+"{:%Y%m%d}".format(dateStart+timedelta(days=i))+str(t).zfill(2) URLlist.append(URL) return (URLlist,validChoice,prbParameters)
[ "Genere", "la", "structure", "de", "requete", "pour", "le", "téléchargement", "de", "données", "GFS", "INPUTS", ":", "\\", "n", "-", "date", ":", "au", "format", "annee", "-", "mois", "-", "jour", "\\", "n", "-", "heure", ":", "au", "format", "heure", ":", "minute", ":", "seconde", "\\", "n", "-", "coord", ":", "une", "liste", "des", "coordonnees", "au", "format", "[", "N", "W", "S", "E", "]", "\\", "n", "-", "dim_grille", ":", "taille", "de", "la", "grille", "en", "degree", "\\", "n" ]
yoannMoreau/gfsDownload
python
https://github.com/yoannMoreau/gfsDownload/blob/56e91a5dffb536596a80d2d614ebe858d9ab5ab7/python/utils.py#L179-L264
[ "def", "create_request_gfs", "(", "dateStart", ",", "dateEnd", ",", "stepList", ",", "levelList", ",", "grid", ",", "extent", ",", "paramList", ",", "typeData", ")", ":", "URLlist", "=", "[", "]", "#Control datetype", "listForcastSurface", "=", "[", "'GUST'", ",", "'HINDEX'", ",", "'PRES'", ",", "'HGT'", ",", "'TMP'", ",", "'WEASD'", ",", "'SNOD'", ",", "'CPOFP'", ",", "'WILT'", ",", "'FLDCP'", ",", "'SUNSD'", ",", "'LFTX'", ",", "'CAPE'", ",", "'CIN'", ",", "'4LFTX'", ",", "'HPBL'", ",", "'LAND'", "]", "if", "(", "0", "not", "in", "[", "int", "(", "x", ")", "for", "x", "in", "stepList", "]", ")", ":", "listForcastSurface", "=", "listForcastSurface", "+", "[", "'PEVPR'", ",", "'CPRAT'", ",", "'PRATE'", ",", "'APCP'", ",", "'ACPCP'", ",", "'WATR'", ",", "'CSNOW'", ",", "'CICEP'", ",", "'CFPER'", ",", "'CRAIN'", ",", "'LHTFL'", ",", "'SHTFL'", ",", "'SHTFL'", ",", "'GFLUX'", ",", "'UFLX'", ",", "'VFLX'", ",", "'U-GWD'", ",", "'V-GWD'", ",", "'DSWRF'", ",", "'DLWRF'", ",", "'ULWRF'", ",", "'USWRF'", ",", "'ALBDO'", "]", "listAnalyseSurface", "=", "[", "'HGT'", ",", "'PRES'", ",", "'LFTX'", ",", "'CAPE'", ",", "'CIN'", ",", "'4LFTX'", "]", "if", "typeData", "==", "'analyse'", "and", "all", "(", "[", "x", "in", "listAnalyseSurface", "for", "x", "in", "paramList", "]", ")", ":", "typeData", "=", "'analyse'", "validChoice", "=", "None", "prbParameters", "=", "None", "else", ":", "if", "all", "(", "[", "x", "in", "listForcastSurface", "for", "x", "in", "paramList", "]", ")", "and", "typeData", "!=", "'cycleforecast'", ":", "if", "typeData", "==", "'analyse'", ":", "typeData", "=", "'forecast'", "validChoice", "=", "typeData", "else", ":", "validChoice", "=", "None", "indexParameters", "=", "[", "i", "for", "i", ",", "elem", "in", "enumerate", "(", "[", "x", "in", "listAnalyseSurface", "for", "x", "in", "paramList", "]", ",", "1", ")", "if", "not", "elem", "]", "prbParameters", "=", "[", "]", "for", "i", "in", "indexParameters", ":", "prbParameters", ".", "append", "(", "paramList", "[", "i", "-", "1", "]", ")", "else", ":", "if", "typeData", "!=", "'cycleforecast'", ":", "typeData", "=", "'cycleforecast'", "validChoice", "=", "typeData", "else", ":", "validChoice", "=", "None", "indexParameters", "=", "[", "i", "for", "i", ",", "elem", "in", "enumerate", "(", "[", "x", "in", "listAnalyseSurface", "for", "x", "in", "paramList", "]", ",", "1", ")", "if", "not", "elem", "]", "prbParameters", "=", "[", "]", "for", "i", "in", "indexParameters", ":", "prbParameters", ".", "append", "(", "paramList", "[", "i", "-", "1", "]", ")", "#Control si date/timeList disponible", "today", "=", "date", ".", "today", "(", ")", "lastData", "=", "today", "-", "timedelta", "(", "days", "=", "14", ")", "if", "dateStart", "<", "lastData", "or", "dateEnd", ">", "today", ":", "exit", "(", "'date are not in 14 days range from today'", ")", "else", ":", "#Pour chaque jour souhaité", "nbDays", "=", "(", "dateEnd", "-", "dateStart", ")", ".", "days", "+", "1", "for", "i", "in", "range", "(", "0", ",", "nbDays", ")", ":", "#on crontrole pour les timeList", "if", "dateStart", "+", "timedelta", "(", "days", "=", "i", ")", "==", "today", ":", "maxT", "=", "datetime", ".", "now", "(", ")", ".", "hour", "-", "5", "timeListCorr", "=", "[", "x", "for", "x", "in", "stepList", "if", "x", "<", "maxT", "]", "else", ":", "timeListCorr", "=", "stepList", "for", "t", "in", "timeListCorr", ":", "URL", "=", "'http://nomads.ncep.noaa.gov/cgi-bin/filter_gfs_'", "#grid", "URL", "=", "URL", "+", "\"{:.2f}\"", ".", "format", "(", "grid", ")", ".", "replace", "(", "'.'", ",", "'p'", ")", "+", "'.pl?file=gfs.'", "#time ( attention limiter avec décalage horaire for today", "URL", "=", "URL", "+", "'t'", "+", "str", "(", "t", ")", ".", "zfill", "(", "2", ")", "+", "'z.'", "if", "(", "grid", "==", "0.5", ")", ":", "URL", "=", "URL", "+", "'pgrb2full.'", "else", ":", "URL", "=", "URL", "+", "'pgrb2.'", "URL", "=", "URL", "+", "\"{:.2f}\"", ".", "format", "(", "grid", ")", ".", "replace", "(", "'.'", ",", "'p'", ")", "+", "'.'", "if", "typeData", "==", "'cycleforecast'", ":", "URL", "=", "URL", "+", "'f006&lev_'", "elif", "typeData", "==", "'forecast'", ":", "URL", "=", "URL", "+", "'f000&lev_'", "else", ":", "URL", "=", "URL", "+", "'anl&lev_'", "URL", "=", "URL", "+", "\"=on&lev_\"", ".", "join", "(", "levelList", ")", "+", "\"=on&var_\"", "URL", "=", "URL", "+", "\"=on&var_\"", ".", "join", "(", "paramList", ")", "+", "\"=on&subregion=&\"", "URL", "=", "URL", "+", "\"leftlon=\"", "+", "str", "(", "round", "(", "float", "(", "extent", "[", "1", "]", ")", "-", "0.05", ",", "1", ")", ")", "+", "\"&rightlon=\"", "+", "str", "(", "round", "(", "float", "(", "extent", "[", "3", "]", ")", "+", "0.05", ",", "1", ")", ")", "+", "\"&toplat=\"", "+", "str", "(", "round", "(", "float", "(", "extent", "[", "0", "]", ")", "+", "0.5", ",", "1", ")", ")", "+", "\"&bottomlat=\"", "+", "str", "(", "round", "(", "float", "(", "extent", "[", "2", "]", ")", "-", "0.5", ",", "1", ")", ")", "URL", "=", "URL", "+", "\"&dir=%2Fgfs.\"", "+", "\"{:%Y%m%d}\"", ".", "format", "(", "dateStart", "+", "timedelta", "(", "days", "=", "i", ")", ")", "+", "str", "(", "t", ")", ".", "zfill", "(", "2", ")", "URLlist", ".", "append", "(", "URL", ")", "return", "(", "URLlist", ",", "validChoice", ",", "prbParameters", ")" ]
56e91a5dffb536596a80d2d614ebe858d9ab5ab7
valid
convertGribToTiff
Convert GRIB to Tif
python/utils.py
def convertGribToTiff(listeFile,listParam,listLevel,liststep,grid,startDate,endDate,outFolder): """ Convert GRIB to Tif""" dicoValues={} for l in listeFile: grbs = pygrib.open(l) grbs.seek(0) index=1 for j in range(len(listLevel),0,-1): for i in range(len(listParam)-1,-1,-1): grb = grbs[index] p=grb.name.replace(' ','_') if grb.level != 0: l=str(grb.level)+'_'+grb.typeOfLevel else: l=grb.typeOfLevel if p+'_'+l not in dicoValues.keys(): dicoValues[p+'_'+l]=[] dicoValues[p+'_'+l].append(grb.values) shape=grb.values.shape lat,lon=grb.latlons() geoparam=(lon.min(),lat.max(),grid,grid) index+= 1 nbJour=(endDate-startDate).days+1 #on joute des arrayNan si il manque des fichiers for s in range(0, (len(liststep)*nbJour-len(listeFile))): for k in dicoValues.keys(): dicoValues[k].append(np.full(shape, np.nan)) #On écrit pour chacune des variables dans un fichier for i in range(len(dicoValues.keys())-1,-1,-1): dictParam=dict((k,dicoValues[dicoValues.keys()[i]][k]) for k in range(0,len(dicoValues[dicoValues.keys()[i]]))) sorted(dictParam.items(), key=lambda x: x[0]) outputImg=outFolder+'/'+dicoValues.keys()[i]+'_'+startDate.strftime('%Y%M%d')+'_'+endDate.strftime('%Y%M%d')+'.tif' writeTiffFromDicoArray(dictParam,outputImg,shape,geoparam) for f in listeFile: os.remove(f)
def convertGribToTiff(listeFile,listParam,listLevel,liststep,grid,startDate,endDate,outFolder): """ Convert GRIB to Tif""" dicoValues={} for l in listeFile: grbs = pygrib.open(l) grbs.seek(0) index=1 for j in range(len(listLevel),0,-1): for i in range(len(listParam)-1,-1,-1): grb = grbs[index] p=grb.name.replace(' ','_') if grb.level != 0: l=str(grb.level)+'_'+grb.typeOfLevel else: l=grb.typeOfLevel if p+'_'+l not in dicoValues.keys(): dicoValues[p+'_'+l]=[] dicoValues[p+'_'+l].append(grb.values) shape=grb.values.shape lat,lon=grb.latlons() geoparam=(lon.min(),lat.max(),grid,grid) index+= 1 nbJour=(endDate-startDate).days+1 #on joute des arrayNan si il manque des fichiers for s in range(0, (len(liststep)*nbJour-len(listeFile))): for k in dicoValues.keys(): dicoValues[k].append(np.full(shape, np.nan)) #On écrit pour chacune des variables dans un fichier for i in range(len(dicoValues.keys())-1,-1,-1): dictParam=dict((k,dicoValues[dicoValues.keys()[i]][k]) for k in range(0,len(dicoValues[dicoValues.keys()[i]]))) sorted(dictParam.items(), key=lambda x: x[0]) outputImg=outFolder+'/'+dicoValues.keys()[i]+'_'+startDate.strftime('%Y%M%d')+'_'+endDate.strftime('%Y%M%d')+'.tif' writeTiffFromDicoArray(dictParam,outputImg,shape,geoparam) for f in listeFile: os.remove(f)
[ "Convert", "GRIB", "to", "Tif" ]
yoannMoreau/gfsDownload
python
https://github.com/yoannMoreau/gfsDownload/blob/56e91a5dffb536596a80d2d614ebe858d9ab5ab7/python/utils.py#L331-L370
[ "def", "convertGribToTiff", "(", "listeFile", ",", "listParam", ",", "listLevel", ",", "liststep", ",", "grid", ",", "startDate", ",", "endDate", ",", "outFolder", ")", ":", "dicoValues", "=", "{", "}", "for", "l", "in", "listeFile", ":", "grbs", "=", "pygrib", ".", "open", "(", "l", ")", "grbs", ".", "seek", "(", "0", ")", "index", "=", "1", "for", "j", "in", "range", "(", "len", "(", "listLevel", ")", ",", "0", ",", "-", "1", ")", ":", "for", "i", "in", "range", "(", "len", "(", "listParam", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "grb", "=", "grbs", "[", "index", "]", "p", "=", "grb", ".", "name", ".", "replace", "(", "' '", ",", "'_'", ")", "if", "grb", ".", "level", "!=", "0", ":", "l", "=", "str", "(", "grb", ".", "level", ")", "+", "'_'", "+", "grb", ".", "typeOfLevel", "else", ":", "l", "=", "grb", ".", "typeOfLevel", "if", "p", "+", "'_'", "+", "l", "not", "in", "dicoValues", ".", "keys", "(", ")", ":", "dicoValues", "[", "p", "+", "'_'", "+", "l", "]", "=", "[", "]", "dicoValues", "[", "p", "+", "'_'", "+", "l", "]", ".", "append", "(", "grb", ".", "values", ")", "shape", "=", "grb", ".", "values", ".", "shape", "lat", ",", "lon", "=", "grb", ".", "latlons", "(", ")", "geoparam", "=", "(", "lon", ".", "min", "(", ")", ",", "lat", ".", "max", "(", ")", ",", "grid", ",", "grid", ")", "index", "+=", "1", "nbJour", "=", "(", "endDate", "-", "startDate", ")", ".", "days", "+", "1", "#on joute des arrayNan si il manque des fichiers", "for", "s", "in", "range", "(", "0", ",", "(", "len", "(", "liststep", ")", "*", "nbJour", "-", "len", "(", "listeFile", ")", ")", ")", ":", "for", "k", "in", "dicoValues", ".", "keys", "(", ")", ":", "dicoValues", "[", "k", "]", ".", "append", "(", "np", ".", "full", "(", "shape", ",", "np", ".", "nan", ")", ")", "#On écrit pour chacune des variables dans un fichier", "for", "i", "in", "range", "(", "len", "(", "dicoValues", ".", "keys", "(", ")", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "dictParam", "=", "dict", "(", "(", "k", ",", "dicoValues", "[", "dicoValues", ".", "keys", "(", ")", "[", "i", "]", "]", "[", "k", "]", ")", "for", "k", "in", "range", "(", "0", ",", "len", "(", "dicoValues", "[", "dicoValues", ".", "keys", "(", ")", "[", "i", "]", "]", ")", ")", ")", "sorted", "(", "dictParam", ".", "items", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", "outputImg", "=", "outFolder", "+", "'/'", "+", "dicoValues", ".", "keys", "(", ")", "[", "i", "]", "+", "'_'", "+", "startDate", ".", "strftime", "(", "'%Y%M%d'", ")", "+", "'_'", "+", "endDate", ".", "strftime", "(", "'%Y%M%d'", ")", "+", "'.tif'", "writeTiffFromDicoArray", "(", "dictParam", ",", "outputImg", ",", "shape", ",", "geoparam", ")", "for", "f", "in", "listeFile", ":", "os", ".", "remove", "(", "f", ")" ]
56e91a5dffb536596a80d2d614ebe858d9ab5ab7
valid
on_pref_update
Triggered on dynamic preferences model save. Issues DB save and reread.
siteprefs/toolbox.py
def on_pref_update(*args, **kwargs): """Triggered on dynamic preferences model save. Issues DB save and reread. """ Preference.update_prefs(*args, **kwargs) Preference.read_prefs(get_prefs())
def on_pref_update(*args, **kwargs): """Triggered on dynamic preferences model save. Issues DB save and reread. """ Preference.update_prefs(*args, **kwargs) Preference.read_prefs(get_prefs())
[ "Triggered", "on", "dynamic", "preferences", "model", "save", ".", "Issues", "DB", "save", "and", "reread", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L21-L27
[ "def", "on_pref_update", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "Preference", ".", "update_prefs", "(", "*", "args", ",", "*", "*", "kwargs", ")", "Preference", ".", "read_prefs", "(", "get_prefs", "(", ")", ")" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
get_app_prefs
Returns a dictionary with preferences for a certain app/module. :param str|unicode app: :rtype: dict
siteprefs/toolbox.py
def get_app_prefs(app=None): """Returns a dictionary with preferences for a certain app/module. :param str|unicode app: :rtype: dict """ if app is None: with Frame(stepback=1) as frame: app = frame.f_globals['__name__'].split('.')[0] prefs = get_prefs() if app not in prefs: return {} return prefs[app]
def get_app_prefs(app=None): """Returns a dictionary with preferences for a certain app/module. :param str|unicode app: :rtype: dict """ if app is None: with Frame(stepback=1) as frame: app = frame.f_globals['__name__'].split('.')[0] prefs = get_prefs() if app not in prefs: return {} return prefs[app]
[ "Returns", "a", "dictionary", "with", "preferences", "for", "a", "certain", "app", "/", "module", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L42-L60
[ "def", "get_app_prefs", "(", "app", "=", "None", ")", ":", "if", "app", "is", "None", ":", "with", "Frame", "(", "stepback", "=", "1", ")", "as", "frame", ":", "app", "=", "frame", ".", "f_globals", "[", "'__name__'", "]", ".", "split", "(", "'.'", ")", "[", "0", "]", "prefs", "=", "get_prefs", "(", ")", "if", "app", "not", "in", "prefs", ":", "return", "{", "}", "return", "prefs", "[", "app", "]" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
bind_proxy
Binds PrefProxy objects to module variables used by apps as preferences. :param list|tuple values: Preference values. :param str|unicode category: Category name the preference belongs to. :param Field field: Django model field to represent this preference. :param str|unicode verbose_name: Field verbose name. :param str|unicode help_text: Field help text. :param bool static: Leave this preference static (do not store in DB). :param bool readonly: Make this field read only. :rtype: list
siteprefs/toolbox.py
def bind_proxy(values, category=None, field=None, verbose_name=None, help_text='', static=True, readonly=False): """Binds PrefProxy objects to module variables used by apps as preferences. :param list|tuple values: Preference values. :param str|unicode category: Category name the preference belongs to. :param Field field: Django model field to represent this preference. :param str|unicode verbose_name: Field verbose name. :param str|unicode help_text: Field help text. :param bool static: Leave this preference static (do not store in DB). :param bool readonly: Make this field read only. :rtype: list """ addrs = OrderedDict() depth = 3 for local_name, locals_dict in traverse_local_prefs(depth): addrs[id(locals_dict[local_name])] = local_name proxies = [] locals_dict = get_frame_locals(depth) for value in values: # Try to preserve fields order. id_val = id(value) if id_val in addrs: local_name = addrs[id_val] local_val = locals_dict[local_name] if isinstance(local_val, PatchedLocal) and not isinstance(local_val, PrefProxy): proxy = PrefProxy( local_name, value.val, category=category, field=field, verbose_name=verbose_name, help_text=help_text, static=static, readonly=readonly, ) app_name = locals_dict['__name__'].split('.')[-2] # x.y.settings -> y prefs = get_prefs() if app_name not in prefs: prefs[app_name] = OrderedDict() prefs[app_name][local_name.lower()] = proxy # Replace original pref variable with a proxy. locals_dict[local_name] = proxy proxies.append(proxy) return proxies
def bind_proxy(values, category=None, field=None, verbose_name=None, help_text='', static=True, readonly=False): """Binds PrefProxy objects to module variables used by apps as preferences. :param list|tuple values: Preference values. :param str|unicode category: Category name the preference belongs to. :param Field field: Django model field to represent this preference. :param str|unicode verbose_name: Field verbose name. :param str|unicode help_text: Field help text. :param bool static: Leave this preference static (do not store in DB). :param bool readonly: Make this field read only. :rtype: list """ addrs = OrderedDict() depth = 3 for local_name, locals_dict in traverse_local_prefs(depth): addrs[id(locals_dict[local_name])] = local_name proxies = [] locals_dict = get_frame_locals(depth) for value in values: # Try to preserve fields order. id_val = id(value) if id_val in addrs: local_name = addrs[id_val] local_val = locals_dict[local_name] if isinstance(local_val, PatchedLocal) and not isinstance(local_val, PrefProxy): proxy = PrefProxy( local_name, value.val, category=category, field=field, verbose_name=verbose_name, help_text=help_text, static=static, readonly=readonly, ) app_name = locals_dict['__name__'].split('.')[-2] # x.y.settings -> y prefs = get_prefs() if app_name not in prefs: prefs[app_name] = OrderedDict() prefs[app_name][local_name.lower()] = proxy # Replace original pref variable with a proxy. locals_dict[local_name] = proxy proxies.append(proxy) return proxies
[ "Binds", "PrefProxy", "objects", "to", "module", "variables", "used", "by", "apps", "as", "preferences", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L71-L133
[ "def", "bind_proxy", "(", "values", ",", "category", "=", "None", ",", "field", "=", "None", ",", "verbose_name", "=", "None", ",", "help_text", "=", "''", ",", "static", "=", "True", ",", "readonly", "=", "False", ")", ":", "addrs", "=", "OrderedDict", "(", ")", "depth", "=", "3", "for", "local_name", ",", "locals_dict", "in", "traverse_local_prefs", "(", "depth", ")", ":", "addrs", "[", "id", "(", "locals_dict", "[", "local_name", "]", ")", "]", "=", "local_name", "proxies", "=", "[", "]", "locals_dict", "=", "get_frame_locals", "(", "depth", ")", "for", "value", "in", "values", ":", "# Try to preserve fields order.", "id_val", "=", "id", "(", "value", ")", "if", "id_val", "in", "addrs", ":", "local_name", "=", "addrs", "[", "id_val", "]", "local_val", "=", "locals_dict", "[", "local_name", "]", "if", "isinstance", "(", "local_val", ",", "PatchedLocal", ")", "and", "not", "isinstance", "(", "local_val", ",", "PrefProxy", ")", ":", "proxy", "=", "PrefProxy", "(", "local_name", ",", "value", ".", "val", ",", "category", "=", "category", ",", "field", "=", "field", ",", "verbose_name", "=", "verbose_name", ",", "help_text", "=", "help_text", ",", "static", "=", "static", ",", "readonly", "=", "readonly", ",", ")", "app_name", "=", "locals_dict", "[", "'__name__'", "]", ".", "split", "(", "'.'", ")", "[", "-", "2", "]", "# x.y.settings -> y", "prefs", "=", "get_prefs", "(", ")", "if", "app_name", "not", "in", "prefs", ":", "prefs", "[", "app_name", "]", "=", "OrderedDict", "(", ")", "prefs", "[", "app_name", "]", "[", "local_name", ".", "lower", "(", ")", "]", "=", "proxy", "# Replace original pref variable with a proxy.", "locals_dict", "[", "local_name", "]", "=", "proxy", "proxies", ".", "append", "(", "proxy", ")", "return", "proxies" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
register_admin_models
Registers dynamically created preferences models for Admin interface. :param admin.AdminSite admin_site: AdminSite object.
siteprefs/toolbox.py
def register_admin_models(admin_site): """Registers dynamically created preferences models for Admin interface. :param admin.AdminSite admin_site: AdminSite object. """ global __MODELS_REGISTRY prefs = get_prefs() for app_label, prefs_items in prefs.items(): model_class = get_pref_model_class(app_label, prefs_items, get_app_prefs) if model_class is not None: __MODELS_REGISTRY[app_label] = model_class admin_site.register(model_class, get_pref_model_admin_class(prefs_items))
def register_admin_models(admin_site): """Registers dynamically created preferences models for Admin interface. :param admin.AdminSite admin_site: AdminSite object. """ global __MODELS_REGISTRY prefs = get_prefs() for app_label, prefs_items in prefs.items(): model_class = get_pref_model_class(app_label, prefs_items, get_app_prefs) if model_class is not None: __MODELS_REGISTRY[app_label] = model_class admin_site.register(model_class, get_pref_model_admin_class(prefs_items))
[ "Registers", "dynamically", "created", "preferences", "models", "for", "Admin", "interface", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L136-L152
[ "def", "register_admin_models", "(", "admin_site", ")", ":", "global", "__MODELS_REGISTRY", "prefs", "=", "get_prefs", "(", ")", "for", "app_label", ",", "prefs_items", "in", "prefs", ".", "items", "(", ")", ":", "model_class", "=", "get_pref_model_class", "(", "app_label", ",", "prefs_items", ",", "get_app_prefs", ")", "if", "model_class", "is", "not", "None", ":", "__MODELS_REGISTRY", "[", "app_label", "]", "=", "model_class", "admin_site", ".", "register", "(", "model_class", ",", "get_pref_model_admin_class", "(", "prefs_items", ")", ")" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
autodiscover_siteprefs
Automatically discovers and registers all preferences available in all apps. :param admin.AdminSite admin_site: Custom AdminSite object.
siteprefs/toolbox.py
def autodiscover_siteprefs(admin_site=None): """Automatically discovers and registers all preferences available in all apps. :param admin.AdminSite admin_site: Custom AdminSite object. """ if admin_site is None: admin_site = admin.site # Do not discover anything if called from manage.py (e.g. executing commands from cli). if 'manage' not in sys.argv[0] or (len(sys.argv) > 1 and sys.argv[1] in MANAGE_SAFE_COMMANDS): import_prefs() Preference.read_prefs(get_prefs()) register_admin_models(admin_site)
def autodiscover_siteprefs(admin_site=None): """Automatically discovers and registers all preferences available in all apps. :param admin.AdminSite admin_site: Custom AdminSite object. """ if admin_site is None: admin_site = admin.site # Do not discover anything if called from manage.py (e.g. executing commands from cli). if 'manage' not in sys.argv[0] or (len(sys.argv) > 1 and sys.argv[1] in MANAGE_SAFE_COMMANDS): import_prefs() Preference.read_prefs(get_prefs()) register_admin_models(admin_site)
[ "Automatically", "discovers", "and", "registers", "all", "preferences", "available", "in", "all", "apps", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L155-L168
[ "def", "autodiscover_siteprefs", "(", "admin_site", "=", "None", ")", ":", "if", "admin_site", "is", "None", ":", "admin_site", "=", "admin", ".", "site", "# Do not discover anything if called from manage.py (e.g. executing commands from cli).", "if", "'manage'", "not", "in", "sys", ".", "argv", "[", "0", "]", "or", "(", "len", "(", "sys", ".", "argv", ")", ">", "1", "and", "sys", ".", "argv", "[", "1", "]", "in", "MANAGE_SAFE_COMMANDS", ")", ":", "import_prefs", "(", ")", "Preference", ".", "read_prefs", "(", "get_prefs", "(", ")", ")", "register_admin_models", "(", "admin_site", ")" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
patch_locals
Temporarily (see unpatch_locals()) replaces all module variables considered preferences with PatchedLocal objects, so that every variable has different hash returned by id().
siteprefs/toolbox.py
def patch_locals(depth=2): """Temporarily (see unpatch_locals()) replaces all module variables considered preferences with PatchedLocal objects, so that every variable has different hash returned by id(). """ for name, locals_dict in traverse_local_prefs(depth): locals_dict[name] = PatchedLocal(name, locals_dict[name]) get_frame_locals(depth)[__PATCHED_LOCALS_SENTINEL] = True
def patch_locals(depth=2): """Temporarily (see unpatch_locals()) replaces all module variables considered preferences with PatchedLocal objects, so that every variable has different hash returned by id(). """ for name, locals_dict in traverse_local_prefs(depth): locals_dict[name] = PatchedLocal(name, locals_dict[name]) get_frame_locals(depth)[__PATCHED_LOCALS_SENTINEL] = True
[ "Temporarily", "(", "see", "unpatch_locals", "()", ")", "replaces", "all", "module", "variables", "considered", "preferences", "with", "PatchedLocal", "objects", "so", "that", "every", "variable", "has", "different", "hash", "returned", "by", "id", "()", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L171-L180
[ "def", "patch_locals", "(", "depth", "=", "2", ")", ":", "for", "name", ",", "locals_dict", "in", "traverse_local_prefs", "(", "depth", ")", ":", "locals_dict", "[", "name", "]", "=", "PatchedLocal", "(", "name", ",", "locals_dict", "[", "name", "]", ")", "get_frame_locals", "(", "depth", ")", "[", "__PATCHED_LOCALS_SENTINEL", "]", "=", "True" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92
valid
unpatch_locals
Restores the original values of module variables considered preferences if they are still PatchedLocal and not PrefProxy.
siteprefs/toolbox.py
def unpatch_locals(depth=3): """Restores the original values of module variables considered preferences if they are still PatchedLocal and not PrefProxy. """ for name, locals_dict in traverse_local_prefs(depth): if isinstance(locals_dict[name], PatchedLocal): locals_dict[name] = locals_dict[name].val del get_frame_locals(depth)[__PATCHED_LOCALS_SENTINEL]
def unpatch_locals(depth=3): """Restores the original values of module variables considered preferences if they are still PatchedLocal and not PrefProxy. """ for name, locals_dict in traverse_local_prefs(depth): if isinstance(locals_dict[name], PatchedLocal): locals_dict[name] = locals_dict[name].val del get_frame_locals(depth)[__PATCHED_LOCALS_SENTINEL]
[ "Restores", "the", "original", "values", "of", "module", "variables", "considered", "preferences", "if", "they", "are", "still", "PatchedLocal", "and", "not", "PrefProxy", "." ]
idlesign/django-siteprefs
python
https://github.com/idlesign/django-siteprefs/blob/3d6bf5e64220fe921468a36fce68e15d7947cf92/siteprefs/toolbox.py#L183-L193
[ "def", "unpatch_locals", "(", "depth", "=", "3", ")", ":", "for", "name", ",", "locals_dict", "in", "traverse_local_prefs", "(", "depth", ")", ":", "if", "isinstance", "(", "locals_dict", "[", "name", "]", ",", "PatchedLocal", ")", ":", "locals_dict", "[", "name", "]", "=", "locals_dict", "[", "name", "]", ".", "val", "del", "get_frame_locals", "(", "depth", ")", "[", "__PATCHED_LOCALS_SENTINEL", "]" ]
3d6bf5e64220fe921468a36fce68e15d7947cf92