repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|
SciTools/conda-testenv
|
conda_testenv/tests/integration/test.py
|
1
|
2011
|
import os
import shutil
import subprocess
import sys
import tempfile
import unittest
class Test_cli(unittest.TestCase):
def setUp(self):
conda = os.path.join(os.path.dirname(sys.executable), 'conda')
self.environ = os.environ.copy()
self.tmpdir = tempfile.mkdtemp('conda_setup')
condarc = os.path.join(self.tmpdir, 'condarc')
self.environ['CONDARC'] = condarc
with open(condarc, 'w') as fh:
fh.write('add_pip_as_python_dependency: false\n')
fh.write('conda-build:\n')
fh.write(' root-dir: {}'.format(os.path.join(self.tmpdir,
'build-root')))
recipes_location = os.path.join(os.path.dirname(__file__),
'test_recipes')
build_environ = self.environ.copy()
build_environ['CONDA_NPY'] = '110'
subprocess.check_call([conda, 'build',
os.path.join(recipes_location, 'a'),
os.path.join(recipes_location, 'b'),
os.path.join(recipes_location, 'c'),
],
env=build_environ)
self.test_prefix = os.path.join(self.tmpdir, 'test_prefix')
cmd = [conda, 'create', '-p', self.test_prefix, 'a', 'b',
'c', '--use-local', '--yes']
subprocess.check_call(cmd, env=self.environ)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test(self):
cmd = ['conda', 'testenv', '-p', self.test_prefix]
self.assertNotIn('CONDA_NPY', self.environ)
output = subprocess.check_output(cmd, env=self.environ)
output = output.decode('ascii')
self.assertIn('Success recipe a', output)
self.assertIn('Success recipe b', output)
self.assertIn('hello from b', output)
self.assertIn('Success recipe c', output)
self.assertIn('hello from c', output)
|
bsd-3-clause
| -7,006,892,278,387,370,000
| 38.431373
| 75
| 0.537046
| false
|
googleapis/googleapis-gen
|
google/cloud/domains/v1beta1/domains-v1beta1-py/noxfile.py
|
1
|
3585
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import pathlib
import shutil
import subprocess
import sys
import nox # type: ignore
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt"
PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8")
nox.sessions = [
"unit",
"cover",
"mypy",
"check_lower_bounds"
# exclude update_lower_bounds from default
"docs",
]
@nox.session(python=['3.6', '3.7', '3.8', '3.9'])
def unit(session):
"""Run the unit test suite."""
session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio')
session.install('-e', '.')
session.run(
'py.test',
'--quiet',
'--cov=google/cloud/domains_v1beta1/',
'--cov-config=.coveragerc',
'--cov-report=term',
'--cov-report=html',
os.path.join('tests', 'unit', ''.join(session.posargs))
)
@nox.session(python='3.7')
def cover(session):
"""Run the final coverage report.
This outputs the coverage report aggregating coverage from the unit
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "erase")
@nox.session(python=['3.6', '3.7'])
def mypy(session):
"""Run the type checker."""
session.install('mypy', 'types-pkg_resources')
session.install('.')
session.run(
'mypy',
'--explicit-package-bases',
'google',
)
@nox.session
def update_lower_bounds(session):
"""Update lower bounds in constraints.txt to match setup.py"""
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'update',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session
def check_lower_bounds(session):
"""Check lower bounds in setup.py are reflected in constraints file"""
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'check',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session(python='3.6')
def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
session.install("sphinx<3.0.0", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
"-W", # warnings as errors
"-T", # show full traceback on exception
"-N", # no colors
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
|
apache-2.0
| -6,592,495,365,631,380,000
| 26.159091
| 96
| 0.618131
| false
|
popazerty/dvbapp2-gui
|
lib/python/Screens/InfoBarGenerics.py
|
1
|
171496
|
# -*- coding: utf-8 -*-
from Screens.ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager, findMountPoint
from Components.Input import Input
from Components.Label import Label
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.Sources.List import List
from Components.config import config, configfile, ConfigBoolean, ConfigClock
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, preferredTimerPath, ConfigSelection
from Components.Task import Task, Job, job_manager as JobManager
from Components.Pixmap import MovingPixmap, MultiPixmap
from Components.Sources.StaticText import StaticText
from Components.ScrollLabel import ScrollLabel
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.EpgSelection import EPGSelection
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
from Screens.PVRState import PVRState, TimeshiftState
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.TimerEdit import TimerEditList
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT, findSafeRecordPath
from Screens.TimerEntry import TimerEntry as TimerEntry_TimerEntry
from timer import TimerEntry
from Tools import Directories, ASCIItranslit, Notifications
from enigma import getBoxType, eBackgroundFileEraser, eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, iPlayableService, eServiceReference, eEPGCache, eActionMap, getBoxType
from time import time, localtime, strftime
from bisect import insort
from random import randint
from sys import maxint
import os, cPickle
# hack alert!
from Screens.Menu import MainMenu, Menu, mdom
from Screens.Setup import Setup
import Screens.Standby
def isStandardInfoBar(self):
return ".InfoBar'>" in `self`
def isMoviePlayerInfoBar(self):
return ".MoviePlayer" in `self`
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
for k, v in resumePointCache.items():
if v[0] < lru:
candidate = k
filepath = os.path.realpath(candidate.split(':')[-1])
mountpoint = findMountPoint(filepath)
if os.path.ismount(mountpoint) and not os.path.exists(filepath):
del resumePointCache[candidate]
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
try:
f = open('/etc/enigma2/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
f.close()
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
try:
file = open('/etc/enigma2/resumepoints.pkl', 'rb')
PickleFile = cPickle.load(file)
file.close()
return PickleFile
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
def updateresumePointCache():
global resumePointCache
resumePointCache = loadResumePoints()
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class SecondInfoBar(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = None
class InfoBarShowHide:
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.toggleShow,
"hide": self.keyHide,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.standardInfoBar = False
self.secondInfoBarScreen = ""
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.secondInfoBarScreen.hide()
self.standardInfoBar = True
self.secondInfoBarWasShown = False
def keyHide(self):
if self.__state == self.STATE_HIDDEN:
self.hide()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.session.pipshown:
self.showPiP()
else:
self.hide()
if self.pvrStateDialog:
self.pvrStateDialog.hide()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.start(idx*1000, True)
elif (self.secondInfoBarScreen and self.secondInfoBarScreen.shown) or ((not config.usage.show_second_infobar.getValue() or isMoviePlayerInfoBar(self))):
self.hideTimer.stop()
idx = config.usage.second_infobar_timeout.index
if idx:
self.hideTimer.start(idx*1000, True)
elif self.pvrStateDialog:
self.hideTimer.stop()
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.start(idx*1000, True)
def __onHide(self):
self.__state = self.STATE_HIDDEN
for x in self.onShowHideNotifiers:
x(False)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
if self.pvrStateDialog:
self.pvrStateDialog.hide()
elif self.__state == self.STATE_HIDDEN and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
elif self.__state == self.STATE_HIDDEN:
try:
self.eventView.close()
except:
pass
self.EventViewIsShown = False
elif self.pvrStateDialog:
self.pvrStateDialog.hide()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
if not self.secondInfoBarWasShown:
self.show()
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
elif self.secondInfoBarScreen and config.usage.show_second_infobar.value and not self.secondInfoBarScreen.shown:
self.hide()
self.secondInfoBarScreen.show()
self.secondInfoBarWasShown = True
self.startHideTimer()
elif (not config.usage.show_second_infobar.value or isMoviePlayerInfoBar(self)):
self.hide()
self.startHideTimer()
else:
self.hide()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.__locked <0:
self.__locked = 0
if self.execing:
self.startHideTimer()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self ["servicename"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(5000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self ["servicename"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.Timer.start(5000, True)
self.field = self.field + str(number)
self["number"].setText(self.field)
self["number_summary"].setText(self.field)
self.handleServiceName()
if len(self.field) >= 4:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.onChangedEntry = [ ]
self.field = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["channel_summary"] = StaticText(_("Channel:"))
self["number"] = Label(self.field)
self["number_summary"] = StaticText(self.field)
self["servicename"] = Label()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(5000, True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" and self.timeshift_enabled and self.isSeekable() and number == 0:
InfoBarTimeshiftState._mayShow(self)
self.pvrStateDialog["PTSSeekPointer"].setPosition((self.pvrStateDialog["PTSSeekBack"].instance.size().width()-4)/2, self.pvrStateDialog["PTSSeekPointer"].position[1])
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
self.ptsSeekPointerOK()
return
if self.pts_blockZap_timer.isActive():
return
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self)
return
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
else:
if config.usage.panicbutton.getValue():
self.servicelist.history = [ ]
self.servicelist.history_pos = 0
if config.usage.multibouquet.getValue():
bqrootstr = '1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "bouquets.tv" ORDER BY bouquet'
else:
bqrootstr = '%s FROM BOUQUET "userbouquet.favourites.tv" ORDER BY bouquet'%(self.service_types)
serviceHandler = eServiceCenter.getInstance()
rootbouquet = eServiceReference(bqrootstr)
bouquet = eServiceReference(bqrootstr)
bouquetlist = serviceHandler.list(bouquet)
if not bouquetlist is None:
while True:
bouquet = bouquetlist.getNext()
if bouquet.flags & eServiceReference.isDirectory:
self.servicelist.clearPath()
self.servicelist.setRoot(bouquet)
servicelist = serviceHandler.list(bouquet)
if not servicelist is None:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
service, bouquet2 = self.searchNumber(1)
if service == serviceIterator: break
serviceIterator = servicelist.getNext()
if serviceIterator.valid() and service == serviceIterator: break
self.servicelist.enterPath(rootbouquet)
self.servicelist.enterPath(bouquet)
self.servicelist.saveRoot()
self.selectAndStartService(service, bouquet)
else:
self.servicelist.recallPrevService()
else:
if self.has_key("TimeshiftActions") and self.timeshift_enabled:
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly = False):
bouquet = self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.getValue() or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
self.tscallback = None
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"switchChannelUp": (self.switchChannelUp, _("open servicelist(up)")),
"switchChannelDown": (self.switchChannelDown, _("open servicelist(down)")),
"ChannelPlusPressed": self.ChannelPlusPressed,
"ChannelMinusPressed": self.ChannelMinusPressed,
"zapUp": (self.zapUp, _("previous channel")),
"zapDown": (self.zapDown, _("next channel")),
"historyBack": (self.historyBack, _("previous channel in history")),
"historyNext": (self.historyNext, _("next channel in history")),
"openServiceList": (self.openServiceList, _("open servicelist")),
"openSatellites": (self.openSatellites, _("open Satellites")),
})
def ChannelPlusPressed(self):
if config.usage.channelbutton_mode.value == "0":
self.zapDown()
elif config.usage.channelbutton_mode.value == "1":
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.openServiceList()
elif config.usage.channelbutton_mode.value == "2":
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.serviceListType = "Norm"
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def ChannelMinusPressed(self):
if config.usage.channelbutton_mode.value == "0":
self.zapUp()
elif config.usage.channelbutton_mode.value == "1":
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.openServiceList()
elif config.usage.channelbutton_mode.value == "2":
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.serviceListType = "Norm"
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def showTvChannelList(self, zap=False):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
if config.usage.show_servicelist.value:
self.session.execDialog(self.servicelist)
def showRadioChannelList(self, zap=False):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="showRadioChannelList")
else:
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
if config.usage.show_servicelist.value:
self.session.execDialog(self.servicelist)
def historyBack(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" and self.timeshift_enabled and self.isSeekable():
InfoBarTimeshiftState._mayShow(self)
self.pvrStateDialog["PTSSeekPointer"].setPosition(int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8, self.pvrStateDialog["PTSSeekPointer"].position[1])
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
self.ptsSeekPointerOK()
elif self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="historyBack")
elif config.usage.historymode.getValue() == "0":
self.servicelist.historyBack()
else:
self.historyZap(-1)
def historyNext(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" and self.timeshift_enabled and self.isSeekable():
InfoBarTimeshiftState._mayShow(self)
self.pvrStateDialog["PTSSeekPointer"].setPosition((self.pvrStateDialog["PTSSeekBack"].instance.size().width()-4), self.pvrStateDialog["PTSSeekPointer"].position[1])
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
self.ptsSeekPointerOK()
elif self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="historyNext")
elif config.usage.historymode.getValue() == "0":
self.servicelist.historyNext()
else:
self.historyZap(+1)
def historyClear(self):
if self and self.servicelist:
for i in range(0, len(self.servicelist.history)-1):
del self.servicelist.history[0]
self.servicelist.history_pos = len(self.servicelist.history)-1
return True
return False
def historyZap(self, direction):
hlen = len(self.servicelist.history)
if hlen < 1: return
mark = self.servicelist.history_pos
selpos = self.servicelist.history_pos + direction
if selpos < 0: selpos = 0
if selpos > hlen-1: selpos = hlen-1
serviceHandler = eServiceCenter.getInstance()
historylist = [ ]
for x in self.servicelist.history:
info = serviceHandler.info(x[-1])
if info: historylist.append((info.getName(x[-1]), x[-1]))
self.session.openWithCallback(self.historyMenuClosed, HistoryZapSelector, historylist, selpos, mark, invert_items=True, redirect_buttons=True, wrap_around=True)
def historyMenuClosed(self, retval):
if not retval: return
hlen = len(self.servicelist.history)
pos = 0
for x in self.servicelist.history:
if x[-1] == retval: break
pos += 1
if pos < hlen and pos != self.servicelist.history_pos:
tmp = self.servicelist.history[pos]
self.servicelist.history.append(tmp)
del self.servicelist.history[pos]
self.servicelist.history_pos = hlen-1
self.servicelist.setHistoryPath()
def switchChannelUp(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="switchChannelUp")
else:
if not config.usage.show_bouquetalways.getValue():
# self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
else:
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="switchChannelDown")
else:
if not config.usage.show_bouquetalways.getValue():
# self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
else:
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def openServiceList(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="openServiceList")
else:
self.session.execDialog(self.servicelist)
def openSatellites(self):
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="openSatellites")
else:
self.servicelist.showSatellites()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.pts_blockZap_timer.isActive():
return
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="zapUp")
else:
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur and (cur.toString() == prev or isPlayableForCur(cur)):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.pts_blockZap_timer.isActive():
return
if self.save_current_timeshift and self.timeshift_enabled:
InfoBarTimeshift.saveTimeshiftActions(self, postaction="zapDown")
else:
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur and (cur.toString() == prev or isPlayableForCur(cur)):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
"showNetworkSetup": (self.showNetworkMounts, _("Show network mounts ...")),
"showRFmod": (self.showRFSetup, _("Show RFmod setup...")),
"toggleAspectRatio": (self.toggleAspectRatio, _("Toggle aspect ratio...")),
})
self.session.infobar = None
def mainMenu(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
def toggleAspectRatio(self):
ASPECT = [ "auto", "16_9", "4_3" ]
ASPECT_MSG = { "auto":"Auto", "16_9":"16:9", "4_3":"4:3" }
if config.av.aspect.getValue() in ASPECT:
index = ASPECT.index(config.av.aspect.getValue())
config.av.aspect.value = ASPECT[(index+1)%3]
else:
config.av.aspect.value = "auto"
config.av.aspect.save()
self.session.open(MessageBox, _("AV aspect is %s." % ASPECT_MSG[config.av.aspect.getValue()]), MessageBox.TYPE_INFO, timeout=5)
def showNetworkMounts(self):
menulist = mdom.getroot().findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'setup_selection':
menulist = item.findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'system_selection':
menulist = item.findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'network_menu':
menu = item
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.openWithCallback(self.mainMenuClosed, Menu, menu)
def showRFSetup(self):
self.session.openWithCallback(self.mainMenuClosed, Setup, 'RFmod')
def mainMenuClosed(self, *val):
self.session.infobar = None
class SimpleServicelist:
def __init__(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = []
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.defaultEPGType = self.getDefaultEPGtype()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"RedPressed": (self.RedPressed, _("Show epg")),
"IPressed": (self.IPressed, _("show program information...")),
"InfoPressed": (self.InfoPressed, _("show program information...")),
"showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")),
"EPGPressed": (self.showDefaultEPG, _("show EPG...")),
"showSingleEPG": (self.openSingleServiceEPG, _("show single EPG...")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getDefaultEPGtype(self):
pluginlist = self.getEPGPluginList()
config.usage.defaultEPGType=ConfigSelection(default = _("Multi EPG"), choices = pluginlist)
for plugin in pluginlist:
if plugin[0] == config.usage.defaultEPGType.getValue():
return plugin[1]
return None
def showEventInfoPlugins(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if isMoviePlayerInfoBar(self):
self.openEventView()
else:
pluginlist = self.getEPGPluginList()
if pluginlist:
pluginlist.append((_("Select default EPG type..."), self.SelectDefaultInfoPlugin))
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list = pluginlist, skin_name = "EPGExtensionsList")
else:
self.openSingleServiceEPG()
def getEPGPluginList(self):
pluginlist = [(p.name, boundFunction(self.runPlugin, p)) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO)]
if pluginlist:
pluginlist.append((_("Event Info"), self.openEventView))
pluginlist.append((_("Graphical EPG"), self.openGraphEPG))
pluginlist.append((_("Infobar EPG"), self.openInfoBarEPG))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG))
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG))
return pluginlist
def SelectDefaultInfoPlugin(self):
self.session.openWithCallback(self.DefaultInfoPluginChosen, ChoiceBox, title=_("Please select a default EPG type..."), list = self.getEPGPluginList(), skin_name = "EPGExtensionsList")
def DefaultInfoPluginChosen(self, answer):
if answer is not None:
self.defaultEPGType = answer[1]
config.usage.defaultEPGType.value = answer[0]
config.usage.defaultEPGType.save()
configfile.save()
def runPlugin(self, plugin):
plugin(session = self.session, servicelist=self.servicelist)
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def RedPressed(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
if config.usage.defaultEPGType.getValue() != _("Graphical EPG") and config.usage.defaultEPGType.getValue() != _("None"):
self.openGraphEPG()
else:
self.openSingleServiceEPG()
def InfoPressed(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
if getBoxType().startswith('et') or getBoxType().startswith('odin') or getBoxType().startswith('venton') or getBoxType().startswith('tm') or getBoxType().startswith('gb') or getBoxType().startswith('xp1000'):
self.openEventView()
else:
self.showDefaultEPG()
def IPressed(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
self.openEventView()
def EPGPressed(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if ".DreamPlex" in `self`:
return
self.openMultiServiceEPG
#if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
# self.openGraphEPG()
def showEventInfoWhenNotVisible(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.servicelist.currentServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
# if bouquet:
# self.epg_bouquet = bouquet
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(preview_zap = preview)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = []
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, zapFunc=self.zapToService, bouquetChangeCB=self.changeBouquetCB, EPGtype=self.EPGtype, StartBouquet=self.StartBouquet, StartRef=self.StartRef, bouquetname=ServiceReference(self.epg_bouquet).getServiceName()))
else:
self.session.open(EPGSelection, services, zapFunc=self.zapToService, bouquetChangeCB=self.changeBouquetCB, EPGtype=self.EPGtype, StartBouquet=self.StartBouquet, StartRef=self.StartRef, bouquetname=ServiceReference(self.epg_bouquet).getServiceName())
def changeBouquetCB(self, direction, epgcall):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if len(services):
self.epg_bouquet = bouquet
epgcall.setServices(services)
epgcall.setTitle(ServiceReference(bouquet).getServiceName())
def onBouquetSelectorClose(self, bouquet):
if bouquet:
services = self.getBouquetServices(bouquet)
if len(services):
self.epg_bouquet = bouquet
self.epg.setServices(services)
self.epg.setTitle(ServiceReference(self.epg_bouquet).getServiceName())
def closed(self, ret=False):
if not self.dlg_stack:
return
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret == True or ret == 'close':
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
self.reopen(ret)
def MultiServiceEPG(self, withCallback=True):
self.bouquets = self.servicelist.getBouquetList()
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if (self.EPGtype == "multi" and config.epgselection.multi_showbouquet.getValue()) or (self.EPGtype == "graph" and config.epgselection.graph_showbouquet.getValue()):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, self.bouquets, self.openBouquetEPG, enableWrapAround=True)
else:
self.bouquetSel = self.session.open(BouquetSelector, self.bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
elif cnt == 1:
self.openBouquetEPG(self.bouquets[0][1], withCallback)
else:
root = self.servicelist.getRoot()
if cnt > 1:
current = 0
rootstr = root.toCompareString()
for bouquet in self.bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
self.bouquetSel = SilentBouquetSelector(self.bouquets, True, current)
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def openMultiServiceEPG(self):
if self.servicelist:
self.EPGtype = "multi"
self.StartBouquet = self.servicelist.getRoot()
if isMoviePlayerInfoBar(self):
self.StartRef = self.lastservice
else:
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.MultiServiceEPG()
def openGraphEPG(self, reopen=False):
self.EPGtype = "graph"
if self.servicelist:
if not reopen:
self.StartBouquet = self.servicelist.getRoot()
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.MultiServiceEPG()
def SingleServiceEPG(self):
try:
self.StartBouquet = self.servicelist.getRoot()
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if isMoviePlayerInfoBar(self):
ref = self.lastservice
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
if self.servicelist.getMutableList() is not None: # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed,EPGSelection, self.servicelist, zapFunc=self.zapToService, serviceChangeCB = self.changeServiceCB, EPGtype=self.EPGtype, StartBouquet=self.StartBouquet, StartRef=self.StartRef, bouquetname=ServiceReference(self.servicelist.getRoot()).getServiceName())
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
except:
pass
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
self.reopen(ret)
def openSingleServiceEPG(self, reopen=False):
if self.servicelist:
self.EPGtype = "enhanced"
self.SingleServiceEPG()
def openInfoBarEPG(self, reopen=False):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.servicelist:
if not reopen:
self.StartBouquet = self.servicelist.getRoot()
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if config.epgselection.infobar_type_mode.getValue() == 'single':
self.EPGtype = "infobar"
self.SingleServiceEPG()
else:
self.EPGtype = "infobargraph"
self.MultiServiceEPG()
def reopen(self, answer):
if answer == 'reopengraph':
self.openGraphEPG(True)
elif answer == 'reopeninfobargraph' or answer == 'reopeninfobar':
self.openInfoBarEPG(True)
elif answer == 'close' and isMoviePlayerInfoBar(self):
self.lastservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.close()
def showCoolTVGuide(self):
if Directories.fileExists("/usr/lib/enigma2/python/Plugins/Extensions/CoolTVGuide/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Cool TV Guide"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Cool TV Guide plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, eventid=eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
if self.defaultEPGType is not None:
self.defaultEPGType()
return
self.EPGPressed()
def openEventView(self, simple=False):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
if not simple:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
self.eventView = self.session.openWithCallback(self.closed, EventViewSimple, epglist[0], ServiceReference(ref))
self.dlg_stack.append(self.eventView)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class Seekbar(Screen):
def __init__(self, session, fwd):
Screen.__init__(self, session)
self.setTitle(_("Seek"))
self.session = session
self.fwd = fwd
self.percent = 0.0
self.length = None
service = session.nav.getCurrentService()
if service:
self.seek = service.seek()
if self.seek:
self.length = self.seek.getLength()
position = self.seek.getPlayPosition()
if self.length and position and int(self.length[1]) > 0:
if int(position[1]) > 0:
self.percent = float(position[1]) * 100.0 / float(self.length[1])
else:
self.close()
self["cursor"] = MovingPixmap()
self["time"] = Label()
self["actions"] = ActionMap(["WizardActions", "DirectionActions"], {"back": self.exit, "ok": self.keyOK, "left": self.keyLeft, "right": self.keyRight}, -1)
self.cursorTimer = eTimer()
self.cursorTimer.callback.append(self.updateCursor)
self.cursorTimer.start(200, False)
def updateCursor(self):
if self.length:
x = 145 + int(2.7 * self.percent)
self["cursor"].moveTo(x, 15, 1)
self["cursor"].startMoving()
pts = int(float(self.length[1]) / 100.0 * self.percent)
self["time"].setText("%d:%02d" % ((pts/60/90000), ((pts/90000)%60)))
def exit(self):
self.cursorTimer.stop()
self.close()
def keyOK(self):
if self.length:
self.seek.seekTo(int(float(self.length[1]) / 100.0 * self.percent))
self.exit()
def keyLeft(self):
self.percent -= float(config.seek.sensibility.getValue()) / 10.0
if self.percent < 0.0:
self.percent = 0.0
def keyRight(self):
self.percent += float(config.seek.sensibility.getValue()) / 10.0
if self.percent > 100.0:
self.percent = 100.0
def keyNumberGlobal(self, number):
sel = self["config"].getCurrent()[1]
if sel == self.positionEntry:
self.percent = float(number) * 10.0
else:
ConfigListScreen.keyNumberGlobal(self, number)
from enigma import eDVBVolumecontrol
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.getValue(), False, config.seek.selfdefined_13.getValue(),
-config.seek.selfdefined_46.getValue(), False, config.seek.selfdefined_46.getValue(),
-config.seek.selfdefined_79.getValue(), False, config.seek.selfdefined_79.getValue())[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": self.playpauseService,
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"SeekbarFwd": self.seekFwdSeekbar,
"SeekbarBack": self.seekBackSeekbar
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActionsPTS"] = InfoBarSeekActionMap(self, "InfobarSeekActionsPTS",
{
"playpauseService": self.playpauseService,
"pauseService": (self.pauseService, _("pause")),
"unPauseService": (self.unPauseService, _("continue")),
"seekFwd": (self.seekFwd, _("skip forward")),
"seekFwdManual": (self.seekFwdManual, _("skip forward (enter time)")),
"seekBack": (self.seekBack, _("skip backward")),
"seekBackManual": (self.seekBackManual, _("skip backward (enter time)")),
"SeekbarFwd": self.seekFwdSeekbar,
"SeekbarBack": self.seekBackSeekbar
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self["SeekActionsPTS"].setEnabled(False)
self.activity = 0
self.activityTimer = eTimer()
self.activityTimer.callback.append(self.doActivityTimer)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshift_enabled):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
# print "not seekable, return to play"
self["SeekActions"].setEnabled(False)
self.setSeekState(self.SEEK_STATE_PLAY)
else:
# print "seekable"
self["SeekActions"].setEnabled(True)
self.activityTimer.start(200, False)
for c in self.onPlayStateChanged:
c(self.seekstate)
def doActivityTimer(self):
if self.isSeekable():
self.activity += 16
hdd = 1
if self.activity >= 100:
self.activity = 0
else:
self.activityTimer.stop()
self.activity = 0
hdd = 0
if os.path.exists("/proc/stb/lcd/symbol_hdd"):
file = open("/proc/stb/lcd/symbol_hdd", "w")
file.write('%d' % int(hdd))
file.close()
if os.path.exists("/proc/stb/lcd/symbol_hddprogress"):
file = open("/proc/stb/lcd/symbol_hddprogress", "w")
file.write('%d' % int(self.activity))
file.close()
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.seekstate = self.SEEK_STATE_PLAY
self.__seekableStatusChanged()
def setMute(self):
if (eDVBVolumecontrol.getInstance().isMuted()):
print "mute already active"
else:
print "NO mute so turrning ON"
eDVBVolumecontrol.getInstance().volumeToggleMute()
def leaveMute(self):
if (eDVBVolumecontrol.getInstance().isMuted()):
eDVBVolumecontrol.getInstance().volumeToggleMute()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
# print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0] and self.seekstate[3] == '||':
print "resolved to PAUSE"
self.leaveMute()
self.activityTimer.stop()
pauseable.pause()
elif self.seekstate[0] and self.seekstate[3] == 'END':
print "resolved to STOP"
self.leaveMute()
self.activityTimer.stop()
service.stop()
elif self.seekstate[1]:
print "resolved to FAST FORWARD"
self.setMute()
pauseable.setFastForward(self.seekstate[1])
elif self.seekstate[2]:
print "resolved to SLOW MOTION"
self.setMute()
pauseable.setSlowMotion(self.seekstate[2])
else:
print "resolved to PLAY"
self.leaveMute()
self.activityTimer.start(200, False)
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
return True
def playpauseService(self):
if self.seekstate == self.SEEK_STATE_PLAY:
self.pauseService()
else:
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.getValue() == "play":
self.unPauseService()
elif config.seek.on_pause.getValue() == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.getValue() == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
self.unPauseService()
def pauseService(self):
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None and int(self.seek.getLength()[1]) < 1:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.getValue():
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.getValue())))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.getValue()):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.getValue()[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.getValue())))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.getValue()) or config.seek.speeds_forward.getValue()[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.getValue())
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.getValue()) or config.seek.speeds_slowmotion.getValue()[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.getValue())))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.getValue())))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.getValue())
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.getValue()) or config.seek.speeds_backward.getValue()[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.getValue())
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
self.pts_lastseekspeed = self.seekstate[1]
def seekFwdManual(self, fwd=True):
if config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def seekBackManual(self, fwd=False):
if config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def seekFwdSeekbar(self, fwd=True):
if not config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
self.doSeekRelative(minutes * 60 * 90000)
def seekBackSeekbar(self, fwd=False):
if not config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
# print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
if self.seekstate == self.SEEK_STATE_PLAY or self.seekstate == self.SEEK_STATE_EOF:
self.lockedBecauseOfSkipping = False
self.unlockShow()
else:
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.getValue():
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
self.doEofInternal(True)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onChangedEntry = [ ]
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def createSummary(self):
return InfoBarMoviePlayerSummary
def _mayShow(self):
if "MoviePlayer'>" in str(self) and not config.usage.movieplayer_pvrstate.getValue():
self["state"].setText("")
self["statusicon"].setPixmapNum(6)
self["speed"].setText("")
if self.execing and self.seekstate != self.SEEK_STATE_EOF and not config.usage.movieplayer_pvrstate.getValue():
self.pvrStateDialog.show()
self.startHideTimer()
def __playStateChanged(self, state):
playstateString = state[3]
state_summary = playstateString
self.pvrStateDialog["state"].setText(playstateString)
if playstateString == '>':
self.pvrStateDialog["statusicon"].setPixmapNum(0)
self.pvrStateDialog["speed"].setText("")
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 0
if "MoviePlayer'>" in str(self) and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(0)
self["speed"].setText("")
elif playstateString == '||':
self.pvrStateDialog["statusicon"].setPixmapNum(1)
self.pvrStateDialog["speed"].setText("")
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 1
if "MoviePlayer'>" in str(self) and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(1)
self["speed"].setText("")
elif playstateString == 'END':
self.pvrStateDialog["statusicon"].setPixmapNum(2)
self.pvrStateDialog["speed"].setText("")
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 2
if "MoviePlayer'>" in str(self) and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(2)
self["speed"].setText("")
elif playstateString.startswith('>>'):
speed = state[3].split()
self.pvrStateDialog["statusicon"].setPixmapNum(3)
self.pvrStateDialog["speed"].setText(speed[1])
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 3
if "MoviePlayer'>" in str(self) and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(3)
self["speed"].setText(speed[1])
elif playstateString.startswith('<<'):
speed = state[3].split()
self.pvrStateDialog["statusicon"].setPixmapNum(4)
self.pvrStateDialog["speed"].setText(speed[1])
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 4
if "MoviePlayer'>" in str(self) and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(4)
self["speed"].setText(speed[1])
elif playstateString.startswith('/'):
self.pvrStateDialog["statusicon"].setPixmapNum(5)
self.pvrStateDialog["speed"].setText(playstateString)
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 5
if "MoviePlayer'>" in str(self) and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(5)
self["speed"].setText(playstateString)
for cb in self.onChangedEntry:
cb(state_summary, speed_summary, statusicon_summary)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.getValue() and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True)
self.onHide.append(self.__hideTimeshiftState)
def _mayShow(self):
if config.timeshift.enabled.getValue():
self["TimeshiftActivateActions"].setEnabled(True)
self["TimeshiftActions"].setEnabled(False)
else:
self["TimeshiftActivateActions"].setEnabled(False)
self["TimeshiftActions"].setEnabled(True)
if self.execing and self.timeshift_enabled and self.isSeekable():
if config.timeshift.enabled.getValue():
self["TimeshiftActivateActions"].setEnabled(False)
self["TimeshiftActions"].setEnabled(True)
self["SeekActions"].setEnabled(True)
InfoBarTimeshift.ptsSeekPointerSetCurrentPos(self)
self["SeekActions"].setEnabled(False)
self["SeekActionsPTS"].setEnabled(True)
if config.timeshift.showinfobar.getValue():
self["TimeshiftSeekPointerActions"].setEnabled(True)
else:
self["SeekActions"].setEnabled(True)
self.pvrStateDialog.show()
self.startHideTimer()
elif self.execing and self.timeshift_enabled and not self.isSeekable():
if config.timeshift.enabled.getValue():
self["SeekActions"].setEnabled(False)
self["SeekActionsPTS"].setEnabled(False)
self["TimeshiftSeekPointerActions"].setEnabled(False)
else:
self["TimeshiftActivateActions"].setEnabled(True)
self["SeekActions"].setEnabled(False)
self.pvrStateDialog.hide()
def __hideTimeshiftState(self):
if config.timeshift.enabled.getValue():
self["TimeshiftActivateActions"].setEnabled(True)
self["TimeshiftActions"].setEnabled(False)
self["SeekActionsPTS"].setEnabled(False)
self["TimeshiftSeekPointerActions"].setEnabled(False)
if self.timeshift_enabled and self.isSeekable():
self["TimeshiftActivateActions"].setEnabled(False)
self["TimeshiftActions"].setEnabled(True)
self["SeekActions"].setEnabled(True)
elif self.timeshift_enabled and not self.isSeekable():
self["SeekActions"].setEnabled(False)
else:
self["TimeshiftActivateActions"].setEnabled(False)
self["TimeshiftActions"].setEnabled(True)
if self.timeshift_enabled and self.isSeekable():
self["SeekActions"].setEnabled(True)
elif self.timeshift_enabled and not self.isSeekable():
self["TimeshiftActivateActions"].setEnabled(True)
self["SeekActions"].setEnabled(False)
self.pvrStateDialog.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")), # currently undefined :), probably 'TV'
"instantRecord": self.instantRecord,
"restartTimeshift": self.restartTimeshift
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftSeekPointerActions"] = ActionMap(["InfobarTimeshiftSeekPointerActions"],
{
"SeekPointerOK": self.ptsSeekPointerOK,
"SeekPointerLeft": self.ptsSeekPointerLeft,
"SeekPointerRight": self.ptsSeekPointerRight
},-2)
self["TimeshiftActions"].setEnabled(False)
self["TimeshiftActivateActions"].setEnabled(False)
self["TimeshiftSeekPointerActions"].setEnabled(False)
self.timeshift_enabled = False
self.check_timeshift = True
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.__event_tracker = ServiceEventTracker(screen = self, eventmap =
{
iPlayableService.evStart: self.__evStart,
iPlayableService.evEnd: self.__evEnd,
iPlayableService.evSOF: self.__evSOF,
iPlayableService.evUpdatedInfo: self.__evInfoChanged,
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evUser+1: self.ptsTimeshiftFileChanged
})
self.pts_begintime = 0
self.pts_pathchecked = False
self.pts_pvrStateDialog = "TimeshiftState"
self.pts_seektoprevfile = False
self.pts_switchtolive = False
self.pts_currplaying = 1
self.pts_lastseekspeed = 0
self.pts_service_changed = False
self.pts_record_running = self.session.nav.RecordTimer.isRecording()
self.save_current_timeshift = False
self.save_timeshift_postaction = None
self.save_timeshift_filename = None
self.service_changed = 0
# Init Global Variables
self.session.ptsmainloopvalue = 0
config.timeshift.isRecording.value = False
# Init eBackgroundFileEraser
self.BgFileEraser = eBackgroundFileEraser.getInstance()
# Init PTS Delay-Timer
self.pts_delay_timer = eTimer()
self.pts_delay_timer.callback.append(self.activatePermanentTimeshift)
# Init PTS LengthCheck-Timer
self.pts_LengthCheck_timer = eTimer()
self.pts_LengthCheck_timer.callback.append(self.ptsLengthCheck)
# Init PTS MergeRecords-Timer
self.pts_mergeRecords_timer = eTimer()
self.pts_mergeRecords_timer.callback.append(self.ptsMergeRecords)
# Init PTS Merge Cleanup-Timer
self.pts_mergeCleanUp_timer = eTimer()
self.pts_mergeCleanUp_timer.callback.append(self.ptsMergePostCleanUp)
# Init PTS QuitMainloop-Timer
self.pts_QuitMainloop_timer = eTimer()
self.pts_QuitMainloop_timer.callback.append(self.ptsTryQuitMainloop)
# Init PTS CleanUp-Timer
self.pts_cleanUp_timer = eTimer()
self.pts_cleanUp_timer.callback.append(self.ptsCleanTimeshiftFolder)
self.pts_cleanUp_timer.start(30000, True)
# Init PTS SeekBack-Timer
self.pts_SeekBack_timer = eTimer()
self.pts_SeekBack_timer.callback.append(self.ptsSeekBackTimer)
# Init Block-Zap Timer
self.pts_blockZap_timer = eTimer()
# Record Event Tracker
self.session.nav.RecordTimer.on_state_change.append(self.ptsTimerEntryStateChange)
# Keep Current Event Info for recordings
self.pts_eventcount = 1
self.pts_curevent_begin = int(time())
self.pts_curevent_end = 0
self.pts_curevent_name = _("Timeshift")
self.pts_curevent_description = ""
self.pts_curevent_servicerefname = ""
self.pts_curevent_station = ""
self.pts_curevent_eventid = None
# Init PTS Infobar
def getTimeshift(self):
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def __evStart(self):
self.service_changed = 1
self.pts_delay_timer.stop()
self.pts_service_changed = True
# self.pvrStateDialog.hide()
# self.timeshift_enabled = False
# self.__seekableStatusChanged()
def __evEnd(self):
self.service_changed = 0
if not config.timeshift.isRecording.getValue():
self.timeshift_enabled = False
self.__seekableStatusChanged()
def __evSOF(self):
if not config.timeshift.enabled.getValue() or not self.timeshift_enabled:
return
if self.pts_currplaying == 1:
preptsfile = config.timeshift.maxevents.getValue()
else:
preptsfile = self.pts_currplaying-1
# Switch to previous TS file by seeking forward to next one
if Directories.fileExists("%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(), preptsfile), 'r') and preptsfile != self.pts_eventcount:
self.pts_seektoprevfile = True
self.ptsSetNextPlaybackFile("pts_livebuffer.%s" % (preptsfile))
if self.seekstate[3].startswith('<<'):
# self.setSeekState(self.SEEK_STATE_PAUSE)
# if self.seekstate != self.SEEK_STATE_PLAY:
# self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(-10)
self.seekBack()
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(-1)
self.seekFwd()
def __evInfoChanged(self):
if self.service_changed:
self.service_changed = 0
# We zapped away before saving the file, save it now!
if self.save_current_timeshift:
self.SaveTimeshift("pts_livebuffer.%s" % (self.pts_eventcount))
# Delete Timeshift Records on zap
self.pts_eventcount = 0
self.pts_cleanUp_timer.start(3000, True)
def __evEventInfoChanged(self):
if not config.timeshift.enabled.getValue():
return
# Get Current Event Info
service = self.session.nav.getCurrentService()
old_begin_time = self.pts_begintime
info = service and service.info()
ptr = info and info.getEvent(0)
self.pts_begintime = ptr and ptr.getBeginTime() or 0
# Save current TimeShift permanently now ...
if info.getInfo(iServiceInformation.sVideoPID) != -1:
# Take care of Record Margin Time ...
if self.save_current_timeshift and self.timeshift_enabled:
if config.recording.margin_after.getValue() > 0 and len(self.recording) == 0:
self.SaveTimeshift(mergelater=True)
recording = RecordTimerEntry(ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup()), time(), time()+(config.recording.margin_after.getValue() * 60), self.pts_curevent_name, self.pts_curevent_description, self.pts_curevent_eventid, dirname = config.usage.default_path.getValue())
recording.dontSave = True
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
else:
self.SaveTimeshift()
# Restarting active timers after zap ...
if self.pts_delay_timer.isActive() and not self.timeshift_enabled:
self.pts_delay_timer.start(config.timeshift.startdelay.getValue() * 1000, True)
if self.pts_cleanUp_timer.isActive() and not self.timeshift_enabled:
self.pts_cleanUp_timer.start(3000, True)
# (Re)Start TimeShift
if not self.pts_delay_timer.isActive():
if not self.timeshift_enabled or old_begin_time != self.pts_begintime or old_begin_time == 0:
if self.pts_service_changed:
self.pts_service_changed = False
self.pts_delay_timer.start(config.timeshift.startdelay.getValue() * 1000, True)
else:
self.pts_delay_timer.start(1000, True)
def __seekableStatusChanged(self):
self["SeekActionsPTS"].setEnabled(False)
self["TimeshiftSeekPointerActions"].setEnabled(False)
if config.timeshift.enabled.getValue():
self["TimeshiftActivateActions"].setEnabled(True)
self["TimeshiftActions"].setEnabled(False)
if self.timeshift_enabled and self.isSeekable():
self["TimeshiftActivateActions"].setEnabled(False)
self["TimeshiftActions"].setEnabled(True)
self["SeekActions"].setEnabled(True)
elif self.timeshift_enabled and not self.isSeekable():
self["SeekActions"].setEnabled(False)
else:
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshift_enabled)
state = self.getSeek() is not None and self.timeshift_enabled
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
# Reset Seek Pointer And Eventname in InfoBar
if config.timeshift.enabled.getValue() and self.timeshift_enabled and not self.isSeekable():
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState":
self.pvrStateDialog["eventname"].setText("")
self.ptsSeekPointerReset()
# setNextPlaybackFile() when switching back to live tv
if config.timeshift.enabled.getValue() and self.timeshift_enabled and not self.isSeekable():
if self.pts_starttime <= (time()-5):
self.pts_blockZap_timer.start(3000, True)
self.pts_currplaying = self.pts_eventcount
self.ptsSetNextPlaybackFile("pts_livebuffer.%s" % (self.pts_eventcount))
def eraseTimeshiftFile(self):
for filename in os.listdir(config.usage.timeshift_path.getValue()):
if filename.startswith("timeshift.") and not filename.endswith(".del") and not filename.endswith(".copy"):
self.BgFileEraser.erase("%s%s" % (config.usage.timeshift_path.getValue(),filename))
def activatePermanentTimeshift(self):
self.createTimeshiftFolder()
if self.ptsCheckTimeshiftPath() is False or self.session.screen["Standby"].boolean is True or self.ptsLiveTVStatus() is False or (config.timeshift.stopwhilerecording.getValue() and self.pts_record_running):
return
# Replace PVR Timeshift State Icon
if self.pts_pvrStateDialog != "Screens.PVRState.PTSTimeshiftState":
self.pts_pvrStateDialog = "Screens.PVRState.PTSTimeshiftState"
self.pvrStateDialog = self.session.instantiateDialog(Screens.PVRState.PTSTimeshiftState)
# Set next-file on event change only when watching latest timeshift ...
if self.isSeekable() and self.pts_eventcount == self.pts_currplaying:
pts_setnextfile = True
else:
pts_setnextfile = False
# Update internal Event Counter
if self.pts_eventcount >= config.timeshift.maxevents.getValue():
self.pts_eventcount = 0
self.pts_eventcount += 1
# setNextPlaybackFile() on event change while timeshifting
if self.pts_eventcount > 1 and self.isSeekable() and pts_setnextfile:
self.ptsSetNextPlaybackFile("pts_livebuffer.%s" % (self.pts_eventcount))
# Do not switch back to LiveTV while timeshifting
if self.isSeekable():
switchToLive = False
else:
switchToLive = True
# (Re)start Timeshift now
self.stopTimeshift(True, switchToLive)
ts = self.getTimeshift()
if ts and not ts.startTimeshift():
if (getBoxType() == 'vuuno' or getBoxType() == 'vuduo') and os.path.exists("/proc/stb/lcd/symbol_timeshift"):
if self.session.nav.RecordTimer.isRecording():
f = open("/proc/stb/lcd/symbol_timeshift", "w")
f.write("0")
f.close()
self.pts_starttime = time()
self.pts_LengthCheck_timer.start(120000)
self.timeshift_enabled = True
self.save_timeshift_postaction = None
self.ptsGetEventInfo()
self.ptsCreateHardlink()
self.__seekableStatusChanged()
else:
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, timeout=5)
self.pts_eventcount = 0
def createTimeshiftFolder(self):
timeshiftdir = Directories.resolveFilename(Directories.SCOPE_TIMESHIFT)
if not Directories.pathExists(timeshiftdir):
try:
os.makedirs(timeshiftdir)
except:
os.makedirs("/media/hdd/")
os.makedirs(timeshiftdir)
print "[TimeShift] Failed to create %s !!" %timeshiftdir
def startTimeshift(self):
self.createTimeshiftFolder()
if config.timeshift.enabled.getValue():
self.pts_delay_timer.stop()
self.activatePermanentTimeshift()
self.activateTimeshiftEndAndPause()
else:
# print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, timeout=5)
# print "no ts interface"
return 0
if self.timeshift_enabled:
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
self.timeshift_enabled = True
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
self.activateTimeshiftEnd(False)
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
else:
print "timeshift failed"
def stopTimeshift(self, answer=True, switchToLive=True):
if switchToLive:
if not answer or self.checkTimeshiftRunning(self.stopTimeshift):
return
# Jump Back to Live TV
if config.timeshift.enabled.getValue() and self.timeshift_enabled:
if self.isSeekable():
self.pts_switchtolive = True
self.ptsSetNextPlaybackFile("")
self.setSeekState(self.SEEK_STATE_PAUSE)
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(-1) # seek 1 gop before end
self.seekFwd() # seekFwd to switch to live TV
return 1
was_enabled = self.timeshift_enabled
ts = self.getTimeshift()
if ts is None:
return
try:
ts.stopTimeshift(switchToLive)
except:
ts.stopTimeshift()
self.timeshift_enabled = False
# self.pvrStateDialog.hide()
# disable actions
self.__seekableStatusChanged()
if was_enabled and not self.timeshift_enabled:
self.timeshift_enabled = False
self.pts_LengthCheck_timer.stop()
def restartTimeshift(self):
self.activatePermanentTimeshift()
Notifications.AddNotification(MessageBox, _("[TimeShift] Restarting Timeshift!"), MessageBox.TYPE_INFO, timeout=5)
def saveTimeshiftPopup(self):
self.session.openWithCallback(self.saveTimeshiftPopupCallback, ChoiceBox, \
title=_("The Timeshift record was not saved yet!\nWhat do you want to do now with the timeshift file?"), \
list=((_("Save Timeshift as Movie and stop recording"), "savetimeshift"), \
(_("Save Timeshift as Movie and continue recording"), "savetimeshiftandrecord"), \
(_("Don't save Timeshift as Movie"), "noSave")))
def saveTimeshiftPopupCallback(self, answer):
if answer is None:
return
if answer[1] == "savetimeshift":
self.saveTimeshiftActions("savetimeshift", self.save_timeshift_postaction)
elif answer[1] == "savetimeshiftandrecord":
self.saveTimeshiftActions("savetimeshiftandrecord", self.save_timeshift_postaction)
elif answer[1] == "noSave":
self.save_current_timeshift = False
self.saveTimeshiftActions("noSave", self.save_timeshift_postaction)
def saveTimeshiftEventPopup(self):
filecount = 0
entrylist = []
entrylist.append((_("Current Event:")+" %s" % (self.pts_curevent_name), "savetimeshift"))
filelist = os.listdir(config.usage.timeshift_path.getValue())
if filelist is not None:
filelist.sort()
for filename in filelist:
if (filename.startswith("pts_livebuffer.") is True) and (filename.endswith(".del") is False and filename.endswith(".meta") is False and filename.endswith(".eit") is False and filename.endswith(".copy") is False):
statinfo = os.stat("%s%s" % (config.usage.timeshift_path.getValue(),filename))
if statinfo.st_mtime < (time()-5.0):
# Get Event Info from meta file
readmetafile = open("%s%s.meta" % (config.usage.timeshift_path.getValue(),filename), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
description = readmetafile.readline()[0:-1]
begintime = readmetafile.readline()[0:-1]
readmetafile.close()
# Add Event to list
filecount += 1
entrylist.append((_("Record") + " #%s (%s): %s" % (filecount,strftime("%H:%M",localtime(int(begintime))),eventname), "%s" % filename))
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=_("Which event do you want to save permanently?"), list=entrylist)
def saveTimeshiftActions(self, action=None, postaction=None):
self.save_timeshift_postaction = postaction
if action is None:
if config.timeshift.favoriteSaveAction.getValue() == "askuser":
self.saveTimeshiftPopup()
return
elif config.timeshift.favoriteSaveAction.getValue() == "savetimeshift":
self.SaveTimeshift()
elif config.timeshift.favoriteSaveAction.getValue() == "savetimeshiftandrecord":
if self.pts_curevent_end > time():
self.SaveTimeshift(mergelater=True)
self.ptsRecordCurrentEvent()
else:
self.SaveTimeshift()
elif config.timeshift.favoriteSaveAction.getValue() == "noSave":
config.timeshift.isRecording.value = False
self.save_current_timeshift = False
elif action == "savetimeshift":
self.SaveTimeshift()
elif action == "savetimeshiftandrecord":
if self.pts_curevent_end > time():
self.SaveTimeshift(mergelater=True)
self.ptsRecordCurrentEvent()
else:
self.SaveTimeshift()
elif action == "noSave":
config.timeshift.isRecording.value = False
self.save_current_timeshift = False
# Get rid of old timeshift file before E2 truncates its filesize
if self.save_timeshift_postaction is not None:
self.eraseTimeshiftFile()
# Post PTS Actions like ZAP or whatever the user requested
if self.save_timeshift_postaction == "zapUp":
InfoBarChannelSelection.zapUp(self)
elif self.save_timeshift_postaction == "zapDown":
InfoBarChannelSelection.zapDown(self)
elif self.save_timeshift_postaction == "historyBack":
InfoBarChannelSelection.historyBack(self)
elif self.save_timeshift_postaction == "historyNext":
InfoBarChannelSelection.historyNext(self)
elif self.save_timeshift_postaction == "switchChannelUp":
InfoBarChannelSelection.switchChannelUp(self)
elif self.save_timeshift_postaction == "switchChannelDown":
InfoBarChannelSelection.switchChannelDown(self)
elif self.save_timeshift_postaction == "openServiceList":
InfoBarChannelSelection.openServiceList(self)
elif self.save_timeshift_postaction == "showRadioChannelList":
InfoBarChannelSelection.showRadioChannelList(self, zap=True)
elif self.save_timeshift_postaction == "standby":
Notifications.AddNotification(Screens.Standby.Standby2)
def SaveTimeshift(self, timeshiftfile=None, mergelater=False):
self.save_current_timeshift = False
savefilename = None
if timeshiftfile is not None:
savefilename = timeshiftfile
if savefilename is None:
for filename in os.listdir(config.usage.timeshift_path.getValue()):
if filename.startswith("timeshift.") and not filename.endswith(".del") and not filename.endswith(".copy"):
try:
statinfo = os.stat("%s%s" % (config.usage.timeshift_path.getValue(),filename))
if statinfo.st_mtime > (time()-5.0):
savefilename=filename
except Exception, errormsg:
Notifications.AddNotification(MessageBox, _("PTS Plugin Error: %s" % (errormsg)), MessageBox.TYPE_ERROR)
if savefilename is None:
Notifications.AddNotification(MessageBox, _("No Timeshift found to save as recording!"), MessageBox.TYPE_ERROR)
else:
timeshift_saved = True
timeshift_saveerror1 = ""
timeshift_saveerror2 = ""
metamergestring = ""
config.timeshift.isRecording.value = True
if mergelater:
self.pts_mergeRecords_timer.start(120000, True)
metamergestring = "pts_merge\n"
try:
if timeshiftfile is None:
# Save Current Event by creating hardlink to ts file
if self.pts_starttime >= (time()-60):
self.pts_starttime -= 60
ptsfilename = "%s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(self.pts_starttime)),self.pts_curevent_station,self.pts_curevent_name)
try:
if config.usage.setup_level.index >= 2:
if config.recording.filename_composition.getValue() == "long" and self.pts_curevent_name != pts_curevent_description:
ptsfilename = "%s - %s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(self.pts_starttime)),self.pts_curevent_station,self.pts_curevent_name,self.pts_curevent_description)
elif config.recording.filename_composition.getValue() == "short":
ptsfilename = "%s - %s" % (strftime("%Y%m%d",localtime(self.pts_starttime)),self.pts_curevent_name)
except Exception, errormsg:
print "[TimeShift] Using default filename"
if config.recording.ascii_filenames.getValue():
ptsfilename = ASCIItranslit.legacyEncode(ptsfilename)
fullname = Directories.getRecordingFilename(ptsfilename,config.usage.default_path.getValue())
os.link("%s%s" % (config.usage.timeshift_path.getValue(),savefilename), "%s.ts" % (fullname))
metafile = open("%s.ts.meta" % (fullname), "w")
metafile.write("%s\n%s\n%s\n%i\n%s" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime),metamergestring))
metafile.close()
self.ptsCreateEITFile(fullname)
elif timeshiftfile.startswith("pts_livebuffer"):
# Save stored timeshift by creating hardlink to ts file
readmetafile = open("%s%s.meta" % (config.usage.timeshift_path.getValue(),timeshiftfile), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
description = readmetafile.readline()[0:-1]
begintime = readmetafile.readline()[0:-1]
readmetafile.close()
ptsfilename = "%s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(int(begintime))),self.pts_curevent_station,eventname)
try:
if config.usage.setup_level.index >= 2:
if config.recording.filename_composition.getValue() == "long" and eventname != description:
ptsfilename = "%s - %s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(int(begintime))),self.pts_curevent_station,eventname,description)
elif config.recording.filename_composition.getValue() == "short":
ptsfilename = "%s - %s" % (strftime("%Y%m%d",localtime(int(begintime))),eventname)
except Exception, errormsg:
print "[TimeShift] Using default filename"
if config.recording.ascii_filenames.getValue():
ptsfilename = ASCIItranslit.legacyEncode(ptsfilename)
fullname=Directories.getRecordingFilename(ptsfilename,config.usage.default_path.getValue())
os.link("%s%s" % (config.usage.timeshift_path.getValue(),timeshiftfile),"%s.ts" % (fullname))
os.link("%s%s.meta" % (config.usage.timeshift_path.getValue(),timeshiftfile),"%s.ts.meta" % (fullname))
if os.path.exists("%s%s.eit" % (config.usage.timeshift_path.getValue(),timeshiftfile)):
os.link("%s%s.eit" % (config.usage.timeshift_path.getValue(),timeshiftfile),"%s.eit" % (fullname))
# Add merge-tag to metafile
if mergelater:
metafile = open("%s.ts.meta" % (fullname), "a")
metafile.write("%s\n" % (metamergestring))
metafile.close()
# Create AP and SC Files when not merging
if not mergelater:
self.ptsCreateAPSCFiles(fullname+".ts")
except Exception, errormsg:
timeshift_saved = False
timeshift_saveerror1 = errormsg
# Hmpppf! Saving Timeshift via Hardlink-Method failed. Probably other device?
# Let's try to copy the file in background now! This might take a while ...
if not timeshift_saved:
try:
stat = os.statvfs(config.usage.default_path.getValue())
freespace = stat.f_bfree / 1000 * stat.f_bsize / 1000
randomint = randint(1, 999)
if timeshiftfile is None:
# Get Filesize for Free Space Check
filesize = int(os.path.getsize("%s%s" % (config.usage.timeshift_path.getValue(),savefilename)) / (1024*1024))
# Save Current Event by copying it to the other device
if filesize <= freespace:
os.link("%s%s" % (config.usage.timeshift_path.getValue(),savefilename), "%s%s.%s.copy" % (config.usage.timeshift_path.getValue(),savefilename,randomint))
copy_file = savefilename
metafile = open("%s.ts.meta" % (fullname), "w")
metafile.write("%s\n%s\n%s\n%i\n%s" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime),metamergestring))
metafile.close()
self.ptsCreateEITFile(fullname)
elif timeshiftfile.startswith("pts_livebuffer"):
# Get Filesize for Free Space Check
filesize = int(os.path.getsize("%s%s" % (config.usage.timeshift_path.getValue(), timeshiftfile)) / (1024*1024))
# Save stored timeshift by copying it to the other device
if filesize <= freespace:
os.link("%s%s" % (config.usage.timeshift_path.getValue(),timeshiftfile), "%s%s.%s.copy" % (config.usage.timeshift_path.getValue(),timeshiftfile,randomint))
Directories.copyfile("%s%s.meta" % (config.usage.timeshift_path.getValue(),timeshiftfile),"%s.ts.meta" % (fullname))
if os.path.exists("%s%s.eit" % (config.usage.timeshift_path.getValue(),timeshiftfile)):
Directories.copyfile("%s%s.eit" % (config.usage.timeshift_path.getValue(),timeshiftfile),"%s.eit" % (fullname))
copy_file = timeshiftfile
# Add merge-tag to metafile
if mergelater:
metafile = open("%s.ts.meta" % (fullname), "a")
metafile.write("%s\n" % (metamergestring))
metafile.close()
# Only copy file when enough disk-space available!
if filesize <= freespace:
timeshift_saved = True
copy_file = copy_file+"."+str(randomint)
# Get Event Info from meta file
if os.path.exists("%s.ts.meta" % (fullname)):
readmetafile = open("%s.ts.meta" % (fullname), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
else:
eventname = ""
JobManager.AddJob(CopyTimeshiftJob(self, "mv \"%s%s.copy\" \"%s.ts\"" % (config.usage.timeshift_path.getValue(),copy_file,fullname), copy_file, fullname, eventname))
if not Screens.Standby.inTryQuitMainloop and not Screens.Standby.inStandby and not mergelater and self.save_timeshift_postaction != "standby":
Notifications.AddNotification(MessageBox, _("Saving timeshift as movie now. This might take a while!"), MessageBox.TYPE_INFO, timeout=5)
else:
timeshift_saved = False
timeshift_saveerror1 = ""
timeshift_saveerror2 = _("Not enough free Diskspace!\n\nFilesize: %sMB\nFree Space: %sMB\nPath: %s" % (filesize,freespace,config.usage.default_path.getValue()))
except Exception, errormsg:
timeshift_saved = False
timeshift_saveerror2 = errormsg
if not timeshift_saved:
config.timeshift.isRecording.value = False
self.save_timeshift_postaction = None
errormessage = str(timeshift_saveerror1) + "\n" + str(timeshift_saveerror2)
Notifications.AddNotification(MessageBox, _("Timeshift save failed!")+"\n\n%s" % errormessage, MessageBox.TYPE_ERROR)
def ptsCleanTimeshiftFolder(self):
if not config.timeshift.enabled.getValue() or self.ptsCheckTimeshiftPath() is False or self.session.screen["Standby"].boolean is True:
return
try:
for filename in os.listdir(config.usage.timeshift_path.getValue()):
if (filename.startswith("timeshift.") or filename.startswith("pts_livebuffer.")) and (filename.endswith(".del") is False and filename.endswith(".copy") is False and filename.endswith(".meta") is False and filename.endswith(".eit") is False):
statinfo = os.stat("%s%s" % (config.usage.timeshift_path.getValue(),filename))
# if no write for 5 sec = stranded timeshift
if statinfo.st_mtime < (time()-5.0):
print "[TimeShift] Erasing stranded timeshift %s" % filename
self.BgFileEraser.erase("%s%s" % (config.usage.timeshift_path.getValue(),filename))
# Delete Meta and EIT File too
if filename.startswith("pts_livebuffer.") is True:
os.remove("%s%s.meta" % (config.usage.timeshift_path.getValue(),filename))
os.remove("%s%s.eit" % (config.usage.timeshift_path.getValue(),filename))
except:
print "PTS: IO-Error while cleaning Timeshift Folder ..."
def ptsGetEventInfo(self):
event = None
try:
serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
serviceHandler = eServiceCenter.getInstance()
info = serviceHandler.info(serviceref)
self.pts_curevent_servicerefname = serviceref.toString()
self.pts_curevent_station = info.getName(serviceref)
service = self.session.nav.getCurrentService()
info = service and service.info()
event = info and info.getEvent(0)
except Exception, errormsg:
Notifications.AddNotification(MessageBox, _("Getting Event Info failed!")+"\n\n%s" % errormsg, MessageBox.TYPE_ERROR, timeout=10)
if event is not None:
curEvent = parseEvent(event)
self.pts_curevent_begin = int(curEvent[0])
self.pts_curevent_end = int(curEvent[1])
self.pts_curevent_name = curEvent[2]
self.pts_curevent_description = curEvent[3]
self.pts_curevent_eventid = curEvent[4]
def ptsFrontpanelActions(self, action=None):
if self.session.nav.RecordTimer.isRecording() or SystemInfo.get("NumFrontpanelLEDs", 0) == 0:
return
try:
if action == "start":
if os.path.exists("/proc/stb/fp/led_set_pattern"):
f = open("/proc/stb/fp/led_set_pattern", "w")
f.write("0xa7fccf7a")
f.close()
elif os.path.exists("/proc/stb/fp/led0_pattern"):
f = open("/proc/stb/fp/led0_pattern", "w")
f.write("0x55555555")
f.close()
if os.path.exists("/proc/stb/fp/led_pattern_speed"):
f = open("/proc/stb/fp/led_pattern_speed", "w")
f.write("20")
f.close()
elif os.path.exists("/proc/stb/fp/led_set_speed"):
f = open("/proc/stb/fp/led_set_speed", "w")
f.write("20")
f.close()
elif action == "stop":
if os.path.exists("/proc/stb/fp/led_set_pattern"):
f = open("/proc/stb/fp/led_set_pattern", "w")
f.write("0")
f.close()
elif os.path.exists("/proc/stb/fp/led0_pattern"):
f = open("/proc/stb/fp/led0_pattern", "w")
f.write("0")
f.close()
except Exception, errormsg:
print "[Timeshift] %s" % (errormsg)
def ptsCreateHardlink(self):
timeshiftlist = []
for filename in os.listdir(config.usage.timeshift_path.getValue()):
if filename.startswith("timeshift.") and not filename.endswith(".del") and not filename.endswith(".copy"):
try:
statinfo = os.stat("%s%s" % (config.usage.timeshift_path.getValue(),filename))
if statinfo.st_size < 10:
try:
if os.path.exists("%spts_livebuffer.%s.eit" % (config.usage.timeshift_path.getValue(),self.pts_eventcount)):
self.BgFileEraser.erase("%spts_livebuffer.%s.eit" % (config.usage.timeshift_path.getValue(),self.pts_eventcount))
if os.path.exists("%spts_livebuffer.%s.meta" % (config.usage.timeshift_path.getValue(),self.pts_eventcount)):
self.BgFileEraser.erase("%spts_livebuffer.%s.meta" % (config.usage.timeshift_path.getValue(),self.pts_eventcount))
if os.path.exists("%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(),self.pts_eventcount)):
self.BgFileEraser.erase("%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(),self.pts_eventcount))
except Exception, errormsg:
Notifications.AddNotification(MessageBox, _("Failed to remove old files.")+"\n\n%s" % errormsg, MessageBox.TYPE_ERROR)
print "[Timeshift] %s" % (errormsg)
try:
# Create link to pts_livebuffer file
os.link("%s%s" % (config.usage.timeshift_path.getValue(),filename), "%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(),self.pts_eventcount))
# Create a Meta File
metafile = open("%spts_livebuffer.%s.meta" % (config.usage.timeshift_path.getValue(),self.pts_eventcount), "w")
metafile.write("%s\n%s\n%s\n%i\n" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime)))
metafile.close()
except Exception, errormsg:
Notifications.AddNotification(MessageBox, _("Creating Hardlink to Timeshift file failed!")+"\n"+_("The Filesystem on your Timeshift-Device does not support hardlinks.\nMake sure it is formatted in EXT2 or EXT3!")+"\n\n%s" % errormsg, MessageBox.TYPE_ERROR)
# Create EIT File
self.ptsCreateEITFile("%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(),self.pts_eventcount))
# Permanent Recording Hack
if config.timeshift.permanentrecording.getValue():
try:
fullname = Directories.getRecordingFilename("%s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(self.pts_starttime)),self.pts_curevent_station,self.pts_curevent_name),config.usage.default_path.getValue())
os.link("%s%s" % (config.usage.timeshift_path.getValue(),filename), "%s.ts" % (fullname))
# Create a Meta File
metafile = open("%s.ts.meta" % (fullname), "w")
metafile.write("%s\n%s\n%s\n%i\nautosaved\n" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime)))
metafile.close()
except Exception, errormsg:
print "[Timeshift] %s" % (errormsg)
except Exception, errormsg:
errormsg = str(errormsg)
if errormsg.find('Input/output error') != -1:
errormsg += _("\nAn Input/output error usually indicates a corrupted filesystem! Please check the filesystem of your timeshift-device!")
Notifications.AddNotification(MessageBox, _("Creating Hardlink to Timeshift file failed!")+"\n%s" % (errormsg), MessageBox.TYPE_ERROR)
def ptsRecordCurrentEvent(self):
recording = RecordTimerEntry(ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup()), time(), self.pts_curevent_end, self.pts_curevent_name, self.pts_curevent_description, self.pts_curevent_eventid, dirname = config.usage.default_path.getValue())
recording.dontSave = True
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
def ptsMergeRecords(self):
if self.session.nav.RecordTimer.isRecording():
self.pts_mergeRecords_timer.start(120000, True)
return
ptsmergeSRC = ""
ptsmergeDEST = ""
ptsmergeeventname = ""
ptsgetnextfile = False
ptsfilemerged = False
filelist = os.listdir(config.usage.default_path.getValue())
if filelist is not None:
filelist.sort()
for filename in filelist:
if filename.endswith(".meta"):
# Get Event Info from meta file
readmetafile = open("%s%s" % (config.usage.default_path.getValue(),filename), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
eventtitle = readmetafile.readline()[0:-1]
eventtime = readmetafile.readline()[0:-1]
eventtag = readmetafile.readline()[0:-1]
readmetafile.close()
if ptsgetnextfile:
ptsgetnextfile = False
ptsmergeSRC = filename[0:-5]
if ASCIItranslit.legacyEncode(eventname) == ASCIItranslit.legacyEncode(ptsmergeeventname):
# Copy EIT File
if Directories.fileExists("%s%s.eit" % (config.usage.default_path.getValue(), ptsmergeSRC[0:-3])):
Directories.copyfile("%s%s.eit" % (config.usage.default_path.getValue(), ptsmergeSRC[0:-3]),"%s%s.eit" % (config.usage.default_path.getValue(), ptsmergeDEST[0:-3]))
# Delete AP and SC Files
if os.path.exists("%s%s.ap" % (config.usage.default_path.getValue(), ptsmergeDEST)):
self.BgFileEraser.erase("%s%s.ap" % (config.usage.default_path.getValue(), ptsmergeDEST))
if os.path.exists("%s%s.sc" % (config.usage.default_path.getValue(), ptsmergeDEST)):
self.BgFileEraser.erase("%s%s.sc" % (config.usage.default_path.getValue(), ptsmergeDEST))
# Add Merge Job to JobManager
JobManager.AddJob(MergeTimeshiftJob(self, "cat \"%s%s\" >> \"%s%s\"" % (config.usage.default_path.getValue(),ptsmergeSRC,config.usage.default_path.getValue(),ptsmergeDEST), ptsmergeSRC, ptsmergeDEST, eventname))
config.timeshift.isRecording.value = True
ptsfilemerged = True
else:
ptsgetnextfile = True
if eventtag == "pts_merge" and not ptsgetnextfile:
ptsgetnextfile = True
ptsmergeDEST = filename[0:-5]
ptsmergeeventname = eventname
ptsfilemerged = False
# If still recording or transfering, try again later ...
if Directories.fileExists("%s%s" % (config.usage.default_path.getValue(),ptsmergeDEST)):
statinfo = os.stat("%s%s" % (config.usage.default_path.getValue(),ptsmergeDEST))
if statinfo.st_mtime > (time()-10.0):
self.pts_mergeRecords_timer.start(120000, True)
return
# Rewrite Meta File to get rid of pts_merge tag
metafile = open("%s%s.meta" % (config.usage.default_path.getValue(),ptsmergeDEST), "w")
metafile.write("%s\n%s\n%s\n%i\n" % (servicerefname,eventname.replace("\n", ""),eventtitle.replace("\n", ""),int(eventtime)))
metafile.close()
# Merging failed :(
if not ptsfilemerged and ptsgetnextfile:
Notifications.AddNotification(MessageBox,_("[Timeshift] Merging records failed!"), MessageBox.TYPE_ERROR)
def ptsCreateAPSCFiles(self, filename):
if Directories.fileExists(filename, 'r'):
if Directories.fileExists(filename+".meta", 'r'):
# Get Event Info from meta file
readmetafile = open(filename+".meta", "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
else:
eventname = ""
JobManager.AddJob(CreateAPSCFilesJob(self, "/usr/lib/enigma2/python/Components/createapscfiles \"%s\"" % (filename), eventname))
else:
self.ptsSaveTimeshiftFinished()
def ptsCreateEITFile(self, filename):
if self.pts_curevent_eventid is not None:
try:
import Components.eitsave
serviceref = ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup()).ref.toString()
Components.eitsave.SaveEIT(serviceref, filename+".eit", self.pts_curevent_eventid, -1, -1)
except Exception, errormsg:
print "[Timeshift] %s" % (errormsg)
def ptsCopyFilefinished(self, srcfile, destfile):
# Erase Source File
if Directories.fileExists(srcfile):
self.BgFileEraser.erase(srcfile)
# Restart Merge Timer
if self.pts_mergeRecords_timer.isActive():
self.pts_mergeRecords_timer.stop()
self.pts_mergeRecords_timer.start(15000, True)
else:
# Create AP and SC Files
self.ptsCreateAPSCFiles(destfile)
def ptsMergeFilefinished(self, srcfile, destfile):
if self.session.nav.RecordTimer.isRecording() or len(JobManager.getPendingJobs()) >= 1:
# Rename files and delete them later ...
self.pts_mergeCleanUp_timer.start(120000, True)
os.system("echo \"\" > \"%s.pts.del\"" % (srcfile[0:-3]))
else:
# Delete Instant Record permanently now ... R.I.P.
self.BgFileEraser.erase("%s" % (srcfile))
self.BgFileEraser.erase("%s.ap" % (srcfile))
self.BgFileEraser.erase("%s.sc" % (srcfile))
self.BgFileEraser.erase("%s.meta" % (srcfile))
self.BgFileEraser.erase("%s.cuts" % (srcfile))
self.BgFileEraser.erase("%s.eit" % (srcfile[0:-3]))
# Create AP and SC Files
self.ptsCreateAPSCFiles(destfile)
# Run Merge-Process one more time to check if there are more records to merge
self.pts_mergeRecords_timer.start(10000, True)
def ptsSaveTimeshiftFinished(self):
if not self.pts_mergeCleanUp_timer.isActive():
self.ptsFrontpanelActions("stop")
config.timeshift.isRecording.value = False
if Screens.Standby.inTryQuitMainloop:
self.pts_QuitMainloop_timer.start(30000, True)
else:
Notifications.AddNotification(MessageBox, _("Timeshift saved to your harddisk!"), MessageBox.TYPE_INFO, timeout = 5)
def ptsMergePostCleanUp(self):
if self.session.nav.RecordTimer.isRecording() or len(JobManager.getPendingJobs()) >= 1:
config.timeshift.isRecording.value = True
self.pts_mergeCleanUp_timer.start(120000, True)
return
self.ptsFrontpanelActions("stop")
config.timeshift.isRecording.value = False
filelist = os.listdir(config.usage.default_path.getValue())
for filename in filelist:
if filename.endswith(".pts.del"):
srcfile = config.usage.default_path.getValue() + "/" + filename[0:-8] + ".ts"
self.BgFileEraser.erase("%s" % (srcfile))
self.BgFileEraser.erase("%s.ap" % (srcfile))
self.BgFileEraser.erase("%s.sc" % (srcfile))
self.BgFileEraser.erase("%s.meta" % (srcfile))
self.BgFileEraser.erase("%s.cuts" % (srcfile))
self.BgFileEraser.erase("%s.eit" % (srcfile[0:-3]))
self.BgFileEraser.erase("%s.pts.del" % (srcfile[0:-3]))
# Restart QuitMainloop Timer to give BgFileEraser enough time
if Screens.Standby.inTryQuitMainloop and self.pts_QuitMainloop_timer.isActive():
self.pts_QuitMainloop_timer.start(60000, True)
def ptsTryQuitMainloop(self):
if Screens.Standby.inTryQuitMainloop and (len(JobManager.getPendingJobs()) >= 1 or self.pts_mergeCleanUp_timer.isActive()):
self.pts_QuitMainloop_timer.start(60000, True)
return
if Screens.Standby.inTryQuitMainloop and self.session.ptsmainloopvalue:
self.session.dialog_stack = []
self.session.summary_stack = [None]
self.session.open(Screens.Standby.TryQuitMainloop, self.session.ptsmainloopvalue)
def ptsGetSeekInfo(self):
s = self.session.nav.getCurrentService()
return s and s.seek()
def ptsGetPosition(self):
seek = self.ptsGetSeekInfo()
if seek is None:
return None
pos = seek.getPlayPosition()
if pos[0]:
return 0
return pos[1]
def ptsGetLength(self):
seek = self.ptsGetSeekInfo()
if seek is None:
return None
length = seek.getLength()
if length[0]:
return 0
return length[1]
def ptsGetSaveTimeshiftStatus(self):
return self.save_current_timeshift
def ptsSeekPointerOK(self):
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" and self.timeshift_enabled and self.isSeekable():
if not self.pvrStateDialog.shown:
if self.seekstate != self.SEEK_STATE_PLAY or self.seekstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PLAY)
self.doShow()
return
length = self.ptsGetLength()
position = self.ptsGetPosition()
if length is None or position is None:
return
cur_pos = self.pvrStateDialog["PTSSeekPointer"].position
jumptox = int(cur_pos[0]) - (int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8)
jumptoperc = round((jumptox / float(self.pvrStateDialog["PTSSeekBack"].instance.size().width())) * 100, 0)
jumptotime = int((length / 100) * jumptoperc)
jumptodiff = position - jumptotime
self.doSeekRelative(-jumptodiff)
else:
return
def ptsSeekPointerLeft(self):
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" and self.timeshift_enabled and self.isSeekable():
self.ptsMoveSeekPointer(direction="left")
else:
return
def ptsSeekPointerRight(self):
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" and self.timeshift_enabled and self.isSeekable():
self.ptsMoveSeekPointer(direction="right")
else:
return
def ptsSeekPointerReset(self):
if self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" and self.timeshift_enabled:
self.pvrStateDialog["PTSSeekPointer"].setPosition(int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8,self.pvrStateDialog["PTSSeekPointer"].position[1])
def ptsSeekPointerSetCurrentPos(self):
if not self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState" or not self.timeshift_enabled or not self.isSeekable():
return
position = self.ptsGetPosition()
length = self.ptsGetLength()
if length >= 1:
tpixels = int((float(int((position*100)/length))/100)*self.pvrStateDialog["PTSSeekBack"].instance.size().width())
self.pvrStateDialog["PTSSeekPointer"].setPosition(int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8+tpixels, self.pvrStateDialog["PTSSeekPointer"].position[1])
def ptsMoveSeekPointer(self, direction=None):
if direction is None or self.pts_pvrStateDialog != "Screens.PVRState.PTSTimeshiftState":
return
isvalidjump = False
cur_pos = self.pvrStateDialog["PTSSeekPointer"].position
self.doShow()
if direction == "left":
minmaxval = int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8
movepixels = -15
if cur_pos[0]+movepixels > minmaxval:
isvalidjump = True
elif direction == "right":
minmaxval = int(self.pvrStateDialog["PTSSeekBack"].instance.size().width()*0.96)
movepixels = 15
if cur_pos[0]+movepixels < minmaxval:
isvalidjump = True
else:
return 0
if isvalidjump:
self.pvrStateDialog["PTSSeekPointer"].setPosition(cur_pos[0]+movepixels,cur_pos[1])
else:
self.pvrStateDialog["PTSSeekPointer"].setPosition(minmaxval,cur_pos[1])
def ptsTimeshiftFileChanged(self):
# Reset Seek Pointer
if config.timeshift.enabled.getValue():
self.ptsSeekPointerReset()
if self.pts_switchtolive:
self.pts_switchtolive = False
return
if self.pts_seektoprevfile:
if self.pts_currplaying == 1:
self.pts_currplaying = config.timeshift.maxevents.getValue()
else:
self.pts_currplaying -= 1
else:
if self.pts_currplaying == config.timeshift.maxevents.getValue():
self.pts_currplaying = 1
else:
self.pts_currplaying += 1
if not Directories.fileExists("%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(),self.pts_currplaying), 'r'):
self.pts_currplaying = self.pts_eventcount
# Set Eventname in PTS InfoBar
if config.timeshift.enabled.getValue() and self.pts_pvrStateDialog == "Screens.PVRState.PTSTimeshiftState":
try:
if self.pts_eventcount != self.pts_currplaying:
readmetafile = open("%spts_livebuffer.%s.meta" % (config.usage.timeshift_path.getValue(),self.pts_currplaying), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
readmetafile.close()
self.pvrStateDialog["eventname"].setText(eventname)
else:
self.pvrStateDialog["eventname"].setText("")
except Exception, errormsg:
self.pvrStateDialog["eventname"].setText("")
# Get next pts file ...
if self.pts_currplaying+1 > config.timeshift.maxevents.getValue():
nextptsfile = 1
else:
nextptsfile = self.pts_currplaying+1
# Seek to previous file
if self.pts_seektoprevfile:
self.pts_seektoprevfile = False
if Directories.fileExists("%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(),nextptsfile), 'r'):
self.ptsSetNextPlaybackFile("pts_livebuffer.%s" % (nextptsfile))
self.ptsSeekBackHack()
else:
if Directories.fileExists("%spts_livebuffer.%s" % (config.usage.timeshift_path.getValue(),nextptsfile), 'r') and nextptsfile <= self.pts_eventcount:
self.ptsSetNextPlaybackFile("pts_livebuffer.%s" % (nextptsfile))
if nextptsfile == self.pts_currplaying:
self.pts_switchtolive = True
self.ptsSetNextPlaybackFile("")
def ptsSetNextPlaybackFile(self, nexttsfile):
ts = self.getTimeshift()
if ts is None:
return
try:
ts.setNextPlaybackFile("%s%s" % (config.usage.timeshift_path.getValue(),nexttsfile))
except:
print "[TimeShift] setNextPlaybackFile() not supported by OE. Enigma2 too old !?"
def ptsSeekBackHack(self):
if not config.timeshift.enabled.getValue() or not self.timeshift_enabled:
return
self.setSeekState(self.SEEK_STATE_PAUSE)
self.doSeek(-90000*4) # seek ~4s before end
self.pts_SeekBack_timer.start(1000, True)
def ptsSeekBackTimer(self):
if self.pts_lastseekspeed == 0:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.getValue())))
else:
self.setSeekState(self.makeStateBackward(int(-self.pts_lastseekspeed)))
def ptsCheckTimeshiftPath(self):
if self.pts_pathchecked:
return True
else:
if Directories.fileExists(config.usage.timeshift_path.getValue(), 'w'):
self.pts_pathchecked = True
return True
else:
Notifications.AddNotification(MessageBox, _("Could not activate Permanent-Timeshift!\nTimeshift-Path does not exist"), MessageBox.TYPE_ERROR, timeout=15)
if self.pts_delay_timer.isActive():
self.pts_delay_timer.stop()
if self.pts_cleanUp_timer.isActive():
self.pts_cleanUp_timer.stop()
return False
def ptsTimerEntryStateChange(self, timer):
if not config.timeshift.enabled.getValue() or not config.timeshift.stopwhilerecording.getValue():
return
self.pts_record_running = self.session.nav.RecordTimer.isRecording()
# Abort here when box is in standby mode
if self.session.screen["Standby"].boolean is True:
return
# Stop Timeshift when Record started ...
if timer.state == TimerEntry.StateRunning and self.timeshift_enabled and self.pts_record_running:
if self.ptsLiveTVStatus() is False:
self.timeshift_enabled = False
self.pts_LengthCheck_timer.stop()
return
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
if self.isSeekable():
Notifications.AddNotification(MessageBox,_("Record started! Stopping timeshift now ..."), MessageBox.TYPE_INFO, timeout=5)
self.stopTimeshift(True, False)
# Restart Timeshift when all records stopped
if timer.state == TimerEntry.StateEnded and not self.timeshift_enabled and not self.pts_record_running:
self.activatePermanentTimeshift()
# Restart Merge-Timer when all records stopped
if timer.state == TimerEntry.StateEnded and self.pts_mergeRecords_timer.isActive():
self.pts_mergeRecords_timer.stop()
self.pts_mergeRecords_timer.start(15000, True)
# Restart FrontPanel LED when still copying or merging files
# ToDo: Only do this on PTS Events and not events from other jobs
if timer.state == TimerEntry.StateEnded and (len(JobManager.getPendingJobs()) >= 1 or self.pts_mergeRecords_timer.isActive()):
self.ptsFrontpanelActions("start")
config.timeshift.isRecording.value = True
def ptsLiveTVStatus(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
sTSID = info and info.getInfo(iServiceInformation.sTSID) or -1
if sTSID is None or sTSID == -1:
return False
else:
return True
def ptsLengthCheck(self):
# Check if we are in TV Mode ...
if self.ptsLiveTVStatus() is False:
self.timeshift_enabled = False
self.pts_LengthCheck_timer.stop()
return
if config.timeshift.stopwhilerecording.getValue() and self.pts_record_running:
return
# Length Check
if config.timeshift.enabled.getValue() and self.session.screen["Standby"].boolean is not True and self.timeshift_enabled and (time() - self.pts_starttime) >= (config.timeshift.maxlength.getValue() * 60):
if self.save_current_timeshift:
self.saveTimeshiftActions("savetimeshift")
self.activatePermanentTimeshift()
self.save_current_timeshift = True
else:
self.activatePermanentTimeshift()
Notifications.AddNotification(MessageBox,_("Maximum Timeshift length per Event reached!\nRestarting Timeshift now ..."), MessageBox.TYPE_INFO, timeout=5)
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
ts = self.getTimeshift()
# print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
# print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
# print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
if back:
if getBoxType().startswith('et'):
self.ts_rewind_timer.start(1000, 1)
else:
self.ts_rewind_timer.start(100, 1)
def rewindService(self):
if getBoxType().startswith('gb') or getBoxType().startswith('xp1000'):
self.setSeekState(self.SEEK_STATE_PLAY)
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.getValue())))
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
# print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def checkTimeshiftRunning(self, returnFunction, answer = None):
if answer is None:
if self.isSeekable() and self.timeshift_enabled and self.check_timeshift and config.usage.check_timeshift.getValue():
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunning, returnFunction), MessageBox, _("You seem to be in timeshift, Do you want to leave timeshift ?"), simple = True)
return True
else:
self.check_timeshift = True
return False
elif answer:
self.check_timeshift = False
boundFunction(returnFunction, True)()
else:
boundFunction(returnFunction, False)()
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("view extensions...")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"showMediaPlayer": (self.showMediaPlayer, _("Show the media player...")),
}, 1) # lower priority
self.addExtension(extension = self.getOsd3DSetup, type = InfoBarExtensions.EXTENSION_LIST)
self.addExtension(extension = self.getCCcamInfo, type = InfoBarExtensions.EXTENSION_LIST)
self.addExtension(extension = self.getOScamInfo, type = InfoBarExtensions.EXTENSION_LIST)
def get3DSetupname(self):
return _("OSD 3D Setup")
def getOsd3DSetup(self):
if config.osd.show3dextensions .getValue():
return [((boundFunction(self.get3DSetupname), boundFunction(self.open3DSetup), lambda: True), None)]
else:
return []
def getCCname(self):
return _("CCcam Info")
def getCCcamInfo(self):
if Directories.pathExists('/usr/emu_scripts/'):
softcams = os.listdir('/usr/emu_scripts/')
for softcam in softcams:
if softcam.lower().startswith('cccam') and config.cccaminfo.showInExtensions.value:
return [((boundFunction(self.getCCname), boundFunction(self.openCCcamInfo), lambda: True), None)] or []
else:
return []
def getOSname(self):
return _("OScam Info")
def getOScamInfo(self):
if Directories.pathExists('/usr/emu_scripts/'):
softcams = os.listdir('/usr/emu_scripts/')
for softcam in softcams:
if softcam.lower().startswith('oscam') and config.oscaminfo.showInExtensions.value:
return [((boundFunction(self.getOSname), boundFunction(self.openOScamInfo), lambda: True), None)] or []
else:
return []
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list = list, keys = keys, skin_name = "ExtensionsList")
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
def showPluginBrowser(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
def openCCcamInfo(self):
from Screens.CCcamInfo import CCcamInfoMain
self.session.open(CCcamInfoMain)
def openOScamInfo(self):
from Screens.OScamInfo import OscamInfoMenu
self.session.open(OscamInfoMenu)
def open3DSetup(self):
from Screens.UserInterfacePositioner import OSD3DSetupScreen
self.session.open(OSD3DSetupScreen)
def editCallback(self, session):
global autotimer
global autopoller
# XXX: canceling of GUI (Overview) won't affect config values which might have been changed - is this intended?
# Don't parse EPG if editing was canceled
if session is not None:
# Save xml
autotimer.writeXml()
# Poll EPGCache
autotimer.parseEPG()
# Start autopoller again if wanted
if config.plugins.autotimer.autopoll.value:
if autopoller is None:
from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller
autopoller = AutoPoller()
autopoller.start()
# Remove instance if not running in background
else:
autopoller = None
autotimer = None
def showMediaPlayer(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if isinstance(self, InfoBarExtensions):
if isinstance(self, InfoBar):
try: # falls es nicht installiert ist
from Plugins.Extensions.MediaPlayer.plugin import MediaPlayer
self.session.open(MediaPlayer)
no_plugin = False
except Exception, e:
self.session.open(MessageBox, _("The MediaPlayer plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
if config.usage.jobtaksextensions.getValue():
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
else:
return []
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
if SystemInfo.get("NumVideoDecoders", 1) > 1 and isinstance(self, InfoBarEPG):
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.showPiP, _("Activate PiP")),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, self.pipShown), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.getValue() != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Move Picture in Picture")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist:
slist.togglePipzap()
def showPiP(self):
if self.session.pipshown:
slist = self.servicelist
if slist and slist.dopipzap:
slist.togglePipzap()
del self.session.pip
self.session.pipshown = False
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.servicelist.servicelist.getCurrent()
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = self.servicelist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
def swapPiP(self):
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
currentServicePath = self.servicelist.getCurrentServicePath()
#self.servicelist.setCurrentServicePath(self.session.pip.servicePath)
self.session.pip.playService(swapservice)
self.session.nav.stopService() # stop portal
self.session.nav.playService(pipref) # start subservice
self.session.pip.servicePath = currentServicePath
#if self.servicelist.dopipzap:
# This unfortunately won't work with subservices
# self.servicelist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.getValue()
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant Record...")),
})
self.recording = []
def stopCurrentRecording(self, entry = -1):
if entry is not None and entry != -1:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
self.recording.remove(self.recording[entry])
def startInstantRecording(self, limitEvent = False):
serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
service = self.session.nav.getCurrentService()
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(serviceref, -1, 0)
if event is None:
info = service.info()
ev = info.getEvent(0)
event = ev
except:
pass
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
description = ""
eventid = None
if event is not None:
curEvent = parseEvent(event)
name = curEvent[2]
description = curEvent[3]
eventid = curEvent[4]
if limitEvent:
end = curEvent[1]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, name, description, eventid, dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
# print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
# print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "stop":
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
# print "after:\n", self.recording
if config.timeshift.enabled.getValue():
if answer is not None and answer[1] == "savetimeshift":
if InfoBarSeek.isSeekable(self) and self.pts_eventcount != self.pts_currplaying:
InfoBarTimeshift.SaveTimeshift(self, timeshiftfile="pts_livebuffer.%s" % self.pts_currplaying)
#InfoBarTimeshift.saveTimeshiftEventPopup(self)
else:
Notifications.AddNotification(MessageBox,_("Timeshift will get saved at end of event!"), MessageBox.TYPE_INFO, timeout=5)
self.save_current_timeshift = True
config.timeshift.isRecording.value = True
if answer is not None and answer[1] == "savetimeshiftEvent":
InfoBarTimeshift.saveTimeshiftEventPopup(self)
if answer is not None and answer[1].startswith("pts_livebuffer") is True:
InfoBarTimeshift.SaveTimeshift(self, timeshiftfile=answer[1])
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
# print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
else:
if self.recording[self.selectedEntry].end != int(time()):
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = int(time())
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER)
def inputCallback(self, value):
# print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if value is not None:
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
else:
if entry.end != int(time()):
entry.autoincrease = False
entry.end = int(time())
self.session.nav.RecordTimer.timeChanged(entry)
def instantRecord(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
common =((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
timeshiftcommon = ((_("Timeshift save recording (stop after current event)"), "savetimeshift"),
(_("Timeshift save recording (Select event)"), "savetimeshiftEvent"),)
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = ((_("Stop recording"), "stop"),) + common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (endtime)"), "changeendtime"),)
else:
title=_("Start recording?")
list = common
if config.timeshift.enabled.getValue() or self.timeshift_enabled:
list = list + timeshiftcommon
list = list + ((_("Do not record"), "no"),)
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox,title=title,list=list)
return
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"GreenPressed": (self.GreenPressed),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next subservice")),
"prevSubservice": (self.prevSubservice, _("Switch to previous subservice"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def GreenPressed(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.subserviceSelection()
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection=0
elif selection < 0:
selection=n-1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.session.nav.playService(newservice, False)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
tlist = []
idx = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
idx += 1
if self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.getValue():
tlist = [(_("Quickzap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quickzap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a subservice..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.session.nav.playService(service[1], False)
def addSubserviceToBouquetCallback(self, service):
if len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
def openTimerList(self):
self.session.open(TimerEditList)
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb is not None:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
# print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
})
def __serviceStarted(self):
if self.is_closing:
return
# print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
# print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if config.usage.on_movie_start.getValue() == "ask" or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10)
elif config.usage.on_movie_start.getValue() == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True and self.resume_point:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
# print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
# print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
# print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
def __init__(self, session, parent):
Screen.__init__(self, session, parent = parent)
self["state_summary"] = StaticText("")
self["speed_summary"] = StaticText("")
self["statusicon_summary"] = MultiPixmap()
self.onShow.append(self.addWatcher)
self.onHide.append(self.removeWatcher)
def addWatcher(self):
self.parent.onChangedEntry.append(self.selectionChanged)
def removeWatcher(self):
self.parent.onChangedEntry.remove(self.selectionChanged)
def selectionChanged(self, state_summary, speed_summary, statusicon_summary):
self["state_summary"].setText(state_summary)
self["speed_summary"].setText(speed_summary)
self["statusicon_summary"].setPixmapNum(int(statusicon_summary))
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
self.__subtitles_enabled = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
self.__selected_subtitle = None
def subtitleSelection(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
service = self.session.nav.getCurrentService()
subtitle = service and service.subtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.__subtitles_enabled or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
def __serviceStopped(self):
if self.__subtitles_enabled:
self.subtitle_window.hide()
self.__subtitles_enabled = False
self.__selected_subtitle = None
def __updatedInfo(self):
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle.getCachedSubtitle()
if subtitle and cachedsubtitle:
if self.__selected_subtitle and self.__subtitles_enabled and cachedsubtitle != self.__selected_subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
self.__subtitles_enabled = False
self.setSelectedSubtitle(cachedsubtitle)
self.setSubtitlesEnable(True)
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def setSubtitlesEnable(self, enable=True):
subtitle = self.getCurrentServiceSubtitle()
if enable:
if self.__selected_subtitle:
if subtitle and not self.__subtitles_enabled:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
self.__subtitles_enabled = True
else:
if subtitle and self.__subtitles_enabled:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.__selected_subtitle = False
self.__subtitles_enabled = False
self.subtitle_window.hide()
def setSelectedSubtitle(self, subtitle):
self.__selected_subtitle = subtitle
subtitles_enabled = property(lambda self: self.__subtitles_enabled, setSubtitlesEnable)
selected_subtitle = property(lambda self: self.__selected_subtitle, setSelectedSubtitle)
from EGAMI.EGAMI_infobar_setup import *
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
service = self.session.nav.getCurrentService()
info = (service and service.info ())
error = (info and info.getInfo(iServiceInformation.sDVBState))
if (error == self.last_error):
error = None
else:
self.last_error = error
errors = { eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None}
if (error is not None):
if ((error == eDVBServicePMTHandler.eventNoResources) and (config.EGDecoding.messageNoResources.value == False)):
return
elif ((error == eDVBServicePMTHandler.eventTuneFailed ) and (config.EGDecoding.messageTuneFailed.value == False)):
return
elif ((error == eDVBServicePMTHandler.eventNoPAT) and (config.EGDecoding.messageNoPAT.value == False)):
return
elif ((error == eDVBServicePMTHandler.eventNoPATEntry) and (config.EGDecoding.messageNoPATEntry.value == False)):
return
elif ((error == eDVBServicePMTHandler.eventNoPMT) and (config.EGDecoding.messageNoPMT.value == False)):
return
error = errors.get(error)
if (error is not None):
Notifications.AddPopup (text=error, type=MessageBox.TYPE_ERROR, timeout=5, id="ZapError")
else:
Notifications.RemovePopup(id="ZapError")
class InfoBarZoom:
def __init__(self):
self.zoomrate=0
self.zoomin=1
self["ZoomActions"] = HelpableActionMap(self, "InfobarZoomActions",
{
"ZoomInOut":(self.ZoomInOut, _("Zoom In/Out TV...")),
"ZoomOff":(self.ZoomOff, _("Zoom Off...")),
}, prio=2)
def ZoomInOut(self):
zoomval=0
if self.zoomrate > 3:
self.zoomin = 0
elif self.zoomrate < -9:
self.zoomin = 1
if self.zoomin == 1:
self.zoomrate += 1
else:
self.zoomrate -= 1
if self.zoomrate < 0:
zoomval=abs(self.zoomrate)+10
else:
zoomval=self.zoomrate
print "zoomRate:", self.zoomrate
print "zoomval:", zoomval
try:
file = open("/proc/stb/vmpeg/0/zoomrate", "w")
file.write('%d' % int(zoomval))
file.close()
except:
pass
def ZoomOff(self):
self.zoomrate = 0
self.zoomin = 1
try:
f = open("/proc/stb/vmpeg/0/zoomrate", "w")
f.write(str(0))
f.close()
except:
pass
###################################
### PTS CopyTimeshift Task ###
###################################
class CopyTimeshiftJob(Job):
def __init__(self, toolbox, cmdline, srcfile, destfile, eventname):
Job.__init__(self, _("Saving Timeshift files"))
self.toolbox = toolbox
AddCopyTimeshiftTask(self, cmdline, srcfile, destfile, eventname)
class AddCopyTimeshiftTask(Task):
def __init__(self, job, cmdline, srcfile, destfile, eventname):
Task.__init__(self, job, eventname)
self.toolbox = job.toolbox
self.setCmdline(cmdline)
self.srcfile = config.usage.timeshift_path.getValue() + srcfile + ".copy"
self.destfile = destfile + ".ts"
self.ProgressTimer = eTimer()
self.ProgressTimer.callback.append(self.ProgressUpdate)
def ProgressUpdate(self):
if self.srcsize <= 0 or not Directories.fileExists(self.destfile, 'r'):
return
self.setProgress(int((os.path.getsize(self.destfile)/float(self.srcsize))*100))
self.ProgressTimer.start(15000, True)
def prepare(self):
if Directories.fileExists(self.srcfile, 'r'):
self.srcsize = os.path.getsize(self.srcfile)
self.ProgressTimer.start(15000, True)
self.toolbox.ptsFrontpanelActions("start")
def afterRun(self):
self.setProgress(100)
self.ProgressTimer.stop()
self.toolbox.ptsCopyFilefinished(self.srcfile, self.destfile)
config.timeshift.isRecording.value = True
###################################
### PTS MergeTimeshift Task ###
###################################
class MergeTimeshiftJob(Job):
def __init__(self, toolbox, cmdline, srcfile, destfile, eventname):
Job.__init__(self, _("Merging Timeshift files"))
self.toolbox = toolbox
AddMergeTimeshiftTask(self, cmdline, srcfile, destfile, eventname)
class AddMergeTimeshiftTask(Task):
def __init__(self, job, cmdline, srcfile, destfile, eventname):
Task.__init__(self, job, eventname)
self.toolbox = job.toolbox
self.setCmdline(cmdline)
self.srcfile = config.usage.default_path.getValue() + srcfile
self.destfile = config.usage.default_path.getValue() + destfile
self.ProgressTimer = eTimer()
self.ProgressTimer.callback.append(self.ProgressUpdate)
def ProgressUpdate(self):
if self.srcsize <= 0 or not Directories.fileExists(self.destfile, 'r'):
return
self.setProgress(int((os.path.getsize(self.destfile)/float(self.srcsize))*100))
self.ProgressTimer.start(7500, True)
def prepare(self):
if Directories.fileExists(self.srcfile, 'r') and Directories.fileExists(self.destfile, 'r'):
fsize1 = os.path.getsize(self.srcfile)
fsize2 = os.path.getsize(self.destfile)
self.srcsize = fsize1 + fsize2
self.ProgressTimer.start(7500, True)
self.toolbox.ptsFrontpanelActions("start")
def afterRun(self):
self.setProgress(100)
self.ProgressTimer.stop()
config.timeshift.isRecording.value = True
self.toolbox.ptsMergeFilefinished(self.srcfile, self.destfile)
##################################
### Create APSC Files Task ###
##################################
class CreateAPSCFilesJob(Job):
def __init__(self, toolbox, cmdline, eventname):
Job.__init__(self, _("Creating AP and SC Files"))
self.toolbox = toolbox
CreateAPSCFilesTask(self, cmdline, eventname)
class CreateAPSCFilesTask(Task):
def __init__(self, job, cmdline, eventname):
Task.__init__(self, job, eventname)
self.toolbox = job.toolbox
self.setCmdline(cmdline)
def prepare(self):
self.toolbox.ptsFrontpanelActions("start")
config.timeshift.isRecording.value = True
def afterRun(self):
self.setProgress(100)
self.toolbox.ptsSaveTimeshiftFinished()
class HistoryZapSelector(Screen):
def __init__(self, session, items=[], sel_item=0, mark_item=0, invert_items=False, redirect_buttons=False, wrap_around=True):
Screen.__init__(self, session)
self.redirectButton = redirect_buttons
self.invertItems = invert_items
if self.invertItems:
self.currentPos = len(items) - sel_item - 1
else:
self.currentPos = sel_item
self["actions"] = ActionMap(["OkCancelActions", "InfobarCueSheetActions"],
{
"ok": self.okbuttonClick,
"cancel": self.cancelClick,
"jumpPreviousMark": self.prev,
"jumpNextMark": self.next,
"toggleMark": self.okbuttonClick,
})
self.setTitle(_("History zap..."))
self.list = []
cnt = 0
for x in items:
if self.invertItems:
self.list.insert(0, (x[1], cnt == mark_item and "»" or "", x[0]))
else:
self.list.append((x[1], cnt == mark_item and "»" or "", x[0]))
cnt += 1
self["menu"] = List(self.list, enableWrapAround=wrap_around)
self.onShown.append(self.__onShown)
def __onShown(self):
self["menu"].index = self.currentPos
def prev(self):
if self.redirectButton:
self.down()
else:
self.up()
def next(self):
if self.redirectButton:
self.up()
else:
self.down()
def up(self):
self["menu"].selectPrevious()
def down(self):
self["menu"].selectNext()
def getCurrent(self):
cur = self["menu"].current
return cur and cur[0]
def okbuttonClick(self):
self.close(self.getCurrent())
def cancelClick(self):
self.close(None)
|
gpl-2.0
| 8,780,597,409,504,721,000
| 35.550298
| 320
| 0.717599
| false
|
HiSPARC/station-software
|
user/python/Lib/lib-tk/Tkinter.py
|
1
|
160467
|
"""Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import Tkinter
from Tkconstants import *
tk = Tkinter.Tk()
frame = Tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = Tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = Tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
__version__ = "$Revision: 81008 $"
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
import FixTk
import _tkinter # If this fails your Python may not be configured for Tk
tkinter = _tkinter # b/w compat for export
TclError = _tkinter.TclError
from types import *
from Tkconstants import *
import re
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
# These are not always defined, e.g. not on Win32 with Tk 8.0 :-(
try: _tkinter.createfilehandler
except AttributeError: _tkinter.createfilehandler = None
try: _tkinter.deletefilehandler
except AttributeError: _tkinter.deletefilehandler = None
_magic_re = re.compile(r'([\\{}])')
_space_re = re.compile(r'([\s])')
def _join(value):
"""Internal function."""
return ' '.join(map(_stringify, value))
def _stringify(value):
"""Internal function."""
if isinstance(value, (list, tuple)):
if len(value) == 1:
value = _stringify(value[0])
if value[0] == '{':
value = '{%s}' % value
else:
value = '{%s}' % _join(value)
else:
if isinstance(value, str):
value = unicode(value, 'utf-8')
elif not isinstance(value, unicode):
value = str(value)
if not value:
value = '{}'
elif _magic_re.search(value):
# add '\' before special characters and spaces
value = _magic_re.sub(r'\\\1', value)
value = _space_re.sub(r'\\\1', value)
elif value[0] == '"' or _space_re.search(value):
value = '{%s}' % value
return value
def _flatten(tuple):
"""Internal function."""
res = ()
for item in tuple:
if type(item) in (TupleType, ListType):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if type(cnfs) is DictionaryType:
return cnfs
elif type(cnfs) in (NoneType, StringType):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError), msg:
print "_cnfmerge: fallback due to:", msg
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
def _splitdict(tk, v, cut_minus=True, conv=None):
"""Return a properly formatted dict built from Tcl list pairs.
If cut_minus is True, the supposed '-' prefix will be removed from
keys. If conv is specified, it is used to convert values.
Tcl list is expected to contain an even number of elements.
"""
t = tk.splitlist(v)
if len(t) % 2:
raise RuntimeError('Tcl list representing a dict is expected '
'to contain an even number of elements')
it = iter(t)
dict = {}
for key, value in zip(it, it):
key = str(key)
if cut_minus and key[0] == '-':
key = key[1:]
if conv:
value = conv(value)
dict[key] = value
return dict
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code=0):
"""Internal function. Calling it will raise the exception SystemExit."""
try:
code = int(code)
except ValueError:
pass
raise SystemExit, code
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
_tclCommands = None
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._root = master._root()
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.set(value)
elif not self._tk.getboolean(self._tk.call("info", "exists", self._name)):
self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
if self._tk is None:
return
if self._tk.getboolean(self._tk.call("info", "exists", self._name)):
self._tk.globalunsetvar(self._name)
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self._tk.deletecommand(name)
self._tclCommands = None
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
f = CallWrapper(callback, None, self._root).__call__
cbname = repr(id(f))
try:
callback = callback.im_func
except AttributeError:
pass
try:
cbname = cbname + callback.__name__
except AttributeError:
pass
self._tk.createcommand(cbname, f)
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(cbname)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
cbname = self._tk.splitlist(cbname)[0]
for m, ca in self.trace_vinfo():
if self._tk.splitlist(ca)[0] == cbname:
break
else:
self._tk.deletecommand(cbname)
try:
self._tclCommands.remove(cbname)
except ValueError:
pass
def trace_vinfo(self):
"""Return all trace callback information."""
return map(self._tk.splitlist, self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name)))
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, basestring):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
if isinstance(value, bool):
value = int(value)
return Variable.set(self, value)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, self._tk.getboolean(value))
def get(self):
"""Return the value of the variable as a bool."""
return self._tk.getboolean(self._tk.globalgetvar(self._name))
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
return _default_root.tk.getboolean(s)
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(kw.items()))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
# obsolete since Tk 4.0
import warnings
warnings.warn('tk_menuBar() does nothing and will be removed in 3.6',
DeprecationWarning, stacklevel=2)
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
return self.tk.getboolean(s)
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
callit.__name__ = func.__name__
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING, except on X11, where the default
is to try UTF8_STRING and fall back to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('clipboard', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use. A keyword parameter type specifies the form of data to be
fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
before STRING."""
if 'displayof' not in kw: kw['displayof'] = self._w
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('selection', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return int(self.tk.call('winfo', 'id', self._w), 0)
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window manager name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if type(data) is StringType:
data = [self.tk.split(data)]
return map(self.__winfo_parseitem, data)
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if type(func) is StringType:
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
@property
def _windowingsystem(self):
"""Internal function."""
try:
return self._root()._windowingsystem_cached
except AttributeError:
ws = self._root()._windowingsystem_cached = \
self.tk.call('tk', 'windowingsystem')
return ws
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if not isinstance(item, (basestring, int, long)):
break
elif isinstance(item, (int, long)):
nv.append('%d' % item)
else:
# format it to proper Tcl code if it contains space
nv.append(_stringify(item))
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.im_func
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid for all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain an x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease, and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
import sys
exc, val, tb = sys.exc_type, sys.exc_value, sys.exc_traceback
root = self._root()
root.report_callback_exception(exc, val, tb)
def _getconfigure(self, *args):
"""Call Tcl configure command and return the result as a dict."""
cnf = {}
for x in self.tk.splitlist(self.tk.call(*args)):
x = self.tk.splitlist(x)
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
def _getconfigure1(self, *args):
x = self.tk.splitlist(self.tk.call(*args))
return (x[0][1:],) + x[1:]
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
return self._getconfigure(_flatten((self._w, cmd)))
if type(cnf) is StringType:
return self._getconfigure1(_flatten((self._w, cmd, '-'+cnf)))
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def __contains__(self, key):
raise TypeError("Tkinter objects don't support 'in' tests.")
def keys(self):
"""Return a list of all resource names of this widget."""
splitlist = self.tk.splitlist
return [splitlist(x)[0][1:] for x in
splitlist(self.tk.call(self._w, 'configure'))]
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w)))
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w)))
# Grid methods that apply to the master
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _gridconvvalue(self, value):
if isinstance(value, (str, _tkinter.Tcl_Obj)):
try:
svalue = str(value)
if not svalue:
return None
elif '.' in svalue:
return getdouble(svalue)
else:
return getint(svalue)
except ValueError:
pass
return value
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if type(cnf) is StringType and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
return _splitdict(
self.tk,
self.tk.call('grid', command, self._w, index),
conv=self._gridconvvalue)
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
return self._gridconvvalue(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return map(self._nametowidget,
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args)))
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.splitlist(self.tk.call('image', 'names'))
def image_types(self):
"""Return a list of all available image types (e.g. photo bitmap)."""
return self.tk.splitlist(self.tk.call('image', 'types'))
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit, msg:
raise SystemExit, msg
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
if wlist:
self.tk.call(args)
else:
return map(self._nametowidget, self.tk.splitlist(self.tk.call(args)))
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if hasattr(func, '__call__'):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
if not sys.flags.ignore_environment:
# Issue #16248: Honor the -E flag to avoid code injection.
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError, \
"tk.h version (%s) doesn't match libtk.a version (%s)" \
% (_tkinter.TK_VERSION, tk_version)
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError, \
"tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version)
if TkVersion < 4.0:
raise RuntimeError, \
"Tk 4.0 or higher is required; found Tk %s" \
% str(TkVersion)
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls execfile on BASENAME.py and CLASSNAME.py if
such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec 'from Tkinter import *' in dir
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
execfile(class_py, dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
execfile(base_py, dir)
def report_callback_exception(self, exc, val, tb):
"""Report callback exception on sys.stderr.
Applications may want to override this internal function, and
should when sys.stderr is None."""
import traceback, sys
print >>sys.stderr, "Exception in Tkinter callback"
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
d = _splitdict(self.tk, self.tk.call('pack', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
d = _splitdict(self.tk, self.tk.call('place', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
d = _splitdict(self.tk, self.tk.call('grid', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = []
for k in cnf.keys():
if type(k) is ClassType:
classes.append((k, cnf[k]))
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
return 'end'
def AtInsert(*args):
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
return 'sel.first'
def AtSelLast():
return 'sel.last'
def At(x, y=None):
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return map(getdouble,
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args)))
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if type(cnf) in (DictionaryType, TupleType):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closest). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, width, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows displaying simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by the given index."""
return self._getints(self.tk.call(self._w, 'bbox', index)) or None
def curselection(self):
"""Return the indices of currently selected item."""
return self._getints(self.tk.call(self._w, 'curselection')) or ()
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (included)."""
if last is not None:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows displaying menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
# obsolete since Tk 4.0
import warnings
warnings.warn('tk_bindForTraversal() does nothing and '
'will be removed in 3.6',
DeprecationWarning, stacklevel=2)
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of a menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
if boolean is None:
return self.tk.getboolean(self.tk.call(self._w, 'debug'))
self.tk.call(self._w, 'debug', boolean)
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError, 'unknown option -'+kwargs.keys()[0]
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError, 'Too early to create image'
self.tk = getattr(master, 'tk', master)
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
elif k in ('data', 'maskdata'):
v = self.tk._createbytearray(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif k in ('data', 'maskdata'):
v = self.tk._createbytearray(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the image, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage(master=self.tk)
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self, x, y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with a factor of x in the X direction and y in the Y
direction. If y is not given, the default value is the same as x.
"""
destImage = PhotoImage(master=self.tk)
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self, x, y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel. If y is not given, the
default value is the same as x.
"""
destImage = PhotoImage(master=self.tk)
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names():
return _default_root.tk.splitlist(_default_root.tk.call('image', 'names'))
def image_types():
return _default_root.tk.splitlist(_default_root.tk.call('image', 'types'))
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self._getints(self.tk.call(self._w, 'bbox', index)) or None
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
return self._getconfigure(self._w, 'paneconfigure', tagOrId)
if type(cnf) == StringType and not kw:
return self._getconfigure1(
self._w, 'paneconfigure', tagOrId, '-'+cnf)
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.splitlist(self.tk.call(self._w, 'panes'))
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
try:
text = text + unicode("\nThis should be a cedilla: \347",
"iso-8859-1")
except NameError:
pass # no unicode support
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
|
gpl-3.0
| -6,800,216,323,774,395,000
| 40.560995
| 110
| 0.598678
| false
|
brupoon/mustachedNinja
|
any_lowercase_test.py
|
1
|
1080
|
#Chapter 9, Exercise 11...any_lowercase tests
def any_lowercase1(s):
for c in s:
if c.islower():
return True
else:
return False
def any_lowercase2(s):
for c in s:
if 'c'.islower():
return 'True'
else:
return 'False'
def any_lowercase3(s):
for c in s:
flag = c.islower()
return flag
def any_lowercase4(s):
flag = False
for c in s:
flag = flag or c.islower()
return flag
def any_lowercase5(s):
for c in s:
if not c.islower():
return False
return True
if __name__ == '__main__':
if any_lowercase2("test") == True: print("all lower: true")
else: print("all lower: false")
if any_lowercase2("Test") == True: print("firstupper: true")
else: print("firstupper: false")
if any_lowercase2("tEst") == True: print("middleupper: true")
else: print("middleupper: false")
if any_lowercase2("TEST") == True: print("all upper: true")
else: print("all upper: false")
|
mit
| 566,311,769,260,755,700
| 23.761905
| 65
| 0.542593
| false
|
mpunkenhofer/irc-telegram-bot
|
telepot/telepot/aio/__init__.py
|
1
|
26958
|
import io
import json
import time
import asyncio
import traceback
import collections
from concurrent.futures._base import CancelledError
from . import helper, api
from .. import _BotBase, flavor, _find_first_key, _isstring, _dismantle_message_identifier, _strip, _rectify
# Patch aiohttp for sending unicode filename
from . import hack
from .. import exception
def flavor_router(routing_table):
router = helper.Router(flavor, routing_table)
return router.route
class Bot(_BotBase):
class Scheduler(object):
def __init__(self, loop):
self._loop = loop
self._callback = None
def event_at(self, when, data):
delay = when - time.time()
return self._loop.call_later(delay, self._callback, data)
# call_at() uses event loop time, not unix time.
# May as well use call_later here.
def event_later(self, delay, data):
return self._loop.call_later(delay, self._callback, data)
def event_now(self, data):
return self._loop.call_soon(self._callback, data)
def cancel(self, event):
return event.cancel()
def __init__(self, token, loop=None):
super(Bot, self).__init__(token)
self._loop = loop if loop is not None else asyncio.get_event_loop()
self._scheduler = self.Scheduler(self._loop)
self._router = helper.Router(flavor, {'chat': helper._delay_yell(self, 'on_chat_message'),
'callback_query': helper._delay_yell(self, 'on_callback_query'),
'inline_query': helper._delay_yell(self, 'on_inline_query'),
'chosen_inline_result': helper._delay_yell(self, 'on_chosen_inline_result')})
@property
def loop(self):
return self._loop
@property
def scheduler(self):
return self._scheduler
@property
def router(self):
return self._router
async def handle(self, msg):
await self._router.route(msg)
async def _api_request(self, method, params=None, files=None, **kwargs):
return await api.request((self._token, method, params, files), **kwargs)
async def getMe(self):
""" See: https://core.telegram.org/bots/api#getme """
return await self._api_request('getMe')
async def sendMessage(self, chat_id, text,
parse_mode=None, disable_web_page_preview=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
""" See: https://core.telegram.org/bots/api#sendmessage """
p = _strip(locals())
return await self._api_request('sendMessage', _rectify(p))
async def forwardMessage(self, chat_id, from_chat_id, message_id, disable_notification=None):
""" See: https://core.telegram.org/bots/api#forwardmessage """
p = _strip(locals())
return await self._api_request('forwardMessage', _rectify(p))
async def _sendfile(self, inputfile, filetype, params):
method = {'photo': 'sendPhoto',
'audio': 'sendAudio',
'document': 'sendDocument',
'sticker': 'sendSticker',
'video': 'sendVideo',
'voice': 'sendVoice',}[filetype]
if _isstring(inputfile):
params[filetype] = inputfile
return await self._api_request(method, _rectify(params))
else:
files = {filetype: inputfile}
return await self._api_request(method, _rectify(params), files)
async def sendPhoto(self, chat_id, photo,
caption=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#sendphoto
:param photo:
a string indicating a ``file_id`` on server,
a file-like object as obtained by ``open()`` or ``urlopen()``,
or a (filename, file-like object) tuple.
If the file-like object is obtained by ``urlopen()``, you most likely
have to supply a filename because Telegram servers require to know
the file extension.
If the filename contains non-ASCII characters and you are using Python 2.7,
make sure the filename is a unicode string.
"""
p = _strip(locals(), more=['photo'])
return await self._sendfile(photo, 'photo', p)
async def sendAudio(self, chat_id, audio,
caption=None, duration=None, performer=None, title=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#sendaudio
:param audio: Same as ``photo`` in :meth:`telepot.aio.Bot.sendPhoto`
"""
p = _strip(locals(), more=['audio'])
return await self._sendfile(audio, 'audio', p)
async def sendDocument(self, chat_id, document,
caption=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#senddocument
:param document: Same as ``photo`` in :meth:`telepot.aio.Bot.sendPhoto`
"""
p = _strip(locals(), more=['document'])
return await self._sendfile(document, 'document', p)
async def sendSticker(self, chat_id, sticker,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#sendsticker
:param sticker: Same as ``photo`` in :meth:`telepot.aio.Bot.sendPhoto`
"""
p = _strip(locals(), more=['sticker'])
return await self._sendfile(sticker, 'sticker', p)
async def sendVideo(self, chat_id, video,
duration=None, width=None, height=None, caption=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#sendvideo
:param video: Same as ``photo`` in :meth:`telepot.aio.Bot.sendPhoto`
"""
p = _strip(locals(), more=['video'])
return await self._sendfile(video, 'video', p)
async def sendVoice(self, chat_id, voice,
caption=None, duration=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#sendvoice
:param voice: Same as ``photo`` in :meth:`telepot.aio.Bot.sendPhoto`
"""
p = _strip(locals(), more=['voice'])
return await self._sendfile(voice, 'voice', p)
async def sendLocation(self, chat_id, latitude, longitude,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
""" See: https://core.telegram.org/bots/api#sendlocation """
p = _strip(locals())
return await self._api_request('sendLocation', _rectify(p))
async def sendVenue(self, chat_id, latitude, longitude, title, address,
foursquare_id=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
""" See: https://core.telegram.org/bots/api#sendvenue """
p = _strip(locals())
return await self._api_request('sendVenue', _rectify(p))
async def sendContact(self, chat_id, phone_number, first_name,
last_name=None,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
""" See: https://core.telegram.org/bots/api#sendcontact """
p = _strip(locals())
return await self._api_request('sendContact', _rectify(p))
async def sendGame(self, chat_id, game_short_name,
disable_notification=None, reply_to_message_id=None, reply_markup=None):
""" See: https://core.telegram.org/bots/api#sendgame """
p = _strip(locals())
return await self._api_request('sendGame', _rectify(p))
async def sendChatAction(self, chat_id, action):
""" See: https://core.telegram.org/bots/api#sendchataction """
p = _strip(locals())
return await self._api_request('sendChatAction', _rectify(p))
async def getUserProfilePhotos(self, user_id, offset=None, limit=None):
""" See: https://core.telegram.org/bots/api#getuserprofilephotos """
p = _strip(locals())
return await self._api_request('getUserProfilePhotos', _rectify(p))
async def getFile(self, file_id):
""" See: https://core.telegram.org/bots/api#getfile """
p = _strip(locals())
return await self._api_request('getFile', _rectify(p))
async def kickChatMember(self, chat_id, user_id):
""" See: https://core.telegram.org/bots/api#kickchatmember """
p = _strip(locals())
return await self._api_request('kickChatMember', _rectify(p))
async def leaveChat(self, chat_id):
""" See: https://core.telegram.org/bots/api#leavechat """
p = _strip(locals())
return await self._api_request('leaveChat', _rectify(p))
async def unbanChatMember(self, chat_id, user_id):
""" See: https://core.telegram.org/bots/api#unbanchatmember """
p = _strip(locals())
return await self._api_request('unbanChatMember', _rectify(p))
async def getChat(self, chat_id):
""" See: https://core.telegram.org/bots/api#getchat """
p = _strip(locals())
return await self._api_request('getChat', _rectify(p))
async def getChatAdministrators(self, chat_id):
""" See: https://core.telegram.org/bots/api#getchatadministrators """
p = _strip(locals())
return await self._api_request('getChatAdministrators', _rectify(p))
async def getChatMembersCount(self, chat_id):
""" See: https://core.telegram.org/bots/api#getchatmemberscount """
p = _strip(locals())
return await self._api_request('getChatMembersCount', _rectify(p))
async def getChatMember(self, chat_id, user_id):
""" See: https://core.telegram.org/bots/api#getchatmember """
p = _strip(locals())
return await self._api_request('getChatMember', _rectify(p))
async def answerCallbackQuery(self, callback_query_id,
text=None, show_alert=None, url=None, cache_time=None):
""" See: https://core.telegram.org/bots/api#answercallbackquery """
p = _strip(locals())
return await self._api_request('answerCallbackQuery', _rectify(p))
async def editMessageText(self, msg_identifier, text,
parse_mode=None, disable_web_page_preview=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#editmessagetext
:param msg_identifier:
a 2-tuple (``chat_id``, ``message_id``),
a 1-tuple (``inline_message_id``),
or simply ``inline_message_id``.
You may extract this value easily with :meth:`telepot.message_identifier`
"""
p = _strip(locals(), more=['msg_identifier'])
p.update(_dismantle_message_identifier(msg_identifier))
return await self._api_request('editMessageText', _rectify(p))
async def editMessageCaption(self, msg_identifier, caption=None, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#editmessagecaption
:param msg_identifier: Same as ``msg_identifier`` in :meth:`telepot.aio.Bot.editMessageText`
"""
p = _strip(locals(), more=['msg_identifier'])
p.update(_dismantle_message_identifier(msg_identifier))
return await self._api_request('editMessageCaption', _rectify(p))
async def editMessageReplyMarkup(self, msg_identifier, reply_markup=None):
"""
See: https://core.telegram.org/bots/api#editmessagereplymarkup
:param msg_identifier: Same as ``msg_identifier`` in :meth:`telepot.aio.Bot.editMessageText`
"""
p = _strip(locals(), more=['msg_identifier'])
p.update(_dismantle_message_identifier(msg_identifier))
return await self._api_request('editMessageReplyMarkup', _rectify(p))
async def answerInlineQuery(self, inline_query_id, results,
cache_time=None, is_personal=None, next_offset=None,
switch_pm_text=None, switch_pm_parameter=None):
""" See: https://core.telegram.org/bots/api#answerinlinequery """
p = _strip(locals())
return await self._api_request('answerInlineQuery', _rectify(p))
async def getUpdates(self, offset=None, limit=None, timeout=None, allowed_updates=None):
""" See: https://core.telegram.org/bots/api#getupdates """
p = _strip(locals())
return await self._api_request('getUpdates', _rectify(p))
async def setWebhook(self, url=None, certificate=None, max_connections=None, allowed_updates=None):
""" See: https://core.telegram.org/bots/api#setwebhook """
p = _strip(locals(), more=['certificate'])
if certificate:
files = {'certificate': certificate}
return await self._api_request('setWebhook', _rectify(p), files)
else:
return await self._api_request('setWebhook', _rectify(p))
async def deleteWebhook(self):
""" See: https://core.telegram.org/bots/api#deletewebhook """
return await self._api_request('deleteWebhook')
async def getWebhookInfo(self):
""" See: https://core.telegram.org/bots/api#getwebhookinfo """
return await self._api_request('getWebhookInfo')
async def setGameScore(self, user_id, score, game_message_identifier,
force=None, disable_edit_message=None):
""" See: https://core.telegram.org/bots/api#setgamescore """
p = _strip(locals(), more=['game_message_identifier'])
p.update(_dismantle_message_identifier(game_message_identifier))
return await self._api_request('setGameScore', _rectify(p))
async def getGameHighScores(self, user_id, game_message_identifier):
""" See: https://core.telegram.org/bots/api#getgamehighscores """
p = _strip(locals(), more=['game_message_identifier'])
p.update(_dismantle_message_identifier(game_message_identifier))
return await self._api_request('getGameHighScores', _rectify(p))
async def download_file(self, file_id, dest):
"""
Download a file to local disk.
:param dest: a path or a ``file`` object
"""
f = await self.getFile(file_id)
try:
d = dest if isinstance(dest, io.IOBase) else open(dest, 'wb')
async with api.download((self._token, f['file_path'])) as r:
while 1:
chunk = await r.content.read(self._file_chunk_size)
if not chunk:
break
d.write(chunk)
d.flush()
finally:
if not isinstance(dest, io.IOBase) and 'd' in locals():
d.close()
async def message_loop(self, handler=None, relax=0.1,
timeout=20, allowed_updates=None,
source=None, ordered=True, maxhold=3):
"""
Return a task to constantly ``getUpdates`` or pull updates from a queue.
Apply ``handler`` to every message received.
:param handler:
a function that takes one argument (the message), or a routing table.
If ``None``, the bot's ``handle`` method is used.
A *routing table* is a dictionary of ``{flavor: function}``, mapping messages to appropriate
handler functions according to their flavors. It allows you to define functions specifically
to handle one flavor of messages. It usually looks like this: ``{'chat': fn1,
'callback_query': fn2, 'inline_query': fn3, ...}``. Each handler function should take
one argument (the message).
:param source:
Source of updates.
If ``None``, ``getUpdates`` is used to obtain new messages from Telegram servers.
If it is a ``asyncio.Queue``, new messages are pulled from the queue.
A web application implementing a webhook can dump updates into the queue,
while the bot pulls from it. This is how telepot can be integrated with webhooks.
Acceptable contents in queue:
- ``str`` or ``bytes`` (decoded using UTF-8)
representing a JSON-serialized `Update <https://core.telegram.org/bots/api#update>`_ object.
- a ``dict`` representing an Update object.
When ``source`` is a queue, these parameters are meaningful:
:type ordered: bool
:param ordered:
If ``True``, ensure in-order delivery of messages to ``handler``
(i.e. updates with a smaller ``update_id`` always come before those with
a larger ``update_id``).
If ``False``, no re-ordering is done. ``handler`` is applied to messages
as soon as they are pulled from queue.
:type maxhold: float
:param maxhold:
Applied only when ``ordered`` is ``True``. The maximum number of seconds
an update is held waiting for a not-yet-arrived smaller ``update_id``.
When this number of seconds is up, the update is delivered to ``handler``
even if some smaller ``update_id``\s have not yet arrived. If those smaller
``update_id``\s arrive at some later time, they are discarded.
:type timeout: int
:param timeout:
``timeout`` parameter supplied to :meth:`telepot.aio.Bot.getUpdates`,
controlling how long to poll in seconds.
:type allowed_updates: array of string
:param allowed_updates:
``allowed_updates`` parameter supplied to :meth:`telepot.aio.Bot.getUpdates`,
controlling which types of updates to receive.
"""
if handler is None:
handler = self.handle
elif isinstance(handler, dict):
handler = flavor_router(handler)
def create_task_for(msg):
self.loop.create_task(handler(msg))
if asyncio.iscoroutinefunction(handler):
callback = create_task_for
else:
callback = handler
def handle(update):
try:
key = _find_first_key(update, ['message',
'edited_message',
'channel_post',
'edited_channel_post',
'callback_query',
'inline_query',
'chosen_inline_result'])
callback(update[key])
except:
# Localize the error so message thread can keep going.
traceback.print_exc()
finally:
return update['update_id']
async def get_from_telegram_server():
offset = None # running offset
allowed_upd = allowed_updates
while 1:
try:
result = await self.getUpdates(offset=offset,
timeout=timeout,
allowed_updates=allowed_upd)
# Once passed, this parameter is no longer needed.
allowed_upd = None
if len(result) > 0:
# No sort. Trust server to give messages in correct order.
# Update offset to max(update_id) + 1
offset = max([handle(update) for update in result]) + 1
except CancelledError:
raise
except exception.BadHTTPResponse as e:
traceback.print_exc()
# Servers probably down. Wait longer.
if e.status == 502:
await asyncio.sleep(30)
except:
traceback.print_exc()
await asyncio.sleep(relax)
else:
await asyncio.sleep(relax)
def dictify(data):
if type(data) is bytes:
return json.loads(data.decode('utf-8'))
elif type(data) is str:
return json.loads(data)
elif type(data) is dict:
return data
else:
raise ValueError()
async def get_from_queue_unordered(qu):
while 1:
try:
data = await qu.get()
update = dictify(data)
handle(update)
except:
traceback.print_exc()
async def get_from_queue(qu):
# Here is the re-ordering mechanism, ensuring in-order delivery of updates.
max_id = None # max update_id passed to callback
buffer = collections.deque() # keep those updates which skip some update_id
qwait = None # how long to wait for updates,
# because buffer's content has to be returned in time.
while 1:
try:
data = await asyncio.wait_for(qu.get(), qwait)
update = dictify(data)
if max_id is None:
# First message received, handle regardless.
max_id = handle(update)
elif update['update_id'] == max_id + 1:
# No update_id skipped, handle naturally.
max_id = handle(update)
# clear contagious updates in buffer
if len(buffer) > 0:
buffer.popleft() # first element belongs to update just received, useless now.
while 1:
try:
if type(buffer[0]) is dict:
max_id = handle(buffer.popleft()) # updates that arrived earlier, handle them.
else:
break # gap, no more contagious updates
except IndexError:
break # buffer empty
elif update['update_id'] > max_id + 1:
# Update arrives pre-maturely, insert to buffer.
nbuf = len(buffer)
if update['update_id'] <= max_id + nbuf:
# buffer long enough, put update at position
buffer[update['update_id'] - max_id - 1] = update
else:
# buffer too short, lengthen it
expire = time.time() + maxhold
for a in range(nbuf, update['update_id']-max_id-1):
buffer.append(expire) # put expiry time in gaps
buffer.append(update)
else:
pass # discard
except asyncio.TimeoutError:
# debug message
# print('Timeout')
# some buffer contents have to be handled
# flush buffer until a non-expired time is encountered
while 1:
try:
if type(buffer[0]) is dict:
max_id = handle(buffer.popleft())
else:
expire = buffer[0]
if expire <= time.time():
max_id += 1
buffer.popleft()
else:
break # non-expired
except IndexError:
break # buffer empty
except:
traceback.print_exc()
finally:
try:
# don't wait longer than next expiry time
qwait = buffer[0] - time.time()
if qwait < 0:
qwait = 0
except IndexError:
# buffer empty, can wait forever
qwait = None
# debug message
# print ('Buffer:', str(buffer), ', To Wait:', qwait, ', Max ID:', max_id)
self._scheduler._callback = callback
if source is None:
await get_from_telegram_server()
elif isinstance(source, asyncio.Queue):
if ordered:
await get_from_queue(source)
else:
await get_from_queue_unordered(source)
else:
raise ValueError('Invalid source')
class SpeakerBot(Bot):
def __init__(self, token, loop=None):
super(SpeakerBot, self).__init__(token, loop)
self._mic = helper.Microphone()
@property
def mic(self):
return self._mic
def create_listener(self):
q = asyncio.Queue()
self._mic.add(q)
ln = helper.Listener(self._mic, q)
return ln
class DelegatorBot(SpeakerBot):
def __init__(self, token, delegation_patterns, loop=None):
"""
:param delegation_patterns: a list of (seeder, delegator) tuples.
"""
super(DelegatorBot, self).__init__(token, loop)
self._delegate_records = [p+({},) for p in delegation_patterns]
def handle(self, msg):
self._mic.send(msg)
for calculate_seed, make_coroutine_obj, dict in self._delegate_records:
id = calculate_seed(msg)
if id is None:
continue
elif isinstance(id, collections.Hashable):
if id not in dict or dict[id].done():
c = make_coroutine_obj((self, msg, id))
if not asyncio.iscoroutine(c):
raise RuntimeError('You must produce a coroutine *object* as delegate.')
dict[id] = self._loop.create_task(c)
else:
c = make_coroutine_obj((self, msg, id))
self._loop.create_task(c)
|
mit
| 7,597,339,703,148,460,000
| 41.655063
| 123
| 0.545664
| false
|
amcat/amcat
|
api/rest/viewsets/coding/codingschemafield.py
|
1
|
2536
|
###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
from rest_framework.viewsets import ReadOnlyModelViewSet
from amcat.models import CodingSchemaField
from api.rest.mixins import DatatablesMixin
from api.rest.serializer import AmCATModelSerializer
from api.rest.viewset import AmCATViewSetMixin
from api.rest.viewsets.project import ProjectViewSetMixin
__all__ = ("CodingSchemaFieldViewSetMixin", "CodingSchemaFieldSerializer", "CodingSchemaFieldViewSet")
class CodingSchemaFieldSerializer(AmCATModelSerializer):
class Meta:
model = CodingSchemaField
fields = '__all__'
class CodingSchemaFieldViewSetMixin(AmCATViewSetMixin):
model_key = "codingschemafield"
model = CodingSchemaField
class CodingSchemaFieldViewSet(ProjectViewSetMixin, CodingSchemaFieldViewSetMixin, DatatablesMixin, ReadOnlyModelViewSet):
model = CodingSchemaField
queryset = CodingSchemaField.objects.all()
serializer_class = CodingSchemaFieldSerializer
ordering_fields = ("id", "fieldnr", "name")
def filter_queryset(self, fields):
fields = super(CodingSchemaFieldViewSet, self).filter_queryset(fields)
return fields.filter(codingschema__in=self.project.get_codingschemas(True))
|
agpl-3.0
| 930,094,134,368,315,800
| 54.130435
| 122
| 0.594637
| false
|
kamsuri/vms
|
vms/pom/pages/basePage.py
|
1
|
1809
|
class BasePage(object):
"""Base class to initialize the base page that will be called from all pages"""
def __init__(self, driver):
self.driver = driver
def send_value_to_element_id(self, key, value):
self.driver.find_element_by_id(key).send_keys(value)
def send_value_to_xpath(self, key, value):
self.driver.find_element_by_xpath(key).send_keys(value)
def element_by_xpath(self, path):
return self.driver.find_element_by_xpath(path)
def elements_by_xpath(self, path):
elements = self.driver.find_elements_by_xpath(path)
return elements if elements else None
def get_page(self, base, relative_url):
self.driver.get(base + relative_url)
def elements_by_class_name(self, class_name):
elements = self.driver.find_elements_by_class_name(class_name)
return elements if elements else None
def find_element_by_css_selector(self, selector):
return self.driver.find_element_by_css_selector(selector)
def element_by_class_name(self, class_name):
return self.driver.find_element_by_class_name(class_name)
def get_value_for(self, field):
return self.driver.find_element_by_id(field).get_attribute('value')
def click_link(self, link_text):
self.driver.find_element_by_link_text(link_text).click()
def find_link(self, link_text):
element = self.driver.find_element_by_link_text(link_text)
return element if element else None
def element_by_id(self, id_name):
return self.driver.find_element_by_id(id_name)
def get_value_for_xpath(self, xpath):
return self.driver.find_element_by_xpath(xpath).get_attribute('value')
def element_by_tag_name(self, tag):
return self.driver.find_element_by_tag_name(tag)
|
gpl-2.0
| 2,963,434,992,951,286,300
| 35.18
| 83
| 0.674406
| false
|
anthonysandrin/kafka-utils
|
tests/acceptance/steps/util.py
|
1
|
4458
|
# -*- coding: utf-8 -*-
# Copyright 2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import time
import uuid
from kafka import KafkaConsumer
from kafka import SimpleProducer
from kafka.common import LeaderNotAvailableError
from kafka_utils.util import config
from kafka_utils.util.client import KafkaToolClient
from kafka_utils.util.offsets import set_consumer_offsets
ZOOKEEPER_URL = 'zookeeper:2181'
KAFKA_URL = 'kafka:9092'
def get_cluster_config():
return config.get_cluster_config(
'test',
'test_cluster',
'tests/acceptance/config',
)
def create_topic(topic_name, replication_factor, partitions):
cmd = ['kafka-topics.sh', '--create',
'--zookeeper', ZOOKEEPER_URL,
'--replication-factor', str(replication_factor),
'--partitions', str(partitions),
'--topic', topic_name]
subprocess.check_call(cmd)
# It may take a little moment for the topic to be ready for writing.
time.sleep(1)
def list_topics():
cmd = ['kafka-topics.sh', '--list',
'--zookeeper', ZOOKEEPER_URL]
return call_cmd(cmd)
def delete_topic(topic_name):
cmd = ['kafka-topics.sh', '--delete',
'--zookeeper', ZOOKEEPER_URL,
'--topic', topic_name]
return call_cmd(cmd)
def call_cmd(cmd):
output = ''
try:
p = subprocess.Popen(
cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
out, err = p.communicate('y')
if out:
output += out
if err:
output += err
except subprocess.CalledProcessError as e:
output += e.output
return output
def create_random_topic(replication_factor, partitions, topic_name=None):
if not topic_name:
topic_name = str(uuid.uuid1())
create_topic(topic_name, replication_factor, partitions)
return topic_name
def create_random_group_id():
return str(uuid.uuid1())
def produce_example_msg(topic, num_messages=1):
kafka = KafkaToolClient(KAFKA_URL)
producer = SimpleProducer(kafka)
for i in xrange(num_messages):
try:
producer.send_messages(topic, b'some message')
except LeaderNotAvailableError:
# Sometimes kafka takes a bit longer to assign a leader to a new
# topic
time.sleep(10)
producer.send_messages(topic, b'some message')
def create_consumer_group(topic, group_name, num_messages=1):
consumer = KafkaConsumer(
topic,
group_id=group_name,
auto_commit_enable=False,
bootstrap_servers=[KAFKA_URL],
auto_offset_reset='smallest')
for i in xrange(num_messages):
message = consumer.next()
consumer.task_done(message)
consumer.commit()
return consumer
def call_watermark_get(topic_name, storage=None):
cmd = ['kafka-consumer-manager',
'--cluster-type', 'test',
'--discovery-base-path', 'tests/acceptance/config',
'get_topic_watermark', topic_name]
return call_cmd(cmd)
def call_offset_get(group, storage=None, json=False):
cmd = ['kafka-consumer-manager',
'--cluster-type', 'test',
'--cluster-name', 'test_cluster',
'--discovery-base-path', 'tests/acceptance/config',
'offset_get',
group]
if storage:
cmd.extend(['--storage', storage])
if json:
cmd.extend(['--json'])
return call_cmd(cmd)
def initialize_kafka_offsets_topic():
if '__consumer_offsets' in list_topics():
return
topic = create_random_topic(1, 1)
produce_example_msg(topic, num_messages=1)
kafka = KafkaToolClient(KAFKA_URL)
set_consumer_offsets(
kafka,
create_random_group_id(),
{topic: {0: 0}},
offset_storage='kafka',
raise_on_error=False,
)
time.sleep(20)
|
apache-2.0
| -3,114,643,107,766,294,500
| 27.037736
| 76
| 0.631673
| false
|
GETLIMS/LIMS-Backend
|
lims/shared/migrations/0005_auto_20180301_0958.py
|
1
|
1248
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-03-01 09:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shared', '0004_trigger_fire_on_create'),
]
operations = [
migrations.AlterModelOptions(
name='organism',
options={'ordering': ['-id']},
),
migrations.AlterModelOptions(
name='trigger',
options={'ordering': ['-id']},
),
migrations.AlterModelOptions(
name='triggeralert',
options={'ordering': ['-id']},
),
migrations.AlterModelOptions(
name='triggeralertstatus',
options={'ordering': ['-id']},
),
migrations.AlterModelOptions(
name='triggerset',
options={'ordering': ['-id']},
),
migrations.AlterModelOptions(
name='triggersubscription',
options={'ordering': ['-id']},
),
migrations.AlterField(
model_name='triggerset',
name='email_title',
field=models.CharField(default='Alert from Leaf LIMS', max_length=255),
),
]
|
mit
| 1,684,550,303,918,089,200
| 27.363636
| 83
| 0.532853
| false
|
qualitio/qualitio
|
qualitio/core/views.py
|
1
|
9056
|
import operator
from mptt.models import MPTTModel
from reversion.models import Version
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.db.models.loading import get_model
from django.views.generic.simple import direct_to_template
from qualitio.core.utils import json_response
def to_tree_element(object, type):
tree_element = {'attr': {'id': "%s_%s" % (object.pk, type),
'rel': type},
'data': object.name}
if isinstance(object, MPTTModel):
try:
subchildren = getattr(getattr(object, "subchildren", None), "all", None)()
except TypeError: # Not really good idea, slow typecheck?
subchildren = None
if object.get_children() or subchildren:
tree_element['state'] = "closed"
return tree_element
@json_response
def get_children(request, directory, *args, **kwargs):
data = []
try:
node_id = int(request.GET.get('id', 0))
node = directory.objects.get(pk=node_id)
directories = node.children.order_by('name')
data = map(lambda x: to_tree_element(x, x._meta.module_name), directories)
try:
subchildren = getattr(node, "subchildren", None)
subchildren = getattr(subchildren, "order_by", lambda *a, **k: None)('name')
data.append(map(lambda x: to_tree_element(x, x._meta.module_name), subchildren))
except TypeError: # Not really good idea, slow typecheck?
pass
except (ObjectDoesNotExist, ValueError):
# TODO: maybe the better way is to override method 'root_nodes' on manager
directories = directory.tree.root_nodes().order_by('name')
data = map(lambda x: to_tree_element(x, x._meta.module_name),
directories)
return data
@json_response
def get_ancestors(request, app, *args, **kwargs):
Model = get_model(app, request.GET['type'])
object = Model.objects.get(pk=request.GET['id'])
ancestors = []
if isinstance(object, MPTTModel): # directory?
ancestors = object.get_ancestors()
else:
if object.parent:
ancestors = list(object.parent.get_ancestors())
ancestors.extend([object.parent])
return {"nodes": map(lambda x: '%s_%s' % (x.pk, x._meta.module_name), ancestors),
"target": "%s_%s" % (object.pk, object._meta.module_name)}
def history(request, object_id, Model, *args, **kwargs):
object = Model.objects.get(pk=object_id)
versions = Version.objects.get_for_object(object)
return direct_to_template(request, 'core/history.html',
{'object': object,
'name' : object._meta.object_name,
'versions' : versions})
def permission_required(request, *args, **kwargs):
return direct_to_template(request, 'core/permission_required.html')
registry = {}
def menu_view(_object, view_name, role="", index=-1, *args, **kwargs):
if index < 0:
index = len(registry)-index+1 # this is only append
if registry.has_key(_object):
registry[_object].insert(index, dict(name=view_name, role=role))
else:
registry[_object] = [dict(name=view_name, role=role)]
def _menu_view(function):
def __menu_view(*args, **kw):
return function(*args, **kw)
return __menu_view
return _menu_view
# jQuery DataTable's ajax params helper #################
class DataTableColumn(object):
"""
Column that represents jQuery DataTable column.
The main responsibility is to create criteria (django ``Q`` objects)
for each client-side table column.
Params:
- ``name``
name of models *attribute* (not the column label or name); if column have
information from ForeignKey field then use normal django-orm-like queries strings,
eg. for store.TestCase.requirement attribute the column name should be:
``requirement__name``.
- ``is_searchable``
think it's self-descibing
- ``search``
the search query, should contains query specified by a user; if query is NOT defined
for specific column, the search query from DataTables search field is used.
- ``search_is_regex``
tells column object should tread serach query as regex pattern
Used internally by DataTable class.
"""
def __init__(self, name=None, is_searchable=False, search=None, search_is_regex=None):
self.name = name
self.is_searchable = is_searchable
self.search = search
self.search_is_regex = search_is_regex
def search_key(self):
if self.search_is_regex:
return '%s__regex' % self.name
return '%s__icontains' % self.name
def construct_Q(self):
if not self.is_searchable:
return Q()
if not self.search:
return Q()
return Q(**{self.search_key():self.search})
class DataTableOptions(object):
"""
Represents jQuery DataTable options send by the plugin
via ajax request.
Usage:
def myview(request, ...):
options = DataTableOptions(request.GET)
# do something with options
"""
def getitem(self, itemable, key, *args):
"""
Work's pretty much the same as ``getattr`` function
but for objects that have ``__getitem__`` method.
"""
try:
return itemable[key]
except (IndexError, KeyError):
if args:
return args[0]
raise
def _get_name(self, column_names, column_index):
return self.getitem(column_names, column_index, None) # default name is None
def _get_searchable(self, opts_dict, column_index):
return opts_dict.get('bSearchable_%s' % column_index, 'false') == 'true'
def _get_search_query(self, opts_dict, column_index):
return opts_dict.get('sSearch_%s' % column_index, self.search) or self.search
def _get_search_is_regex(self, opts_dict, column_index):
return opts_dict.get('bRegex_%s' % column_index, 'false') == 'true' or self.search_is_regex
def _get_columns(self, columns_names, params):
columns = []
for i in xrange(len(columns_names)):
columns.append(DataTableColumn(**{
'name': self._get_name(columns_names, i),
'is_searchable': self._get_searchable(params, i),
'search': self._get_search_query(params, i),
'search_is_regex': self._get_search_is_regex(params, i),
}))
return columns
def _get_ordering(self, columns, params):
ordering = None
sorting_column_index = int(params.get('iSortingCols', None))
sorting_column_dir = params.get('sSortDir_0', 'asc')
if sorting_column_index:
ordering = columns[sorting_column_index].name
if sorting_column_dir == 'desc':
ordering = '-%s' % ordering
return ordering
def __init__(self, model, column_names, params):
self.search = params.get('sSearch', '')
self.search_is_regex = params.get('bRegex', 'false') == 'true'
self.columns = self._get_columns(column_names, params)
self.model = model
self.limit = int(params.get('iDisplayLength', 100))
self.start_record = int(params.get('iDisplayStart', 0))
self.end_record = self.start_record + self.limit
self.echo = int(params.get('sEcho', 1))
self.ordering = self._get_ordering(self.columns, params)
class DataTable(object):
model = None
def __init__(self, columns=None, params=None, model=None, queryset=None):
self._count = None # cache count field
self._meta = DataTableOptions(model or self.__class__.model or queryset.model, columns, params)
if queryset is not None:
self._queryset = queryset
else:
self._queryset = self._meta.model.objects
def construct_Q(self):
return reduce(operator.or_, [col.construct_Q() for col in self._meta.columns])
def queryset(self):
qs = self._queryset.filter(self.construct_Q())
if self._meta.ordering:
return qs.order_by(self._meta.ordering)
return qs
def count(self):
if self._count is None:
self._count = self.queryset().count()
return self._count
def slice_queryset(self):
return self.queryset()[self._meta.start_record:self._meta.end_record]
def map(self, function):
return map(function, self.slice_queryset())
def response_dict(self, mapitem=lambda x: x):
return {
'iTotalRecords': self.count(),
'iTotalDisplayRecords': self.count(),
'sEcho': self._meta.echo,
'aaData': self.map(mapitem),
}
#########################################################
|
gpl-3.0
| -4,864,936,522,159,101,000
| 33.830769
| 103
| 0.599823
| false
|
ksmit799/Toontown-Source
|
toontown/minigame/DistributedPatternGameAI.py
|
1
|
7498
|
from DistributedMinigameAI import *
from toontown.ai.ToonBarrier import *
from direct.fsm import ClassicFSM, State
from direct.fsm import State
import random
import PatternGameGlobals
import copy
class DistributedPatternGameAI(DistributedMinigameAI):
def __init__(self, air, minigameId):
try:
self.DistributedPatternGameAI_initialized
except:
self.DistributedPatternGameAI_initialized = 1
DistributedMinigameAI.__init__(self, air, minigameId)
self.gameFSM = ClassicFSM.ClassicFSM('DistributedPatternGameAI', [State.State('off', self.enterInactive, self.exitInactive, ['waitClientsReady', 'cleanup']),
State.State('waitClientsReady', self.enterWaitClientsReady, self.exitWaitClientsReady, ['generatePattern', 'cleanup']),
State.State('generatePattern', self.enterGeneratePattern, self.exitGeneratePattern, ['waitForResults', 'cleanup']),
State.State('waitForResults', self.enterWaitForResults, self.exitWaitForResults, ['waitClientsReady', 'cleanup']),
State.State('cleanup', self.enterCleanup, self.exitCleanup, [])], 'off', 'cleanup')
self.addChildGameFSM(self.gameFSM)
def delete(self):
self.notify.debug('delete')
del self.gameFSM
DistributedMinigameAI.delete(self)
def setGameReady(self):
self.notify.debug('setGameReady')
DistributedMinigameAI.setGameReady(self)
self.__initGameVars()
def setGameStart(self, timestamp):
self.notify.debug('setGameStart')
DistributedMinigameAI.setGameStart(self, timestamp)
self.gameFSM.request('waitClientsReady')
def setGameAbort(self):
self.notify.debug('setGameAbort')
if self.gameFSM.getCurrentState():
self.gameFSM.request('cleanup')
DistributedMinigameAI.setGameAbort(self)
def gameOver(self):
self.notify.debug('gameOver')
self.gameFSM.request('cleanup')
DistributedMinigameAI.gameOver(self)
def enterInactive(self):
self.notify.debug('enterInactive')
def exitInactive(self):
pass
def __initGameVars(self):
self.pattern = []
self.round = 0
self.perfectResults = {}
for avId in self.avIdList:
self.perfectResults[avId] = 1
self.readyClients = []
self.timeoutTaskName = self.uniqueName('PatternGameResultsTimeout')
def enterWaitClientsReady(self):
self.notify.debug('enterWaitClientsReady')
self.nextRoundBarrier = ToonBarrier('nextRoundReady', self.uniqueName('nextRoundReady'), self.avIdList, PatternGameGlobals.ClientsReadyTimeout, self.__allPlayersReady, self.__clientsReadyTimeout)
for avId in self.readyClients:
self.nextRoundBarrier.clear(avId)
def reportPlayerReady(self):
if not self._inState('waitClientsReady'):
return
avId = self.air.getAvatarIdFromSender()
if avId not in self.avIdList:
self.notify.warning('Got reportPlayerReady from an avId: %s not in our list: %s' % (avId, self.avIdList))
else:
self.readyClients.append(avId)
self.nextRoundBarrier.clear(avId)
def __allPlayersReady(self):
self.readyClients = []
self.gameFSM.request('generatePattern')
def __clientsReadyTimeout(self, avIds):
self.notify.debug('__clientsReadyTimeout: clients %s have not responded' % avIds)
self.setGameAbort()
def exitWaitClientsReady(self):
self.nextRoundBarrier.cleanup()
del self.nextRoundBarrier
def enterGeneratePattern(self):
self.notify.debug('enterGeneratePattern')
self.round += 1
targetLen = PatternGameGlobals.INITIAL_ROUND_LENGTH + PatternGameGlobals.ROUND_LENGTH_INCREMENT * (self.round - 1)
count = targetLen - len(self.pattern)
for i in range(0, count):
self.pattern.append(random.randint(0, 3))
self.gameFSM.request('waitForResults')
self.sendUpdate('setPattern', [self.pattern])
def exitGeneratePattern(self):
pass
def enterWaitForResults(self):
self.notify.debug('enterWaitForResults')
self.results = [None] * self.numPlayers
self.fastestTime = PatternGameGlobals.InputTime * 2
self.fastestAvId = 0
self.resultsBarrier = ToonBarrier('results', self.uniqueName('results'), self.avIdList, PatternGameGlobals.InputTimeout + 1.0 * self.round, self.__gotAllPatterns, self.__resultsTimeout)
return
def reportButtonPress(self, index, wrong):
if not self._inState('waitForResults'):
return
avId = self.air.getAvatarIdFromSender()
if avId not in self.avIdList:
self.air.writeServerEvent('suspicious', avId, 'PatternGameAI.reportButtonPress avId not on list')
return
if index < 0 or index > 3:
self.air.writeServerEvent('warning', index, 'PatternGameAI.reportButtonPress got bad index')
return
if wrong not in [0, 1]:
self.air.writeServerEvent('warning', wrong, "PatternGameAI.reportButtonPress got bad 'wrong'")
return
self.sendUpdate('remoteButtonPressed', [avId, index, wrong])
def __resultsTimeout(self, avIds):
self.notify.debug('__resultsTimeout: %s' % avIds)
for avId in avIds:
index = self.avIdList.index(avId)
self.__acceptPlayerPattern(avId, [], PatternGameGlobals.InputTime * 2)
self.__gotAllPatterns()
def reportPlayerPattern(self, pattern, totalTime):
if not self._inState('waitForResults'):
return
avId = self.air.getAvatarIdFromSender()
self.__acceptPlayerPattern(avId, pattern, totalTime)
self.resultsBarrier.clear(avId)
def __acceptPlayerPattern(self, avId, pattern, totalTime):
index = self.avIdList.index(avId)
if self.results[index] != None:
return
self.results[index] = pattern
if totalTime < self.fastestTime and pattern == self.pattern:
self.fastestTime = totalTime
self.fastestAvId = avId
if self.numPlayers == 1:
self.fastestAvId = 1
else:
self.scoreDict[self.fastestAvId] += 2
return
def __gotAllPatterns(self):
patterns = [[]] * 4
for i in range(0, len(self.results)):
patterns[i] = self.results[i]
if patterns[i] is None:
patterns[i] = []
self.sendUpdate('setPlayerPatterns', patterns + [self.fastestAvId])
for i in range(0, self.numPlayers):
avId = self.avIdList[i]
if not self.results[i] == self.pattern:
self.perfectResults[avId] = 0
else:
self.scoreDict[avId] += self.round
if self.round < PatternGameGlobals.NUM_ROUNDS:
self.gameFSM.request('waitClientsReady')
else:
for avId in self.avIdList:
if self.perfectResults[avId]:
self.scoreDict[avId] += 4
self.logPerfectGame(avId)
self.gameOver()
self.gameFSM.request('cleanup')
return
def exitWaitForResults(self):
self.resultsBarrier.cleanup()
del self.resultsBarrier
def enterCleanup(self):
self.notify.debug('enterCleanup')
def exitCleanup(self):
pass
|
mit
| -381,536,890,092,594,100
| 37.451282
| 203
| 0.640571
| false
|
NeurodataWithoutBorders/api-python
|
unittest/t_ref_image.py
|
1
|
1592
|
#!/usr/bin/python
# import nwb
import test_utils as ut
from nwb import nwb_file
from nwb import nwb_utils as utils
# TESTS storage of reference image
def test_refimage_series():
if __file__.startswith("./"):
fname = "s" + __file__[3:-3] + ".nwb"
else:
fname = "s" + __file__[1:-3] + ".nwb"
name = "refimage"
create_refimage(fname, name)
val = ut.verify_present(fname, "acquisition/images/", name)
#if len(val) != 6:
if len(val) != 5:
ut.error("Checking ref image contents", "wrong dimension")
val = ut.verify_attribute_present(fname, "acquisition/images/"+name, "format")
if not ut.strcmp(val, "raw"):
ut.error("Checking ref image format", "Wrong value")
val = ut.verify_attribute_present(fname, "acquisition/images/"+name, "description")
if not ut.strcmp(val, "test"):
ut.error("Checking ref image description", "Wrong value")
def create_refimage(fname, name):
settings = {}
settings["file_name"] = fname
settings["start_time"] = "2008-09-15T15:53:00-08:00"
settings["identifier"] = utils.create_identifier("reference image test")
settings["mode"] = "w"
settings["description"] = "reference image test"
settings["verbosity"] = "none"
f = nwb_file.open(**settings)
# neurodata.create_reference_image([1,2,3,4,5], name, "raw", "test")
f.set_dataset("<image_X>", [1,2,3,4,5], dtype="uint8", name=name, attrs={
"description": "test", "format":"raw"})
# neurodata.close()
f.close()
test_refimage_series()
print("%s PASSED" % __file__)
|
bsd-3-clause
| 7,133,338,631,440,159,000
| 32.166667
| 87
| 0.616206
| false
|
hhorak/rebase-helper
|
test/test_application.py
|
1
|
4369
|
# -*- coding: utf-8 -*-
#
# This tool helps you to rebase package to the latest version
# Copyright (C) 2013-2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# he Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hracek <phracek@redhat.com>
# Tomas Hozza <thozza@redhat.com>
import os
from .base_test import BaseTest
from rebasehelper.cli import CLI
from rebasehelper.application import Application
from rebasehelper import settings
class TestApplication(BaseTest):
""" Application tests """
OLD_SOURCES = 'test-1.0.2.tar.xz'
NEW_SOURCES = 'test-1.0.3.tar.xz'
SPEC_FILE = 'test.spec'
PATCH_1 = 'test-testing.patch'
PATCH_2 = 'test-testing2.patch'
PATCH_3 = 'test-testing3.patch'
SOURCE_1 = 'file.txt.bz2'
TEST_FILES = [
OLD_SOURCES,
NEW_SOURCES,
SPEC_FILE,
PATCH_1,
PATCH_2,
PATCH_3,
SOURCE_1
]
cmd_line_args = ['--not-download-sources', '1.0.3']
def test_application_sources(self):
expected_dict = {
'new': {
'sources': [os.path.join(self.WORKING_DIR, 'test-source.sh'),
os.path.join(self.WORKING_DIR, 'source-tests.sh'),
os.path.join(self.WORKING_DIR, self.NEW_SOURCES)],
'version': '1.0.3',
'name': 'test',
'tarball': self.NEW_SOURCES,
'spec': os.path.join(self.WORKING_DIR, settings.REBASE_HELPER_RESULTS_DIR, self.SPEC_FILE),
'patches_full': {1: [os.path.join(self.WORKING_DIR, self.PATCH_1),
'',
0,
False],
2: [os.path.join(self.WORKING_DIR, self.PATCH_2),
'-p1',
1,
False],
3: [os.path.join(self.WORKING_DIR, self.PATCH_3),
'-p1',
2,
False]}},
'workspace_dir': os.path.join(self.WORKING_DIR, settings.REBASE_HELPER_WORKSPACE_DIR),
'old': {
'sources': [os.path.join(self.WORKING_DIR, 'test-source.sh'),
os.path.join(self.WORKING_DIR, 'source-tests.sh'),
os.path.join(self.WORKING_DIR, self.OLD_SOURCES)],
'version': '1.0.2',
'name': 'test',
'tarball': self.OLD_SOURCES,
'spec': os.path.join(self.WORKING_DIR, self.SPEC_FILE),
'patches_full': {1: [os.path.join(self.WORKING_DIR, self.PATCH_1),
'',
0,
False],
2: [os.path.join(self.WORKING_DIR, self.PATCH_2),
'-p1',
1,
False],
3: [os.path.join(self.WORKING_DIR, self.PATCH_3),
'-p1',
2,
False]}},
'results_dir': os.path.join(self.WORKING_DIR, settings.REBASE_HELPER_RESULTS_DIR)}
try:
cli = CLI(self.cmd_line_args)
app = Application(cli)
app.prepare_sources()
for key, val in app.kwargs.items():
if key in expected_dict:
assert val == expected_dict[key]
except OSError as oer:
pass
|
gpl-2.0
| 800,306,180,075,415,300
| 38.718182
| 107
| 0.487984
| false
|
indautgrp/erpnext
|
erpnext/accounts/report/trial_balance_for_party/trial_balance_for_party.py
|
1
|
5770
|
# Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, cint
from erpnext.accounts.report.trial_balance.trial_balance import validate_filters
def execute(filters=None):
validate_filters(filters)
show_party_name = is_party_name_visible(filters)
columns = get_columns(filters, show_party_name)
data = get_data(filters, show_party_name)
return columns, data
def get_data(filters, show_party_name):
party_name_field = "customer_name" if filters.get("party_type")=="Customer" else "supplier_name"
party_filters = {"name": filters.get("party")} if filters.get("party") else {}
parties = frappe.get_all(filters.get("party_type"), fields = ["name", party_name_field],
filters = party_filters, order_by="name")
company_currency = frappe.db.get_value("Company", filters.company, "default_currency")
opening_balances = get_opening_balances(filters)
balances_within_period = get_balances_within_period(filters)
data = []
# total_debit, total_credit = 0, 0
total_row = frappe._dict({
"opening_debit": 0,
"opening_credit": 0,
"debit": 0,
"credit": 0,
"closing_debit": 0,
"closing_credit": 0
})
for party in parties:
row = { "party": party.name }
if show_party_name:
row["party_name"] = party.get(party_name_field)
# opening
opening_debit, opening_credit = opening_balances.get(party.name, [0, 0])
row.update({
"opening_debit": opening_debit,
"opening_credit": opening_credit
})
# within period
debit, credit = balances_within_period.get(party.name, [0, 0])
row.update({
"debit": debit,
"credit": credit
})
# closing
closing_debit, closing_credit = toggle_debit_credit(opening_debit + debit, opening_credit + credit)
row.update({
"closing_debit": closing_debit,
"closing_credit": closing_credit
})
# totals
for col in total_row:
total_row[col] += row.get(col)
row.update({
"currency": company_currency
})
has_value = False
if (opening_debit or opening_credit or debit or credit or closing_debit or closing_credit):
has_value =True
if cint(filters.show_zero_values) or has_value:
data.append(row)
# Add total row
total_row.update({
"party": "'" + _("Totals") + "'",
"currency": company_currency
})
data.append(total_row)
return data
def get_opening_balances(filters):
gle = frappe.db.sql("""
select party, sum(debit) as opening_debit, sum(credit) as opening_credit
from `tabGL Entry`
where company=%(company)s
and ifnull(party_type, '') = %(party_type)s and ifnull(party, '') != ''
and (posting_date < %(from_date)s or ifnull(is_opening, 'No') = 'Yes')
group by party""", {
"company": filters.company,
"from_date": filters.from_date,
"party_type": filters.party_type
}, as_dict=True)
opening = frappe._dict()
for d in gle:
opening_debit, opening_credit = toggle_debit_credit(d.opening_debit, d.opening_credit)
opening.setdefault(d.party, [opening_debit, opening_credit])
return opening
def get_balances_within_period(filters):
gle = frappe.db.sql("""
select party, sum(debit) as debit, sum(credit) as credit
from `tabGL Entry`
where company=%(company)s
and ifnull(party_type, '') = %(party_type)s and ifnull(party, '') != ''
and posting_date >= %(from_date)s and posting_date <= %(to_date)s
and ifnull(is_opening, 'No') = 'No'
group by party""", {
"company": filters.company,
"from_date": filters.from_date,
"to_date": filters.to_date,
"party_type": filters.party_type
}, as_dict=True)
balances_within_period = frappe._dict()
for d in gle:
balances_within_period.setdefault(d.party, [d.debit, d.credit])
return balances_within_period
def toggle_debit_credit(debit, credit):
if flt(debit) > flt(credit):
debit = flt(debit) - flt(credit)
credit = 0.0
else:
credit = flt(credit) - flt(debit)
debit = 0.0
return debit, credit
def get_columns(filters, show_party_name):
columns = [
{
"fieldname": "party",
"label": _(filters.party_type),
"fieldtype": "Link",
"options": filters.party_type,
"width": 120
},
{
"fieldname": "opening_debit",
"label": _("Opening (Dr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "opening_credit",
"label": _("Opening (Cr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "debit",
"label": _("Debit"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "credit",
"label": _("Credit"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "closing_debit",
"label": _("Closing (Dr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "closing_credit",
"label": _("Closing (Cr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "currency",
"label": _("Currency"),
"fieldtype": "Link",
"options": "Currency",
"hidden": 1
}
]
if show_party_name:
columns.insert(1, {
"fieldname": "party_name",
"label": _(filters.party_type) + " Name",
"fieldtype": "Data",
"width": 200
})
return columns
def is_party_name_visible(filters):
show_party_name = False
if filters.get("party_type") == "Customer":
party_naming_by = frappe.db.get_single_value("Selling Settings", "cust_master_name")
else:
party_naming_by = frappe.db.get_single_value("Buying Settings", "supp_master_name")
if party_naming_by == "Naming Series":
show_party_name = True
return show_party_name
|
gpl-3.0
| 8,733,647,437,386,378,000
| 24.995495
| 101
| 0.644541
| false
|
lycying/seeking
|
sklib/ui/wysiwyg/syntax.py
|
1
|
2930
|
# coding:utf-8
#
# Copyright (c) 2010, guo.li <lycying@gmail.com>
# Site < http://code.google.com/p/seeking/ >
# All rights reserved.
# vim: set ft=python sw=2 ts=2 et:
#
from PyQt5.QtGui import QFont
from PyQt5.QtGui import QTextCharFormat
from PyQt5.QtGui import QSyntaxHighlighter
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QRegExp
class Highlighter(QSyntaxHighlighter):
def __init__(self, parent=None):
super(Highlighter, self).__init__(parent)
keywordFormat = QTextCharFormat()
keywordFormat.setForeground(Qt.darkBlue)
keywordFormat.setFontWeight(QFont.Bold)
keywordPatterns = ["""</?\w+\s+[^>]*>""","<[/]?(html|body|head|title|div|a|br|form|input|b|p|i|center|span|font|table|tr|td|h[1-6])[/]?>"]
self.highlightingRules = [(QRegExp(pattern), keywordFormat)
for pattern in keywordPatterns]
self.multiLineCommentFormat = QTextCharFormat()
self.multiLineCommentFormat.setForeground(Qt.red)
quotationFormat = QTextCharFormat()
quotationFormat.setForeground(Qt.darkGreen)
self.highlightingRules.append((QRegExp("\".*\""),
quotationFormat))
functionFormat = QTextCharFormat()
functionFormat.setFontItalic(True)
functionFormat.setForeground(Qt.blue)
self.highlightingRules.append((QRegExp("\\b[A-Za-z0-9_]+(?=\\()"),
functionFormat))
moreKeyWords = QTextCharFormat()
moreKeyWords.setForeground(Qt.darkMagenta)
moreKeyWords.setFontWeight(QFont.Bold)
self.highlightingRules.append((QRegExp("(id|class|src|border|width|height|style|name|type|value)="),moreKeyWords))
self.commentStartExpression = QRegExp("<!--")
self.commentEndExpression = QRegExp("-->")
def highlightBlock(self, text):
for pattern, formats in self.highlightingRules:
expression = QRegExp(pattern)
index = expression.indexIn(text)
while index >= 0:
length = expression.matchedLength()
self.setFormat(index, length, formats)
index = expression.indexIn(text, index + length)
self.setCurrentBlockState(0)
startIndex = 0
if self.previousBlockState() != 1:
startIndex = self.commentStartExpression.indexIn(text)
while startIndex >= 0:
endIndex = self.commentEndExpression.indexIn(text, startIndex)
if endIndex == -1:
self.setCurrentBlockState(1)
commentLength = len(text) - startIndex
else:
commentLength = endIndex - startIndex + self.commentEndExpression.matchedLength()
self.setFormat(startIndex, commentLength,
self.multiLineCommentFormat)
startIndex = self.commentStartExpression.indexIn(text,
startIndex + commentLength);
|
gpl-2.0
| -3,336,502,950,754,958,000
| 36.564103
| 146
| 0.639249
| false
|
nheijmans/random_scripts
|
mock_http_server/http_https_server.py
|
1
|
1185
|
# given a pem file ... openssl req -new -x509 -keyout yourpemfile.pem -out yourpemfile.pem -days 365 -nodes
import sys
import ssl
import time
import signal
import threading
import BaseHTTPServer, SimpleHTTPServer
def http_worker():
httpd = BaseHTTPServer.HTTPServer(('localhost', 8080), SimpleHTTPServer.SimpleHTTPRequestHandler)
httpd.serve_forever()
return
def https_worker():
httpd = BaseHTTPServer.HTTPServer(('localhost', 4443), SimpleHTTPServer.SimpleHTTPRequestHandler)
httpd.socket = ssl.wrap_socket (httpd.socket, server_side=True,
certfile='yourpemfile.pem')
httpd.serve_forever()
return
def signal_handler(sig, frame):
print('You pressed Ctrl+C! Now exiting')
sys.exit(0)
if __name__ == "__main__":
# http server
http = threading.Thread(name='httpserver', target=http_worker)
http.setDaemon(True)
# https server
https = threading.Thread(name='httpsserver',target=https_worker)
https.setDaemon(True)
http.start()
https.start()
# catch ctrl+c to exit the script
signal.signal(signal.SIGINT, signal_handler)
print("Press CTRL+C to stop the script")
signal.pause()
|
gpl-3.0
| 8,117,941,957,505,710,000
| 27.214286
| 107
| 0.699578
| false
|
boazjohn/pyspark-job-server
|
lib/receiver.py
|
1
|
8167
|
#!/usr/bin/python
# Standard Library
import json
import time
import logging
import socket
from threading import Thread, Lock
# Third Party
# Local
# set up logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class EventBroadcastReceiver:
def __init__(self, host='localhost', port=0):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.bind((host,port))
self.socket.listen(5)
if port == 0:
port = self.socket.getsockname()[1]
self.port = port
self.statusLock = Lock()
self.status = {
"jobs": {}
}
self.stageMap = {}
self.idMap = {}
def run(self):
self.listener = Thread(target=self._run)
self.listener.start()
def _run(self):
conn, address = self.socket.accept()
data = ""
while True:
data += conn.recv(1024)
while True:
newline = data.find('\n')
if newline != -1:
self.statusLock.acquire()
try:
jsonData = data[:newline]
self.processEvent(json.loads(data[:newline]))
except Exception as e:
print "ERROR: %s" % e
finally:
data = data[newline+1:]
self.statusLock.release()
else:
break
def processEvent(self, event):
eventType = event['Event']
# TODO DEAL WITH FAILED TASKS!!!
if eventType == "SparkListenerApplicationStart":
self.status['appName'] = event["App Name"]
self.status['started'] = event["Timestamp"]
elif eventType == "SparkListenerJobStart":
jobId = event['Job ID']
stages = event["Stage IDs"]
properties = event["Properties"]
jobInfo = {
"id": jobId,
"numStages" : len(stages),
"stagesWaiting": stages,
"stagesInProgress" : [],
"stagesComplete" : [],
"stages": {
stage : {
"id": stage,
"numTasks" : 0,
"tasksInProgress" : [],
"tasksComplete" : [],
"tasks": {},
"complete": False
}
for stage in stages},
"complete": False,
"failed": False,
"properties": properties
}
for stage in stages:
self.stageMap[stage] = jobId
if "spark.jobGroup.id" in properties:
self.idMap[properties["spark.jobGroup.id"]] = jobId
jobInfo['handle'] = properties["spark.jobGroup.id"]
self.status['jobs'][jobId] = jobInfo
# Clean up old, complete jobs
i = 0
keys = self.status['jobs'].keys()
for key in keys:
if len(self.status['jobs']) <= 100:
break
if self.status['jobs'][key]['complete']:
del self.status['jobs'][key]
elif eventType == "SparkListenerStageSubmitted":
info = event["Stage Info"]
stageId = info["Stage ID"]
jobId = self.stageMap[stageId]
job = self.status['jobs'][jobId]
job['stagesWaiting'].remove(stageId)
job['stagesInProgress'].append(stageId)
stage = job['stages'][stageId]
stage['numTasks'] = info["Number of Tasks"]
elif eventType == "SparkListenerTaskStart":
info = event["Task Info"]
taskId = info["Task ID"]
stageId = event["Stage ID"]
jobId = self.stageMap[stageId]
stage = self.status['jobs'][jobId]['stages'][stageId]
stage["tasksInProgress"].append(taskId)
stage["tasks"][taskId] = {
"id": taskId,
"started": info["Launch Time"]
}
elif eventType == "SparkListenerTaskEnd":
info = event["Task Info"]
taskId = info["Task ID"]
stageId = event["Stage ID"]
jobId = self.stageMap[stageId]
stage = self.status['jobs'][jobId]['stages'][stageId]
stage["tasksInProgress"].remove(taskId)
stage["tasksComplete"].append(taskId)
stage["tasks"][taskId]['finished'] = info["Finish Time"]
# TODO Handle event where task ends in failure
elif eventType == "SparkListenerStageCompleted":
info = event["Stage Info"]
stageId = info["Stage ID"]
jobId = self.stageMap[stageId]
job = self.status['jobs'][jobId]
job['stagesInProgress'].remove(stageId)
job['stagesComplete'].append(stageId)
stage = job['stages'][stageId]
stage["complete"] = True
elif eventType == "SparkListenerJobEnd":
jobId = event['Job ID']
job = self.status['jobs'][jobId]
job["complete"] = True
result = event['Job Result']
if result['Result'] == 'JobFailed':
job["failed"] = True
def getStatus(self):
status = {}
self.statusLock.acquire()
try:
status = dict(self.status.items())
except Exception as e:
print e
finally:
self.statusLock.release()
return status
def getProgress(self, jobType=None):
status = self.getStatus()
if jobType:
status['jobs'] = {
key: value for key, value in status['jobs'].items() if value['properties']['spark.job.type'] == jobType
}
status['jobs'] = {
jobId: self._processJobStatusToProgress(info)
for jobId, info in status['jobs'].items()
}
return status
def getJobStatus(self, jobName):
jobId = self.idMap.get(jobName, None)
status = self.getStatus()
jobStatus = status['jobs'].get(jobId, {})
return jobStatus
def _processJobStatusToProgress(self, status):
if len(status) == 0:
return {}
stages = status['stages']
properties = status['properties']
totalStages = len(stages)
completeStages = len([stage for stage in stages.values() if stage['complete']])
if totalStages == 0:
completeStages = 1
totalStages = 1
progress = {
"name": properties.get("spark.job.name", ""),
"type": properties.get("spark.job.type", ""),
"complete": status['complete'],
"failed": status['failed'],
"totalStages": totalStages,
"completeStages": completeStages,
"stageProgress": float(completeStages)/float(totalStages),
}
if "handle" in status:
progress['handle'] = status['handle']
if len(status["stagesInProgress"]) > 0:
currentStage = stages[status["stagesInProgress"][0]]
totalTasks = currentStage['numTasks']
completeTasks = len(currentStage['tasksComplete'])
if totalTasks == 0:
completeTasks = 1
totalTasks = 1
progress["currentStage"] = {
"totalTasks": totalTasks,
"completeTasks": completeTasks,
"taskProgress": float(completeTasks)/float(totalTasks)
}
return progress
def getJobProgress(self, jobName):
status = self.getJobStatus(jobName)
return self._processJobStatusToProgress(status)
def getRunningCount(self):
return len([job for job in self.getStatus()['jobs'].values() if not job['complete']])
def close():
# TODO actually close shit up
pass
|
bsd-3-clause
| 7,201,294,214,269,019,000
| 28.806569
| 119
| 0.501408
| false
|
AvadootNachankar/gstudio
|
gnowsys-ndf/gnowsys_ndf/ndf/views/data_review.py
|
2
|
15718
|
''' -- Imports from python libraries -- '''
# import os, re
import json
''' -- imports from installed packages -- '''
from django.http import HttpResponse
from django.shortcuts import render_to_response # , render #uncomment when to use
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from mongokit import paginator
try:
from bson import ObjectId
except ImportError: # old pymongo
from pymongo.objectid import ObjectId
# from django.http import Http404
''' -- imports from application folders/files -- '''
from gnowsys_ndf.ndf.models import Node # , GRelation, Triple
from gnowsys_ndf.ndf.models import node_collection, triple_collection
# from gnowsys_ndf.ndf.models import GSystemType#, GSystem uncomment when to use
# from gnowsys_ndf.ndf.models import File
from gnowsys_ndf.ndf.models import STATUS_CHOICES
from gnowsys_ndf.ndf.views.methods import get_node_common_fields,get_execution_time # , create_grelation_list ,set_all_urls
from gnowsys_ndf.ndf.views.methods import create_grelation
# from gnowsys_ndf.ndf.views.methods import create_gattribute
from gnowsys_ndf.ndf.views.methods import get_node_metadata, get_page, get_group_name_id
# from gnowsys_ndf.ndf.org2any import org2html
from gnowsys_ndf.ndf.views.search_views import results_search
# from gnowsys_ndf.settings import GSTUDIO_SITE_VIDEO
# from gnowsys_ndf.settings import EXTRA_LANG_INFO
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_EDUCATIONAL_SUBJECT
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_EDUCATIONAL_USE
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_INTERACTIVITY_TYPE
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_EDUCATIONAL_ALIGNMENT
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_EDUCATIONAL_LEVEL
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_CURRICULAR
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_AUDIENCE
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_TEXT_COMPLEXITY
from gnowsys_ndf.settings import GSTUDIO_RESOURCES_LANGUAGES
GST_FILE = node_collection.one({'_type': 'GSystemType', 'name': u'File'})
pandora_video_st = node_collection.one({'$and': [{'_type': 'GSystemType'}, {'name': 'Pandora_video'}]})
file_id = node_collection.find_one({'_type': "GSystemType", "name": "File"}, {"_id": 1})
page_id = node_collection.find_one({'_type': "GSystemType", "name": "Page"}, {"_id": 1})
theme_gst_id = node_collection.find_one({'_type': "GSystemType", "name": "Theme"}, {"_id": 1})
# data review in File app
@login_required
@get_execution_time
def data_review(request, group_id, page_no=1, **kwargs):
'''
To get all the information related to every resource object in the group.
To get processed context_variables into another variable,
pass <get_paged_resources=True> as last arg.
e.g:
context_variables = data_review(request, group_id, page_no, get_paged_resources=True)
'''
try:
group_id = ObjectId(group_id)
except:
group_name, group_id = get_group_name_id(group_id)
files_obj = node_collection.find({
'member_of': {'$in': [
ObjectId(file_id._id),
ObjectId(page_id._id),
ObjectId(theme_gst_id._id)
]},
# '_type': 'File', 'fs_file_ids': {'$ne': []},
'group_set': {'$in': [ObjectId(group_id)]},
'$or': [
{'access_policy': u"PUBLIC"},
{'$and': [
{'access_policy': u"PRIVATE"},
{'created_by': request.user.id}
]
}
]
# {'member_of': {'$all': [pandora_video_st._id]}}
}).sort("created_at", -1)
# implementing pagination: paginator.Paginator(cursor_obj, <int: page no>, <int: no of obj in each page>)
# (ref: https://github.com/namlook/mongokit/blob/master/mongokit/paginator.py)
paged_resources = paginator.Paginator(files_obj, page_no, 10)
# list to hold resources instances with it's attributes and relations
files_list = []
for each_resource in paged_resources.items:
# each_resource, ver = get_page(request, each_resource)
each_resource.get_neighbourhood(each_resource.member_of)
files_list.append(node_collection.collection.GSystem(each_resource))
# print "==============", each_resource.name, " : ", each_resource.group_set
# print "\n\n\n========", each_resource.keys()
# for each, val in each_resource.iteritems():
# print each, "--", val,"\n"
# print "files_obj.count: ", files_obj.count()
files_obj.close()
context_variables = {
"group_id": group_id, "groupid": group_id,
"files": files_list, "page_info": paged_resources,
"urlname": "data_review_page", "second_arg": "",
"static_educationalsubject": GSTUDIO_RESOURCES_EDUCATIONAL_SUBJECT,
# "static_language": EXTRA_LANG_INFO,
"static_language": GSTUDIO_RESOURCES_LANGUAGES,
"static_educationaluse": GSTUDIO_RESOURCES_EDUCATIONAL_USE,
"static_interactivitytype": GSTUDIO_RESOURCES_INTERACTIVITY_TYPE,
"static_educationalalignment": GSTUDIO_RESOURCES_EDUCATIONAL_ALIGNMENT,
"static_educationallevel": GSTUDIO_RESOURCES_EDUCATIONAL_LEVEL,
"static_curricular": GSTUDIO_RESOURCES_CURRICULAR,
"static_audience": GSTUDIO_RESOURCES_AUDIENCE,
"static_status": list(STATUS_CHOICES),
"static_textcomplexity": GSTUDIO_RESOURCES_TEXT_COMPLEXITY
}
if kwargs.get('get_paged_resources', False):
return context_variables
template_name = "ndf/data_review.html"
return render_to_response(
template_name,
context_variables,
context_instance=RequestContext(request)
)
# ---END of data review in File app
@get_execution_time
def get_dr_search_result_dict(request, group_id, search_text=None, page_no=1):
try:
group_id = ObjectId(group_id)
except:
group_name, group_id = get_group_name_id(group_id)
# check if request is from form or from next page
if request.GET.has_key("search_text"):
search_text = request.GET.get("search_text", "")
else:
search_text = search_text.replace("+", " ")
get_req = request.GET.copy()
# adding values to GET req
get_req.update({"search_text": search_text})
# overwriting request.GET with newly created QueryDict instance get_req
request.GET = get_req
search_reply = json.loads(results_search(request, group_id, return_only_dict = True))
exact_search_res = search_reply["exact"]["name"]
result_ids_list = [ ObjectId(each_dict["_id"]) for each_dict in exact_search_res ]
result_cur = node_collection.find({
"_id": {"$in": result_ids_list},
'member_of': {'$in': [ObjectId(file_id._id), ObjectId(page_id._id)]}
})
paged_resources = paginator.Paginator(result_cur, page_no, 10)
# list to hold resources instances with it's attributes and relations
files_list = []
for each_resource in paged_resources.items:
each_resource, ver = get_page(request, each_resource)
each_resource.get_neighbourhood(each_resource.member_of)
files_list.append(node_collection.collection.GSystem(each_resource))
return render_to_response("ndf/data_review.html",
{
"group_id": group_id, "groupid": group_id,
"files": files_list, "page_info": paged_resources,
"urlname": "data_review_search_page",
"second_arg": search_text, "search_text": search_text,
"static_educationalsubject": GSTUDIO_RESOURCES_EDUCATIONAL_SUBJECT,
# "static_language": EXTRA_LANG_INFO,
"static_language": GSTUDIO_RESOURCES_LANGUAGES,
"static_educationaluse": GSTUDIO_RESOURCES_EDUCATIONAL_USE,
"static_interactivitytype": GSTUDIO_RESOURCES_INTERACTIVITY_TYPE,
"static_educationalalignment": GSTUDIO_RESOURCES_EDUCATIONAL_ALIGNMENT,
"static_educationallevel": GSTUDIO_RESOURCES_EDUCATIONAL_LEVEL,
"static_curricular": GSTUDIO_RESOURCES_CURRICULAR,
"static_audience": GSTUDIO_RESOURCES_AUDIENCE,
"static_status": list(STATUS_CHOICES),
"static_textcomplexity": GSTUDIO_RESOURCES_TEXT_COMPLEXITY
}, context_instance=RequestContext(request))
# saving resource object of data review
@login_required
@get_execution_time
def data_review_save(request, group_id):
'''
Method to save each and every data-row edit of data review app
'''
userid = request.user.pk
try:
group_id = ObjectId(group_id)
except:
group_name, group_id = get_group_name_id(group_id)
group_obj = node_collection.one({"_id": ObjectId(group_id)})
node_oid = request.POST.get("node_oid", "")
node_details = request.POST.get("node_details", "")
node_details = json.loads(node_details)
# print "node_details : ", node_details
# updating some key names of dictionary as per get_node_common_fields.
node_details["lan"] = node_details.pop("language")
node_details["prior_node_list"] = node_details.pop("prior_node")
node_details["login-mode"] = node_details.pop("access_policy")
status = node_details.pop("status")
# node_details["collection_list"] = node_details.pop("collection") for future use
# Making copy of POST QueryDict instance.
# To make it mutable and fill in node_details value/s.
post_req = request.POST.copy()
# removing node_details dict from req
post_req.pop('node_details')
# adding values to post req
post_req.update(node_details)
# overwriting request.POST with newly created QueryDict instance post_req
request.POST = post_req
# print "\n---\n", request.POST, "\n---\n"
license = request.POST.get('license', '')
file_node = node_collection.one({"_id": ObjectId(node_oid)})
if request.method == "POST":
edit_summary = []
file_node_before = file_node.copy() # copying before it is getting modified
is_changed = get_node_common_fields(request, file_node, group_id, GST_FILE)
for key, val in file_node_before.iteritems():
if file_node_before[key] != file_node[key]:
temp_edit_summ = {}
temp_edit_summ["name"] = "Field: " + key
temp_edit_summ["before"] = file_node_before[key]
temp_edit_summ["after"] = file_node[key]
edit_summary.append(temp_edit_summ)
# to fill/update attributes of the node and get updated attrs as return
ga_nodes = get_node_metadata(request, file_node, is_changed=True)
if len(ga_nodes):
is_changed = True
# adding the edit attribute name in summary
for each_ga in ga_nodes:
temp_edit_summ = {}
temp_edit_summ["name"] = "Attribute: " + each_ga["node"]["attribute_type"]["name"]
temp_edit_summ["before"] = each_ga["before_obj_value"]
temp_edit_summ["after"] = each_ga["node"]["object_value"]
edit_summary.append(temp_edit_summ)
teaches_list = request.POST.get('teaches', '') # get the teaches list
prev_teaches_list = request.POST.get("teaches_prev", "") # get the before-edit teaches list
# check if teaches list exist means nodes added/removed for teaches relation_type
# also check for if previous teaches list made empty with prev_teaches_list
if (teaches_list != '') or prev_teaches_list:
teaches_list = teaches_list.split(",") if teaches_list else []
teaches_list = [ObjectId(each_oid) for each_oid in teaches_list]
relation_type_node = node_collection.one({'_type': "RelationType", 'name':'teaches'})
gr_nodes = create_grelation(file_node._id, relation_type_node, teaches_list)
gr_nodes_oid_list = [ObjectId(each_oid["right_subject"]) for each_oid in gr_nodes] if gr_nodes else []
prev_teaches_list = prev_teaches_list.split(",") if prev_teaches_list else []
prev_teaches_list = [ObjectId(each_oid) for each_oid in prev_teaches_list]
if len(gr_nodes_oid_list) == len(prev_teaches_list) and set(gr_nodes_oid_list) == set(prev_teaches_list):
pass
else:
rel_nodes = triple_collection.find({'_type': "GRelation",
'subject': file_node._id,
'relation_type.$id': relation_type_node._id
})
rel_oid_name = {}
for each in rel_nodes:
temp = {}
temp[each.right_subject] = each.name
rel_oid_name.update(temp)
is_changed = True
temp_edit_summ = {}
temp_edit_summ["name"] = "Relation: Teaches"
temp_edit_summ["before"] = [rel_oid_name[each_oid].split(" -- ")[2] for each_oid in prev_teaches_list]
temp_edit_summ["after"] = [rel_oid_name[each_oid].split(" -- ")[2] for each_oid in gr_nodes_oid_list]
edit_summary.append(temp_edit_summ)
assesses_list = request.POST.get('assesses_list','')
if assesses_list != '':
assesses_list = assesses_list.split(",")
assesses_list = [ObjectId(each_oid) for each_oid in assesses_list]
relation_type_node = node_collection.one({'_type': "RelationType", 'name':'assesses'})
gr_nodes = create_grelation(file_node._id, relation_type_node, teaches_list)
gr_nodes_oid_list = [ObjectId(each_oid["right_subject"]) for each_oid in gr_nodes]
if len(gr_nodes_oid_list) == len(teaches_list) and set(gr_nodes_oid_list) == set(teaches_list):
pass
else:
is_changed = True
# changing status to draft even if attributes/relations are changed
if is_changed:
file_node.status = unicode("DRAFT")
file_node.modified_by = userid
if userid not in file_node.contributors:
file_node.contributors.append(userid)
# checking if user is authenticated to change the status of node
if status and ((group_obj.is_gstaff(request.user)) or (userid in group_obj.author_set)):
if file_node.status != status:
file_node.status = unicode(status)
file_node.modified_by = userid
if userid not in file_node.contributors:
file_node.contributors.append(userid)
is_changed = True
if is_changed:
file_node.save(groupid=group_id)
# print edit_summary
return HttpResponse(file_node.status)
# ---END of data review saving.
|
agpl-3.0
| 8,873,485,038,332,246,000
| 42.905028
| 124
| 0.599313
| false
|
RakanNimer/hackathon
|
main.py
|
1
|
1342
|
from flask import Flask
import flask
from reddit import Submissions
from reddit import Submission
import redis
import json
try:
from flask.ext.cors import CORS # The typical way to import flask-cors
except ImportError:
# Path hack allows examples to be run without installation.
import os
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.sys.path.insert(0, parentdir)
from flask.ext.cors import CORS
r = redis.StrictRedis(host='localhost', port=6379, db=0)
app = Flask(__name__)
cors = CORS(app)
@app.route("/")
def hello():
if r.get('worldnews') is None :
submissions = Submissions();
submissions.getFromReddit('worldnews',10)
urlsInTweets = submissions.getFromTwitter()
r.set('worldnews', json.dumps(urlsInTweets))
return flask.jsonify(result=urlsInTweets)
else :
urlsInTweets = r.get('worldnews')
submissions = json.loads(urlsInTweets)
return flask.jsonify(result=submissions)
# submissionsInfo = {'result' : result}
# return flask.jsonify(result=submissionsInfo)
#submissions = Submissions().getFromReddit('worldnews',10)
#a = submissions.getFromTwitter()
#submission.getTweetLinksFromHashtags()
if __name__ == "__main__":
app.debug = True
app.run(threaded=True)
|
mit
| 1,764,334,778,868,504,600
| 28.195652
| 75
| 0.681073
| false
|
xhochy/g-octave
|
g_octave/config.py
|
1
|
2924
|
# -*- coding: utf-8 -*-
"""
g_octave.config
~~~~~~~~~~~~~~~
This module implements a Python object to handle the configuration
of g-octave.
:copyright: (c) 2009-2010 by Rafael Goncalves Martins
:license: GPL-2, see LICENSE for more details.
"""
from __future__ import absolute_import
import os
from .exception import GOctaveError
# py3k compatibility
from .compat import py3k
if py3k:
import configparser
else:
import ConfigParser as configparser
__all__ = ['Config']
class Config(object):
_defaults = {
'db': '/var/cache/g-octave',
'overlay': '/var/lib/g-octave',
'categories': 'main,extra,language',
'db_mirror': 'github://rafaelmartins/g-octave-db',
'trac_user': '',
'trac_passwd': '',
'log_level': '',
'log_file': '/var/log/g-octave.log',
'package_manager': 'portage',
'use_scm': 'false',
}
_section_name = 'main'
_environ_namespace = 'GOCTAVE_'
def __init__(self, config_file=None):
# config Parser
self._config = configparser.ConfigParser(self._defaults)
# current directory
cwd = os.path.dirname(os.path.realpath(__file__))
# no configuration file provided as parameter
if config_file is None:
# we just want one of the following configuration files:
# '/etc/g-octave.cfg', '../etc/g-octave.cfg'
available_files = [
os.path.join('/etc', 'g-octave.cfg'),
os.path.join(cwd, '..', 'etc', 'g-octave.cfg'),
]
# get the first one available
for my_file in available_files:
if os.path.exists(my_file):
config_file = my_file
break
# parse the wanted file using ConfigParser
parsed_files = self._config.read(config_file)
# no file to parsed
if len(parsed_files) == 0:
raise GOctaveError('File not found: %r' % config_file)
def _evaluate_from_file(self, attr):
# return the value from the configuration file
try:
return self._config.get(self._section_name, attr)
except (configparser.NoSectionError, configparser.NoOptionError):
return None
def _evaluate_from_environ(self, attr):
# return the value from the environment variables namespace
return os.environ.get(self._environ_namespace + attr.upper(), None)
def __getattr__(self, attr):
# valid attribute?
if attr in self._defaults:
# try the environment variable first
from_env = self._evaluate_from_environ(attr)
if from_env is not None:
return from_env
# default to the configuration file
return self._evaluate_from_file(attr)
else:
raise GOctaveError('Invalid option: %r' % attr)
|
gpl-2.0
| 2,911,297,229,804,912,600
| 28.836735
| 75
| 0.576949
| false
|
banesullivan/ParaViewGeophysics
|
PVGeo/ubc/tensor.py
|
1
|
21910
|
__all__ = [
'TensorMeshReader',
'TensorMeshAppender',
'TopoMeshAppender',
]
__displayname__ = 'Tensor Mesh'
import os
import sys
import numpy as np
import pandas as pd
import vtk
from .. import _helpers, interface
from ..base import AlgorithmBase
from .two_file_base import ModelAppenderBase, ubcMeshReaderBase
if sys.version_info < (3,):
from StringIO import StringIO
else:
from io import StringIO
class TensorMeshReader(ubcMeshReaderBase):
"""UBC Mesh 2D/3D models are defined using a 2-file format. The "mesh" file
describes how the data is discretized. The "model" file lists the physical
property values for all cells in a mesh. A model file is meaningless without
an associated mesh file. The reader will automatically detect if the mesh is
2D or 3D and read the remainder of the data with that dimensionality
assumption. If the mesh file is 2D, then then model file must also be in the
2D format (same for 3D).
Note:
Model File is optional. Reader will still construct
``vtkRectilinearGrid`` safely.
"""
__displayname__ = 'UBC Tensor Mesh Reader'
__category__ = 'reader'
description = 'PVGeo: UBC Mesh 2D/3D Two-File Format'
def __init__(self, nOutputPorts=1, outputType='vtkRectilinearGrid', **kwargs):
ubcMeshReaderBase.__init__(
self, nOutputPorts=nOutputPorts, outputType=outputType, **kwargs
)
self.__mesh = vtk.vtkRectilinearGrid()
self.__models = []
@staticmethod
def place_model_on_mesh(mesh, model, data_name='Data'):
"""Places model data onto a mesh. This is for the UBC Grid data reaers
to associate model data with the mesh grid.
Args:
mesh (vtkRectilinearGrid): The ``vtkRectilinearGrid`` that is the
mesh to place the model data upon.
model (np.array): A NumPy float array that holds all of the data to
place inside of the mesh's cells.
data_name (str) : The name of the model data array once placed on the
``vtkRectilinearGrid``.
Return:
vtkRectilinearGrid :
Returns the input ``vtkRectilinearGrid`` with model data appended.
"""
if isinstance(model, dict):
for key in model.keys():
TensorMeshReader.place_model_on_mesh(mesh, model[key], data_name=key)
return mesh
# model.GetNumberOfValues() if model is vtkDataArray
# Make sure this model file fits the dimensions of the mesh
ext = mesh.GetExtent()
n1, n2, n3 = ext[1], ext[3], ext[5]
if n1 * n2 * n3 < len(model):
raise _helpers.PVGeoError(
'Model `%s` has more data than the given mesh has cells to hold.'
% data_name
)
elif n1 * n2 * n3 > len(model):
raise _helpers.PVGeoError(
'Model `%s` does not have enough data to fill the given mesh\'s cells.'
% data_name
)
# Swap axes because VTK structures the coordinates a bit differently
# - This is absolutely crucial!
# - Do not play with unless you know what you are doing!
if model.ndim > 1 and model.ndim < 3:
ncomp = model.shape[1]
model = np.reshape(model, (n1, n2, n3, ncomp))
model = np.swapaxes(model, 0, 1)
model = np.swapaxes(model, 0, 2)
# Now reverse Z axis
model = model[::-1, :, :, :] # Note it is in Fortran ordering
model = np.reshape(model, (n1 * n2 * n3, ncomp))
else:
model = np.reshape(model, (n1, n2, n3))
model = np.swapaxes(model, 0, 1)
model = np.swapaxes(model, 0, 2)
# Now reverse Z axis
model = model[::-1, :, :] # Note it is in Fortran ordering
model = model.flatten()
# Convert data to VTK data structure and append to output
c = interface.convert_array(model, name=data_name, deep=True)
# THIS IS CELL DATA! Add the model data to CELL data:
mesh.GetCellData().AddArray(c)
return mesh
# ------------------------------------------------------------------#
# ---------------------- UBC MESH 2D ------------------------#
# ------------------------------------------------------------------#
@staticmethod
def ubc_mesh_2d(FileName, output):
"""This method reads a UBC 2D Mesh file and builds an empty
``vtkRectilinearGrid`` for data to be inserted into. `Format Specs`_.
.. _Format Specs: http://giftoolscookbook.readthedocs.io/en/latest/content/fileFormats/mesh2Dfile.html
Args:
FileName (str) : The mesh filename as an absolute path for the input
mesh file in UBC 3D Mesh Format.
output (vtkRectilinearGrid) : The output data object
Return:
vtkRectilinearGrid :
a ``vtkRectilinearGrid`` generated from the UBC 3D Mesh grid.
Mesh is defined by the input mesh file.
No data attributes here, simply an empty mesh. Use the
``place_model_on_mesh()`` method to associate with model data.
"""
# Read in data from file
xpts, xdisc, zpts, zdisc = ubcMeshReaderBase._ubc_mesh_2d_part(FileName)
nx = np.sum(np.array(xdisc, dtype=int)) + 1
nz = np.sum(np.array(zdisc, dtype=int)) + 1
# Now generate the vtkRectilinear Grid
def _genCoords(pts, disc, z=False):
c = [float(pts[0])]
for i in range(len(pts) - 1):
start = float(pts[i])
stop = float(pts[i + 1])
num = int(disc[i])
w = (stop - start) / num
for j in range(1, num):
c.append(start + (j) * w)
c.append(stop)
c = np.array(c, dtype=float)
if z:
c = -c[::-1]
return interface.convert_array(c, deep=True)
xcoords = _genCoords(xpts, xdisc)
zcoords = _genCoords(zpts, zdisc, z=True)
ycoords = interface.convert_array(np.zeros(1), deep=True)
output.SetDimensions(nx, 2, nz) # note this subtracts 1
output.SetXCoordinates(xcoords)
output.SetYCoordinates(ycoords)
output.SetZCoordinates(zcoords)
return output
@staticmethod
def ubc_model_2d(FileName):
"""Reads a 2D model file and returns a 1D NumPy float array. Use the
``place_model_on_mesh()`` method to associate with a grid.
Note:
Only supports single component data
Args:
FileName (str) : The model filename as an absolute path for the
input model file in UBCMesh Model Format. Also accepts a list of
string file names.
Return:
np.array :
a NumPy float array that holds the model data read from
the file. Use the ``place_model_on_mesh()`` method to associate
with a grid. If a list of file names is given then it will
return a dictionary of NumPy float array with keys as the
basenames of the files.
"""
if isinstance(FileName, (list, tuple)):
out = {}
for f in FileName:
out[os.path.basename(f)] = TensorMeshReader.ubc_model_2d(f)
return out
dim = np.genfromtxt(
FileName, dtype=int, delimiter=None, comments='!', max_rows=1
)
names = ['col%d' % i for i in range(dim[0])]
df = pd.read_csv(
FileName, names=names, delim_whitespace=True, skiprows=1, comment='!'
)
data = df.values
if np.shape(data)[0] != dim[1] and np.shape(data)[1] != dim[0]:
raise _helpers.PVGeoError('Mode file `%s` improperly formatted.' % FileName)
return data.flatten(order='F')
def __ubc_mesh_data_2d(self, filename_mesh, filename_models, output):
"""Helper method to read a 2D mesh"""
# Construct/read the mesh
if self.need_to_readMesh():
TensorMeshReader.ubc_mesh_2d(filename_mesh, self.__mesh)
self.need_to_readMesh(flag=False)
output.DeepCopy(self.__mesh)
if self.need_to_readModels() and self.this_has_models():
self.__models = []
for f in filename_models:
# Read the model data
self.__models.append(TensorMeshReader.ubc_model_2d(f))
self.need_to_readModels(flag=False)
return output
# ------------------------------------------------------------------#
# ---------------------- UBC MESH 3D ------------------------#
# ------------------------------------------------------------------#
@staticmethod
def ubc_mesh_3d(FileName, output):
"""This method reads a UBC 3D Mesh file and builds an empty
``vtkRectilinearGrid`` for data to be inserted into.
Args:
FileName (str) : The mesh filename as an absolute path for the input
mesh file in UBC 3D Mesh Format.
output (vtkRectilinearGrid) : The output data object
Return:
vtkRectilinearGrid :
a ``vtkRectilinearGrid`` generated from the UBC 3D Mesh grid.
Mesh is defined by the input mesh file.
No data attributes here, simply an empty mesh. Use the
``place_model_on_mesh()`` method to associate with model data.
"""
# --- Read in the mesh ---#
fileLines = np.genfromtxt(FileName, dtype=str, delimiter='\n', comments='!')
# Get mesh dimensions
dim = np.array(fileLines[0].split('!')[0].split(), dtype=int)
dim = (dim[0] + 1, dim[1] + 1, dim[2] + 1)
# The origin corner (Southwest-top)
# - Remember UBC format specifies down as the positive Z
# - Easting, Northing, Altitude
oo = np.array(fileLines[1].split('!')[0].split(), dtype=float)
ox, oy, oz = oo[0], oo[1], oo[2]
# Read cell sizes for each line in the UBC mesh files
def _readCellLine(line):
line_list = []
for seg in line.split():
if '*' in seg:
sp = seg.split('*')
seg_arr = np.ones((int(sp[0]),), dtype=float) * float(sp[1])
else:
seg_arr = np.array([float(seg)], dtype=float)
line_list.append(seg_arr)
return np.concatenate(line_list)
# Read the cell sizes
cx = _readCellLine(fileLines[2].split('!')[0])
cy = _readCellLine(fileLines[3].split('!')[0])
cz = _readCellLine(fileLines[4].split('!')[0])
# Invert the indexing of the vector to start from the bottom.
cz = cz[::-1]
# Adjust the reference point to the bottom south west corner
oz = oz - np.sum(cz)
# Now generate the coordinates for from cell width and origin
cox = ox + np.cumsum(cx)
cox = np.insert(cox, 0, ox)
coy = oy + np.cumsum(cy)
coy = np.insert(coy, 0, oy)
coz = oz + np.cumsum(cz)
coz = np.insert(coz, 0, oz)
# Set the dims and coordinates for the output
output.SetDimensions(dim[0], dim[1], dim[2])
# Convert to VTK array for setting coordinates
output.SetXCoordinates(interface.convert_array(cox, deep=True))
output.SetYCoordinates(interface.convert_array(coy, deep=True))
output.SetZCoordinates(interface.convert_array(coz, deep=True))
return output
def __ubc_mesh_data_3d(self, filename_mesh, filename_models, output):
"""Helper method to read a 3D mesh"""
# Construct/read the mesh
if self.need_to_readMesh():
TensorMeshReader.ubc_mesh_3d(filename_mesh, self.__mesh)
self.need_to_readMesh(flag=False)
output.DeepCopy(self.__mesh)
if self.need_to_readModels() and self.this_has_models():
self.__models = []
for f in filename_models:
# Read the model data
self.__models.append(TensorMeshReader.ubc_model_3d(f))
self.need_to_readModels(flag=False)
return output
def __ubc_tensor_mesh(self, filename_mesh, filename_models, output):
"""Wrapper to Read UBC GIF 2D and 3D meshes. UBC Mesh 2D/3D models are
defined using a 2-file format. The "mesh" file describes how the data is
descritized. The "model" file lists the physical property values for all
cells in a mesh. A model file is meaningless without an associated mesh
file. If the mesh file is 2D, then then model file must also be in the
2D format (same for 3D).
Args:
filename_mesh (str) : The mesh filename as an absolute path for the
input mesh file in UBC 2D/3D Mesh Format
filename_models (str or list(str)) : The model filename(s) as an
absolute path for the input model file in UBC 2D/3D Model Format.
output (vtkRectilinearGrid) : The output data object
Return:
vtkRectilinearGrid :
a ``vtkRectilinearGrid`` generated from the UBC 2D/3D Mesh grid.
Mesh is defined by the input mesh file.
Cell data is defined by the input model file.
"""
# Check if the mesh is a UBC 2D mesh
if self.is_2d():
self.__ubc_mesh_data_2d(filename_mesh, filename_models, output)
# Check if the mesh is a UBC 3D mesh
elif self.is_3d():
self.__ubc_mesh_data_3d(filename_mesh, filename_models, output)
else:
raise _helpers.PVGeoError('File format not recognized')
return output
def RequestData(self, request, inInfo, outInfo):
"""Handles data request by the pipeline."""
# Get output:
output = self.GetOutputData(outInfo, 0)
# Get requested time index
i = _helpers.get_requested_time(self, outInfo)
self.__ubc_tensor_mesh(
self.get_mesh_filename(), self.get_model_filenames(), output
)
# Place the model data for given timestep onto the mesh
if len(self.__models) > i:
TensorMeshReader.place_model_on_mesh(
output, self.__models[i], self.get_data_name()
)
return 1
def RequestInformation(self, request, inInfo, outInfo):
"""Handles info request by pipeline about timesteps and grid extents."""
# Call parent to handle time stuff
ubcMeshReaderBase.RequestInformation(self, request, inInfo, outInfo)
# Now set whole output extent
if self.need_to_readMesh():
ext = self._read_extent()
info = outInfo.GetInformationObject(0)
# Set WHOLE_EXTENT: This is absolutely necessary
info.Set(vtk.vtkStreamingDemandDrivenPipeline.WHOLE_EXTENT(), ext, 6)
return 1
def clear_mesh(self):
"""Use to clean/rebuild the mesh"""
self.__mesh = vtk.vtkRectilinearGrid()
ubcMeshReaderBase.clear_models(self)
def clear_models(self):
"""Use to clean the models and reread"""
self.__models = []
ubcMeshReaderBase.clear_models(self)
###############################################################################
class TensorMeshAppender(ModelAppenderBase):
"""This filter reads a timeseries of models and appends it to an input
``vtkRectilinearGrid``
"""
__displayname__ = 'UBC Tensor Mesh Appender'
__category__ = 'filter'
def __init__(self, **kwargs):
ModelAppenderBase.__init__(
self,
inputType='vtkRectilinearGrid',
outputType='vtkRectilinearGrid',
**kwargs
)
def _read_up_front(self):
"""Internal helepr to read data at start"""
reader = ubcMeshReaderBase.ubc_model_3d
if not self._is_3D:
# Note how in UBC format, 2D grids are specified on an XZ plane (no Y component)
# This will only work prior to rotations to account for real spatial reference
reader = TensorMeshReader.ubc_model_2d
self._models = []
for f in self._model_filenames:
# Read the model data
self._models.append(reader(f))
self.need_to_read(flag=False)
return
def _place_on_mesh(self, output, idx=0):
"""Internal helepr to place a model on the mesh for a given index"""
TensorMeshReader.place_model_on_mesh(
output, self._models[idx], self.get_data_name()
)
return
###############################################################################
class TopoMeshAppender(AlgorithmBase):
"""This filter reads a single discrete topography file and appends it as a
boolean data array.
"""
__displayname__ = 'Append UBC Discrete Topography'
__category__ = 'filter'
def __init__(
self, inputType='vtkRectilinearGrid', outputType='vtkRectilinearGrid', **kwargs
):
AlgorithmBase.__init__(
self,
nInputPorts=1,
inputType=inputType,
nOutputPorts=1,
outputType=outputType,
)
self._topoFileName = kwargs.get('filename', None)
self.__indices = None
self.__need_to_read = True
self.__ne, self.__nn = None, None
def need_to_read(self, flag=None):
"""Ask self if the reader needs to read the files again
Args:
flag (bool): if the flag is set then this method will set the read
status
Return:
bool:
The status of the reader aspect of the filter.
"""
if flag is not None and isinstance(flag, (bool, int)):
self.__need_to_read = flag
return self.__need_to_read
def Modified(self, read_again=True):
"""Call modified if the files needs to be read again again."""
if read_again:
self.__need_to_read = read_again
AlgorithmBase.Modified(self)
def modified(self, read_again=True):
"""Call modified if the files needs to be read again again."""
return self.Modified(read_again=read_again)
def _read_up_front(self):
"""Internal helepr to read data at start"""
# Read the file
content = np.genfromtxt(
self._topoFileName, dtype=str, delimiter='\n', comments='!'
)
dim = content[0].split()
self.__ne, self.__nn = int(dim[0]), int(dim[1])
self.__indices = pd.read_csv(
StringIO("\n".join(content[1::])),
names=['i', 'j', 'k'],
delim_whitespace=True,
)
# NOTE: K indices are inverted
self.need_to_read(flag=False)
return
def _place_on_mesh(self, output):
"""Internal helepr to place an active cells model on the mesh"""
# Check mesh extents to math topography
nx, ny, nz = output.GetDimensions()
nx, ny, nz = nx - 1, ny - 1, nz - 1 # because GetDimensions counts the nodes
topz = np.max(self.__indices['k']) + 1
if nx != self.__nn or ny != self.__ne or topz > nz:
raise _helpers.PVGeoError(
'Dimension mismatch between input grid and topo file.'
)
# # Adjust the k indices to be in caarteian system
# self.__indices['k'] = nz - self.__indices['k']
# Fill out the topo and add it as model as it will be in UBC format
# Create a 3D array of 1s and zeros (1 means beneath topo or active)
topo = np.empty((ny, nx, nz), dtype=float)
topo[:] = np.nan
for row in self.__indices.values:
i, j, k = row
topo[i, j, k + 1 :] = 0
topo[i, j, : k + 1] = 1
# Add as model... ``place_model_on_mesh`` handles the rest
TensorMeshReader.place_model_on_mesh(
output, topo.flatten(), 'Active Topography'
)
return
def RequestData(self, request, inInfo, outInfo):
"""Used by pipeline to generate output"""
# Get input/output of Proxy
pdi = self.GetInputData(inInfo, 0, 0)
output = self.GetOutputData(outInfo, 0)
output.DeepCopy(pdi) # ShallowCopy if you want changes to propagate upstream
# Perfrom task:
if self.__need_to_read:
self._read_up_front()
# Place the model data for given timestep onto the mesh
self._place_on_mesh(output)
return 1
#### Setters and Getters ####
def clear_topo_file(self):
"""Use to clear data file name."""
self._topoFileName = None
self.Modified(read_again=True)
def set_topo_filename(self, filename):
"""Use to set the file names for the reader. Handles single strings only"""
if filename is None:
return # do nothing if None is passed by a constructor on accident
elif isinstance(filename, str) and self._topoFileName != filename:
self._topoFileName = filename
self.Modified()
return 1
###############################################################################
#
# import numpy as np
# indices = np.array([[0,0,1],
# [0,1,1],
# [0,2,1],
# [1,0,1],
# [1,1,1],
# [1,2,1],
# [2,0,1],
# [2,1,1],
# [2,2,1],
# ])
#
# topo = np.empty((3,3,3), dtype=float)
# topo[:] = np.nan
#
# for row in indices:
# i, j, k = row
# topo[i, j, k:] = 0
# topo[i, j, :k] = 1
# topo
|
bsd-3-clause
| 8,269,175,232,248,584,000
| 37.371278
| 110
| 0.557599
| false
|
Nettacker/Nettacker
|
lib/transactions/maltego/nettacker_transforms/src/nettacker_transforms/transforms/wp_xmlrpc_dos_vuln.py
|
1
|
1880
|
import random
from canari.maltego.transform import Transform
from canari.maltego.entities import URL
from canari.framework import EnableDebugWindow
from common.entities import NettackerScan
from lib.scan.wp_xmlrpc.engine import start
from database.db import __logs_by_scan_id as find_log
__author__ = 'Shaddy Garg'
__copyright__ = 'Copyright 2018, nettacker_transforms Project'
__credits__ = []
__license__ = 'GPLv3'
__version__ = '0.1'
__maintainer__ = 'Shaddy Garg'
__email__ = 'shaddygarg1@gmail.com'
__status__ = 'Development'
@EnableDebugWindow
class WordpressXMLPRCScan(Transform):
"""TODO: Your transform description."""
# The transform input entity type.
input_type = NettackerScan
def do_transform(self, request, response, config):
# TODO: write your code here.
scan_request = request.entity
scan_id = "".join(random.choice("0123456789abcdef") for x in range(32))
scan_request.ports = scan_request.ports.split(', ') if scan_request.ports is not None else None
start(scan_request.host, [], [], scan_request.ports, scan_request.timeout_sec, scan_request.thread_no,
1, 1, 'abcd', 0, "en", scan_request.verbose, scan_request.socks_proxy, scan_request.retries, [], scan_id,
"Through Maltego")
results = find_log(scan_id, "en")
for result in results:
url = result["HOST"] + ":" + result["PORT"]
response += URL(url=url, title=result["DESCRIPTION"],
short_title="Site is vulnerable to XMLPRC DOS attacks ",
link_label='wp_xmlrpc_dos_vuln')
return response
def on_terminate(self):
"""This method gets called when transform execution is prematurely terminated. It is only applicable for local
transforms. It can be excluded if you don't need it."""
pass
|
gpl-3.0
| 8,579,453,768,622,444,000
| 37.367347
| 119
| 0.653723
| false
|
xmichael/tagger
|
src/main/lib/logtool.py
|
1
|
1546
|
## Just some utility functions for logging messages. Most important is getLogger.
import sys, pprint, json, logging, configuration
### Constants ###
#################
## Can only log in stderr (or environ['wsgi.errors']) when using WSGI:
def dbg(msg):
print >> sys.stderr, msg
def pp(obj):
"""
shortcut for pretty printing a python object on the debug channel
"""
pprinter = pprint.PrettyPrinter(indent=4)
return pprinter.pformat(obj)
def jsonpp(obj):
"""
shortcut for pretty printing a json object on the debug channel
"""
pp(json.loads(obj))
def getLogger(name, parent=None):
""" Create a logger with some sane configuration
Args:
name (str): name of logger. Should be the name of the file.
parent (str): name of parent logger to inherit its properties
"""
if parent:
# create child logger that inherits properties from father
logger = logging.getLogger(parent + "." + name)
else:
#create parent logger with new properties
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler(configuration.get_log_file())
fh.setLevel(logging.DEBUG)
# create formatter and add it to the handler
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
return logger
|
bsd-3-clause
| -8,895,426,044,850,906,000
| 31.893617
| 93
| 0.651358
| false
|
Labbiness/Pancake
|
setup.py
|
1
|
1437
|
# -*- encoding:utf-8 -*-
#
# Copyright (c) 2017-2018 Shota Shimazu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup, find_packages
import sys
sys.path.append('./pancake')
sys.path.append('./tests')
if __name__ == "__main__":
setup(
name = "Pancake",
version='0.0.1',
author = "Shota Shimazu",
author_email = "hornet.live.mf@gmail.com",
packages = find_packages(),
install_requires=[
],
entry_points = {
'console_scripts':[
'pancake = Pancake.Pancake:main',
],
},
description = "Abstract layer for any package manager.",
long_description = "Abstract layer for any package manager.",
url = "https://github.com/shotastage/Pancake.git",
license = "Apache",
platforms = ["POSIX", "Windows", "Mac OS X"],
test_suite = "djconsole_test.suite",
)
|
apache-2.0
| 5,198,664,308,909,516,000
| 31.659091
| 74
| 0.631872
| false
|
ciarams87/PyU4V
|
PyU4V/tools/openstack/migrate.py
|
1
|
7265
|
# Copyright (c) 2020 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
OpenStack migrate script.
The script migrates volumes from the old SMI-S Masking view to the
new REST Masking view used from Pike onwards
"""
from __future__ import print_function
from builtins import input
import sys
from PyU4V.tools.openstack import migrate_utils
from PyU4V import univmax_conn
sys.path.append('../../..')
sys.path.append('.')
conn = univmax_conn.U4VConn()
utils = migrate_utils.MigrateUtils(conn)
utils.smart_print(
'********************************************************************',
migrate_utils.DEBUG)
utils.smart_print(
'*** Welcome to the migration script for the VMAX/PowerMax driver ***',
migrate_utils.DEBUG)
utils.smart_print(
'*** to migrate from SMI-S masking view to REST masking view. ***',
migrate_utils.DEBUG)
utils.smart_print(
'*** This is recommended if you intend using live migration to ***',
migrate_utils.DEBUG)
utils.smart_print(
'*** move from one compute node to another. ***',
migrate_utils.DEBUG)
utils.smart_print(
'********************************************************************',
migrate_utils.DEBUG)
utils.smart_print('version is %s', migrate_utils.DEBUG, migrate_utils.VERSION)
masking_view_list = conn.provisioning.get_masking_view_list()
is_revert = False
no_action = True
if len(sys.argv) == 2:
if sys.argv[1] == 'revert':
is_revert = True
else:
utils.smart_print('%s is not a valid argument.',
migrate_utils.DEBUG, sys.argv[1])
sys.exit()
def revert_case(masking_view_name):
"""The revert case of the migrate process
:param masking_view_name: masking view name -- str
:returns: masking view details -- dict
element details -- dict
no action flag -- boolean
"""
if utils.check_masking_view_for_migration(
masking_view_name, True):
utils.smart_print(
'NEW MASKING VIEW IS %s',
migrate_utils.DEBUG, masking_view_name)
masking_view_details = (
utils.get_elements_from_masking_view(masking_view_name))
# The storage group is the parent SG
# Get the list of child SGs
child_storage_group_list = (
conn.provisioning.get_child_storage_groups_from_parent(
masking_view_details['storagegroup']))
element_details, masking_view_details['storagegroup'] = (
utils.choose_storage_group(
masking_view_name, child_storage_group_list,
masking_view_details['portgroup'],
masking_view_details['initiatorgroup'],
is_revert))
# Check if masking view exists and if it does validate it
if element_details:
utils.get_or_create_masking_view(
element_details,
masking_view_details['portgroup'],
masking_view_details['initiatorgroup'],
is_revert)
else:
utils.smart_print(
'NO MIGRATION', migrate_utils.WARNING)
sys.exit()
return masking_view_details, element_details, False
else:
return dict(), dict(), True
def migrate_case(masking_view_name):
"""The revert case of the migrate process
:param masking_view_name: masking view name -- str
:returns: masking view details -- dict
element details -- dict
no action flag -- boolean
"""
if utils.check_masking_view_for_migration(masking_view_name):
utils.smart_print(
'OLD MASKING VIEW IS %s',
migrate_utils.DEBUG, masking_view_name)
masking_view_details = (
utils.get_elements_from_masking_view(masking_view_name))
# Compile the new names of the SGs and MV
element_details = utils.compile_new_element_names(
masking_view_name, masking_view_details['portgroup'],
masking_view_details['initiatorgroup'],
masking_view_details['storagegroup'])
# Check if masking view exists and if it does validate it
utils.get_or_create_masking_view(
element_details, masking_view_details['portgroup'],
masking_view_details['initiatorgroup'])
return masking_view_details, element_details, False
else:
return dict(), dict(), True
def move_volumes(masking_view_details, element_details):
"""Move volumes from one masking view to another
:param masking_view_details: masking view details -- dict
:param element_details: element details -- dict
"""
# Check the qos setting of source and target storage group
utils.set_qos(
masking_view_details['storagegroup'],
element_details['new_sg_name'])
volume_list, create_volume_flag = utils.get_volume_list(
masking_view_details['storagegroup'])
if volume_list:
message = utils.move_volumes_from_source_to_target(
volume_list, masking_view_details['storagegroup'],
element_details['new_sg_name'], create_volume_flag)
print_str = '%s SOURCE STORAGE GROUP REMAINS'
utils.smart_print(
print_str, migrate_utils.DEBUG,
masking_view_details['storagegroup'])
utils.print_pretty_table(message)
new_storage_group = utils.get_storage_group(
element_details['new_sg_name'])
print_str = '%s TARGET STORAGE GROUP DETAILS:'
utils.smart_print(
print_str, migrate_utils.DEBUG,
element_details['new_sg_name'])
utils.print_pretty_table(new_storage_group)
for masking_view in masking_view_list:
if utils.validate_masking_view(masking_view, is_revert):
txt = 'Do you want to migrate %s. Y/N or X(exit): ' % masking_view
txt_out = input(txt)
if utils.check_input(txt_out, 'Y'):
if is_revert:
masking_view_components, element_dict, no_action = (
revert_case(masking_view))
else:
masking_view_components, element_dict, no_action = (
migrate_case(masking_view))
# Get the volumes in the storage group
if masking_view_components and (
'storagegroup' in masking_view_components):
move_volumes(masking_view_components, element_dict)
else:
utils.smart_print('NO MIGRATION', migrate_utils.WARNING)
elif utils.check_input(txt_out, 'X'):
sys.exit()
if no_action:
utils.smart_print(
'No OpenStack masking views eligible for migration.',
migrate_utils.DEBUG)
|
mit
| 42,564,993,807,781,930
| 36.838542
| 78
| 0.615416
| false
|
mailgun/talon
|
talon/signature/learning/helpers.py
|
1
|
6879
|
# -*- coding: utf-8 -*-
""" The module provides:
* functions used when evaluating signature's features
* regexp's constants used when evaluating signature's features
"""
from __future__ import absolute_import
import unicodedata
import regex as re
from talon.utils import to_unicode
from talon.signature.constants import SIGNATURE_MAX_LINES
rc = re.compile
RE_EMAIL = rc('\S@\S')
RE_RELAX_PHONE = rc('(\(? ?[\d]{2,3} ?\)?.{,3}?){2,}')
RE_URL = rc(r'''https?://|www\.[\S]+\.[\S]''')
# Taken from:
# http://www.cs.cmu.edu/~vitor/papers/sigFilePaper_finalversion.pdf
# Line matches the regular expression "^[\s]*---*[\s]*$".
RE_SEPARATOR = rc('^[\s]*---*[\s]*$')
# Taken from:
# http://www.cs.cmu.edu/~vitor/papers/sigFilePaper_finalversion.pdf
# Line has a sequence of 10 or more special characters.
RE_SPECIAL_CHARS = rc(('^[\s]*([\*]|#|[\+]|[\^]|-|[\~]|[\&]|[\$]|_|[\!]|'
'[\/]|[\%]|[\:]|[\=]){10,}[\s]*$'))
RE_SIGNATURE_WORDS = rc(('(T|t)hank.*,|(B|b)est|(R|r)egards|'
'^sent[ ]{1}from[ ]{1}my[\s,!\w]*$|BR|(S|s)incerely|'
'(C|c)orporation|Group'))
# Taken from:
# http://www.cs.cmu.edu/~vitor/papers/sigFilePaper_finalversion.pdf
# Line contains a pattern like Vitor R. Carvalho or William W. Cohen.
RE_NAME = rc('[A-Z][a-z]+\s\s?[A-Z][\.]?\s\s?[A-Z][a-z]+')
INVALID_WORD_START = rc('\(|\+|[\d]')
BAD_SENDER_NAMES = [
# known mail domains
'hotmail', 'gmail', 'yandex', 'mail', 'yahoo', 'mailgun', 'mailgunhq',
'example',
# first level domains
'com', 'org', 'net', 'ru',
# bad words
'mailto'
]
def binary_regex_search(prog):
'''Returns a function that returns 1 or 0 depending on regex search result.
If regular expression compiled into prog is present in a string
the result of calling the returned function with the string will be 1
and 0 otherwise.
>>> import regex as re
>>> binary_regex_search(re.compile("12"))("12")
1
>>> binary_regex_search(re.compile("12"))("34")
0
'''
return lambda s: 1 if prog.search(s) else 0
def binary_regex_match(prog):
'''Returns a function that returns 1 or 0 depending on regex match result.
If a string matches regular expression compiled into prog
the result of calling the returned function with the string will be 1
and 0 otherwise.
>>> import regex as re
>>> binary_regex_match(re.compile("12"))("12 3")
1
>>> binary_regex_match(re.compile("12"))("3 12")
0
'''
return lambda s: 1 if prog.match(s) else 0
def flatten_list(list_to_flatten):
"""Simple list comprehension to flatten list.
>>> flatten_list([[1, 2], [3, 4, 5]])
[1, 2, 3, 4, 5]
>>> flatten_list([[1], [[2]]])
[1, [2]]
>>> flatten_list([1, [2]])
Traceback (most recent call last):
...
TypeError: 'int' object is not iterable
"""
return [e for sublist in list_to_flatten for e in sublist]
def contains_sender_names(sender):
'''Returns a functions to search sender\'s name or it\'s part.
>>> feature = contains_sender_names("Sergey N. Obukhov <xxx@example.com>")
>>> feature("Sergey Obukhov")
1
>>> feature("BR, Sergey N.")
1
>>> feature("Sergey")
1
>>> contains_sender_names("<serobnic@mail.ru>")("Serobnic")
1
>>> contains_sender_names("<serobnic@mail.ru>")("serobnic")
1
'''
names = '( |$)|'.join(flatten_list([[e, e.capitalize()]
for e in extract_names(sender)]))
names = names or sender
if names != '':
return binary_regex_search(re.compile(names))
return lambda s: 0
def extract_names(sender):
"""Tries to extract sender's names from `From:` header.
It could extract not only the actual names but e.g.
the name of the company, parts of email, etc.
>>> extract_names('Sergey N. Obukhov <serobnic@mail.ru>')
['Sergey', 'Obukhov', 'serobnic']
>>> extract_names('')
[]
"""
sender = to_unicode(sender, precise=True)
# Remove non-alphabetical characters
sender = "".join([char if char.isalpha() else ' ' for char in sender])
# Remove too short words and words from "black" list i.e.
# words like `ru`, `gmail`, `com`, `org`, etc.
sender = [word for word in sender.split() if len(word) > 1 and
not word in BAD_SENDER_NAMES]
# Remove duplicates
names = list(set(sender))
return names
def categories_percent(s, categories):
'''Returns category characters percent.
>>> categories_percent("qqq ggg hhh", ["Po"])
0.0
>>> categories_percent("q,w.", ["Po"])
50.0
>>> categories_percent("qqq ggg hhh", ["Nd"])
0.0
>>> categories_percent("q5", ["Nd"])
50.0
>>> categories_percent("s.s,5s", ["Po", "Nd"])
50.0
'''
count = 0
s = to_unicode(s, precise=True)
for c in s:
if unicodedata.category(c) in categories:
count += 1
return 100 * float(count) / len(s) if len(s) else 0
def punctuation_percent(s):
'''Returns punctuation percent.
>>> punctuation_percent("qqq ggg hhh")
0.0
>>> punctuation_percent("q,w.")
50.0
'''
return categories_percent(s, ['Po'])
def capitalized_words_percent(s):
'''Returns capitalized words percent.'''
s = to_unicode(s, precise=True)
words = re.split('\s', s)
words = [w for w in words if w.strip()]
words = [w for w in words if len(w) > 2]
capitalized_words_counter = 0
valid_words_counter = 0
for word in words:
if not INVALID_WORD_START.match(word):
valid_words_counter += 1
if word[0].isupper() and not word[1].isupper():
capitalized_words_counter += 1
if valid_words_counter > 0 and len(words) > 1:
return 100 * float(capitalized_words_counter) / valid_words_counter
return 0
def many_capitalized_words(s):
"""Returns a function to check percentage of capitalized words.
The function returns 1 if percentage greater then 65% and 0 otherwise.
"""
return 1 if capitalized_words_percent(s) > 66 else 0
def has_signature(body, sender):
'''Checks if the body has signature. Returns True or False.'''
non_empty = [line for line in body.splitlines() if line.strip()]
candidate = non_empty[-SIGNATURE_MAX_LINES:]
upvotes = 0
for line in candidate:
# we check lines for sender's name, phone, email and url,
# those signature lines don't take more then 27 lines
if len(line.strip()) > 27:
continue
elif contains_sender_names(sender)(line):
return True
elif (binary_regex_search(RE_RELAX_PHONE)(line) +
binary_regex_search(RE_EMAIL)(line) +
binary_regex_search(RE_URL)(line) == 1):
upvotes += 1
if upvotes > 1:
return True
|
apache-2.0
| -8,025,110,656,605,266,000
| 29.303965
| 79
| 0.594709
| false
|
foospidy/DbDat
|
plugins/mysql/check_user_access_denied.py
|
1
|
2424
|
import ConfigParser
import os.path
class check_user_access_denied():
"""
check_user_access_denied:
Access denied events for user, these should be reviewed for malicious activity.
"""
# References:
# https://www.percona.com/blog/2012/12/28/auditing-login-attempts-in-mysql/
TITLE = 'User Access Denied'
CATEGORY = 'Configuration'
TYPE = 'configuration_file'
SQL = None
verbose = False
skip = False
result = {}
appuser = None
def do_check(self, configuration_file):
configuration = ConfigParser.ConfigParser()
count = 0
try:
configuration.read(configuration_file)
except ConfigParser.ParsingError as e:
if self.verbose:
print('Ignoring parsing errors:\n' + str(e))
try:
error_log_file = configuration.get('mysqld', 'log_error')[0]
if os.path.isfile(str(error_log_file)):
with open(str(error_log_file), 'r') as log:
for line in log:
if "Access denied for user '" + self.appuser + "'" in line:
count += 1
if 0 == count:
self.result['level'] = 'GREEN'
self.result['output'] = 'No access denied events found for user.'
elif count < 5:
self.result['level'] = 'YELLOW'
self.result['output'] = 'Access denied events found for user.'
else:
self.result['level'] = 'RED'
self.result['output'] = 'Excessive access denied events found for user.'
else:
self.result['level'] = 'YELLOW'
self.result['output'] = 'Could not access error log file ' + str(error_log_file) + '.'
except ConfigParser.NoOptionError as e:
self.result['level'] = 'RED'
self.result['output'] = 'Error log file not enabled.'
return self.result
def __init__(self, parent):
print('Performing check: ' + self.TITLE)
self.verbose = parent.verbose
if '' != parent.appuser:
self.appuser = parent.appuser
else:
self.skip = True
self.result['level'] = 'GRAY'
self.result['output'] = 'No application user set, check skipped.'
|
gpl-2.0
| 3,906,439,819,165,909,500
| 30.894737
| 102
| 0.526815
| false
|
kobronson/cs-voltdb
|
tests/scripts/examples/sql_coverage/strings-schema.py
|
1
|
1798
|
#!/usr/bin/env python
# This file is part of VoltDB.
# Copyright (C) 2008-2013 VoltDB Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
{
"P1": {
"columns": (("DESC", FastSerializer.VOLTTYPE_STRING),
("DESC_INLINE", FastSerializer.VOLTTYPE_STRING),
("ID", FastSerializer.VOLTTYPE_INTEGER),
("RATIO", FastSerializer.VOLTTYPE_FLOAT)),
"partitions": (),
"indexes": ("ID")
},
"R1": {
"columns": (("DESC", FastSerializer.VOLTTYPE_STRING),
("DESC_INLINE", FastSerializer.VOLTTYPE_STRING),
("ID", FastSerializer.VOLTTYPE_INTEGER),
("RATIO", FastSerializer.VOLTTYPE_FLOAT)),
"partitions": (),
"indexes": ("ID")
}
}
|
agpl-3.0
| 1,006,511,410,808,711,300
| 41.809524
| 72
| 0.664071
| false
|
rfleschenberg/django-shop-rest-checkout
|
docs/conf.py
|
1
|
1474
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.extlinks',
'sphinx.ext.ifconfig',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
if os.getenv('SPELLCHECK'):
extensions += 'sphinxcontrib.spelling',
spelling_show_suggestions = True
spelling_lang = 'en_US'
source_suffix = '.rst'
master_doc = 'index'
project = 'django-shop-rest-checkout'
year = '2017'
author = 'René Fleschenberg'
copyright = '{0}, {1}'.format(year, author)
version = release = '0.1.0'
pygments_style = 'trac'
templates_path = ['.']
extlinks = {
'issue': ('https://github.com/rfleschenberg/django-shop-rest-checkout/issues/%s', '#'),
'pr': ('https://github.com/rfleschenberg/django-shop-rest-checkout/pull/%s', 'PR #'),
}
import sphinx_py3doc_enhanced_theme
html_theme = "sphinx_py3doc_enhanced_theme"
html_theme_path = [sphinx_py3doc_enhanced_theme.get_html_theme_path()]
html_theme_options = {
'githuburl': 'https://github.com/rfleschenberg/django-shop-rest-checkout/'
}
html_use_smartypants = True
html_last_updated_fmt = '%b %d, %Y'
html_split_index = False
html_sidebars = {
'**': ['searchbox.html', 'globaltoc.html', 'sourcelink.html'],
}
html_short_title = '%s-%s' % (project, version)
napoleon_use_ivar = True
napoleon_use_rtype = False
napoleon_use_param = False
|
bsd-2-clause
| -5,760,618,463,269,746,000
| 26.277778
| 91
| 0.672098
| false
|
Vagab0nd/SiCKRAGE
|
lib3/twilio/rest/voice/v1/dialing_permissions/country/__init__.py
|
1
|
18104
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.voice.v1.dialing_permissions.country.highrisk_special_prefix import HighriskSpecialPrefixList
class CountryList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version):
"""
Initialize the CountryList
:param Version version: Version that contains the resource
:returns: twilio.rest.voice.v1.dialing_permissions.country.CountryList
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryList
"""
super(CountryList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/DialingPermissions/Countries'.format(**self._solution)
def stream(self, iso_code=values.unset, continent=values.unset,
country_code=values.unset, low_risk_numbers_enabled=values.unset,
high_risk_special_numbers_enabled=values.unset,
high_risk_tollfraud_numbers_enabled=values.unset, limit=None,
page_size=None):
"""
Streams CountryInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param unicode iso_code: Filter to retrieve the country permissions by specifying the ISO country code
:param unicode continent: Filter to retrieve the country permissions by specifying the continent
:param unicode country_code: Country code filter
:param bool low_risk_numbers_enabled: Filter to retrieve the country permissions with dialing to low-risk numbers enabled
:param bool high_risk_special_numbers_enabled: Filter to retrieve the country permissions with dialing to high-risk special service numbers enabled
:param bool high_risk_tollfraud_numbers_enabled: Filter to retrieve the country permissions with dialing to high-risk toll fraud numbers enabled
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.voice.v1.dialing_permissions.country.CountryInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
iso_code=iso_code,
continent=continent,
country_code=country_code,
low_risk_numbers_enabled=low_risk_numbers_enabled,
high_risk_special_numbers_enabled=high_risk_special_numbers_enabled,
high_risk_tollfraud_numbers_enabled=high_risk_tollfraud_numbers_enabled,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'])
def list(self, iso_code=values.unset, continent=values.unset,
country_code=values.unset, low_risk_numbers_enabled=values.unset,
high_risk_special_numbers_enabled=values.unset,
high_risk_tollfraud_numbers_enabled=values.unset, limit=None,
page_size=None):
"""
Lists CountryInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param unicode iso_code: Filter to retrieve the country permissions by specifying the ISO country code
:param unicode continent: Filter to retrieve the country permissions by specifying the continent
:param unicode country_code: Country code filter
:param bool low_risk_numbers_enabled: Filter to retrieve the country permissions with dialing to low-risk numbers enabled
:param bool high_risk_special_numbers_enabled: Filter to retrieve the country permissions with dialing to high-risk special service numbers enabled
:param bool high_risk_tollfraud_numbers_enabled: Filter to retrieve the country permissions with dialing to high-risk toll fraud numbers enabled
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.voice.v1.dialing_permissions.country.CountryInstance]
"""
return list(self.stream(
iso_code=iso_code,
continent=continent,
country_code=country_code,
low_risk_numbers_enabled=low_risk_numbers_enabled,
high_risk_special_numbers_enabled=high_risk_special_numbers_enabled,
high_risk_tollfraud_numbers_enabled=high_risk_tollfraud_numbers_enabled,
limit=limit,
page_size=page_size,
))
def page(self, iso_code=values.unset, continent=values.unset,
country_code=values.unset, low_risk_numbers_enabled=values.unset,
high_risk_special_numbers_enabled=values.unset,
high_risk_tollfraud_numbers_enabled=values.unset,
page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of CountryInstance records from the API.
Request is executed immediately
:param unicode iso_code: Filter to retrieve the country permissions by specifying the ISO country code
:param unicode continent: Filter to retrieve the country permissions by specifying the continent
:param unicode country_code: Country code filter
:param bool low_risk_numbers_enabled: Filter to retrieve the country permissions with dialing to low-risk numbers enabled
:param bool high_risk_special_numbers_enabled: Filter to retrieve the country permissions with dialing to high-risk special service numbers enabled
:param bool high_risk_tollfraud_numbers_enabled: Filter to retrieve the country permissions with dialing to high-risk toll fraud numbers enabled
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryPage
"""
data = values.of({
'IsoCode': iso_code,
'Continent': continent,
'CountryCode': country_code,
'LowRiskNumbersEnabled': low_risk_numbers_enabled,
'HighRiskSpecialNumbersEnabled': high_risk_special_numbers_enabled,
'HighRiskTollfraudNumbersEnabled': high_risk_tollfraud_numbers_enabled,
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(method='GET', uri=self._uri, params=data, )
return CountryPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of CountryInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return CountryPage(self._version, response, self._solution)
def get(self, iso_code):
"""
Constructs a CountryContext
:param iso_code: The ISO country code
:returns: twilio.rest.voice.v1.dialing_permissions.country.CountryContext
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryContext
"""
return CountryContext(self._version, iso_code=iso_code, )
def __call__(self, iso_code):
"""
Constructs a CountryContext
:param iso_code: The ISO country code
:returns: twilio.rest.voice.v1.dialing_permissions.country.CountryContext
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryContext
"""
return CountryContext(self._version, iso_code=iso_code, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Voice.V1.CountryList>'
class CountryPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, response, solution):
"""
Initialize the CountryPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.voice.v1.dialing_permissions.country.CountryPage
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryPage
"""
super(CountryPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of CountryInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.voice.v1.dialing_permissions.country.CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryInstance
"""
return CountryInstance(self._version, payload, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Voice.V1.CountryPage>'
class CountryContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, iso_code):
"""
Initialize the CountryContext
:param Version version: Version that contains the resource
:param iso_code: The ISO country code
:returns: twilio.rest.voice.v1.dialing_permissions.country.CountryContext
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryContext
"""
super(CountryContext, self).__init__(version)
# Path Solution
self._solution = {'iso_code': iso_code, }
self._uri = '/DialingPermissions/Countries/{iso_code}'.format(**self._solution)
# Dependents
self._highrisk_special_prefixes = None
def fetch(self):
"""
Fetch the CountryInstance
:returns: The fetched CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return CountryInstance(self._version, payload, iso_code=self._solution['iso_code'], )
@property
def highrisk_special_prefixes(self):
"""
Access the highrisk_special_prefixes
:returns: twilio.rest.voice.v1.dialing_permissions.country.highrisk_special_prefix.HighriskSpecialPrefixList
:rtype: twilio.rest.voice.v1.dialing_permissions.country.highrisk_special_prefix.HighriskSpecialPrefixList
"""
if self._highrisk_special_prefixes is None:
self._highrisk_special_prefixes = HighriskSpecialPrefixList(
self._version,
iso_code=self._solution['iso_code'],
)
return self._highrisk_special_prefixes
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Voice.V1.CountryContext {}>'.format(context)
class CountryInstance(InstanceResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, payload, iso_code=None):
"""
Initialize the CountryInstance
:returns: twilio.rest.voice.v1.dialing_permissions.country.CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryInstance
"""
super(CountryInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'iso_code': payload.get('iso_code'),
'name': payload.get('name'),
'continent': payload.get('continent'),
'country_codes': payload.get('country_codes'),
'low_risk_numbers_enabled': payload.get('low_risk_numbers_enabled'),
'high_risk_special_numbers_enabled': payload.get('high_risk_special_numbers_enabled'),
'high_risk_tollfraud_numbers_enabled': payload.get('high_risk_tollfraud_numbers_enabled'),
'url': payload.get('url'),
'links': payload.get('links'),
}
# Context
self._context = None
self._solution = {'iso_code': iso_code or self._properties['iso_code'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: CountryContext for this CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryContext
"""
if self._context is None:
self._context = CountryContext(self._version, iso_code=self._solution['iso_code'], )
return self._context
@property
def iso_code(self):
"""
:returns: The ISO country code
:rtype: unicode
"""
return self._properties['iso_code']
@property
def name(self):
"""
:returns: The name of the country
:rtype: unicode
"""
return self._properties['name']
@property
def continent(self):
"""
:returns: The name of the continent in which the country is located
:rtype: unicode
"""
return self._properties['continent']
@property
def country_codes(self):
"""
:returns: The E.164 assigned country codes(s)
:rtype: unicode
"""
return self._properties['country_codes']
@property
def low_risk_numbers_enabled(self):
"""
:returns: Whether dialing to low-risk numbers is enabled
:rtype: bool
"""
return self._properties['low_risk_numbers_enabled']
@property
def high_risk_special_numbers_enabled(self):
"""
:returns: Whether dialing to high-risk special services numbers is enabled
:rtype: bool
"""
return self._properties['high_risk_special_numbers_enabled']
@property
def high_risk_tollfraud_numbers_enabled(self):
"""
:returns: Whether dialing to high-risk toll fraud numbers is enabled, else `false`
:rtype: bool
"""
return self._properties['high_risk_tollfraud_numbers_enabled']
@property
def url(self):
"""
:returns: The absolute URL of this resource
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: A list of URLs related to this resource
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch the CountryInstance
:returns: The fetched CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryInstance
"""
return self._proxy.fetch()
@property
def highrisk_special_prefixes(self):
"""
Access the highrisk_special_prefixes
:returns: twilio.rest.voice.v1.dialing_permissions.country.highrisk_special_prefix.HighriskSpecialPrefixList
:rtype: twilio.rest.voice.v1.dialing_permissions.country.highrisk_special_prefix.HighriskSpecialPrefixList
"""
return self._proxy.highrisk_special_prefixes
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Voice.V1.CountryInstance {}>'.format(context)
|
gpl-3.0
| 4,873,446,034,144,213,000
| 39.410714
| 155
| 0.648641
| false
|
Farious/PersonTracker
|
Source/LoadImageTextPrint_sample.py
|
1
|
4907
|
# import numpy as np
import cv2
# import linecache
import LoadImagePrintText as f
# >>> import os
# >>> os.chdir("C:\Users\dario\Desktop\Dropbox\Work\DemoHDA\git\Source\\")
## Input
cam = 60
frame = 26
debugREID = 1
img = f.loadImagePrintText(cam, frame, debugREID=0, debugPD=1, PDthreshold=20)
cv2.namedWindow("1", cv2.WINDOW_NORMAL)
cv2.resizeWindow("1", 2560 / 4, 1600 / 4)
cv2.imshow("1", img)
cv2.waitKey(0)
## Pre-defined static variables
# CV_FILLED = -1
# red = (0, 0, 255)
# green = (0, 255, 0)
# black = (0, 0, 0)
# white = (255, 255, 255)
# thickness = 8
# if cam == 60:
# fontScale = 2 # 2 for Camera 60 (4MPixel), 1 for other cameras (1MPixel)
# else:
# fontScale = 1
#
# # user = "dario" # "Toshiba"
# # JPEGspath = "C:\Users\\" + user + "\Desktop\Dropbox\Work\HDA_Dataset\VIDeoSequences\JPEG\camera60\\"
# JPEGspath = "RESOURCES\JPEG\camera" + str(cam) + "\\"
# filename = "I000" + str(frame) + ".jpeg"
# image = cv2.imread(JPEGspath + filename)
#
# cv2.namedWindow("1", cv2.WINDOW_NORMAL)
# cv2.resizeWindow("1", 2560/2, 1600/2)
#
# # DetectionsPath = "C:\\Users\\" + user + "\Desktop\Dropbox\Work\DemoHDA\\7.0.SmallLimited\\"
# # detectionfile = "\set60\V000\I000" + str(frame) + ".txt"
# detectionsPath = "RESOURCES\Detections\set" + str(cam) + "\V000\\"
# detectionFile = "I000" + str(frame) + ".txt"
#
# # line = 'place-holder'
# ind = 1
#
# fileText = open(detectionsPath + detectionFile, 'r')
# lines = fileText.readlines()
# fileText.close()
#
# res1 = [line.rstrip('\n').split(',') for line in lines]
# for i, values in enumerate(res1):
# res1[i] = [int(float(value)) for value in values] # [:4]
# leftTop = np.array((res1[i][0], res1[i][1]))
# rightBottom = leftTop + np.array((res1[i][2], res1[i][3]))
# left = res1[i][0]
# top = res1[i][1]
# right = left+res1[i][2]
# bottom = top+res1[i][3]
# if len(res1[i]) > 5: # There is a re-IDentification for this detection
# correctID = res1[i][5]
# REIDs = res1[i][6:]
#
# imgR = image
# ## in thickness CV_FILLED is -1
# ## Coordinate frame is (x,y) starting at top-left corner
# ## cv2.rectangle(img, pt1, pt2, color[, thickness[, lineType[, shift]]])
# cv2.rectangle(imgR, (left, top), (right, bottom), red, thickness)
#
# ## Given a list of names, put one white box for each, on top of the image, and print the text on each respective
# # whitebox
#
# # Standard person names are PersonXXX
# texts = [str(k+1) + ".Person" + str(ID).zfill(3) for k, ID in enumerate(REIDs)]
# # But for a few select persons that we do know their first name, we can re-name the text to their names
# # It would probably be nicer if made in a single cycle
# for k, ID in enumerate(REIDs):
# if ID == 22:
# texts[k] = str(k+1) + ".Matteo"
# if ID == 32:
# texts[k] = str(k+1) + ".Dario"
# # print texts[k]
#
# # texts = ("1.Matteo","2.Dario")
# textHeight = 25*fontScale # 50 for cv2.FONT_HERSHEY_DUPLEX in cam60 image sizes
# letterWidth = 18*fontScale
# # for j, text in enumerate(texts):
# for k, ID in enumerate(REIDs):
# text = texts[k]
# j=k
# cv2.rectangle(imgR, (left, top-textHeight*j),
# (left + letterWidth*len(text), top-textHeight*(j+1)), white, CV_FILLED) # tuple(topleft + (textWIDth, 0))
# ## cv2.putText(img, text, org, fontFace, fontScale, color[, thickness[, lineType[, bottomLeftOrigin]]])
# if ID == correctID:
# color = green
# else:
# color = red
# if debugREID == 0:
# color = black
# cv2.putText(imgR, text, (left, top-textHeight*j), cv2.FONT_HERSHEY_DUPLEX, fontScale, color, thickness=thickness/2)
#
# cv2.imshow("1",imgR)
#
#
# cv2.waitKey(0)
# FONT_HERSHEY_SIMPLEX, FONT_HERSHEY_PLAIN, FONT_HERSHEY_DUPLEX, FONT_HERSHEY_COMPLEX, FONT_HERSHEY_TRIPLEX,
# FONT_HERSHEY_COMPLEX_SMALL, FONT_HERSHEY_SCRIPT_SIMPLEX, or FONT_HERSHEY_SCRIPT_COMPLEX
# cv2.putText(imgR, text, tuple(topleft), cv2.FONT_HERSHEY_SIMPLEX, 3, black, thickness/2)
# cv2.putText(imgR, text, tuple(topleft+(0, textHeight)), cv2.FONT_HERSHEY_PLAIN, 3, black, thickness/2)
# cv2.putText(imgR, text, tuple(topleft+(0, textHeight*2)), cv2.FONT_HERSHEY_DUPLEX, 3, black, thickness/2)
# cv2.putText(imgR, text, tuple(topleft+(0, textHeight*3)), cv2.FONT_HERSHEY_COMPLEX, 3, black, thickness/2)
# cv2.putText(imgR, text, tuple(topleft+(0, textHeight*4)), cv2.FONT_HERSHEY_TRIPLEX, 3, black, thickness/2)
# cv2.putText(imgR, text, tuple(topleft+(0, textHeight*5)), cv2.FONT_HERSHEY_COMPLEX_SMALL, 3, black, thickness/2)
# cv2.putText(imgR, text, tuple(topleft+(0, textHeight*6)), cv2.FONT_HERSHEY_SCRIPT_SIMPLEX, 3, black, thickness/2)
# cv2.putText(imgR, text, tuple(topleft+(0, textHeight*7)), cv2.FONT_HERSHEY_SCRIPT_COMPLEX, 3, black, thickness/2)
|
apache-2.0
| -2,488,138,149,678,787,000
| 37.944444
| 130
| 0.627675
| false
|
michaelcontento/whirlwind
|
whirlwind/view/filters.py
|
1
|
6750
|
from datetime import datetime
import pytz, sys, re, locale
from dateutil import parser
try:
import simplejson
except ImportError:
import json as simplejson
class Filters():
'''
Checks whether the passed in value is considered useful otherwise will return None
will return None on the following values:
None
''
'null'
'undefined'
{}
'''
@staticmethod
def val(val):
if val == None :
return None
if val == 'null' :
return None
if val == 'undefined' :
return None
if val == 0 :
return val
if isinstance(val, basestring) and len(val) == 0 :
return None
if isinstance(val, dict) and len(val) == 0 :
return None
return val
@staticmethod
def str(val):
if not val:
return ''
#TODO: sensibly handle:
# dicts => json
# dates => pretty
# numbers => add commas
return str(val)
'''
Checks for various styles of true.
matches on True, 'true', 'on'
'''
@staticmethod
def is_true(val):
if not val :
return False
if isinstance(val, basestring) :
if val == 'True' or val == 'true' or val == 'on' :
return True
return False
if val == True :
return True
return False
@staticmethod
def strip_html(data):
if not data :
return
p = re.compile(r'<[^<]*?/?>')
return p.sub('', data)
@staticmethod
def long_timestamp(dt_str,tz="America/New_York"):
utc_dt = Filters._convert_utc_to_local(dt_str,tz)
if utc_dt:
return utc_dt.strftime("%A, %d. %B %Y %I:%M%p")
else:
return dt_str
@staticmethod
def short_timestamp(dt_str,tz="America/New_York"):
tz_dt = Filters._convert_utc_to_local(dt_str,tz)
return tz_dt.strftime("%m/%d/%Y %I:%M")
@staticmethod
def short_date(dt_str,tz="America/New_York"):
tz_dt = Filters._convert_utc_to_local(dt_str,tz)
return tz_dt.strftime("%m/%d/%Y")
@staticmethod
def ellipsis(data,limit,append='...'):
return (data[:limit] + append) if len(data) > limit else data
'''
filter to translate a dict to json
'''
@staticmethod
def to_json(dict):
return simplejson.dumps(dict, True)
@staticmethod
def idize(str):
return (re.sub(r'[^0-9a-zA-Z]', '_',str)).lower()
@staticmethod
def _convert_utc_to_local(utc_dt,tz):
try:
print utc_dt
local = pytz.timezone(tz)
local_dt = utc_dt.replace(tzinfo = local)
return local_dt.astimezone (pytz.utc)
except Exception:
print sys.exc_info()
return None
@staticmethod
def url_pretty(str):
if not str :
return
url = re.sub(r'[^0-9a-zA-Z]', '_',Filters.str(str))
url = re.sub('_+', '_',url)
#max 32 chars.
if len(url) > 32 :
url = url[0:32]
return url
@staticmethod
def add_commas(val,as_data_type='int',the_locale=locale.LC_ALL):
locale.setlocale(the_locale, "")
if as_data_type == 'int':
return locale.format('%d', int(val), True)
elif as_data_type == 'float':
return locale.format('%f', float(val), True)
else:
return val
@staticmethod
def pluralize(str):
pl = Pluralizer()
return pl.plural(str)
'''
Does a get on the dict. will work with dot operator, and not throw an exception
returns default if the key doesn't work
will also work to reach into lists via integer keys.
example:
{
'key1' : {
'subkey' : [{'subsubkey1':9},{}]
}
}
Filters.dict_get('key1.subkey.0.subsubkey1') => 9
'''
@staticmethod
def dict_get(dict, key, default=None):
#Surround this with try in case key is None or not a string or something
try:
keys = key.split(".")
except:
return default
tmp = dict
for k in keys :
try:
tmp = tmp[k]
except TypeError:
#Issue may be that we have something like '0'. Try converting to a number
try:
tmp = tmp[int(k)]
except:
#Either couldn't convert or went out of bounds on list
return default
except:
#Exception other than TypeError probably missing key, so default
return default
return tmp
class Pluralizer():
#
# (pattern, search, replace) regex english plural rules tuple
#
rule_tuple = (
('[ml]ouse$', '([ml])ouse$', '\\1ice'),
('child$', 'child$', 'children'),
('booth$', 'booth$', 'booths'),
('foot$', 'foot$', 'feet'),
('ooth$', 'ooth$', 'eeth'),
('l[eo]af$', 'l([eo])af$', 'l\\1aves'),
('sis$', 'sis$', 'ses'),
('man$', 'man$', 'men'),
('ife$', 'ife$', 'ives'),
('eau$', 'eau$', 'eaux'),
('lf$', 'lf$', 'lves'),
('[sxz]$', '$', 'es'),
('[^aeioudgkprt]h$', '$', 'es'),
('(qu|[^aeiou])y$', 'y$', 'ies'),
('$', '$', 's')
)
def regex_rules(rules=rule_tuple):
for line in rules:
pattern, search, replace = line
yield lambda word: re.search(pattern, word) and re.sub(search, replace, word)
def plural(noun):
for rule in regex_rules():
result = rule(noun)
if result:
return result
class Cycler():
cycle_registry = {}
@staticmethod
def uuid():
import uuid
return uuid.uuid1()
@staticmethod
def cycle(values,name='default'):
if name in Cycler.cycle_registry:
try:
return Cycler.cycle_registry[name].next()
except StopIteration:
Cycler.cycle_registry[name] = iter(values)
return Cycler.cycle_registry[name].next()
else:
Cycler.cycle_registry[name] = iter(values)
return Cycler.cycle_registry[name].next()
|
mit
| 1,393,791,378,986,615,300
| 27.246862
| 90
| 0.480296
| false
|
cuemacro/chartpy
|
chartpy_examples/subplot_example.py
|
1
|
2359
|
__author__ = 'saeedamen' # Saeed Amen
#
# Copyright 2016 Cuemacro
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and limitations under the License.
#
import pandas
# support Quandl 3.x.x
try:
import quandl as Quandl
except:
# if import fails use Quandl 2.x.x
import Quandl
from chartpy import Chart, Style
# get your own free bQuandl API key from https://www.quandl.com/
try:
from chartpy.chartcred import ChartCred
cred = ChartCred()
quandl_api_key = cred.quandl_api_key
except:
quandl_api_key = "x"
# choose run_example = 0 for everything
# run_example = 1 - plot US GDP QoQ (real) and nominal with Plotly/Bokeh/Matplotlib with subplots for each line
# run_example = 2 - plot US GDP QoQ (real + nominal) in two double plots (passing an array of dataframes)
run_example = 0
if run_example == 1 or run_example == 0:
df = Quandl.get(["FRED/A191RL1Q225SBEA", "FRED/A191RP1Q027SBEA"], authtoken=quandl_api_key)
df.columns = ["Real QoQ", "Nominal QoQ"]
# set the style of the plot
style = Style(title="US GDP", source="Quandl/Fred", subplots=True)
# Chart object is initialised with the dataframe and our chart style
chart = Chart(df=df, chart_type='line', style=style)
chart.plot(engine='matplotlib')
chart.plot(engine='bokeh')
chart.plot(engine='plotly')
if run_example == 2 or run_example == 0:
df = Quandl.get(["FRED/A191RL1Q225SBEA", "FRED/A191RP1Q027SBEA"], authtoken=quandl_api_key)
df.columns = ["Real QoQ", "Nominal QoQ"]
df = [df, df]
# set the style of the plot
style = Style(title="US GDP double plot", source="Quandl/Fred", subplots=True)
# Chart object is initialised with the dataframe and our chart style
chart = Chart(df=df, chart_type='line', style=style)
chart.plot(engine='bokeh')
chart.plot(engine='matplotlib')
chart.plot(engine='plotly') # TODO fix legends though
|
apache-2.0
| -2,607,866,110,523,156,500
| 32.225352
| 121
| 0.705384
| false
|
monokrome/django-drift
|
setup.py
|
1
|
1462
|
import os
import sys
try:
from setuptools import setup
except ImportError:
from . import ez_setup
from setuptools import setup
parent_directory = os.path.abspath(os.path.dirname(__file__))
metafiles = {
'README.md': None,
'CHANGES.md': None,
'CLASSIFIERS.txt': None,
}
# The following bit will read each index from metafiles and fill it's value
# with the contents of that file if it is able to read the file.
for filename in metafiles:
try:
current_file = open(os.path.join(parent_directory, filename))
metafiles[filename] = current_file.read()
current_file.close()
except IOError:
pass
# No dependencies :)
dependencies = [
'celery>=3.1.6',
]
metadata = {
'name': 'django-drift',
'version': '0.1.2',
'description': 'Takes files and turns them into recrods in models. HOORAY!',
'long_description': metafiles['README.md'] + '\n\n' + metafiles['CHANGES.md'],
'classifiers': metafiles['CLASSIFIERS.txt'],
'author': 'Brandon R. Stoner',
'author_email': 'monokrome@limpidtech.com',
'url': 'http://github.com/monokrome/django-drift',
'keywords': '',
'packages': [
'drift',
'drift.management',
'drift.management.commands',
],
'package_data': {
'drift': ['templates/drift/*'],
},
'test_suite': 'drift.tests',
'install_requires': dependencies,
'tests_require': dependencies,
}
setup(**metadata)
|
mit
| 7,593,647,087,996,668,000
| 23.366667
| 82
| 0.629275
| false
|
davidhax0r/dojme
|
dojme/routes.py
|
1
|
2223
|
from dojme import app
from flask import render_template, request, redirect
from urlparse import urlparse
import httplib2
import re
def status_check(url):
"""
Get the headers of a web resource to check if it exists
"""
h = httplib2.Http()
try:
resp = h.request(url, 'HEAD')
if resp[0].status == 200:
return True
except (httplib2.RelativeURIError, httplib2.ServerNotFoundError):
return False
def check_protocol(url):
"""
Checks if http:// is present before Url
"""
parsed = urlparse(url)
if parsed.scheme == "":
return "http://" + url
else:
return url
def is_valid_url(url):
"""
Validates the URL input
"""
regex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?'
r'|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
if regex.search(url):
return True
return False
@app.route("/")
def main():
return render_template("index.html")
@app.route("/form", methods=["GET", "POST"])
def route_form():
if request.method == "GET":
return redirect('/')
else:
web_address = request.form['webaddress']
web_address = check_protocol(web_address)
valid_url = is_valid_url(web_address)
if not valid_url:
return render_template("index.html")
else:
check_website = status_check(web_address)
if check_website:
return render_template("submit.html",
up="It's Up",
url=web_address)
else:
return render_template("submit.html",
down="It's Down",
url=web_address)
@app.errorhandler(404)
def handle_error():
"""
404 error handler function
"""
return render_template("404.html"), 404
|
mit
| 4,382,458,863,198,709,000
| 26.109756
| 73
| 0.504723
| false
|
lhfei/spark-in-action
|
spark-2.x/src/main/python/ml/correlation_example.py
|
1
|
1843
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An example for computing correlation matrix.
Run with:
bin/spark-submit examples/src/main/python/ml/correlation_example.py
"""
from __future__ import print_function
# $example on$
from pyspark.ml.linalg import Vectors
from pyspark.ml.stat import Correlation
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession \
.builder \
.appName("CorrelationExample") \
.getOrCreate()
# $example on$
data = [(Vectors.sparse(4, [(0, 1.0), (3, -2.0)]),),
(Vectors.dense([4.0, 5.0, 0.0, 3.0]),),
(Vectors.dense([6.0, 7.0, 0.0, 8.0]),),
(Vectors.sparse(4, [(0, 9.0), (3, 1.0)]),)]
df = spark.createDataFrame(data, ["features"])
r1 = Correlation.corr(df, "features").head()
print("Pearson correlation matrix:\n" + str(r1[0]))
r2 = Correlation.corr(df, "features", "spearman").head()
print("Spearman correlation matrix:\n" + str(r2[0]))
# $example off$
spark.stop()
|
apache-2.0
| 8,634,468,812,316,314,000
| 34.137255
| 74
| 0.658709
| false
|
kyleabeauchamp/mdtraj
|
mdtraj/tests/test_dtr.py
|
1
|
11033
|
##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Robert McGibbon
# Contributors: Teng Lin
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
"""
Test the cython dtr module
Note, this file cannot be located in the dtr subdirectory, because that
directory is not a python package (it has no __init__.py) and is thus tests
there are not discovered by nose
"""
import tempfile, os
import numpy as np
from mdtraj.formats import DTRTrajectoryFile, DCDTrajectoryFile
from nose.tools import assert_raises
from mdtraj.testing import get_fn, eq, DocStringFormatTester, raises
from shutil import rmtree
#TestDocstrings = DocStringFormatTester(dtr, error_on_none=True)
fn_dtr = get_fn('frame0.dtr')
fn_dcd = get_fn('frame0.dcd')
fn_pdb = get_fn('native.pdb')
temp = tempfile.mkdtemp(suffix='.dtr')
def teardown_module(module):
"""
Remove the temporary trajectory directory created by tests
in this file this gets automatically called by nose
"""
try:
rmtree(temp)
except OSError:
pass
def test_read():
"""
test the default read and compare against reference trajectory in dcd format
"""
dtr_traj = DTRTrajectoryFile(fn_dtr)
eq(len(dtr_traj), 501)
xyz, times, cell_lens, cell_angles = dtr_traj.read()
xyz2, cell_lens2, cell_angles2 = DCDTrajectoryFile(fn_dcd).read()
eq(xyz, xyz2)
eq(cell_lens, cell_lens2)
eq(cell_angles, cell_angles2)
def test_read_1():
""" test read with n_frame"""
xyz, times, cell_lens, cell_angles = DTRTrajectoryFile(fn_dtr).read()
xyz2, times2, cell_lens2, cell_angles2 = DTRTrajectoryFile(fn_dtr).read(n_frames=501)
eq(xyz, xyz2)
eq(times, times2)
eq(cell_lens, cell_lens2)
eq(cell_angles, cell_angles2)
def test_read_2():
""" test read with atom indices"""
indices = np.array([0, 3, 12, 4])
xyz, times, cell_lens, cell_angles = DTRTrajectoryFile(fn_dtr).read()
xyz2, times2, cell_lens2, cell_angles2 = DTRTrajectoryFile(fn_dtr).read(atom_indices=indices)
eq(xyz[:,indices,:], xyz2)
eq(times, times2)
eq(cell_lens, cell_lens2)
eq(cell_angles, cell_angles2)
def test_read_3():
"""test read with n_frames"""
dtr_traj = DTRTrajectoryFile(fn_dtr)
dtr_traj.seek(1)
xyz, times, cell_lens, cell_angles = dtr_traj.read(n_frames=900)
eq(len(xyz), 500)
def test_read_stride():
"Read dtr with stride"
with DTRTrajectoryFile(fn_dtr) as f:
xyz1, times1, box_lengths1, box_angles1 = f.read()
with DTRTrajectoryFile(fn_dtr) as f:
xyz2, times2, box_lengths2, box_angles2 = f.read(stride=2)
yield lambda: eq(xyz1[::2], xyz2)
yield lambda: eq(times1[::2], times2)
yield lambda: eq(box_lengths1[::2], box_lengths2)
yield lambda: eq(box_angles1[::2], box_angles2)
def test_read_4():
"""Read dtr with stride and n_frames"""
# dtr_traj = DTRTrajectoryFile(fn_dtr)
# dtr_traj.seek(1)
# xyz, times, cell_lens, cell_angles = dtr_traj.read(n_frames=300, stride=2)
# eq(len(xyz), 251)
with DTRTrajectoryFile(fn_dtr) as f:
xyz1, times1, box_lengths1, box_angles1 = f.read()
with DTRTrajectoryFile(fn_dtr) as f:
xyz2, times2, box_lengths2, box_angles2 = f.read(n_frames=300, stride=2)
yield lambda: eq(xyz1[::2], xyz2)
yield lambda: eq(times1[::2], times2)
yield lambda: eq(box_lengths1[::2], box_lengths2)
yield lambda: eq(box_angles1[::2], box_angles2)
def test_read_5():
"check streaming read of frames 1 at a time"
xyz_ref, times_ref, box_lengths_ref, box_angles_ref = DTRTrajectoryFile(fn_dtr).read()
reader = DTRTrajectoryFile(fn_dtr)
for i in range(len(xyz_ref)):
xyz, times, box_lenths, box_angles = reader.read(1)
eq(xyz_ref[np.newaxis, i], xyz)
eq(times_ref[np.newaxis, i], times)
eq(box_lengths_ref[np.newaxis, i], box_lenths)
eq(box_angles_ref[np.newaxis, i], box_angles)
def test_read_6():
"DTRReader: check streaming read followed by reading the 'rest'"
xyz_ref, times_ref, box_lengths_ref, box_angles_ref = DTRTrajectoryFile(fn_dtr).read()
reader = DTRTrajectoryFile(fn_dtr)
for i in range(int(len(xyz_ref)/2)):
xyz, times, box_lenths, box_angles = reader.read(1)
eq(xyz_ref[np.newaxis, i], xyz)
eq(times_ref[np.newaxis, i], times)
eq(box_lengths_ref[np.newaxis, i], box_lenths)
eq(box_angles_ref[np.newaxis, i], box_angles)
xyz_rest, times_rest, box_rest, angles_rest = reader.read()
yield lambda: eq(xyz_ref[i+1:], xyz_rest)
yield lambda: eq(times_ref[i+1:], times_rest)
yield lambda: eq(box_lengths_ref[i+1:], box_rest)
yield lambda: eq(box_angles_ref[i+1:], angles_rest)
yield lambda: len(xyz_ref) == i + len(xyz_rest)
def test_read_7():
'test two full read'
reader = DTRTrajectoryFile(fn_dtr)
xyz, times, cell_lens, cell_angles = reader.read()
xyz, times, cell_lens, cell_angles = reader.read()
eq(len(xyz), 0)
eq(len(times), 0)
eq(len(cell_lens), 0)
eq(len(cell_angles), 0)
def test_read_8():
with DTRTrajectoryFile(fn_dtr) as f:
xyz_ref, times_ref, box_lengths_ref, box_angles_ref = f.read()
with DTRTrajectoryFile(fn_dtr) as f:
xyz, times, box_lengths, box_angles = f.read(atom_indices=slice(None, None, 2))
yield lambda: eq(xyz_ref[:, ::2, :], xyz)
def test_write_1():
"test write"
xyz, times, cell_lens, cell_angles = DTRTrajectoryFile(fn_dtr).read()
xyz += 1
DTRTrajectoryFile(temp, 'w').write(xyz,cell_lengths=cell_lens,
cell_angles=cell_angles, times=times)
xyz2, times2, cell_lens2, cell_angles2 = DTRTrajectoryFile(temp).read()
eq(xyz, xyz2)
eq(times, times2)
eq(cell_lens, cell_lens2)
eq(cell_angles, cell_angles2)
def test_write_2():
"""
test two separate write call
"""
xyz, times, cell_lens, cell_angles = DTRTrajectoryFile(fn_dtr).read()
writer = DTRTrajectoryFile(temp, 'w')
writer.write(xyz,cell_lengths=cell_lens,
cell_angles=cell_angles, times=times)
n_frames = len(xyz)
times += 50.0
writer.write(xyz,cell_lengths=cell_lens,
cell_angles=cell_angles, times=times)
# # try to write frames with different number of atoms
# assert_raises(ValueError, writer.write, xyz[:,10:,:],
# cell_lengths=cell_lens,
# cell_angles=cell_angles,
# times=times)
writer.close()
xyz2, times2, cell_lens2, cell_angles2 = DTRTrajectoryFile(temp).read()
eq(len(xyz2), n_frames*2)
eq(xyz, xyz2[n_frames:])
eq(times, times2[n_frames:])
eq(cell_lens, cell_lens2[n_frames:])
eq(cell_angles, cell_angles2[n_frames:])
def test_write_3():
"test a random write operation"
xyz = np.array(np.random.uniform(low=-50, high=-50, size=(3, 17, 3)), dtype=np.float32)
times = np.array([1, 23.0, 48.0], dtype=np.float64)
cell_lengths=np.array(np.random.uniform(low=100, high=200, size=(3, 3)), dtype=np.float32)
cell_angles=np.array([[90, 90, 90],
[80, 100, 120],
[120, 90, 80]],
dtype=np.float32)
with DTRTrajectoryFile(temp, 'w') as f:
f.write(xyz, cell_lengths=cell_lengths,
cell_angles=cell_angles, times=times)
with DTRTrajectoryFile(temp) as f:
xyz2, times2, cell_lengths2, cell_angles2 = f.read()
eq(xyz, xyz2)
def test_write_4():
"test write error"
xyz = np.array(np.random.uniform(low=-50, high=-50, size=(3, 17, 3)), dtype=np.float32)
times = np.array([1, 23.0, 48.0], dtype=np.float64)
cell_lengths=np.array(np.random.uniform(low=100, high=200, size=(3, 3)), dtype=np.float32)
cell_angles=np.array([[90, 90, 90],
[80, 100, 120],
[120, 90, 80]],
dtype=np.float32)
bad_times = np.array([21, 3.0, 48.0], dtype=np.float64)
f = DTRTrajectoryFile(temp, 'w')
assert_raises(ValueError, f.write, xyz, cell_lengths=cell_lengths)
assert_raises(ValueError, f.write, xyz, cell_angles=cell_angles)
assert_raises(ValueError, f.write, xyz, times=times)
assert_raises(ValueError, f.write, xyz,
cell_lengths=cell_lengths,
cell_angles=cell_angles,
times=bad_times)
f.close()
# assert_raises(IOError, f.write, xyz,
# cell_lengths=cell_lengths,
# cell_angles=cell_angles,
# times=times)
def test_seek():
reference = DTRTrajectoryFile(fn_dtr).read()[0]
with DTRTrajectoryFile(fn_dtr) as f:
eq(f.tell(), 0)
eq(f.read(1)[0][0], reference[0])
eq(f.tell(), 1)
xyz = f.read(1)[0][0]
eq(xyz, reference[1])
eq(f.tell(), 2)
f.seek(0)
eq(f.tell(), 0)
xyz = f.read(1)[0][0]
eq(f.tell(), 1)
eq(xyz, reference[0])
f.seek(5)
eq(f.read(1)[0][0], reference[5])
eq(f.tell(), 6)
f.seek(-5, 1)
eq(f.tell(), 1)
eq(f.read(1)[0][0], reference[1])
@raises(IOError)
def test_read_closed():
f = DTRTrajectoryFile(fn_dtr)
f.close()
f.read()
# @raises(IOError)
# def test_write_closed():
# f = DTRTrajectoryFile(fn_dtr, 'w')
# f.close()
# xyz = np.array(np.random.uniform(low=-50, high=-50, size=(3, 17, 3)), dtype=np.float32)
# times = np.array([1, 23.0, 48.0], dtype=np.float64)
# cell_lengths=np.array(np.random.uniform(low=100, high=200, size=(3, 3)), dtype=np.float32)
# cell_angles=np.array([[90, 90, 90],
# [80, 100, 120],
# [120, 90, 80]],
# dtype=np.float32)
#
# f.write(xyz, cell_lengths=cell_lengths,
# cell_angles=cell_angles,
# times=times)
def test_tell():
with DTRTrajectoryFile(fn_dtr) as f:
last = len(f)
eq(f.tell(), 0)
f.read(2)
eq(f.tell(), 2)
f.read(100)
eq(f.tell(), 102)
f.seek(600)
eq(f.tell(), last)
test_read_7()
|
lgpl-2.1
| -1,750,320,178,704,864,000
| 32.537994
| 97
| 0.608719
| false
|
vipul-tm/DAG
|
dags-ttpl/test_mod.py
|
1
|
1158
|
from airflow import DAG
from airflow.operators import TelradExtractor
from datetime import datetime, timedelta
from airflow.models import Variable
#from airflow.telrad_extractor_operator import TelradExtractor
default_args = {
'owner': 'wireless',
'depends_on_past': False,
'start_date': datetime.now() - timedelta(minutes=5),
#'email': ['vipulsharma144@gmail.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 0,
'retry_delay': timedelta(minutes=1),
'catchup': False,
'provide_context': True,
'sla' : timedelta(minutes=5)
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
PARENT_DAG_NAME = "TestModules"
main_etl_dag2=DAG(dag_id=PARENT_DAG_NAME, default_args=default_args, schedule_interval='*/5 * * * *',)
te = TelradExtractor(
task_id="test_modules",
#python_callable = upload_service_data_mysql,
dag=main_etl_dag2,
query = Variable.get("cpe_command_get_snapshot"),
telrad_conn_id = "telrad_default",
redis_conn_id = "redis_hook_4",
wait_str = "admin@BreezeVIEW>"
)
|
bsd-3-clause
| -2,846,163,788,623,033,300
| 26.571429
| 102
| 0.65285
| false
|
fcrozat/telepathy-haze
|
tests/twisted/presence/presence.py
|
1
|
2155
|
"""
A simple smoke-test for C.I.SimplePresence
"""
import dbus
from twisted.words.xish import domish, xpath
from twisted.words.protocols.jabber.client import IQ
from servicetest import assertEquals
from hazetest import exec_test
import constants as cs
def test(q, bus, conn, stream):
amy_handle = conn.RequestHandles(1, ['amy@foo.com'])[0]
# Divergence from Gabble: hazetest responds to all roster gets with an
# empty roster, so we need to push the roster.
iq = IQ(stream, 'set')
query = iq.addElement(('jabber:iq:roster', 'query'))
item = query.addElement('item')
item['jid'] = 'amy@foo.com'
item['subscription'] = 'both'
stream.send(iq)
presence = domish.Element((None, 'presence'))
presence['from'] = 'amy@foo.com'
show = presence.addElement((None, 'show'))
show.addContent('away')
status = presence.addElement((None, 'status'))
status.addContent('At the pub')
stream.send(presence)
event = q.expect('dbus-signal', signal='PresencesChanged')
assert event.args[0] == { amy_handle: (3, 'away', 'At the pub') }
presence = domish.Element((None, 'presence'))
presence['from'] = 'amy@foo.com'
show = presence.addElement((None, 'show'))
show.addContent('chat')
status = presence.addElement((None, 'status'))
status.addContent('I may have been drinking')
stream.send(presence)
event = q.expect('dbus-signal', signal='PresencesChanged')
# FIXME: 'chat' gets lost somewhere between the XMPP stream and what Haze
# produces.
assert event.args[0] == { amy_handle: (2, 'available', 'I may have been drinking') }
amy_handle, asv = conn.Contacts.GetContactByID('amy@foo.com',
[cs.CONN_IFACE_SIMPLE_PRESENCE])
assertEquals(event.args[0][amy_handle], asv.get(cs.ATTR_PRESENCE))
bob_handle, asv = conn.Contacts.GetContactByID('bob@foo.com',
[cs.CONN_IFACE_SIMPLE_PRESENCE])
assertEquals((cs.PRESENCE_UNKNOWN, 'unknown', ''),
asv.get(cs.ATTR_PRESENCE))
conn.Disconnect()
q.expect('dbus-signal', signal='StatusChanged', args=[2, 1])
if __name__ == '__main__':
exec_test(test)
|
gpl-2.0
| 6,834,671,132,213,838,000
| 32.153846
| 88
| 0.658005
| false
|
f5devcentral/f5-cccl
|
f5_cccl/resource/ltm/internal_data_group.py
|
1
|
3015
|
"""Provides a class for managing BIG-IP iRule resources."""
# coding=utf-8
#
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from copy import deepcopy
import logging
from f5_cccl.resource import Resource
LOGGER = logging.getLogger(__name__)
def get_record_key(record):
"""Allows data groups to be sorted by the 'name' member."""
return record.get('name', '')
class InternalDataGroup(Resource):
"""InternalDataGroup class."""
# The property names class attribute defines the names of the
# properties that we wish to compare.
properties = dict(
name=None,
partition=None,
type=None,
records=list()
)
def __init__(self, name, partition, **data):
"""Create the InternalDataGroup"""
super(InternalDataGroup, self).__init__(name, partition)
self._data['type'] = data.get('type', '')
records = data.get('records', list())
self._data['records'] = sorted(records, key=get_record_key)
def __eq__(self, other_dg):
"""Check the equality of the two objects.
Only compare the properties as defined in the
properties class dictionany.
"""
if not isinstance(other_dg, InternalDataGroup):
return False
for key in self.properties:
if self._data[key] != other_dg.data.get(key, None):
return False
return True
def __hash__(self): # pylint: disable=useless-super-delegation
return super(InternalDataGroup, self).__hash__()
def _uri_path(self, bigip):
return bigip.tm.ltm.data_group.internals.internal
def __str__(self):
return str(self._data)
def update(self, bigip, data=None, modify=False):
"""Override of base class implemntation, required because data-groups
are picky about what data can exist in the object when modifying.
"""
tmp_copy = deepcopy(self)
tmp_copy.do_update(bigip, data, modify)
def do_update(self, bigip, data, modify):
"""Remove 'type' before doing the update."""
del self._data['type']
super(InternalDataGroup, self).update(
bigip, data=data, modify=modify)
class IcrInternalDataGroup(InternalDataGroup):
"""InternalDataGroup object created from the iControl REST object"""
pass
class ApiInternalDataGroup(InternalDataGroup):
"""InternalDataGroup object created from the API configuration object"""
pass
|
apache-2.0
| 8,203,400,656,434,464,000
| 30.736842
| 77
| 0.659701
| false
|
saketkc/ribo-seq-snakemake
|
configs/config_Shalgi_et_al_Cell_2013.mouse.py
|
1
|
1460
|
GENOMES_DIR = '/home/cmb-panasas2/skchoudh/genomes'
OUT_DIR = '/staging/as/skchoudh/rna/September_2017_Shalgi_et_al_Cell_2013'
SRC_DIR = '/home/cmb-panasas2/skchoudh/github_projects/clip_seq_pipeline/scripts'
RAWDATA_DIR = '/home/cmb-06/as/skchoudh/dna/September_2017_Shalgi_et_al_Cell_2013/sra_single_end_mouse'
GENOME_BUILD = 'mm10'
GENOME_FASTA = GENOMES_DIR + '/' + GENOME_BUILD + '/fasta/'+ GENOME_BUILD+ '.fa'
STAR_INDEX = GENOMES_DIR + '/' + GENOME_BUILD + '/star_annotated'
GTF = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.vM11.annotation.without_rRNA_tRNA.gtf'
GENE_NAMES = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + GENOME_BUILD+'_gene_names_stripped.tsv'
GTF_UTR = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.vM11.gffutils.modifiedUTRs.gtf'
GENE_LENGTHS = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.vM11.coding_lengths.tsv' #+ GENOME_BUILD+'_gene_lengths.tsv'
HTSEQ_STRANDED = 'yes'
FEATURECOUNTS_S = '-s 1'
GENE_BED = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'mm10.vM11.genes.fromUCSC.bed' #+ GENOME_BUILD+'_gene_lengths.tsv'
START_CODON_BED = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.vM11.gffutils.start_codon.bed' #+ GENOME_BUILD+'_gene_lengths.tsv'
STOP_CODON_BED = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.vM11.gffutils.stop_codon.bed' #+ GENOME_BUILD+'_gene_lengths.tsv'
FEATURECOUNTS_T='CDS'
HTSEQ_MODE='intersection-strict'
|
bsd-3-clause
| -6,291,594,674,228,733,000
| 80.111111
| 146
| 0.7
| false
|
DigitalSlideArchive/HistomicsTK
|
tests/test_color_normalization.py
|
1
|
2842
|
#!/usr/bin/env python
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import collections
import numpy as np
import os
from histomicstk.preprocessing import color_normalization as htk_cn
from histomicstk.cli import utils as cli_utils
from .datastore import datastore
class TestReinhardNormalization:
def test_reinhard_stats(self):
wsi_path = os.path.join(datastore.fetch(
'sample_svs_image.TCGA-DU-6399-01A-01-TS1.e8eb65de-d63e-42db-af6f-14fefbbdf7bd.svs' # noqa
))
np.random.seed(1)
# create dask client
args = {
# In Python 3 unittesting, the scheduler fails if it uses processes
'scheduler': 'multithreading', # None,
'num_workers': -1,
'num_threads_per_worker': 1,
}
args = collections.namedtuple('Parameters', args.keys())(**args)
cli_utils.create_dask_client(args)
# compute reinhard stats
wsi_mean, wsi_stddev = htk_cn.reinhard_stats(
wsi_path, 0.1, magnification=20)
gt_mean = [8.896134, -0.074579, 0.022006]
gt_stddev = [0.612143, 0.122667, 0.021361]
np.testing.assert_allclose(wsi_mean, gt_mean, atol=1e-2)
np.testing.assert_allclose(wsi_stddev, gt_stddev, atol=1e-2)
class TestBackgroundIntensity:
def test_background_intensity(self):
wsi_path = os.path.join(datastore.fetch(
'sample_svs_image.TCGA-DU-6399-01A-01-TS1.e8eb65de-d63e-42db-af6f-14fefbbdf7bd.svs'
))
np.random.seed(1)
# create dask client
args = {
# In Python 3 unittesting, the scheduler fails if it uses processes
'scheduler': 'multithreading', # None,
'num_workers': -1,
'num_threads_per_worker': 1,
}
args = collections.namedtuple('Parameters', args.keys())(**args)
cli_utils.create_dask_client(args)
# compute background intensity
I_0 = htk_cn.background_intensity(wsi_path,
sample_approximate_total=5000)
np.testing.assert_allclose(I_0, [242, 244, 241], atol=1)
|
apache-2.0
| -2,885,756,250,232,397,300
| 32.046512
| 103
| 0.6038
| false
|
kanishkarj/Rave
|
Qt_Designer_files/playlist_design.py
|
1
|
2791
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'playlist.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_playlist(object):
def setupUi(self, playlist):
playlist.setObjectName(_fromUtf8("playlist"))
playlist.resize(451, 300)
self.gridLayout = QtGui.QGridLayout(playlist)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.mediaList = QtGui.QListWidget(playlist)
self.mediaList.setObjectName(_fromUtf8("mediaList"))
self.gridLayout.addWidget(self.mediaList, 1, 0, 1, 5)
self.listRearrange = QtGui.QPushButton(playlist)
self.listRearrange.setMinimumSize(QtCore.QSize(35, 35))
self.listRearrange.setMaximumSize(QtCore.QSize(35, 35))
self.listRearrange.setStyleSheet(_fromUtf8("border-radius:5em;"))
self.listRearrange.setText(_fromUtf8(""))
self.listRearrange.setObjectName(_fromUtf8("listRearrange"))
self.gridLayout.addWidget(self.listRearrange, 0, 3, 1, 1)
self.listRemove = QtGui.QPushButton(playlist)
self.listRemove.setMinimumSize(QtCore.QSize(35, 35))
self.listRemove.setMaximumSize(QtCore.QSize(35, 35))
self.listRemove.setStyleSheet(_fromUtf8("border-radius:5em;"))
self.listRemove.setText(_fromUtf8(""))
self.listRemove.setObjectName(_fromUtf8("listRemove"))
self.gridLayout.addWidget(self.listRemove, 0, 2, 1, 1)
self.listAdd = QtGui.QPushButton(playlist)
self.listAdd.setMinimumSize(QtCore.QSize(35, 35))
self.listAdd.setMaximumSize(QtCore.QSize(35, 35))
self.listAdd.setStyleSheet(_fromUtf8("border-radius:5em;"))
self.listAdd.setText(_fromUtf8(""))
self.listAdd.setObjectName(_fromUtf8("listAdd"))
self.gridLayout.addWidget(self.listAdd, 0, 1, 1, 1)
self.retranslateUi(playlist)
QtCore.QMetaObject.connectSlotsByName(playlist)
def retranslateUi(self, playlist):
playlist.setWindowTitle(_translate("playlist", "Playlist", None))
self.listRearrange.setToolTip(_translate("playlist", "Reorder", None))
self.listRemove.setToolTip(_translate("playlist", "Remove", None))
self.listAdd.setToolTip(_translate("playlist", "Add File", None))
|
gpl-3.0
| 8,714,846,205,213,983,000
| 42.609375
| 79
| 0.694375
| false
|
belese/luciphone
|
Luciphone/modules/NFCmonitor.py
|
1
|
1642
|
import time
from py532lib.i2c import *
from py532lib.frame import *
from py532lib.constants import *
class NFCmonitor :
def __init__(self) :
self.cardIn = False
self.UUID = []
self.stopped = False
self.cbcardin = None
self.cbcardout = None
#Initialise NFC_reader
self.pn532 = Pn532_i2c()
self.pn532.SAMconfigure()
def registerCB(self,cbcardin = None,cbcardout = None):
self.cbcardin = cbcardin
self.cbcardout = cbcardout
def _trust_uid(self,uid) :
return uid == self.pn532.get_uid() and uid == self.pn532.get_uid()
def stop(self) :
self.stopped = True
def start(self) :
print ("NFC Monitor started")
while not self.stopped :
uid = self.pn532.get_uid()
if uid == self.UUID :
time.sleep(0.2)
elif uid and self._trust_uid(uid) :
print ("New Card Detected",uid)
self.UUID = uid
if not self.cardIn :
self.cardIn = True
if self.cbcardin : self.cbcardin(self.UUID)
elif not uid and self.cardIn and self._trust_uid(uid):
print ("Card Removed 2",self.UUID)
uuid = self.UUID
self.UUID = None
self.cardIn = False
if self.cbcardout : self.cbcardout(uuid)
NFC = NFCmonitor()
NFC.start()
|
gpl-2.0
| 6,317,143,941,664,688,000
| 31.84
| 95
| 0.477467
| false
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/gtk/gdk/__init__/Font.py
|
1
|
1926
|
# encoding: utf-8
# module gtk.gdk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/pynotify/_pynotify.so
# by generator 1.135
# no doc
# imports
from exceptions import Warning
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
import pango as __pango
import pangocairo as __pangocairo
class Font(__gobject.GBoxed):
# no doc
def char_height(self, *args, **kwargs): # real signature unknown
pass
def char_measure(self, *args, **kwargs): # real signature unknown
pass
def char_width(self, *args, **kwargs): # real signature unknown
pass
def equal(self, *args, **kwargs): # real signature unknown
pass
def extents(self, *args, **kwargs): # real signature unknown
pass
def get_display(self, *args, **kwargs): # real signature unknown
pass
def get_name(self, *args, **kwargs): # real signature unknown
pass
def height(self, *args, **kwargs): # real signature unknown
pass
def id(self, *args, **kwargs): # real signature unknown
pass
def measure(self, *args, **kwargs): # real signature unknown
pass
def string_height(self, *args, **kwargs): # real signature unknown
pass
def string_measure(self, *args, **kwargs): # real signature unknown
pass
def string_width(self, *args, **kwargs): # real signature unknown
pass
def width(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
ascent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
descent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
type = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__gtype__ = None # (!) real value is ''
|
gpl-2.0
| 2,422,510,009,856,149,500
| 25.383562
| 97
| 0.637072
| false
|
XBMC-Addons/plugin.library.node.editor
|
resources/lib/orderby.py
|
1
|
8285
|
# coding=utf-8
import os, sys
import xbmc, xbmcaddon, xbmcplugin, xbmcgui, xbmcvfs
import xml.etree.ElementTree as xmltree
from traceback import print_exc
from urllib.parse import unquote
from resources.lib.common import *
class OrderByFunctions():
def __init__(self, ltype):
self.ltype = ltype
def _load_rules( self ):
if self.ltype.startswith('video'):
overridepath = os.path.join( DEFAULTPATH , "videorules.xml" )
else:
overridepath = os.path.join( DEFAULTPATH , "musicrules.xml" )
try:
tree = xmltree.parse( overridepath )
return tree
except:
return None
def translateOrderBy( self, rule ):
# Load the rules
tree = self._load_rules()
hasValue = True
if rule[ 0 ] == "sorttitle":
rule[ 0 ] = "title"
if rule[ 0 ] != "random":
# Get the field we're ordering by
elems = tree.getroot().find( "matches" ).findall( "match" )
for elem in elems:
if elem.attrib.get( "name" ) == rule[ 0 ]:
match = xbmc.getLocalizedString( int( elem.find( "label" ).text ) )
else:
# We'll manually set for random
match = xbmc.getLocalizedString( 590 )
# Get localization of direction
direction = None
elems = tree.getroot().find( "orderby" ).findall( "type" )
for elem in elems:
if elem.text == rule[ 1 ]:
direction = xbmc.getLocalizedString( int( elem.attrib.get( "label" ) ) )
directionVal = rule[ 1 ]
if direction is None:
direction = xbmc.getLocalizedString( int( tree.getroot().find( "orderby" ).find( "type" ).attrib.get( "label" ) ) )
directionVal = tree.getroot().find( "orderby" ).find( "type" ).text
return [ [ match, rule[ 0 ] ], [ direction, directionVal ] ]
def displayOrderBy( self, actionPath):
try:
# Load the xml file
tree = xmltree.parse( unquote(actionPath) )
root = tree.getroot()
# Get the content type
content = root.find( "content" ).text
# Get the order node
orderby = root.find( "order" )
if orderby is None:
# There is no orderby element, so add one
self.newOrderBy( tree, actionPath )
orderby = root.find( "order" )
match = orderby.text
if "direction" in orderby.attrib:
direction = orderby.attrib.get( "direction" )
else:
direction = ""
translated = self.translateOrderBy( [match, direction ] )
listitem = xbmcgui.ListItem( label="%s" % ( translated[ 0 ][ 0 ] ) )
action = "plugin://plugin.library.node.editor?ltype=%s&type=editOrderBy&actionPath=" % self.ltype + actionPath + "&content=" + content + "&default=" + translated[0][1]
xbmcplugin.addDirectoryItem( int(sys.argv[ 1 ]), action, listitem, isFolder=False )
listitem = xbmcgui.ListItem( label="%s" % ( translated[ 1 ][ 0 ] ) )
action = "plugin://plugin.library.node.editor?ltype=%s&type=editOrderByDirection&actionPath=" % self.ltype + actionPath + "&default=" + translated[1][1]
xbmcplugin.addDirectoryItem( int(sys.argv[ 1 ]), action, listitem, isFolder=False )
xbmcplugin.setContent(int(sys.argv[1]), 'files')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
except:
print_exc()
def editOrderBy( self, actionPath, content, default ):
# Load all operator groups
tree = self._load_rules().getroot()
elems = tree.find( "matches" ).findall( "match" )
selectName = []
selectValue = []
# Find the matches for the content we've been passed
for elem in elems:
contentMatch = elem.find( content )
if contentMatch is not None:
selectName.append( xbmc.getLocalizedString( int( elem.find( "label" ).text ) ) )
selectValue.append( elem.attrib.get( "name" ) )
# Add a random element
selectName.append( xbmc.getLocalizedString( 590 ) )
selectValue.append( "random" )
# Let the user select an operator
selectedOperator = xbmcgui.Dialog().select( LANGUAGE( 30314 ), selectName )
# If the user selected no operator...
if selectedOperator == -1:
return
returnVal = selectValue[ selectedOperator ]
if returnVal == "title":
returnVal = "sorttitle"
self.writeUpdatedOrderBy( actionPath, field = returnVal )
def editDirection( self, actionPath, direction ):
# Load all directions
tree = self._load_rules().getroot()
elems = tree.find( "orderby" ).findall( "type" )
selectName = []
selectValue = []
# Find the group we've been passed and load its operators
for elem in elems:
selectName.append( xbmc.getLocalizedString( int( elem.attrib.get( "label" ) ) ) )
selectValue.append( elem.text )
# Let the user select an operator
selectedOperator = xbmcgui.Dialog().select( LANGUAGE( 30315 ), selectName )
# If the user selected no operator...
if selectedOperator == -1:
return
self.writeUpdatedOrderBy( actionPath, direction = selectValue[ selectedOperator ] )
def writeUpdatedOrderBy( self, actionPath, field = None, direction = None ):
# This function writes an updated orderby rule
try:
# Load the xml file
tree = xmltree.parse( unquote(unquote(actionPath)) )
root = tree.getroot()
# Get all the rules
orderby = root.find( "order" )
if field is not None:
orderby.text = field
if direction is not None:
orderby.set( "direction", direction )
# Save the file
self.indent( root )
tree.write( unquote(actionPath), encoding="UTF-8" )
except:
print_exc()
def newOrderBy( self, tree, actionPath ):
# This function adds a new OrderBy, with default match and direction
try:
# Load the xml file
#tree = xmltree.parse( actionPath )
root = tree.getroot()
# Get the content type
content = root.find( "content" )
if content is None:
xbmcgui.Dialog().ok( ADDONNAME, LANGUAGE( 30406 ) )
return
else:
content = content.text
# Find the default match for this content type
ruleTree = self._load_rules().getroot()
elems = ruleTree.find( "matches" ).findall( "match" )
match = "title"
for elem in elems:
contentCheck = elem.find( content )
if contentCheck is not None:
# We've found the first match for this type
match = elem.attrib.get( "name" )
break
if match == "title":
match = "sorttitle"
# Find the default direction
elem = ruleTree.find( "orderby" ).find( "type" )
direction = elem.text
# Write the new rule
newRule = xmltree.SubElement( root, "order" )
newRule.text = match
newRule.set( "direction", direction )
# Save the file
self.indent( root )
tree.write( unquote( actionPath ), encoding="UTF-8" )
except:
print_exc()
# in-place prettyprint formatter
def indent( self, elem, level=0 ):
i = "\n" + level*"\t"
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + "\t"
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
self.indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
|
gpl-2.0
| -1,985,181,185,056,589,800
| 41.487179
| 179
| 0.551358
| false
|
ScottWales/rose
|
lib/python/rose/scheme_handler.py
|
1
|
5051
|
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-5 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
#-----------------------------------------------------------------------------
"""Load and select from a group of related functional classes."""
from glob import glob
import inspect
import os
import sys
class SchemeHandlersManager(object):
"""Load and select from a group of related functional classes."""
CAN_HANDLE = "can_handle"
def __init__(self, paths, ns=None, attrs=None, can_handle=None,
*args, **kwargs):
"""Load modules in paths and initialise any classes with a SCHEME.
If "ns" is not None, only modules under the specified name-space in
paths are searched and imported. ("ns" should be a str in the form
"a.b", which will be converted as "a/b" for path search.)
Initialise each handler, and save it in self.handlers, which is a dict
of {scheme: handler, ...}.
If attrs is specified, it should be a list of attributes the class
has that do not have None values.
args and kwargs are passed as *args, **kwargs to the constructor of
each class. This manager will be passed to the constructor using the
kwargs["manager"].
Each handler class may have a SCHEMES attribute (a list of str) or a
SCHEME attribute with a str value, which will be used as the keys to
self.handlers of this manager.
Optionally, a handler may have a h.can_handle(scheme, **kwargs) method
that returns a boolean value to indicate whether it can handle a given
value.
"""
self.handlers = {}
if can_handle is None:
can_handle = self.CAN_HANDLE
self.can_handle = can_handle
cwd = os.getcwd()
ns_path = ""
if ns:
ns_path = os.path.join(*(ns.split("."))) + os.sep
for path in paths:
os.chdir(path) # assuming that "" is at the front of sys.path
sys.path.insert(0, path)
try:
kwargs["manager"] = self
for file_name in glob(ns_path + "*.py"):
if file_name.startswith("__"):
continue
mod_path = file_name[0:-3]
mod_name = mod_path.replace(os.sep, ".")
mod = __import__(mod_name, fromlist=[""])
members = inspect.getmembers(mod, inspect.isclass)
scheme0_default = None
if len(members) == 1:
scheme0_default = os.path.basename(mod_path)
for key, c in members:
if any([getattr(c, a, None) is None for a in attrs]):
continue
handler = None
scheme0 = getattr(c, "SCHEME", scheme0_default)
schemes = []
if scheme0 is not None:
schemes = [scheme0]
for scheme in getattr(c, "SCHEMES", schemes):
if self.handlers.has_key(scheme):
raise ValueError(c) # scheme already used
kwargs["manager"] = self
if handler is None:
handler = c(*args, **kwargs)
self.handlers[scheme] = handler
finally:
os.chdir(cwd)
sys.path.pop(0)
def get_handler(self, scheme):
"""Return the handler with a matching scheme.
Return None if there is no handler with a matching scheme.
"""
try:
if self.handlers.has_key(scheme):
return self.handlers[scheme]
except TypeError:
pass
def guess_handler(self, item):
"""Return a handler that can handle item.
Return None if there is no handler with a matching scheme.
"""
handler = self.get_handler(item)
if handler:
return handler
for handler in self.handlers.values():
can_handle = getattr(handler, self.can_handle, None)
if (callable(can_handle) and can_handle(item)):
return handler
|
gpl-3.0
| 441,689,296,259,777,660
| 38.771654
| 78
| 0.545239
| false
|
hocinebendou/bika.gsoc
|
bika/lims/utils/analysis.py
|
1
|
12208
|
# -*- coding: utf-8 -*-
import math
import zope.event
from bika.lims.utils import formatDecimalMark
from Products.Archetypes.event import ObjectInitializedEvent
from Products.CMFCore.WorkflowCore import WorkflowException
from Products.CMFPlone.utils import _createObjectByType
def create_analysis(context, service, keyword, interim_fields):
# Determine if the sampling workflow is enabled
workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
# Create the analysis
analysis = _createObjectByType("Analysis", context, keyword)
analysis.setService(service)
analysis.setInterimFields(interim_fields)
analysis.setMaxTimeAllowed(service.getMaxTimeAllowed())
analysis.unmarkCreationFlag()
analysis.reindexObject()
# Trigger the intitialization event of the new object
zope.event.notify(ObjectInitializedEvent(analysis))
# Perform the appropriate workflow action
try:
workflow_action = 'sampling_workflow' if workflow_enabled \
else 'no_sampling_workflow'
context.portal_workflow.doActionFor(analysis, workflow_action)
except WorkflowException:
# The analysis may have been transitioned already!
# I am leaving this code here though, to prevent regression.
pass
# Return the newly created analysis
return analysis
def get_significant_digits(numeric_value):
"""
Returns the precision for a given floatable value.
If value is None or not floatable, returns None.
Will return positive values if the result is below 0 and will
return 0 values if the result is above 0.
:param numeric_value: the value to get the precision from
:return: the numeric_value's precision
Examples:
numeric_value Returns
0 0
0.22 1
1.34 0
0.0021 3
0.013 2
2 0
22 0
"""
try:
numeric_value = float(numeric_value)
except ValueError:
return None
if numeric_value == 0:
return 0
significant_digit = int(math.floor(math.log10(abs(numeric_value))))
return 0 if significant_digit > 0 else abs(significant_digit)
def format_uncertainty(analysis, result, decimalmark='.', sciformat=1):
"""
Returns the formatted uncertainty according to the analysis, result
and decimal mark specified following these rules:
If the "Calculate precision from uncertainties" is enabled in
the Analysis service, and
a) If the the non-decimal number of digits of the result is above
the service's ExponentialFormatPrecision, the uncertainty will
be formatted in scientific notation. The uncertainty exponential
value used will be the same as the one used for the result. The
uncertainty will be rounded according to the same precision as
the result.
Example:
Given an Analysis with an uncertainty of 37 for a range of
results between 30000 and 40000, with an
ExponentialFormatPrecision equal to 4 and a result of 32092,
this method will return 0.004E+04
b) If the number of digits of the integer part of the result is
below the ExponentialFormatPrecision, the uncertainty will be
formatted as decimal notation and the uncertainty will be
rounded one position after reaching the last 0 (precision
calculated according to the uncertainty value).
Example:
Given an Analysis with an uncertainty of 0.22 for a range of
results between 1 and 10 with an ExponentialFormatPrecision
equal to 4 and a result of 5.234, this method will return 0.2
If the "Calculate precision from Uncertainties" is disabled in the
analysis service, the same rules described above applies, but the
precision used for rounding the uncertainty is not calculated from
the uncertainty neither the result. The fixed length precision is
used instead.
For further details, visit
https://jira.bikalabs.com/browse/LIMS-1334
If the result is not floatable or no uncertainty defined, returns
an empty string.
The default decimal mark '.' will be replaced by the decimalmark
specified.
:param analysis: the analysis from which the uncertainty, precision
and other additional info have to be retrieved
:param result: result of the analysis. Used to retrieve and/or
calculate the precision and/or uncertainty
:param decimalmark: decimal mark to use. By default '.'
:param sciformat: 1. The sci notation has to be formatted as aE^+b
2. The sci notation has to be formatted as ax10^b
3. As 2, but with super html entity for exp
4. The sci notation has to be formatted as a·10^b
5. As 4, but with super html entity for exp
By default 1
:return: the formatted uncertainty
"""
try:
result = float(result)
except ValueError:
return ""
objres = None
try:
objres = float(analysis.getResult())
except ValueError:
pass
service = analysis.getService()
uncertainty = None
if result == objres:
# To avoid problems with DLs
uncertainty = analysis.getUncertainty()
else:
uncertainty = analysis.getUncertainty(result)
if uncertainty is None or uncertainty == 0:
return ""
# Scientific notation?
# Get the default precision for scientific notation
threshold = service.getExponentialFormatPrecision()
# Current result precision is above the threshold?
sig_digits = get_significant_digits(result)
negative = sig_digits < 0
sign = '-' if negative else ''
sig_digits = abs(sig_digits)
sci = sig_digits >= threshold and sig_digits > 0
formatted = ''
if sci:
# Scientific notation
# 3.2014E+4
if negative == True:
res = float(uncertainty)*(10**sig_digits)
else:
res = float(uncertainty)/(10**sig_digits)
res = float(str("%%.%sf" % (sig_digits-1)) % res)
res = int(res) if res.is_integer() else res
if sciformat in [2,3,4,5]:
if sciformat == 2:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"x10^",sign,sig_digits)
elif sciformat == 3:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"x10<sup>",sign,sig_digits,"</sup>")
elif sciformat == 4:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"·10^",sign,sig_digits)
elif sciformat == 5:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"·10<sup>",sign,sig_digits,"</sup>")
else:
# Default format: aE^+b
sig_digits = "%02d" % sig_digits
formatted = "%s%s%s%s" % (res,"e",sign,sig_digits)
#formatted = str("%%.%se" % sig_digits) % uncertainty
else:
# Decimal notation
prec = analysis.getPrecision(result)
prec = prec if prec else ''
formatted = str("%%.%sf" % prec) % uncertainty
return formatDecimalMark(formatted, decimalmark)
def format_numeric_result(analysis, result, decimalmark='.', sciformat=1):
"""
Returns the formatted number part of a results value. This is
responsible for deciding the precision, and notation of numeric
values in accordance to the uncertainty. If a non-numeric
result value is given, the value will be returned unchanged.
The following rules apply:
If the "Calculate precision from uncertainties" is enabled in
the Analysis service, and
a) If the non-decimal number of digits of the result is above
the service's ExponentialFormatPrecision, the result will
be formatted in scientific notation.
Example:
Given an Analysis with an uncertainty of 37 for a range of
results between 30000 and 40000, with an
ExponentialFormatPrecision equal to 4 and a result of 32092,
this method will return 3.2092E+04
b) If the number of digits of the integer part of the result is
below the ExponentialFormatPrecision, the result will be
formatted as decimal notation and the resulta will be rounded
in accordance to the precision (calculated from the uncertainty)
Example:
Given an Analysis with an uncertainty of 0.22 for a range of
results between 1 and 10 with an ExponentialFormatPrecision
equal to 4 and a result of 5.234, this method will return 5.2
If the "Calculate precision from Uncertainties" is disabled in the
analysis service, the same rules described above applies, but the
precision used for rounding the result is not calculated from
the uncertainty. The fixed length precision is used instead.
For further details, visit
https://jira.bikalabs.com/browse/LIMS-1334
The default decimal mark '.' will be replaced by the decimalmark
specified.
:param analysis: the analysis from which the uncertainty, precision
and other additional info have to be retrieved
:param result: result to be formatted.
:param decimalmark: decimal mark to use. By default '.'
:param sciformat: 1. The sci notation has to be formatted as aE^+b
2. The sci notation has to be formatted as ax10^b
3. As 2, but with super html entity for exp
4. The sci notation has to be formatted as a·10^b
5. As 4, but with super html entity for exp
By default 1
:return: the formatted result
"""
try:
result = float(result)
except ValueError:
return result
# continuing with 'nan' result will cause formatting to fail.
if math.isnan(result):
return result
service = analysis.getService()
# Scientific notation?
# Get the default precision for scientific notation
threshold = service.getExponentialFormatPrecision()
# Current result precision is above the threshold?
sig_digits = get_significant_digits(result)
negative = sig_digits < 0
sign = '-' if negative else ''
sig_digits = abs(sig_digits)
sci = sig_digits >= threshold
formatted = ''
if sci:
# Scientific notation
if sciformat in [2,3,4,5]:
if negative == True:
res = float(result)*(10**sig_digits)
else:
res = float(result)/(10**sig_digits)
res = float(str("%%.%sf" % (sig_digits-1)) % res)
# We have to check if formatted is an integer using "'.' in formatted"
# because ".is_integer" doesn't work with X.0
res = int(res) if '.' not in res else res
if sciformat == 2:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"x10^",sign,sig_digits)
elif sciformat == 3:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"x10<sup>",sign,sig_digits,"</sup>")
elif sciformat == 4:
# ax10^b or ax10^-b
formatted = "%s%s%s%s" % (res,"·10^",sign,sig_digits)
elif sciformat == 5:
# ax10<super>b</super> or ax10<super>-b</super>
formatted = "%s%s%s%s%s" % (res,"·10<sup>",sign,sig_digits,"</sup>")
else:
# Default format: aE^+b
formatted = str("%%.%se" % sig_digits) % result
else:
# Decimal notation
prec = analysis.getPrecision(result)
prec = prec if prec else ''
formatted = str("%%.%sf" % prec) % result
# We have to check if formatted is an integer using "'.' in formatted"
# because ".is_integer" doesn't work with X.0
formatted = str(int(float(formatted))) if '.' not in formatted else formatted
return formatDecimalMark(formatted, decimalmark)
|
mit
| 269,186,533,421,347,600
| 39.138158
| 85
| 0.632028
| false
|
VerstandInvictus/NachIOs
|
ifthentrack.py
|
1
|
1181
|
## Hook.io Nach tracker updater.
## This takes a query with parameters of ?val=<number>&tr=<Nach tracker ID>&sec=<secret word>
## and adds a data point to the Nach tracker with ID corresponding to the "tr" param.
## It is intended to be used with IFTTT's Maker channel action but could be triggered from anywhere.
## It is not authenticated because IFTTT doesn't really support HTTP auth;
## as a workaround it uses a secret word stored in Hook and fails if that is not a param.
## Not highly secure, but good enough for this application.
import requests
# to avoid publicizing API key, store it in your Hook env vars (hook.io/env).
apikey = Hook['env']['nachkey']
value = Hook['params']['val']
secret = Hook['params']['sec']
tracker = Hook['params']['tr']
# ditto - store a secret word or phrase in Hook env vars. This prevents open access to this hook.
magicword = Hook['env']['magicword']
# send the request
if secret == magicword:
url = 'https://nachapp.com/api/trackers/' + str(tracker) + '/measures'
r= requests.post(url, auth=(apikey, ''), verify=False, data= {"value":value})
print r.text
# <nedry>
else:
print "Ah ah ah! You didn't say the magic word!"
# </nedry>
|
mit
| 6,501,709,542,593,637,000
| 42.740741
| 100
| 0.711262
| false
|
kansanmuisti/datavaalit
|
web/political/views.py
|
1
|
6227
|
import json
import time
from django.template import RequestContext
from django.shortcuts import render_to_response
from social.models import *
from political.models import *
from political.api import *
from geo.models import Municipality
from django.core.urlresolvers import reverse
from django.core.mail import mail_admins
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.db.models import Count
from django.core.cache import cache
from django.template.defaultfilters import slugify
def show_candidates_social_feeds(request):
tw = {}
tw['feed_count'] = CandidateFeed.objects.filter(type="TW").count()
tw['update_count'] = Update.objects.filter(feed__type="TW").count()
fb = {}
fb['feed_count'] = CandidateFeed.objects.filter(type="FB").count()
fb['update_count'] = Update.objects.filter(feed__type="FB").count()
last_update = CandidateFeed.objects.filter(last_update__isnull=False).order_by('-last_update')[0].last_update
args = dict(tw=tw, fb=fb, last_update=last_update)
return render_to_response('political/candidate_social_feeds.html', args,
context_instance=RequestContext(request))
def candidate_change_request(request):
muni_list = []
for muni in Municipality.objects.all():
muni_list.append((muni.id, muni.name))
args = dict(muni_json=json.dumps(muni_list, ensure_ascii=False))
return render_to_response('political/candidate_change_request.html', args,
context_instance=RequestContext(request))
def _calc_submission_history(election, muni=None):
cache_key = 'party_budget'
if muni:
cache_key += '_%d' % muni.pk
ret = cache.get(cache_key)
if ret:
return ret
budget_list_base = CampaignBudget.objects.filter(candidate__election=election)
if muni:
budget_list = budget_list_base.filter(candidate__municipality=muni)
else:
budget_list = budget_list_base
party_list = []
for p in Party.objects.all():
d = {'id': p.pk, 'name': p.name, 'code': p.code, 'disclosure_data': []}
cand_list = Candidate.objects.filter(election=election, party=p)
if muni:
cand_list = cand_list.filter(municipality=muni)
d['num_candidates'] = cand_list.count()
# Filter out parties with no candidates
if not d['num_candidates']:
continue
party_list.append(d)
# Get the timestamps from all munis
timestamps = budget_list_base.order_by('time_submitted').values_list('time_submitted', flat=True).distinct()
for ts in timestamps:
ts_epoch = int(time.mktime(ts.timetuple()) * 1000)
for p in party_list:
nr_submitted = budget_list.filter(candidate__party=p['id'], time_submitted__lte=ts).count()
p['disclosure_data'].append((ts_epoch, nr_submitted))
ret = json.dumps(party_list, ensure_ascii=False)
cache.set(cache_key, ret, 3600)
return ret
def get_party_budget_data(request):
election = Election.objects.get(year=2012, type='muni')
muni = None
if 'municipality' in request.GET:
try:
muni = Municipality.objects.get(id=int(request.GET['municipality']))
except:
raise Http404()
ret = _calc_submission_history(election, muni)
return HttpResponse(ret, mimetype="application/javascript")
def _calc_prebudget_stats():
args = {}
timestamp = CampaignBudget.objects.order_by('-time_submitted')[0].time_submitted
election = Election.objects.get(year=2012, type='muni')
# Find the list of candidates that have submitted the campaign prebudgets
submitted_list = CampaignBudget.objects.filter(advance=True, candidate__election=election)
muni_list = Municipality.objects.annotate(num_candidates=Count('candidate')).filter(num_candidates__gt=0).order_by('name')
muni_dict = {}
for muni in muni_list:
muni_dict[muni.pk] = muni
muni.num_submitted = 0
# Calculate how many candidates have submitted the budgets per muni.
# Also figure out when the candidate first submitted the advance disclosure.
for budget in submitted_list:
muni = muni_dict[budget.candidate.municipality_id]
muni.num_submitted += 1
muni_dict = {}
total_cands = 0
total_submitted = 0
for muni in muni_list:
m = {'num_submitted': muni.num_submitted,
'num_candidates': muni.num_candidates,
'name': muni.name}
m['slug'] = slugify(muni.name)
muni_dict[muni.pk] = m
total_cands += muni.num_candidates
total_submitted += muni.num_submitted
args['num_candidates'] = total_cands
args['num_submitted'] = total_submitted
args['muni_json'] = json.dumps(muni_dict, indent=None, ensure_ascii=False)
args['timestamp'] = timestamp
return args
def show_prebudget_stats(request):
# The calculation takes a bit of time, so cache the results.
args = cache.get('muni_budget_stats')
if not args:
args = _calc_prebudget_stats()
cache.set('muni_budget_stats', args, 3600)
return render_to_response('political/candidate_budgets.html', args,
context_instance=RequestContext(request))
def candidate_change_request_form(request):
if request.method == 'GET':
return render_to_response('political/candidate_change_request_ok.html',
context_instance=RequestContext(request))
args = request.POST
try:
cand_id = int(args['candidate-id'])
request_str = args['request']
except:
return HttpResponseRedirect(reverse('political.views.candidate_change_request'))
try:
cand = Candidate.objects.get(pk=cand_id)
except Candidate.DoesNotExist:
return HttpResponseRedirect(reverse('political.views.candidate_change_request'))
subject = "Change request: %s" % unicode(cand)
message = """
Info
----
"""
message += "Candidate: %s\n" % unicode(cand)
message += "Request:\n%s" % unicode(request_str)
mail_admins(subject, message, fail_silently=False)
return HttpResponseRedirect(reverse('political.views.candidate_change_request_form'))
|
agpl-3.0
| -3,575,229,711,720,876,500
| 38.411392
| 126
| 0.666131
| false
|
santegoeds/bfair
|
bfair/_types.py
|
1
|
7481
|
#!/usr/bin/env python
#
# Copyright 2011 Tjerk Santegoeds
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import izip
def _mk_class(name, attrs):
"""Creates a class similar to a namedtuple. These classes are compatible
with SQLAlchemy, however.
"""
class_ = type(name, (object,), {attr: None for attr in attrs})
class_.__slots__ = attrs
def __init__(self, *args, **kwargs):
for attr, val in izip(self.__slots__, args):
setattr(self, attr, val)
for k, v in kwargs.iteritems():
if k not in self.__slots__:
raise ValueError("%s : Invalid attribute" % k)
setattr(self, k, v)
def __repr__(self):
s = ", ".join("=".join((a, repr(getattr(self, a)))) for a in self.__slots__)
s = "".join(("<", type(self).__name__, "(", s, ")>"))
return s
def __str__(self):
return repr(self)
def __len__(self):
return len(self.__slots__)
def __getitem__(self, i):
return getattr(self, self.__slots__[i])
class_.__init__ = __init__
class_.__repr__ = __repr__
class_.__str__ = __str__
class_.__len__ = __len__
class_.__getitem__ = __getitem__
return class_
Market = _mk_class(
"Market", (
"marketId",
"marketName",
"marketType",
"marketStatus",
"marketTime",
"menuPath",
"eventHierarchy",
"betDelay",
"exchangeId",
"countryISO3",
"lastRefresh",
"numberOfRunners",
"numberOfWinners",
"matchedSize",
"bspMarket",
"turningInPlay",
)
)
EventInfo = _mk_class(
"EventInfo", (
"eventItems", # List of BFEvent
"eventParentId",
"marketItems", # List of MarketSummary
"couponLinks", # List of CouponLink
)
)
BFEvent = _mk_class(
"BFEvent", (
"eventId",
"eventName",
"eventTypeId",
"menuLevel",
"orderIndex",
"startTime",
"timezone",
)
)
MarketSummary = _mk_class(
"MarketSummary", (
"eventTypeId",
"marketId",
"marketName",
"marketType",
"marketTypeVariant",
"menuLevel",
"orderIndex",
"startTime",
"timezone",
"venue",
"betDelay",
"numberOfWinners",
"eventParentId",
"exchangeId",
)
)
Currency = _mk_class(
"Currency", (
"currencyCode",
"rateGBP",
"minimumStake",
"minimumStakeRange",
"minimumBSPLayLiability",
)
)
Currency.__str__ = lambda self: self.currencyCode
MarketPrices = _mk_class(
"MarketPrices", (
"marketId",
"currency",
"marketStatus",
"delay",
"numberOfWinners",
"marketInfo",
"discountAllowed",
"marketBaseRate",
"lastRefresh",
"removedRunners",
"bspMarket",
"runnerPrices",
)
)
EventType = _mk_class(
"EventType", (
"id",
"name",
"nextMarketId",
"exchangeId",
)
)
CouponLink = _mk_class(
"CouponLink", (
"couponId",
"couponName",
)
)
RunnerPrice = _mk_class(
"RunnerPrice", (
"selectionId",
"sortOrder",
"totalAmountMatched",
"lastPriceMatched",
"handicap",
"reductionFactor",
"vacant",
"farBSP",
"nearBSP",
"actualBSP",
"bestPricesToLay",
"bestPricesToBack",
"asianLineId",
)
)
Price = _mk_class(
"Price", (
"price",
"amountAvailable",
"betType",
"depth",
)
)
Runner = _mk_class(
"Runner", (
"asianLineId",
"handicap",
"name",
"selectionId",
)
)
RemovedRunner = _mk_class(
"RemovedRunner", (
"selection_name",
"removed_date",
"adjustment_factor"
)
)
BetInfo = _mk_class(
"BetInfo", (
"asianLineId",
"avgPrice",
"betCategoryType",
"betId",
"betPersistenceType",
"bspLiability",
"cancelledDate",
"executedBy",
"fullMarketName",
"handicap",
"lapsedDate",
"marketId",
"marketName",
"marketType",
"marketTypeVariant",
"matchedDate",
"matchedSize",
"matches",
"placedDate",
"price",
"profitAndLoss",
"remainingSize",
"requestedSize",
"selectionId",
"selectionName",
"settledDate",
"voidedDate",
)
)
PlaceBet = _mk_class(
"PlaceBet", (
"asianLineId",
"betCategoryType",
"betPersistenceType",
"betType",
"bspLiability",
"marketId",
"price",
"selectionId",
"size",
)
)
PlaceBetResult = _mk_class(
"PlaceBetResult", (
"averagePriceMatched",
"betId",
"resultCode",
"sizeMatched",
"success",
)
)
UpdateBet = _mk_class(
"UpdateBet", (
"betId",
"newBetPersistenceType",
"newPrice",
"newSize",
"oldBetPersistenceType",
"oldPrice",
"oldSize",
)
)
CancelBet = _mk_class(
"CancelBet", (
"betId",
)
)
Match = _mk_class(
"Match", (
"betStatus",
"matchedDate",
"priceMatched",
"profitLoss",
"settledDate",
"sizeMatched",
"transactionId",
"voidedDate",
)
)
MarketInfo = _mk_class(
"MarketInfo", (
'countryISO3',
'discountAllowed',
'eventTypeId',
'lastRefresh',
'marketBaseRate',
'marketDescription',
'marketDescriptionHasDate',
'marketDisplayTime',
'marketId',
'marketStatus',
'marketSuspendTime',
'marketTime',
'marketType',
'marketTypeVariant',
'menuPath',
'eventHierarchy',
'name',
'numberOfWinners',
'parentEventId',
'runners', # List of Runners
'unit',
'maxUnitValue',
'minUnitValue',
'interval',
'runnersMayBeAdded',
'timezone',
'licenceId',
'couponLinks', # List of CouponLink
'bspMarket',
)
)
MarketInfoLite = _mk_class(
"MarketInfoLite", (
"marketStatus",
"marketSuspendTime",
"marketTime",
"numberOfRunners",
"delay",
"reconciled",
"openForBspBetting",
)
)
VolumeInfo = _mk_class(
"VolumeInfo", (
"odds",
"totalMatchedAmount",
"totalBspBackMatchedAmount",
"totalBspMatchedAmount",
)
)
MarketTradedVolume = _mk_class(
"MarketTradedVolume", (
"priceItems",
"actualBSP",
)
)
MarketTradedVolume.reconciled = property(lambda self: self.actualBSP != 0.)
del _mk_class
|
apache-2.0
| 4,085,828,044,796,257,000
| 19.439891
| 84
| 0.515573
| false
|
blazaid/pynetics
|
test/test_stop.py
|
1
|
2293
|
import pickle
from unittest import TestCase
from unittest.mock import Mock
from tempfile import TemporaryFile
from pynetics.stop import StepsNum, FitnessBound
class StepsNumTestCase(TestCase):
""" Test for the stop condition based on number of iterations. """
def test_class_is_pickeable(self):
""" Checks if it's pickeable by writing it into a temporary file. """
with TemporaryFile() as f:
pickle.dump(StepsNum(steps=10), f)
def test_criteria_is_not_met_with_fewer_iterations(self):
stop_condition = StepsNum(2)
genetic_algorithm = Mock()
genetic_algorithm.generation = 0
self.assertFalse(stop_condition(genetic_algorithm))
genetic_algorithm.generation = 1
self.assertFalse(stop_condition(genetic_algorithm))
def test_criteria_is_not_met_with_same_or_more_iterations(self):
genetic_algorithm = StepsNum(2)
population = Mock()
population.generation = 2
self.assertTrue(genetic_algorithm(population))
population.generation = 3
self.assertTrue(genetic_algorithm(population))
class FitnessBoundTestCase(TestCase):
""" If the genetic algorithm obtained a fine enough individual. """
def test_class_is_pickeable(self):
""" Checks if it's pickeable by writing it into a temporary file. """
with TemporaryFile() as f:
pickle.dump(StepsNum(steps=10), f)
def test_criteria_is_not_met_with_a_lower_fitness(self):
stop_condition = FitnessBound(1.0)
for fitness in (0.0, 0.25, 0.5, 0.75, 0.9, 0.9999999):
individual = Mock()
individual.fitness = Mock(return_value=fitness)
genetic_algorithm = Mock()
genetic_algorithm.best = Mock(return_value=individual)
self.assertFalse(stop_condition(genetic_algorithm))
def test_criteria_is_not_met_with_a_higher_fitness(self):
stop_condition = FitnessBound(1.0)
for fitness in (1.0, 1.000000001, 1.25, 1.5, 1.75, 2):
individual = Mock()
individual.fitness = Mock(return_value=fitness)
genetic_algorithm = Mock()
genetic_algorithm.best = Mock(return_value=individual)
self.assertTrue(stop_condition(genetic_algorithm))
|
gpl-3.0
| -8,702,983,873,158,111,000
| 37.864407
| 77
| 0.66027
| false
|
tuxskar/caluny
|
caluny/core/admin.py
|
1
|
2473
|
"""Admin site registration models for Caluma"""
from django.contrib import admin
from .models import SemesterDate
from .models import Student, Course, Level, Exam, Timetable, CourseLabel, Degree
from .models import Subject, Teacher, TeachingSubject, School, University
@admin.register(Subject)
class SubjectAdmin(admin.ModelAdmin):
list_display = ('title', 'degree', 'level', 'description')
search_fields = ('code', 'title')
list_filter = ('degree',)
ordering = ('degree',)
@admin.register(Teacher)
class TeacherAdmin(admin.ModelAdmin):
pass
@admin.register(TeachingSubject)
class TeachingSubjectAdmin(admin.ModelAdmin):
list_display = ('subject', 'degree_info', 'course', 'start_date', 'end_date', 'address')
search_fields = ('subject__title',)
list_filter = ('course', 'subject__degree__title', 'address')
ordering = ('course',)
@admin.register(Student)
class StudentAdmin(admin.ModelAdmin):
pass
@admin.register(Course)
class CourseAdmin(admin.ModelAdmin):
search_fields = ('language', 'level', 'label')
list_filter = ('language', 'level')
ordering = ('level', 'label',)
@admin.register(Level)
class LevelAdmin(admin.ModelAdmin):
pass
@admin.register(Exam)
class ExamAdmin(admin.ModelAdmin):
list_display = ('title', 'degree_info', 'address', 'date', 'start_time', 'end_time', 'course_info')
search_fields = ('title',)
list_filter = ('date', 'address', 't_subject__subject__degree__title')
ordering = ('date',)
@admin.register(Timetable)
class TimetableAdmin(admin.ModelAdmin):
list_display = ('t_subject', 'degree_info', 'period', 'week_day', 'start_time', 'end_time')
search_fields = ('t_subject__subject__title',)
list_filter = ('week_day', 'period', 't_subject__subject__degree__title')
ordering = ('t_subject',)
@admin.register(School)
class SchoolAdmin(admin.ModelAdmin):
def get_queryset(self, request):
qs = super(SchoolAdmin, self).queryset(request)
if request.user.is_superuser:
return qs
# return qs.first()filter(owner=request.user)
# return qs.first()
return qs.filter(id=10)
@admin.register(University)
class UniversityAdmin(admin.ModelAdmin):
pass
@admin.register(CourseLabel)
class CourseLabelAdmin(admin.ModelAdmin):
pass
@admin.register(SemesterDate)
class SemesterDateAdmin(admin.ModelAdmin):
pass
@admin.register(Degree)
class DegreeAdmin(admin.ModelAdmin):
pass
|
gpl-2.0
| 8,265,881,122,111,337,000
| 26.175824
| 103
| 0.68702
| false
|
frnhr/django-stdnumfield
|
testproject/testproject/settings.py
|
1
|
3320
|
"""
Django settings for testproject project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "=t4f&0jd786fl_ri1$7z9)!iblzhv1r7f$9p&z4kol9zej*(q@"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"stdnumfield",
"testapp",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "testproject.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
WSGI_APPLICATION = "testproject.wsgi.application"
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
# fmt: off
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": (
"django.contrib.auth.password_validation"
".UserAttributeSimilarityValidator"
),
},
{
"NAME": (
"django.contrib.auth.password_validation"
".MinimumLengthValidator"
),
},
{
"NAME": (
"django.contrib.auth.password_validation"
".CommonPasswordValidator"
),
},
{
"NAME": (
"django.contrib.auth.password_validation"
".NumericPasswordValidator"
),
},
]
# fmt: on
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = "/static/"
|
unlicense
| -7,300,196,891,010,927,000
| 23.592593
| 79
| 0.654217
| false
|
talipovm/terse
|
terse/ReportGenerator/Charges.py
|
1
|
3037
|
from ReportGenerator.Top_ReportGenerator import Top_ReportGenerator
from ReportGenerator.Geom import Geom
import logging
log = logging.getLogger(__name__)
class Charges(Top_ReportGenerator):
def __init__(self,we,parsed):
self.Q = list()
super().__init__(we,parsed)
def prepare_for_report(self):
geom = Geom(self.we,self.parsed).geom
q_Mulliken = self.parsed.last_value('P_charges_Mulliken')
q_Lowdin = self.parsed.last_value('P_charges_Lowdin')
Q = (
(q_Mulliken, 'Mulliken'),
(self.combineH(q_Mulliken, geom), 'no_H'),
(q_Lowdin, 'Lowdin'),
(self.combineH(q_Lowdin, geom), 'no_H'),
)
self.Q = list((q,name) for q,name in Q if q is not None)
self.available = (len(list(self.Q))>0)
def combineH(self, q, geom):
if (geom is None) or (q is None) or ([atom for atom in geom if atom[0]!='H'] is None):
return None
out = [float(x) for x in q]
at_pairs = self.assignH(geom)
for i,j in at_pairs:
out[j] += out[i]
out[i] = 0
return [str(x) for x in out]
def assignH(self, geom):
return [(i,self.find_closest(i,geom)) for i,atom in enumerate(geom) if atom[0]=='H']
def find_closest(self,i,geom):
x,y,z = [float(q) for q in geom[i][1:]]
min_r2 = 1e6
min_j = 0
for j,at2 in enumerate(geom):
if at2[0]=='H' or i==j:
continue
x2,y2,z2 = [float(q) for q in at2[1:]]
r = (x2-x)**2 + (y2-y)**2 + (z2-z)**2
if r < min_r2:
min_r2 = r
min_j = j
return min_j
def charges_button(self, load_command, charges, name):
color_min, color_max = -1.0, 1.0
h_1 = h_2 = ""
if 'no_H' in name:
h_1 = "color atoms cpk; label off ; select not Hydrogen"
h_2 = "select all"
script_on = "; ".join([
"x='%(a)s'",
"DATA '%(p)s @x'",
"%(h_1)s",
"label %%.%(precision)s[%(p)s]",
#"color atoms %(p)s 'rwb' absolute %(col_min)f %(col_max)f",
"%(h_2)s"
]) % {
'a': " ".join(charges),
'p': 'property_' + name,
'precision': str(2),
'col_min': color_min,
'col_max': color_max,
'h_1': h_1,
'h_2': h_2
}
script_on ="; ".join([load_command,script_on])
return self.we.html_button(script_on, name)
def charges_button_off(self):
return self.we.html_button('label off;color atoms cpk', 'Off')
def button_bar(self, load_command):
if not self.available:
return ''
self.add_right('Charges: ')
for q,name in self.Q:
s = self.charges_button(load_command, q, name)
self.add_right(s)
self.add_right(self.charges_button_off())
self.add_right(self.br_tag)
return self.get_cells()
|
mit
| 8,360,541,386,781,269,000
| 30.978947
| 94
| 0.498848
| false
|
oldm/OldMan
|
tests/attr_entry_test.py
|
1
|
1916
|
import unittest
from oldman.model.attribute import Entry
class AttributeEntryTest(unittest.TestCase):
def test_1(self):
entry = Entry()
value1 = 1
self.assertNotEquals(entry.current_value, value1)
entry.current_value = value1
self.assertEquals(entry.current_value, value1)
self.assertTrue(entry.has_changed())
self.assertEquals(entry.diff(), (None, value1))
self.assertTrue(entry.has_changed())
entry.receive_storage_ack()
self.assertFalse(entry.has_changed())
self.assertEquals(entry.current_value, value1)
#TODO: use a more precise exception
with self.assertRaises(Exception):
entry.diff()
value2 = 2
entry.current_value = value2
self.assertEquals(entry.current_value, value2)
self.assertTrue(entry.has_changed())
self.assertEquals(entry.diff(), (value1, value2))
entry.receive_storage_ack()
self.assertFalse(entry.has_changed())
self.assertEquals(entry.current_value, value2)
def test_boolean(self):
entry = Entry()
entry.current_value = False
self.assertTrue(entry.has_changed())
self.assertEquals(entry.diff(), (None, False))
entry.receive_storage_ack()
self.assertFalse(entry.has_changed())
entry.current_value = None
self.assertTrue(entry.has_changed())
self.assertEquals(entry.diff(), (False, None))
def test_clone(self):
value1 = [1]
value2 = {2}
e1 = Entry(value1)
e1.current_value = value2
self.assertEquals(e1.diff(), (value1, value2))
e2 = e1.clone()
self.assertEquals(e1.diff(), e2.diff())
value3 = {"f": "3"}
e1.current_value = value3
self.assertEquals(e1.diff(), (value1, value3))
self.assertEquals(e2.diff(), (value1, value2))
|
bsd-3-clause
| -1,405,675,632,503,099,600
| 27.176471
| 57
| 0.613779
| false
|
mitar/django-pushserver
|
setup.py
|
1
|
1615
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
try:
# Workaround for http://bugs.python.org/issue15881
import multiprocessing
except ImportError:
pass
VERSION = '0.3.4'
if __name__ == '__main__':
setup(
name = 'django-pushserver',
version = VERSION,
description = "Push server for Django based on Leo Ponomarev's Basic HTTP Push Relay Protocol.",
long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Mitar',
author_email = 'mitar.django@tnode.com',
url = 'https://github.com/mitar/django-pushserver',
license = 'AGPLv3',
packages = find_packages(exclude=('*.tests', '*.tests.*', 'tests.*', 'tests')),
package_data = {},
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
include_package_data = True,
zip_safe = False,
dependency_links = [
'https://github.com/mitar/py-hbpush/tarball/0.1.4-mitar#egg=py_hbpush-0.1.4',
'http://github.com/clement/brukva/tarball/bff451511a3cc09cd52bebcf6372a59d36567827#egg=brukva-0.0.1',
],
install_requires = [
'Django>=1.2',
'py_hbpush==0.1.4',
'tornado<3',
],
)
|
agpl-3.0
| -6,908,667,698,903,055,000
| 33.361702
| 113
| 0.570898
| false
|
cdkrot/scanner2web
|
serve.py
|
1
|
1040
|
#!/usr/bin/python3
# Copyright (C) 2016 Sayutin Dmitry.
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; If not, see <http://www.gnu.org/licenses/>.
# !! This is very simple server, mostly for testing.
# !! However you can use in small networks.
# !! Make sure that it will not be visible in global internet.
from wsgiref.simple_server import make_server
import main
import os
try:
# in case of forced stop
os.unlink('image.lock')
except:
pass
httpd = make_server('', 8080, main.application);
print("Serving on port 8080")
httpd.serve_forever()
|
gpl-3.0
| 4,064,060,949,484,477,000
| 31.5
| 70
| 0.742308
| false
|
JohanComparat/nbody-npt-functions
|
bin/bin_SMHMr/create_AGN_catalog_gawk.py
|
1
|
2344
|
# overall python packages
import glob
#import astropy.io.fits as fits
# 2397897 143.540054 0.032711 20.449619 119.370173 9.753314 33.197590 -1.000000 25.191960 40.977921 2 127
# ------ -------- -------- ra dec
import os
import time
import numpy as n
import sys
t0=time.time()
#from astropy.cosmology import FlatLambdaCDM
#import astropy.units as u
#cosmoMD = FlatLambdaCDM(H0=67.77*u.km/u.s/u.Mpc, Om0=0.307115, Ob0=0.048206)
def get_AGN_catalog(env='MD10'):
# gets the file list to add the Xray luminosity
fileList = n.array(glob.glob(os.path.join(os.environ[env], "light-cone", "MDPL2_ROCKSTAR_FluxProj_*_000_AGN.dat" )))
fileList.sort()
#print fileList
#print fileList[0]
#data = n.loadtxt(fileList[0],unpack=True)
#print data, data.shape
#agn = (data[3] > 30 ) & (data[3] < 40 ) & (data[4] > 30 ) & (data[4] < 40 )
#data_all = data.T[agn]
#print data_all.shape
for path_2_input in fileList:
print path_2_input
path_2_output = os.path.join(os.environ[env], "light-cone", os.path.basename(path_2_input)[:-4]+".erosita-agn-window-100deg2.gawk.ascii")
gawk_command = """gawk ' {if ( $4 >= -5 && $4<=5 && $5 >= -5 && $5 <=5 ) print $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12}' """ + path_2_input +" > " + path_2_output
print(gawk_command)
os.system(gawk_command)
#data = n.loadtxt(fileName,unpack=True)
#print data.shape
## compute luminosity
#dL_cm = (cosmoMD.luminosity_distance(data[2]).to(u.cm)).value
#flux = 10**(data[9]-0.3) / (4.* n.pi * dL_cm * dL_cm)
#print dL_cm, flux
#agn = (flux > 1e-15 ) #& (data[2] < 2.44)
#print len(agn.nonzero()[0])
##data_all = n.vstack((data_all, data.T[agn]))
##print data_all.shape
#n.savetxt(, data.T[agn])
get_AGN_catalog(env='MD10')
print time.time()-t0, "seconds"
os.system("""cat header_agn.txt MDPL2_ROCKSTAR_FluxProj_*p_000_AGN.erosita-agn-window-100deg2.gawk.ascii > AGN.erosita-agn-window-100deg2.ascii""")
#os.system("""cat AGN.erosita-agn-window-100deg2.gawk.ascii > AGN.erosita-agn-window-100deg2-withHeader.ascii""")
|
cc0-1.0
| 3,773,433,743,117,480,000
| 45.88
| 189
| 0.566126
| false
|
erdc/proteus
|
proteus/tests/surface_tension/rising_bubble_rans3p/vof_n.py
|
1
|
1873
|
from __future__ import absolute_import
from proteus import *
try:
from .risingBubble import *
from .vof_p import *
except:
from risingBubble import *
from vof_p import *
if timeDiscretization=='vbdf':
timeIntegration = VBDF
timeOrder=2
stepController = Min_dt_cfl_controller
elif timeDiscretization=='flcbdf':
timeIntegration = FLCBDF
#stepController = FLCBDF_controller
stepController = Min_dt_cfl_controller
time_tol = 10.0*vof_nl_atol_res
atol_u = {0:time_tol}
rtol_u = {0:time_tol}
else:
timeIntegration = BackwardEuler_cfl
stepController = Min_dt_cfl_controller
femSpaces = {0:pbasis}
massLumping = False
numericalFluxType = VOF3P.NumericalFlux
conservativeFlux = None
subgridError = VOF3P.SubgridError(coefficients=coefficients,nd=nd)
shockCapturing = VOF3P.ShockCapturing(coefficients,nd,shockCapturingFactor=vof_shockCapturingFactor,lag=vof_lag_shockCapturing)
if EXPLICIT_VOF==True:
fullNewtonFlag = False
timeIntegration = BackwardEuler_cfl
stepController = Min_dt_cfl_controller
else:
fullNewtonFlag = True
multilevelNonlinearSolver = Newton
levelNonlinearSolver = TwoStageNewton
nonlinearSmoother = None
linearSmoother = None
matrix = SparseMatrix
if useOldPETSc:
multilevelLinearSolver = PETSc
levelLinearSolver = PETSc
else:
multilevelLinearSolver = KSP_petsc4py
levelLinearSolver = KSP_petsc4py
if useSuperlu:
multilevelLinearSolver = LU
levelLinearSolver = LU
linear_solver_options_prefix = 'vof_'
nonlinearSolverConvergenceTest = 'rits'
levelNonlinearSolverConvergenceTest = 'rits'
linearSolverConvergenceTest = 'r-true'
tolFac = 0.0
nl_atol_res = vof_nl_atol_res
linTolFac = 0.0
l_atol_res = 0.1*vof_nl_atol_res
useEisenstatWalker = False
maxNonlinearIts = 50
maxLineSearches = 0
|
mit
| -7,382,594,697,588,394,000
| 24.657534
| 130
| 0.732515
| false
|
anhaidgroup/py_stringsimjoin
|
py_stringsimjoin/tests/test_size_filter.py
|
1
|
25474
|
import unittest
from nose.tools import assert_equal, assert_list_equal, nottest, raises
from py_stringmatching.tokenizer.delimiter_tokenizer import DelimiterTokenizer
from py_stringmatching.tokenizer.qgram_tokenizer import QgramTokenizer
import numpy as np
import pandas as pd
from py_stringsimjoin.filter.size_filter import SizeFilter
from py_stringsimjoin.utils.converter import dataframe_column_to_str
from py_stringsimjoin.utils.generic_helper import remove_redundant_attrs
# test SizeFilter.filter_pair method
class FilterPairTestCases(unittest.TestCase):
def setUp(self):
self.dlm = DelimiterTokenizer(delim_set=[' '], return_set=True)
self.qg2 = QgramTokenizer(2)
# tests for JACCARD measure
def test_jac_dlm_08_prune(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy',
self.dlm, 'JACCARD', 0.8, False, False, True)
def test_jac_dlm_08_pass(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy aa tt',
self.dlm, 'JACCARD', 0.8, False, False, False)
# tests for COSINE measure
def test_cos_dlm_08_prune(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy',
self.dlm, 'COSINE', 0.8, False, False, True)
def test_cos_dlm_08_pass(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy aa tt',
self.dlm, 'COSINE', 0.8, False, False, False)
# tests for DICE measure
def test_dice_dlm_08_prune_lower(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy uu',
self.dlm, 'DICE', 0.8, False, False, True)
def test_dice_dlm_08_prune_upper(self):
self.test_filter_pair('aa bb cc dd ee', 'cc xx yy aa tt uu ii oo',
self.dlm, 'DICE', 0.8, False, False, True)
def test_dice_dlm_08_pass(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy aa tt',
self.dlm, 'DICE', 0.8, False, False, False)
# tests for OVERLAP measure
def test_overlap_dlm_prune(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy',
self.dlm, 'OVERLAP', 3, False, False, True)
def test_overlap_dlm_pass(self):
self.test_filter_pair('aa bb cc dd ee', 'xx yy aa',
self.dlm, 'OVERLAP', 3, False, False, False)
def test_overlap_dlm_empty(self):
self.test_filter_pair('', '',
self.dlm, 'OVERLAP', 1, False, False, True)
def test_overlap_dlm_empty_with_allow_empty(self):
self.test_filter_pair('', '',
self.dlm, 'OVERLAP', 1, True, False, True)
# tests for EDIT_DISTANCE measure
def test_edit_dist_qg2_prune(self):
self.test_filter_pair('abcd', 'cd',
self.qg2, 'EDIT_DISTANCE', 1, False, False, True)
def test_edit_dist_qg2_pass(self):
self.test_filter_pair('abcd', 'cd',
self.qg2, 'EDIT_DISTANCE', 2, False, False, False)
def test_edit_dist_qg2_empty(self):
self.test_filter_pair('', '',
self.qg2, 'EDIT_DISTANCE', 1, False, False, False)
def test_edit_dist_qg2_empty_with_allow_empty(self):
self.test_filter_pair('', '',
self.qg2, 'EDIT_DISTANCE', 1, True, False, False)
def test_edit_dist_qg2_no_padding_empty(self):
self.test_filter_pair('', '', QgramTokenizer(2, padding=False),
'EDIT_DISTANCE', 1, False, False, False)
# test allow_missing flag
def test_size_filter_pass_missing_left(self):
self.test_filter_pair(None, 'fg ty',
self.dlm, 'DICE', 0.8, False, True, False)
def test_size_filter_pass_missing_right(self):
self.test_filter_pair('fg ty', np.NaN,
self.dlm, 'DICE', 0.8, False, True, False)
def test_size_filter_pass_missing_both(self):
self.test_filter_pair(None, np.NaN,
self.dlm, 'DICE', 0.8, False, True, False)
# tests for empty string input
def test_empty_lstring(self):
self.test_filter_pair('ab', '', self.dlm, 'JACCARD', 0.8,
False, False, True)
def test_empty_rstring(self):
self.test_filter_pair('', 'ab', self.dlm, 'JACCARD', 0.8,
False, False, True)
def test_empty_strings(self):
self.test_filter_pair('', '', self.dlm, 'JACCARD', 0.8,
False, False, True)
def test_empty_strings_with_allow_empty(self):
self.test_filter_pair('', '', self.dlm, 'JACCARD', 0.8,
True, False, False)
@nottest
def test_filter_pair(self, lstring, rstring, tokenizer, sim_measure_type,
threshold, allow_empty, allow_missing, expected_output):
size_filter = SizeFilter(tokenizer, sim_measure_type, threshold,
allow_empty, allow_missing)
actual_output = size_filter.filter_pair(lstring, rstring)
assert_equal(actual_output, expected_output)
# test SizeFilter.filter_tables method
class FilterTablesTestCases(unittest.TestCase):
def setUp(self):
self.dlm = DelimiterTokenizer(delim_set=[' '], return_set=True)
self.A = pd.DataFrame([{'id': 1, 'attr':'ab cd ef aa bb'},
{'id': 2, 'attr':''},
{'id': 3, 'attr':'ab'},
{'id': 4, 'attr':'ll oo pp'},
{'id': 5, 'attr':'xy xx zz fg'},
{'id': 6, 'attr':None},
{'id': 7, 'attr':''}])
self.B = pd.DataFrame([{'id': 1, 'attr':'mn'},
{'id': 2, 'attr':'he ll'},
{'id': 3, 'attr':'xy pl ou'},
{'id': 4, 'attr':'aa'},
{'id': 5, 'attr':'fg cd aa ef'},
{'id': 6, 'attr':np.NaN},
{'id': 7, 'attr':' '}])
self.empty_table = pd.DataFrame(columns=['id', 'attr'])
self.default_l_out_prefix = 'l_'
self.default_r_out_prefix = 'r_'
# tests for JACCARD measure
def test_jac_dlm_08(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,3', '5,5'])
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr'),
expected_pairs)
def test_jac_dlm_08_with_out_attrs(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,3', '5,5'])
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr',
['attr'], ['attr']),
expected_pairs)
def test_jac_dlm_08_with_out_prefix(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,3', '5,5'])
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr',
['attr'], ['attr'],
'ltable.', 'rtable.'),
expected_pairs)
# tests for COSINE measure
def test_cos_dlm_08(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,2', '4,3',
'4,5', '5,3', '5,5'])
self.test_filter_tables(self.dlm, 'COSINE', 0.8, False, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr'),
expected_pairs)
# tests for DICE measure
def test_dice_dlm_08(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,2', '4,3',
'4,5', '5,3', '5,5'])
self.test_filter_tables(self.dlm, 'DICE', 0.8, False, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr'),
expected_pairs)
# tests for OVERLAP measure
def test_overlap_dlm_3(self):
expected_pairs = set(['1,3', '1,5', '4,3', '4,5', '5,3', '5,5'])
self.test_filter_tables(self.dlm, 'OVERLAP', 3, False, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr'),
expected_pairs)
# tests for EDIT_DISTANCE measure
def test_edit_distance_qg2_2(self):
A = pd.DataFrame([{'l_id': 1, 'l_attr':'1990'},
{'l_id': 2, 'l_attr':'200'},
{'l_id': 3, 'l_attr':'0'},
{'l_id': 4, 'l_attr':''},
{'l_id': 5, 'l_attr':np.NaN}])
B = pd.DataFrame([{'r_id': 1, 'r_attr':'200155'},
{'r_id': 2, 'r_attr':'19'},
{'r_id': 3, 'r_attr':'188'},
{'r_id': 4, 'r_attr':''},
{'r_id': 5, 'r_attr':np.NaN}])
qg2_tok = QgramTokenizer(2)
expected_pairs = set(['1,1', '1,2', '1,3',
'2,2', '2,3', '2,3', '3,2', '3,3', '3,4',
'4,2', '4,4'])
self.test_filter_tables(qg2_tok, 'EDIT_DISTANCE', 2, False, False,
(A, B,
'l_id', 'r_id', 'l_attr', 'r_attr'),
expected_pairs)
# test allow_missing flag
def test_jac_dlm_08_allow_missing(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,3', '5,5',
'6,1', '6,2', '6,3', '6,4', '6,5',
'6,6', '6,7', '1,6', '2,6', '3,6',
'4,6', '5,6', '7,6'])
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, True,
(self.A, self.B,
'id', 'id', 'attr', 'attr'),
expected_pairs)
# test allow_empty flag
def test_jac_dlm_08_allow_empty(self):
expected_pairs = set(['1,5', '2,7', '3,1', '3,4', '4,3', '5,5', '7,7'])
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, True, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr'),
expected_pairs)
# test allow_empty flag with output attributes
def test_jac_dlm_08_allow_empty_with_out_attrs(self):
expected_pairs = set(['1,5', '2,7', '3,1', '3,4', '4,3', '5,5', '7,7'])
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, True, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr',
['attr'], ['attr']),
expected_pairs)
# test with n_jobs above 1
def test_jac_dlm_08_with_njobs_above_1(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,3', '5,5'])
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, False,
(self.A, self.B,
'id', 'id', 'attr', 'attr',
['attr'], ['attr'],
'ltable.', 'rtable.', 2),
expected_pairs)
# test filter attribute of type int
def test_jac_qg2_with_filter_attr_of_type_int(self):
A = pd.DataFrame([{'l_id': 1, 'l_attr':1990},
{'l_id': 2, 'l_attr':2000},
{'l_id': 3, 'l_attr':0},
{'l_id': 4, 'l_attr':-1},
{'l_id': 5, 'l_attr':1986}])
B = pd.DataFrame([{'r_id': 1, 'r_attr':2001},
{'r_id': 2, 'r_attr':1992},
{'r_id': 3, 'r_attr':1886},
{'r_id': 4, 'r_attr':2007},
{'r_id': 5, 'r_attr':2012}])
dataframe_column_to_str(A, 'l_attr', inplace=True)
dataframe_column_to_str(B, 'r_attr', inplace=True)
qg2_tok = QgramTokenizer(2, return_set=True)
expected_pairs = set(['1,1', '1,2', '1,3', '1,4', '1,5',
'2,1', '2,2', '2,3', '2,4', '2,5',
'5,1', '5,2', '5,3', '5,4', '5,5'])
self.test_filter_tables(qg2_tok, 'JACCARD', 0.8, False, False,
(A, B,
'l_id', 'r_id', 'l_attr', 'r_attr'),
expected_pairs)
# tests for empty table input
def test_empty_ltable(self):
expected_pairs = set()
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, False,
(self.empty_table, self.B,
'id', 'id', 'attr', 'attr'),
expected_pairs)
def test_empty_rtable(self):
expected_pairs = set()
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, False,
(self.A, self.empty_table,
'id', 'id', 'attr', 'attr'),
expected_pairs)
def test_empty_tables(self):
expected_pairs = set()
self.test_filter_tables(self.dlm, 'JACCARD', 0.8, False, False,
(self.empty_table, self.empty_table,
'id', 'id', 'attr', 'attr'),
expected_pairs)
@nottest
def test_filter_tables(self, tokenizer, sim_measure_type, threshold,
allow_empty, allow_missing, args, expected_pairs):
size_filter = SizeFilter(tokenizer, sim_measure_type, threshold,
allow_empty, allow_missing)
actual_candset = size_filter.filter_tables(*args)
expected_output_attrs = ['_id']
l_out_prefix = self.default_l_out_prefix
r_out_prefix = self.default_r_out_prefix
# Check for l_out_prefix in args.
if len(args) > 8:
l_out_prefix = args[8]
expected_output_attrs.append(l_out_prefix + args[2])
# Check for r_out_prefix in args.
if len(args) > 9:
r_out_prefix = args[9]
expected_output_attrs.append(r_out_prefix + args[3])
# Check for l_out_attrs in args.
if len(args) > 6:
if args[6]:
l_out_attrs = remove_redundant_attrs(args[6], args[2])
for attr in l_out_attrs:
expected_output_attrs.append(l_out_prefix + attr)
# Check for r_out_attrs in args.
if len(args) > 7:
if args[7]:
r_out_attrs = remove_redundant_attrs(args[7], args[3])
for attr in r_out_attrs:
expected_output_attrs.append(r_out_prefix + attr)
# verify whether the output table has the necessary attributes.
assert_list_equal(list(actual_candset.columns.values),
expected_output_attrs)
actual_pairs = set()
for idx, row in actual_candset.iterrows():
actual_pairs.add(','.join((str(row[l_out_prefix + args[2]]),
str(row[r_out_prefix + args[3]]))))
# verify whether the actual pairs and the expected pairs match.
assert_equal(len(expected_pairs), len(actual_pairs))
common_pairs = actual_pairs.intersection(expected_pairs)
assert_equal(len(common_pairs), len(expected_pairs))
# test SizeFilter.filter_candset method
class FilterCandsetTestCases(unittest.TestCase):
def setUp(self):
self.dlm = DelimiterTokenizer(delim_set=[' '], return_set=True)
self.A = pd.DataFrame([{'l_id': 1, 'l_attr':'ab cd ef aa bb'},
{'l_id': 2, 'l_attr':''},
{'l_id': 3, 'l_attr':'ab'},
{'l_id': 4, 'l_attr':'ll oo pp'},
{'l_id': 5, 'l_attr':'xy xx zz fg'},
{'l_id': 6, 'l_attr': np.NaN}])
self.B = pd.DataFrame([{'r_id': 1, 'r_attr':'mn'},
{'r_id': 2, 'r_attr':'he ll'},
{'r_id': 3, 'r_attr':'xy pl ou'},
{'r_id': 4, 'r_attr':'aa'},
{'r_id': 5, 'r_attr':'fg cd aa ef'},
{'r_id': 6, 'r_attr':None}])
# generate cartesian product A x B to be used as candset
self.A['tmp_join_key'] = 1
self.B['tmp_join_key'] = 1
self.C = pd.merge(self.A[['l_id', 'tmp_join_key']],
self.B[['r_id', 'tmp_join_key']],
on='tmp_join_key').drop('tmp_join_key', 1)
self.empty_A = pd.DataFrame(columns=['l_id', 'l_attr'])
self.empty_B = pd.DataFrame(columns=['r_id', 'r_attr'])
self.empty_candset = pd.DataFrame(columns=['l_id', 'r_id'])
# tests for JACCARD measure
def test_jac_dlm_08(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,3', '5,5'])
self.test_filter_candset(self.dlm, 'JACCARD', 0.8, False, False,
(self.C, 'l_id', 'r_id',
self.A, self.B,
'l_id', 'r_id', 'l_attr', 'r_attr'),
expected_pairs)
# tests for COSINE measure
def test_cos_dlm_08(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,2', '4,3',
'4,5', '5,3', '5,5'])
self.test_filter_candset(self.dlm, 'COSINE', 0.8, False, False,
(self.C, 'l_id', 'r_id',
self.A, self.B,
'l_id', 'r_id', 'l_attr', 'r_attr'),
expected_pairs)
# tests for DICE measure
def test_dice_dlm_08(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,2', '4,3',
'4,5', '5,3', '5,5'])
self.test_filter_candset(self.dlm, 'DICE', 0.8, False, False,
(self.C, 'l_id', 'r_id',
self.A, self.B,
'l_id', 'r_id', 'l_attr', 'r_attr'),
expected_pairs)
# test allow_missing flag
def test_jac_dlm_08_allow_missing(self):
expected_pairs = set(['1,5', '3,1', '3,4', '4,3', '5,5',
'6,1', '6,2', '6,3', '6,4', '6,5',
'6,6', '1,6', '2,6', '3,6', '4,6', '5,6'])
self.test_filter_candset(self.dlm, 'JACCARD', 0.8, False, True,
(self.C, 'l_id', 'r_id',
self.A, self.B,
'l_id', 'r_id', 'l_attr', 'r_attr'),
expected_pairs)
# tests for empty candset input
def test_empty_candset(self):
expected_pairs = set()
self.test_filter_candset(self.dlm, 'JACCARD', 0.8, False, False,
(self.empty_candset, 'l_id', 'r_id',
self.empty_A, self.empty_B,
'l_id', 'r_id', 'l_attr', 'r_attr'),
expected_pairs)
@nottest
def test_filter_candset(self, tokenizer, sim_measure_type, threshold,
allow_empty, allow_missing, args, expected_pairs):
size_filter = SizeFilter(tokenizer, sim_measure_type, threshold,
allow_empty, allow_missing)
actual_output_candset = size_filter.filter_candset(*args)
# verify whether the output table has the necessary attributes.
assert_list_equal(list(actual_output_candset.columns.values),
list(args[0].columns.values))
actual_pairs = set()
for idx, row in actual_output_candset.iterrows():
actual_pairs.add(','.join((str(row[args[1]]), str(row[args[2]]))))
# verify whether the actual pairs and the expected pairs match.
assert_equal(len(expected_pairs), len(actual_pairs))
common_pairs = actual_pairs.intersection(expected_pairs)
assert_equal(len(common_pairs), len(expected_pairs))
class SizeFilterInvalidTestCases(unittest.TestCase):
def setUp(self):
self.A = pd.DataFrame([{'A.id':1, 'A.attr':'hello', 'A.int_attr':5}])
self.B = pd.DataFrame([{'B.id':1, 'B.attr':'world', 'B.int_attr':6}])
self.tokenizer = DelimiterTokenizer(delim_set=[' '], return_set=True)
self.sim_measure_type = 'JACCARD'
self.threshold = 0.8
@raises(TypeError)
def test_invalid_ltable(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables([], self.B, 'A.id', 'B.id',
'A.attr', 'B.attr')
@raises(TypeError)
def test_invalid_rtable(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, [], 'A.id', 'B.id',
'A.attr', 'B.attr')
@raises(AssertionError)
def test_invalid_l_key_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.invalid_id', 'B.id',
'A.attr', 'B.attr')
@raises(AssertionError)
def test_invalid_r_key_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.id', 'B.invalid_id',
'A.attr', 'B.attr')
@raises(AssertionError)
def test_invalid_l_filter_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.id', 'B.id',
'A.invalid_attr', 'B.attr')
@raises(AssertionError)
def test_invalid_r_filter_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.id', 'B.id',
'A.attr', 'B.invalid_attr')
@raises(AssertionError)
def test_numeric_l_filter_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.id', 'B.id',
'A.int_attr', 'B.attr')
@raises(AssertionError)
def test_numeric_r_filter_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.id', 'B.id',
'A.attr', 'B.int_attr')
@raises(AssertionError)
def test_invalid_l_out_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.id', 'B.id',
'A.attr', 'B.attr',
['A.invalid_attr'], ['B.attr'])
@raises(AssertionError)
def test_invalid_r_out_attr(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type,
self.threshold)
size_filter.filter_tables(self.A, self.B, 'A.id', 'B.id',
'A.attr', 'B.attr',
['A.attr'], ['B.invalid_attr'])
@raises(TypeError)
def test_invalid_tokenizer(self):
size_filter = SizeFilter([], self.sim_measure_type, self.threshold)
@raises(AssertionError)
def test_invalid_tokenizer_for_edit_distance(self):
size_filter = SizeFilter(self.tokenizer, 'EDIT_DISTANCE', 2)
@raises(TypeError)
def test_invalid_sim_measure_type(self):
size_filter = SizeFilter(self.tokenizer, 'INVALID_TYPE', self.threshold)
@raises(AssertionError)
def test_invalid_threshold(self):
size_filter = SizeFilter(self.tokenizer, self.sim_measure_type, 1.2)
|
bsd-3-clause
| 1,673,880,438,268,904,700
| 45.065099
| 81
| 0.459292
| false
|
newmediamedicine/indivo_server_1_0
|
indivo/tests/api/accounts_tests.py
|
1
|
10862
|
import django.test
from indivo.models import *
from indivo.tests.internal_tests import InternalTests, TransactionInternalTests
from django.utils.http import urlencode
from indivo.tests.data import *
from lxml import etree
from indivo.lib import iso8601
def accountStateSetUp(test_cases_instance):
_self = test_cases_instance
super(_self.__class__, _self).setUp()
# create an account
_self.account = _self.createAccount(TEST_ACCOUNTS, 4)
# create a record for the account
_self.record = _self.createRecord(TEST_RECORDS, 0, owner=_self.account)
# create a message, with an attachment
_self.message = _self.createMessage(TEST_MESSAGES, 2, about_record=_self.record, account=_self.account,
sender=_self.account, recipient=_self.account)
_self.attachment = _self.createAttachment(TEST_ATTACHMENTS, 0, attachment_num=1, message=_self.message)
class TransactionAccountInternalTests(TransactionInternalTests):
def setUp(self):
return accountStateSetUp(self)
def tearDown(self):
super(TransactionAccountInternalTests,self).tearDown()
def test_duplicate_message_ids(self):
msg = TEST_MESSAGES[0]
data = {'message_id': msg['message_id'],
'body':msg['body'],
'severity':msg['severity'],
}
# Send a message
response = self.client.post('/accounts/%s/inbox/'%(self.account.email),
urlencode(data),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
# Send it again, with the same message_id. Should break
response = self.client.post('/accounts/%s/inbox/'%(self.account.email),
urlencode(data),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 400)
class AccountInternalTests(InternalTests):
def setUp(self):
return accountStateSetUp(self)
def tearDown(self):
super(AccountInternalTests,self).tearDown()
def test_forgot_password(self):
url = '/accounts/%s/forgot-password'%(self.account.email)
bad_methods = ['get', 'put', 'delete']
self.check_unsupported_http_methods(bad_methods, url)
response = self.client.post(url)
self.assertEquals(response.status_code, 200)
def test_create_accounts(self):
email = "mymail2@mail.ma"
response = self.client.post('/accounts/', urlencode({'account_id' : email,'full_name':'fl','contact_email':'contactemail','password':'pass','primary_secret_p':'primaryp','secondary_secret_p':'secondaryp'}),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_change_password(self):
response = self.client.post('/accounts/%s/authsystems/password/change'%(self.account.email), urlencode({'old':TEST_ACCOUNTS[4]['password'],'new':"newpassword"}),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_set_password(self):
response = self.client.post('/accounts/%s/authsystems/password/set'%(self.account.email), urlencode({'password':'newpassword'}),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_set_username(self):
response = self.client.post('/accounts/%s/authsystems/password/set-username'%(self.account.email), urlencode({'username':'newusername'}),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_add_authsystem_to_accnt(self):
response = self.client.post('/accounts/%s/authsystems/password/set'%(self.account.email), urlencode({'username':'someuser','password':'somepassword','system':'mychildrens'}),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_check_secret(self):
response = self.client.get('/accounts/%s/check-secrets/%s?secondary_secret=%s'%(self.account.email,self.account.primary_secret,self.account.secondary_secret))
self.assertEquals(response.status_code, 200)
def test_get_accountinfo(self):
response = self.client.get('/accounts/%s'%(self.account.email))
self.assertEquals(response.status_code, 200)
def test_add_archive(self):
response = self.client.post('/accounts/%s/inbox/%s/archive'%(self.account.email,self.message.id))
self.assertEquals(response.status_code, 200)
def test_accept_attachment(self):
response = self.client.post('/accounts/%s/inbox/%s/attachments/%s/accept'%(self.account.email,self.message.id,self.attachment.attachment_num))
self.assertEquals(response.status_code, 200)
def test_get_message(self):
response = self.client.get('/accounts/%s/inbox/%s'%(self.account.email,self.message.id))
self.assertEquals(response.status_code, 200)
# Insure that dates are in the proper format
xml = etree.fromstring(response.content)
received_at = xml.findtext('received_at')
self.assertNotRaises(ValueError, self.validateIso8601, received_at)
read_at = xml.findtext('read_at')
self.assertNotRaises(ValueError, self.validateIso8601, read_at)
archived_at = xml.findtext('archived_at')
self.assertNotRaises(ValueError, self.validateIso8601, archived_at)
# We should have gotten one attachemnt.
# Insure that we got didn't get a doc id, as the doc wasn't saved
attachments = xml.findall('attachment')
self.assertEqual(len(attachments), 1)
attachment_doc_id = attachments[0].get('doc_id')
self.assertEqual(attachment_doc_id, None)
# Now save the document and try again
# We should get a doc id
self.attachment.save_as_document(self.account)
response = self.client.get('/accounts/%s/inbox/%s'%(self.account.email, self.message.id))
self.assertEquals(response.status_code, 200)
xml = etree.fromstring(response.content)
attachment = xml.find('attachment')
attachment_doc_id = attachment.get('doc_id')
self.assertNotEqual(attachment_doc_id, None)
def test_get_inbox(self):
response = self.client.get('/accounts/%s/inbox/'%(self.account.email))
self.assertEquals(response.status_code, 200)
# Insure that dates are in the proper format
messages = etree.fromstring(response.content)
for message in messages.iterfind('Message'):
received_at = message.findtext('received_at')
self.assertNotRaises(ValueError, self.validateIso8601, received_at)
read_at = message.findtext('read_at')
self.assertNotRaises(ValueError, self.validateIso8601, read_at)
archived_at = message.findtext('archived_at')
self.assertNotRaises(ValueError, self.validateIso8601, archived_at)
def test_send_message_to_account(self):
msg = TEST_MESSAGES[0]
data = {'message_id': msg['message_id'],
'subject':msg['subject'],
'body':msg['body'],
'severity':msg['severity'],
}
response = self.client.post('/accounts/%s/inbox/'%(self.account.email), urlencode(data),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
root = etree.XML(response.content)
# check returned data
message_id = root.get('id')
self.assertTrue(message_id is not None and len(message_id) > 0, "Did not find message ID")
subject = root.find('subject').text
self.assertEqual(subject, msg['subject'], "subjects do not match")
severity = root.find('severity').text
self.assertEqual(severity, msg['severity'], "subjects do not match")
def test_update_account_info(self):
response = self.client.post('/accounts/%s/info-set'%(self.account.email), urlencode({'contact_email':self.account.contact_email,'full_name':self.account.full_name}),'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_init_account(self):
new_acct = self.createUninitializedAccount(TEST_ACCOUNTS, 0)
url = '/accounts/%s/initialize/%s'%(new_acct.email,new_acct.primary_secret)
response = self.client.post(url, urlencode({'secondary_secret':new_acct.secondary_secret}), 'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_get_notifications(self):
response = self.client.get('/accounts/%s/notifications/'%(self.account.email))
self.assertEquals(response.status_code, 200)
# Insure that dates are in the proper format
notifications = etree.fromstring(response.content)
for n in notifications.iterfind('Notification'):
received_at = n.findtext('received_at')
self.assertNotRaises(ValueError, self.validateIso8601, received_at)
def test_get_permissions(self):
response = self.client.get('/accounts/%s/permissions/'%(self.account.email))
self.assertEquals(response.status_code, 200)
def test_get_primary_secret(self):
response = self.client.get('/accounts/%s/primary-secret'%(self.account.email))
self.assertEquals(response.status_code, 200)
def test_get_records(self):
response = self.client.get('/accounts/%s/records/?status=active'%(self.account.email))
self.assertEquals(response.status_code, 200)
def test_account_reset(self):
url = '/accounts/%s/reset'%(self.account.email)
bad_methods = ['get', 'put', 'delete']
self.check_unsupported_http_methods(bad_methods, url)
response = self.client.post(url)
self.assertEquals(response.status_code, 200)
def test_get_secret(self):
response = self.client.get('/accounts/%s/secret'%(self.account.email))
self.assertEquals(response.status_code, 200)
def test_resend_secret(self):
url = '/accounts/%s/secret-resend'%(self.account.email)
bad_methods = ['get', 'put', 'delete']
self.check_unsupported_http_methods(bad_methods, url)
response = self.client.post(url)
self.assertEquals(response.status_code, 200)
def test_set_account_state(self):
response = self.client.post('/accounts/%s/set-state'%(self.account.email), urlencode({'state':'active'}), 'application/x-www-form-urlencoded')
self.assertEquals(response.status_code, 200)
def test_search(self):
response = self.client.get('/accounts/search?fullname=%s&contact_email=%s'%(self.account.full_name,self.account.email))
self.assertEquals(response.status_code, 200)
|
gpl-3.0
| 2,913,130,053,172,983,300
| 45.418803
| 250
| 0.662125
| false
|
jpanganiban/simplestruct
|
test_simplestruct.py
|
1
|
4368
|
# -*- coding: utf-8 -*-
import unittest
import simplestruct as structs
class StructTestCase(unittest.TestCase):
def test_class(self):
class Type(structs.Struct):
pass
self.assertIsNotNone(Type)
self.assertIsNotNone(Type())
def test_instantiation_correct_value_type(self):
class Type(structs.Struct):
id = int
self.assertEqual(Type(id=1).id, 1)
def test_instantiation_incorrect_value_type(self):
class Type(structs.Struct):
id = int
with self.assertRaises(TypeError):
Type(id="1")
def test_instantiation_None_type(self):
class Type(structs.Struct):
id = int
self.assertIsNone(Type(id=None).id)
def test_instance_setter_correct_value_type(self):
class Type(structs.Struct):
id = int
t = Type()
t.id = 1
self.assertEqual(t.id, 1)
def test_instance_setter_None(self):
class Type(structs.Struct):
id = int
t = Type()
t.id = None
self.assertIsNone(t.id)
def test_instance_setter_incorrect_value_type(self):
class Type(structs.Struct):
id = int
t = Type()
with self.assertRaises(TypeError):
t.id = "1"
def test_compound_type_correct_value_type(self):
class Address(structs.Struct):
country = str
class Person(structs.Struct):
address = Address
a = Address(country="PH")
p = Person(address=a)
self.assertEqual(p.address, a)
self.assertEqual(p.address.country, "PH")
def test_compound_type_incorrect_value_type(self):
class Address(structs.Struct):
country = str
class Person(structs.Struct):
address = Address
with self.assertRaises(TypeError):
Person(address="PH")
def test_compound_type_None_value(self):
class Address(structs.Struct):
country = str
class Person(structs.Struct):
address = Address
self.assertIsNone(Person(address=None).address)
def test_composite_correct_value_type(self):
class Person(structs.Struct):
sibling_names = [str]
Person(sibling_names=["peter", "mary", "john"])
def test_composite_incorrect_value_type(self):
class Person(structs.Struct):
sibling_names = [str]
with self.assertRaises(TypeError):
Person(sibling_names=[1, 2, 3])
def test_composite_no_type(self):
class Person(structs.Struct):
stuff = []
Person(stuff=[1, "jesse", 2, {}])
def test_composite_struct_correct_value_type(self):
class Address(structs.Struct):
country = str
class Person(structs.Struct):
addresses = [Address]
Person(addresses=[Address(country="PH"), Address(country="US")])
def test_composite_struct_incorrect_value_type(self):
class Address(structs.Struct):
country = str
class Person(structs.Struct):
addresses = [Address]
with self.assertRaises(TypeError):
Person(addresses=["PH", "US"])
def test_type_tuple_correct_value_type(self):
class Person(structs.Struct):
age_and_gender = (int, str)
Person(age_and_gender=(24, "M"))
def test_type_tuple_incorrect_value_type(self):
class Person(structs.Struct):
age_and_gender = (int, str)
with self.assertRaises(TypeError):
Person(age_and_gender=(24, 12312))
def test_type_tuple_None_value(self):
class Person(structs.Struct):
age_and_gender = (int, str)
self.assertIsNone(Person(age_and_gender=None).age_and_gender)
def test_type_tuple_partial_None_value(self):
class Person(structs.Struct):
age_and_gender = (int, str)
self.assertEqual(
Person(age_and_gender=(21, None)).age_and_gender,
(21, None)
)
def test_type_list_just_list(self):
class Person(structs.Struct):
stuff = list([str])
self.assertEqual(
Person(stuff=["a", "b", 1]).stuff,
["a", "b", 1]
)
if __name__ == "__main__":
unittest.main()
|
mit
| -5,055,025,621,369,701,000
| 22.234043
| 72
| 0.572344
| false
|
rwl/PyCIM
|
CIM14/IEC61970/Dynamics/BlockUsageOutputReference.py
|
1
|
4616
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.Core.IdentifiedObject import IdentifiedObject
class BlockUsageOutputReference(IdentifiedObject):
"""Used at instance level to tie the input of a referenced block to the output of another referenced block. Note that typically an input is only tied to an output of another block at the same PowerSystemResource, but there is no restriction to do so. If the output is implicity tied to an input, then the an instance of this class is not required. The sole purpose of this class is to explicitly tio the input of other blocks at the power system instance level.
"""
def __init__(self, block0=None, BlockUsageInputReference=None, metaBlockOutput0=None, *args, **kw_args):
"""Initialises a new 'BlockUsageOutputReference' instance.
@param block0:
@param BlockUsageInputReference: Can cross BlockUsage objects.
@param metaBlockOutput0:
"""
self._block0 = None
self.block0 = block0
self._BlockUsageInputReference = []
self.BlockUsageInputReference = [] if BlockUsageInputReference is None else BlockUsageInputReference
self._metaBlockOutput0 = None
self.metaBlockOutput0 = metaBlockOutput0
super(BlockUsageOutputReference, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["block0", "BlockUsageInputReference", "metaBlockOutput0"]
_many_refs = ["BlockUsageInputReference"]
def getblock0(self):
return self._block0
def setblock0(self, value):
if self._block0 is not None:
filtered = [x for x in self.block0.BlockUsageOutputReference if x != self]
self._block0._BlockUsageOutputReference = filtered
self._block0 = value
if self._block0 is not None:
if self not in self._block0._BlockUsageOutputReference:
self._block0._BlockUsageOutputReference.append(self)
block0 = property(getblock0, setblock0)
def getBlockUsageInputReference(self):
"""Can cross BlockUsage objects.
"""
return self._BlockUsageInputReference
def setBlockUsageInputReference(self, value):
for x in self._BlockUsageInputReference:
x.BlockUsageOutputReference = None
for y in value:
y._BlockUsageOutputReference = self
self._BlockUsageInputReference = value
BlockUsageInputReference = property(getBlockUsageInputReference, setBlockUsageInputReference)
def addBlockUsageInputReference(self, *BlockUsageInputReference):
for obj in BlockUsageInputReference:
obj.BlockUsageOutputReference = self
def removeBlockUsageInputReference(self, *BlockUsageInputReference):
for obj in BlockUsageInputReference:
obj.BlockUsageOutputReference = None
def getmetaBlockOutput0(self):
return self._metaBlockOutput0
def setmetaBlockOutput0(self, value):
if self._metaBlockOutput0 is not None:
filtered = [x for x in self.metaBlockOutput0.blockUsageOutputReference0 if x != self]
self._metaBlockOutput0._blockUsageOutputReference0 = filtered
self._metaBlockOutput0 = value
if self._metaBlockOutput0 is not None:
if self not in self._metaBlockOutput0._blockUsageOutputReference0:
self._metaBlockOutput0._blockUsageOutputReference0.append(self)
metaBlockOutput0 = property(getmetaBlockOutput0, setmetaBlockOutput0)
|
mit
| -6,126,209,542,516,952,000
| 42.961905
| 468
| 0.717071
| false
|
O-T-L/PyOptimization
|
parameters/indicator/pf.py
|
1
|
3441
|
"""
Copyright (C) 2014, 申瑞珉 (Ruimin Shen)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import os
import math
import numpy
import pyotl.utility
import pyoptimization.utility
_pfs = {}
def get_pf(path):
if path in _pfs:
return _pfs[path]
else:
pf = numpy.loadtxt(path, ndmin=2)
_pf = pyotl.utility.PyListList2VectorVector_Real(pf.tolist())
_pfs[path] = _pf
return _pf
def pf(config, properties, folder='PF'):
path = os.path.join(pyoptimization.utility.get_pyoptimization_path(config), 'Data', folder)
if re.match('^ZDT[14]$', properties['problem']):
return get_pf(os.path.join(path, 'ZDT1.csv'))
elif re.match('^ZDT[26]$', properties['problem']):
return get_pf(os.path.join(path, 'ZDT2.csv'))
elif re.match('^ZDT3$', properties['problem']):
return get_pf(os.path.join(path, 'ZDT3.csv'))
elif re.match('^ZDT5$', properties['problem']):
return get_pf(os.path.join(path, 'ZDT5.csv'))
elif re.match('^UF(\d|10)$', properties['problem']):
return get_pf(os.path.join(path, properties['problem'] + '.csv'))
elif properties['problem'] == 'DTLZ1':
return get_pf(os.path.join(path, 'DTLZ1', str(properties['objectives']) + '.csv'))
elif re.match('^DTLZ[234]$', properties['problem']):
if properties['objectives'] == 2:
return get_pf(os.path.join(path, 'DTLZ5', str(properties['objectives']) + '.csv'))
else:
return get_pf(os.path.join(path, 'DTLZ2', str(properties['objectives']) + '.csv'))
elif re.match('^DTLZ[56]$', properties['problem']):
return get_pf(os.path.join(path, 'DTLZ5', str(properties['objectives']) + '.csv'))
elif properties['problem'] == 'DTLZ7':
return get_pf(os.path.join(path, 'DTLZ7', str(properties['objectives']) + '.csv'))
elif re.match('^DTLZ[56]I$', properties['problem']):
return get_pf(os.path.join(path, 'DTLZ5I', '%u_%u.csv' % (properties['objectives'], properties['DTLZ_I'])))
elif re.match('^ScaledDTLZ[234]$', properties['problem']):
pf = numpy.loadtxt(os.path.join(path, 'DTLZ2', str(properties['objectives']) + '.csv'))
for i, col in enumerate(pf.T):
col *= math.pow(10, i)
return pf
elif re.match('^WFG[1-3]$', properties['problem']):
return get_pf(os.path.join(path, properties['problem'], str(properties['objectives']) + '.csv'))
elif re.match('^WFG[4-9]$', properties['problem']):
return get_pf(os.path.join(path, 'WFG4', str(properties['objectives']) + '.csv'))
try:
return get_pf(os.path.join(path, properties['problem'] + '.csv'))
except:
try:
return get_pf(os.path.join(path, properties['problem'], str(properties['objectives']) + '.csv'))
except:
raise Exception(properties, folder)
|
lgpl-3.0
| -4,490,129,093,049,562,600
| 43.038462
| 115
| 0.63901
| false
|
FroggedTV/grenouillebot
|
bot/module/irc/irc_bot.py
|
1
|
4075
|
import logging, traceback
from time import sleep
from datetime import datetime, timedelta
import threading
from irc.bot import SingleServerIRCBot
class IrcBot(SingleServerIRCBot):
"""The module of the bot responsible for the Twitch (IRC) chat.
Listen to all pub messages and forward to the command processor if a
command is detected. Provide methods to send message to the current
channel or a private message to a user.
Attributes:
enabled: Is the module enabled.
grenouille_bot: master class.
sanitizer: thread checking every 3 minutes to check if the bot is
still alive.
last_ping: last time of the ping-pong.
"""
def __init__(self, grenouille_bot):
self.grenouille_bot = grenouille_bot
config = grenouille_bot.config['IRC']
self.enabled = config.getboolean('enabled', False)
if not self.enabled:
return
nickname = config['nickname']
server = 'irc.chat.twitch.tv'
password = config['token']
port = 6667
SingleServerIRCBot.__init__(self, [(server, port, password)],
nickname, nickname)
self.channel = config['channel']
self.sanitizer = threading.Timer(60, self.sanitize).start()
self.last_ping = datetime.utcnow()
def stop(self):
self.disconnect()
def on_welcome(self, connection, e):
"""Called when the bot is connected to the IRC server. Setup config."""
connection.join(self.channel)
connection.set_rate_limit(0.5)
connection.send_raw('CAP REQ :twitch.tv/commands')
connection.send_raw('CAP REQ :twitch.tv/tags')
logging.info('Connected to channel.')
def sanitize(self):
"""Reconnect if the Twitch IRC server kicked the bot."""
if datetime.utcnow() - self.last_ping > timedelta(minutes=7):
self.last_ping = datetime.utcnow()
logging.warning('Sanitizer detected lost connection. Reconnecting.')
self.connection.disconnect()
sleep(10)
self.connection.reconnect()
self.sanitizer = threading.Timer(60, self.sanitize).start()
def on_ping(self, connection, e):
"""Save last ping for sanitizer check."""
self.last_ping = datetime.utcnow()
def on_pubmsg(self, connection, e):
"""Called for every public message. Detect if a command is called."""
message = e.arguments[0]
sender = e.source.nick
tags = {key_value["key"]: key_value["value"] for key_value in e.tags}
is_admin = False
if 'user-type' in tags:
is_admin = bool(tags['user-type'])
if sender == self.channel[1:]:
is_admin = True
# Check we have a message starting with ! from a user
if (len(message) <= 1
or message[0] != '!'
or message[1] == ' ' ):
return
processor = self.grenouille_bot.command_processor
try:
processor.process(command_line=message[1:],
sender=sender,
is_admin=is_admin)
except Exception as e:
logging.exception('Impossible to execute command.')
def send_msg(self, line):
"""Send a message to the IRC channel.
Do nothing if there is an exception (like disconnected)
Args:
line: The line to print.
"""
try :
self.connection.privmsg(self.channel, line)
except Exception:
# TODO: Queue the message to resend later.
logging.exception('Impossible to send the message.')
def send_private_msg(self, user, line):
"""Send a private message to a IRC user in the channel.
Args:
user: user to send the message to.
line: message to send.
"""
self.send_msg('/w {} {}'.format(user, line))
|
mit
| -4,768,008,839,170,572,000
| 33.745614
| 80
| 0.576933
| false
|
YeEmrick/learning
|
stanford-tensorflow/assignments/01/q1_sol.py
|
1
|
5009
|
"""
Solution to simple exercises to get used to TensorFlow API
You should thoroughly test your code.
TensorFlow's official documentation should be your best friend here
CS20: "TensorFlow for Deep Learning Research"
cs20.stanford.edu
Created by Chip Huyen (chiphuyen@cs.stanford.edu)
"""
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import tensorflow as tf
sess = tf.InteractiveSession()
###############################################################################
# 1a: Create two random 0-d tensors x and y of any distribution.
# Create a TensorFlow object that returns x + y if x > y, and x - y otherwise.
# Hint: look up tf.cond()
# I do the first problem for you
###############################################################################
x = tf.random_uniform([]) # Empty array as shape creates a scalar.
y = tf.random_uniform([])
out = tf.cond(tf.greater(x, y), lambda: tf.add(x, y), lambda: tf.subtract(x, y))
###############################################################################
# 1b: Create two 0-d tensors x and y randomly selected from the range [-1, 1).
# Return x + y if x < y, x - y if x > y, 0 otherwise.
# Hint: Look up tf.case().
###############################################################################
x = tf.random_uniform([], -1, 1, dtype=tf.float32)
y = tf.random_uniform([], -1, 1, dtype=tf.float32)
out = tf.case({tf.less(x, y): lambda: tf.add(x, y),
tf.greater(x, y): lambda: tf.subtract(x, y)},
default=lambda: tf.constant(0.0), exclusive=True)
###############################################################################
# 1c: Create the tensor x of the value [[0, -2, -1], [0, 1, 2]]
# and y as a tensor of zeros with the same shape as x.
# Return a boolean tensor that yields Trues if x equals y element-wise.
# Hint: Look up tf.equal().
###############################################################################
x = tf.constant([[0, -2, -1], [0, 1, 2]])
y = tf.zeros_like(x)
out = tf.equal(x, y)
###############################################################################
# 1d: Create the tensor x of value
# [29.05088806, 27.61298943, 31.19073486, 29.35532951,
# 30.97266006, 26.67541885, 38.08450317, 20.74983215,
# 34.94445419, 34.45999146, 29.06485367, 36.01657104,
# 27.88236427, 20.56035233, 30.20379066, 29.51215172,
# 33.71149445, 28.59134293, 36.05556488, 28.66994858].
# Get the indices of elements in x whose values are greater than 30.
# Hint: Use tf.where().
# Then extract elements whose values are greater than 30.
# Hint: Use tf.gather().
###############################################################################
x = tf.constant([29.05088806, 27.61298943, 31.19073486, 29.35532951,
30.97266006, 26.67541885, 38.08450317, 20.74983215,
34.94445419, 34.45999146, 29.06485367, 36.01657104,
27.88236427, 20.56035233, 30.20379066, 29.51215172,
33.71149445, 28.59134293, 36.05556488, 28.66994858])
indices = tf.where(x > 30)
out = tf.gather(x, indices)
###############################################################################
# 1e: Create a diagnoal 2-d tensor of size 6 x 6 with the diagonal values of 1,
# 2, ..., 6
# Hint: Use tf.range() and tf.diag().
###############################################################################
values = tf.range(1, 7)
out = tf.diag(values)
###############################################################################
# 1f: Create a random 2-d tensor of size 10 x 10 from any distribution.
# Calculate its determinant.
# Hint: Look at tf.matrix_determinant().
###############################################################################
m = tf.random_normal([10, 10], mean=10, stddev=1)
out = tf.matrix_determinant(m)
###############################################################################
# 1g: Create tensor x with value [5, 2, 3, 5, 10, 6, 2, 3, 4, 2, 1, 1, 0, 9].
# Return the unique elements in x
# Hint: use tf.unique(). Keep in mind that tf.unique() returns a tuple.
###############################################################################
x = tf.constant([5, 2, 3, 5, 10, 6, 2, 3, 4, 2, 1, 1, 0, 9])
unique_values, indices = tf.unique(x)
###############################################################################
# 1h: Create two tensors x and y of shape 300 from any normal distribution,
# as long as they are from the same distribution.
# Use tf.cond() to return:
# - The mean squared error of (x - y) if the average of all elements in (x - y)
# is negative, or
# - The sum of absolute value of all elements in the tensor (x - y) otherwise.
# Hint: see the Huber loss function in the lecture slides 3.
###############################################################################
x = tf.random_normal([300], mean=5, stddev=1)
y = tf.random_normal([300], mean=5, stddev=1)
average = tf.reduce_mean(x - y)
def f1(): return tf.reduce_mean(tf.square(x - y))
def f2(): return tf.reduce_sum(tf.abs(x - y))
out = tf.cond(average < 0, f1, f2)
|
apache-2.0
| -4,323,550,377,722,852,000
| 43.336283
| 80
| 0.498503
| false
|
Noirello/bonsai
|
.ci/delay.py
|
1
|
5135
|
import os
import subprocess
import xmlrpc.server as rpc
import time
import sys
import multiprocessing as mp
try:
import pydivert
except ImportError:
pass
class LinuxDelayHandler:
@staticmethod
def get_interface_name():
""" Get the first interface name that is not the localhost. """
net = os.listdir("/sys/class/net")
net.remove("lo")
if "eth0" in net:
return "eth0"
return net[0]
def set_delay(self, sec, duration=10.0):
""" Set network delay, return with the call's result. """
try:
subprocess.check_call(
[
"tc",
"qdisc",
"add",
"dev",
self.get_interface_name(),
"root",
"handle",
"1:",
"prio",
]
)
subprocess.check_call(
[
"tc",
"qdisc",
"add",
"dev",
self.get_interface_name(),
"parent",
"1:3",
"handle",
"30:",
"netem",
"delay",
("%dmsec" % (sec * 1000)),
]
)
for port in ("389", "636"):
subprocess.check_call(
[
"tc",
"filter",
"add",
"dev",
self.get_interface_name(),
"protocol",
"ip",
"parent",
"1:0",
"u32",
"match",
"ip",
"sport",
port,
"0xffff",
"flowid",
"1:3",
]
)
return True
except subprocess.CalledProcessError:
return False
def remove_delay(self):
""" Remove network delay. """
try:
subprocess.check_call(
["tc", "qdisc", "del", "dev", self.get_interface_name(), "root"]
)
return True
except subprocess.CalledProcessError:
return False
class MacDelayHandler:
def set_delay(self, sec, duration=10.0):
with open("/etc/pf.conf") as fp:
conf = fp.read()
conf += '\ndummynet-anchor "mop"\nanchor "mop"\n'
rule = (
"dummynet in quick proto tcp from any to any port {389, 636} pipe 1\n"
)
try:
subprocess.run(
["pfctl", "-f", "-"], input=conf, encoding="utf-8", check=True
)
subprocess.run(
["pfctl", "-a", "mop", "-f", "-"],
input=rule,
encoding="utf-8",
check=True,
)
subprocess.check_call(
["dnctl", "pipe", "1", "config", "delay", "%d" % int(sec * 1000)]
)
return True
except subprocess.CalledProcessError:
return False
def remove_delay(self):
try:
subprocess.check_call(["dnctl", "-q", "flush"])
subprocess.check_call(["pfctl", "-f", "/etc/pf.conf"])
return True
except subprocess.CalledProcessError:
return False
class WinDelayHandler:
proc = None
def delay(self, sec, duration=10.0):
netfil = "tcp.DstPort == 389 or tcp.SrcPort == 389"
start = time.time()
with pydivert.WinDivert(netfil) as divert:
for packet in divert:
time.sleep(sec)
divert.send(packet)
if time.time() - start >= duration:
break
def set_delay(self, sec, duration=10.0):
""" Set network delay, return with the call's result. """
self.proc = mp.Process(target=self.delay, args=(sec, duration))
self.proc.start()
return True
def remove_delay(self):
""" Remove network delay, return with the call's result. """
if self.proc is not None and self.proc.is_alive():
self.proc.terminate()
return True
if __name__ == "__main__":
if sys.platform == "win32":
handler = WinDelayHandler()
elif sys.platform == "darwin":
handler = MacDelayHandler()
else:
handler = LinuxDelayHandler()
# Fix network collapse on certain Linux distros.
subprocess.call(
["ip", "link", "set", handler.get_interface_name(), "qlen", "1000"]
)
server = rpc.SimpleXMLRPCServer(("0.0.0.0", 8000))
server.register_function(handler.set_delay, "set_delay")
server.register_function(handler.remove_delay, "remove_delay")
server.serve_forever()
|
mit
| 509,284,448,804,491,300
| 29.02924
| 86
| 0.427848
| false
|
Karspexet/Karspexet
|
karspexet/ticket/tasks.py
|
1
|
1458
|
import logging
from django.conf import settings
from django.core.mail import send_mail
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
logger = logging.getLogger(__file__)
def send_ticket_email_to_customer(reservation, email, name=None):
'''Send an email to the customer with a link to their tickets
If the supplied email is empty, this will silently fail. The reason for this is that this is used in the payment
flow, and if we raise an error here, it will crash the payment transaction, and at that point we have likely
charged someone's card without giving them tickets.
Therefore the trade-off is made that if the customer fails to provide a valid email address, they will not receive
an email. They will however, have another chance to send the reservation information via email at the
reservation-detail page.
'''
if not email:
return
if not name:
name = email
to_address = f'{name} <{email}>'
subject = 'Dina biljetter till Kårspexet'
site = Site.objects.get_current()
reservation_url = f'https://{site.domain}{reservation.get_absolute_url()}'
body = render_to_string('reservation_email.txt', {
'reservation': reservation,
'url': reservation_url,
})
send_mail(
subject,
body,
settings.TICKET_EMAIL_FROM_ADDRESS,
[to_address],
fail_silently=False,
)
|
mit
| 7,935,162,058,853,896,000
| 33.690476
| 118
| 0.693892
| false
|
jonparrott/nox
|
nox/command.py
|
1
|
3925
|
# Copyright 2016 Alethea Katherine Flowers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from typing import Any, Iterable, List, Optional, Sequence, Union
import py
from nox.logger import logger
from nox.popen import popen
class CommandFailed(Exception):
"""Raised when an executed command returns a non-success status code."""
def __init__(self, reason: str = None) -> None:
super(CommandFailed, self).__init__(reason)
self.reason = reason
def which(program: str, paths: Optional[List[str]]) -> str:
"""Finds the full path to an executable."""
full_path = None
if paths:
full_path = py.path.local.sysfind(program, paths=paths)
if full_path:
return full_path.strpath
full_path = py.path.local.sysfind(program)
if full_path:
return full_path.strpath
logger.error("Program {} not found.".format(program))
raise CommandFailed("Program {} not found".format(program))
def _clean_env(env: Optional[dict]) -> Optional[dict]:
if env is None:
return None
clean_env = {}
# Ensure systemroot is passed down, otherwise Windows will explode.
clean_env["SYSTEMROOT"] = os.environ.get("SYSTEMROOT", "")
clean_env.update(env)
return clean_env
def run(
args: Sequence[str],
*,
env: Optional[dict] = None,
silent: bool = False,
paths: Optional[List[str]] = None,
success_codes: Optional[Iterable[int]] = None,
log: bool = True,
external: bool = False,
**popen_kws: Any
) -> Union[str, bool]:
"""Run a command-line program."""
if success_codes is None:
success_codes = [0]
cmd, args = args[0], args[1:]
full_cmd = "{} {}".format(cmd, " ".join(args))
cmd_path = which(cmd, paths)
if log:
logger.info(full_cmd)
is_external_tool = paths is not None and not any(
cmd_path.startswith(path) for path in paths
)
if is_external_tool:
if external == "error":
logger.error(
"Error: {} is not installed into the virtualenv, it is located at {}. "
"Pass external=True into run() to explicitly allow this.".format(
cmd, cmd_path
)
)
raise CommandFailed("External program disallowed.")
elif external is False:
logger.warning(
"Warning: {} is not installed into the virtualenv, it is located at {}. This might cause issues! "
"Pass external=True into run() to silence this message.".format(
cmd, cmd_path
)
)
env = _clean_env(env)
try:
return_code, output = popen(
[cmd_path] + list(args), silent=silent, env=env, **popen_kws
)
if return_code not in success_codes:
logger.error(
"Command {} failed with exit code {}{}".format(
full_cmd, return_code, ":" if silent else ""
)
)
if silent:
sys.stderr.write(output)
raise CommandFailed("Returned code {}".format(return_code))
if output:
logger.output(output)
return output if silent else True
except KeyboardInterrupt:
logger.error("Interrupted...")
raise
|
apache-2.0
| -6,808,519,644,293,755,000
| 28.074074
| 118
| 0.593376
| false
|
pombredanne/discern
|
tastypie/resources.py
|
1
|
94958
|
from __future__ import with_statement
import sys
import logging
import warnings
import django
from django.conf import settings
try:
from django.conf.urls import patterns, url
except ImportError: # Django < 1.4
from django.conf.urls.defaults import patterns, url
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned, ValidationError
from django.core.urlresolvers import NoReverseMatch, reverse, resolve, Resolver404, get_script_prefix
from django.core.signals import got_request_exception
from django.db import transaction
from django.db.models.sql.constants import QUERY_TERMS
from django.http import HttpResponse, HttpResponseNotFound, Http404
from django.utils.cache import patch_cache_control, patch_vary_headers
from tastypie.authentication import Authentication
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.cache import NoCache
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.exceptions import NotFound, BadRequest, InvalidFilterError, HydrationError, InvalidSortError, ImmediateHttpResponse, Unauthorized
from tastypie import fields
from tastypie import http
from tastypie.paginator import Paginator
from tastypie.serializers import Serializer
from tastypie.throttle import BaseThrottle
from tastypie.utils import is_valid_jsonp_callback_value, dict_strip_unicode_keys, trailing_slash
from tastypie.utils.mime import determine_format, build_content_type
from tastypie.validation import Validation
try:
set
except NameError:
from sets import Set as set
# copycompat deprecated in Django 1.5. If python version is at least 2.5, it
# is safe to use the native python copy module.
# The ``copy`` module became function-friendly in Python 2.5 and
# ``copycompat`` was added in post 1.1.1 Django (r11901)..
if sys.version_info >= (2,5):
try:
from copy import deepcopy
except ImportError:
from django.utils.copycompat import deepcopy
else:
# For python older than 2.5, we must be running a version of Django before
# copycompat was deprecated.
try:
from django.utils.copycompat import deepcopy
except ImportError:
from copy import deepcopy
# If ``csrf_exempt`` isn't present, stub it.
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
def csrf_exempt(func):
return func
# Django 1.5 has moved this constant up one level.
try:
from django.db.models.constants import LOOKUP_SEP
except ImportError:
from django.db.models.sql.constants import LOOKUP_SEP
class NOT_AVAILABLE:
def __str__(self):
return 'No such data is available.'
class ResourceOptions(object):
"""
A configuration class for ``Resource``.
Provides sane defaults and the logic needed to augment these settings with
the internal ``class Meta`` used on ``Resource`` subclasses.
"""
serializer = Serializer()
authentication = Authentication()
authorization = ReadOnlyAuthorization()
cache = NoCache()
throttle = BaseThrottle()
validation = Validation()
paginator_class = Paginator
allowed_methods = ['get', 'post', 'put', 'delete', 'patch']
list_allowed_methods = None
detail_allowed_methods = None
limit = getattr(settings, 'API_LIMIT_PER_PAGE', 20)
max_limit = 1000
api_name = None
resource_name = None
urlconf_namespace = None
default_format = 'application/json'
filtering = {}
ordering = []
object_class = None
queryset = None
fields = []
excludes = []
include_resource_uri = True
include_absolute_url = False
always_return_data = False
collection_name = 'objects'
detail_uri_name = 'pk'
def __new__(cls, meta=None):
overrides = {}
# Handle overrides.
if meta:
for override_name in dir(meta):
# No internals please.
if not override_name.startswith('_'):
overrides[override_name] = getattr(meta, override_name)
allowed_methods = overrides.get('allowed_methods', ['get', 'post', 'put', 'delete', 'patch'])
if overrides.get('list_allowed_methods', None) is None:
overrides['list_allowed_methods'] = allowed_methods
if overrides.get('detail_allowed_methods', None) is None:
overrides['detail_allowed_methods'] = allowed_methods
return object.__new__(type('ResourceOptions', (cls,), overrides))
class DeclarativeMetaclass(type):
def __new__(cls, name, bases, attrs):
attrs['base_fields'] = {}
declared_fields = {}
# Inherit any fields from parent(s).
try:
parents = [b for b in bases if issubclass(b, Resource)]
# Simulate the MRO.
parents.reverse()
for p in parents:
parent_fields = getattr(p, 'base_fields', {})
for field_name, field_object in parent_fields.items():
attrs['base_fields'][field_name] = deepcopy(field_object)
except NameError:
pass
for field_name, obj in attrs.items():
# Look for ``dehydrated_type`` instead of doing ``isinstance``,
# which can break down if Tastypie is re-namespaced as something
# else.
if hasattr(obj, 'dehydrated_type'):
field = attrs.pop(field_name)
declared_fields[field_name] = field
attrs['base_fields'].update(declared_fields)
attrs['declared_fields'] = declared_fields
new_class = super(DeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
opts = getattr(new_class, 'Meta', None)
new_class._meta = ResourceOptions(opts)
if not getattr(new_class._meta, 'resource_name', None):
# No ``resource_name`` provided. Attempt to auto-name the resource.
class_name = new_class.__name__
name_bits = [bit for bit in class_name.split('Resource') if bit]
resource_name = ''.join(name_bits).lower()
new_class._meta.resource_name = resource_name
if getattr(new_class._meta, 'include_resource_uri', True):
if not 'resource_uri' in new_class.base_fields:
new_class.base_fields['resource_uri'] = fields.CharField(readonly=True)
elif 'resource_uri' in new_class.base_fields and not 'resource_uri' in attrs:
del(new_class.base_fields['resource_uri'])
for field_name, field_object in new_class.base_fields.items():
if hasattr(field_object, 'contribute_to_class'):
field_object.contribute_to_class(new_class, field_name)
return new_class
class Resource(object):
"""
Handles the data, request dispatch and responding to requests.
Serialization/deserialization is handled "at the edges" (i.e. at the
beginning/end of the request/response cycle) so that everything internally
is Python data structures.
This class tries to be non-model specific, so it can be hooked up to other
data sources, such as search results, files, other data, etc.
"""
__metaclass__ = DeclarativeMetaclass
def __init__(self, api_name=None):
self.fields = deepcopy(self.base_fields)
if not api_name is None:
self._meta.api_name = api_name
def __getattr__(self, name):
if name in self.fields:
return self.fields[name]
raise AttributeError(name)
def wrap_view(self, view):
"""
Wraps methods so they can be called in a more functional way as well
as handling exceptions better.
Note that if ``BadRequest`` or an exception with a ``response`` attr
are seen, there is special handling to either present a message back
to the user or return the response traveling with the exception.
"""
@csrf_exempt
def wrapper(request, *args, **kwargs):
try:
callback = getattr(self, view)
response = callback(request, *args, **kwargs)
# Our response can vary based on a number of factors, use
# the cache class to determine what we should ``Vary`` on so
# caches won't return the wrong (cached) version.
varies = getattr(self._meta.cache, "varies", [])
if varies:
patch_vary_headers(response, varies)
if self._meta.cache.cacheable(request, response):
if self._meta.cache.cache_control():
# If the request is cacheable and we have a
# ``Cache-Control`` available then patch the header.
patch_cache_control(response, **self._meta.cache.cache_control())
if request.is_ajax() and not response.has_header("Cache-Control"):
# IE excessively caches XMLHttpRequests, so we're disabling
# the browser cache here.
# See http://www.enhanceie.com/ie/bugs.asp for details.
patch_cache_control(response, no_cache=True)
return response
except (BadRequest, fields.ApiFieldError), e:
data = {"error": e.args[0] if getattr(e, 'args') else ''}
return self.error_response(request, data, response_class=http.HttpBadRequest)
except ValidationError, e:
data = {"error": e.messages}
return self.error_response(request, data, response_class=http.HttpBadRequest)
except Exception, e:
if hasattr(e, 'response'):
return e.response
# A real, non-expected exception.
# Handle the case where the full traceback is more helpful
# than the serialized error.
if settings.DEBUG and getattr(settings, 'TASTYPIE_FULL_DEBUG', False):
raise
# Re-raise the error to get a proper traceback when the error
# happend during a test case
if request.META.get('SERVER_NAME') == 'testserver':
raise
# Rather than re-raising, we're going to things similar to
# what Django does. The difference is returning a serialized
# error message.
return self._handle_500(request, e)
return wrapper
def _handle_500(self, request, exception):
import traceback
import sys
the_trace = '\n'.join(traceback.format_exception(*(sys.exc_info())))
response_class = http.HttpApplicationError
response_code = 500
NOT_FOUND_EXCEPTIONS = (NotFound, ObjectDoesNotExist, Http404)
if isinstance(exception, NOT_FOUND_EXCEPTIONS):
response_class = HttpResponseNotFound
response_code = 404
if settings.DEBUG:
data = {
"error_message": unicode(exception),
"traceback": the_trace,
}
return self.error_response(request, data, response_class=response_class)
# When DEBUG is False, send an error message to the admins (unless it's
# a 404, in which case we check the setting).
send_broken_links = getattr(settings, 'SEND_BROKEN_LINK_EMAILS', False)
if not response_code == 404 or send_broken_links:
log = logging.getLogger('django.request.tastypie')
log.error('Internal Server Error: %s' % request.path, exc_info=True,
extra={'status_code': response_code, 'request': request})
if django.VERSION < (1, 3, 0):
from django.core.mail import mail_admins
subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path)
try:
request_repr = repr(request)
except:
request_repr = "Request repr() unavailable"
message = "%s\n\n%s" % (the_trace, request_repr)
mail_admins(subject, message, fail_silently=True)
# Send the signal so other apps are aware of the exception.
got_request_exception.send(self.__class__, request=request)
# Prep the data going out.
data = {
"error_message": getattr(settings, 'TASTYPIE_CANNED_ERROR', "Sorry, this request could not be processed. Please try again later."),
"traceback": the_trace,
}
return self.error_response(request, data, response_class=response_class)
def _build_reverse_url(self, name, args=None, kwargs=None):
"""
A convenience hook for overriding how URLs are built.
See ``NamespacedModelResource._build_reverse_url`` for an example.
"""
return reverse(name, args=args, kwargs=kwargs)
def base_urls(self):
"""
The standard URLs this ``Resource`` should respond to.
"""
return [
url(r"^(?P<resource_name>%s)%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('dispatch_list'), name="api_dispatch_list"),
url(r"^(?P<resource_name>%s)/schema%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('get_schema'), name="api_get_schema"),
url(r"^(?P<resource_name>%s)/set/(?P<%s_list>\w[\w/;-]*)%s$" % (self._meta.resource_name, self._meta.detail_uri_name, trailing_slash()), self.wrap_view('get_multiple'), name="api_get_multiple"),
url(r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)%s$" % (self._meta.resource_name, self._meta.detail_uri_name, trailing_slash()), self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
def override_urls(self):
"""
Deprecated. Will be removed by v1.0.0. Please use ``prepend_urls`` instead.
"""
return []
def prepend_urls(self):
"""
A hook for adding your own URLs or matching before the default URLs.
"""
return []
@property
def urls(self):
"""
The endpoints this ``Resource`` responds to.
Mostly a standard URLconf, this is suitable for either automatic use
when registered with an ``Api`` class or for including directly in
a URLconf should you choose to.
"""
urls = self.prepend_urls()
overridden_urls = self.override_urls()
if overridden_urls:
warnings.warn("'override_urls' is a deprecated method & will be removed by v1.0.0. Please rename your method to ``prepend_urls``.")
urls += overridden_urls
urls += self.base_urls()
urlpatterns = patterns('',
*urls
)
return urlpatterns
def determine_format(self, request):
"""
Used to determine the desired format.
Largely relies on ``tastypie.utils.mime.determine_format`` but here
as a point of extension.
"""
return determine_format(request, self._meta.serializer, default_format=self._meta.default_format)
def serialize(self, request, data, format, options=None):
"""
Given a request, data and a desired format, produces a serialized
version suitable for transfer over the wire.
Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``.
"""
options = options or {}
if 'text/javascript' in format:
# get JSONP callback name. default to "callback"
callback = request.GET.get('callback', 'callback')
if not is_valid_jsonp_callback_value(callback):
raise BadRequest('JSONP callback name is invalid.')
options['callback'] = callback
return self._meta.serializer.serialize(data, format, options)
def deserialize(self, request, data, format='application/json'):
"""
Given a request, data and a format, deserializes the given data.
It relies on the request properly sending a ``CONTENT_TYPE`` header,
falling back to ``application/json`` if not provided.
Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``.
"""
deserialized = self._meta.serializer.deserialize(data, format=request.META.get('CONTENT_TYPE', 'application/json'))
return deserialized
def alter_list_data_to_serialize(self, request, data):
"""
A hook to alter list data just before it gets serialized & sent to the user.
Useful for restructuring/renaming aspects of the what's going to be
sent.
Should accommodate for a list of objects, generally also including
meta data.
"""
return data
def alter_detail_data_to_serialize(self, request, data):
"""
A hook to alter detail data just before it gets serialized & sent to the user.
Useful for restructuring/renaming aspects of the what's going to be
sent.
Should accommodate for receiving a single bundle of data.
"""
return data
def alter_deserialized_list_data(self, request, data):
"""
A hook to alter list data just after it has been received from the user &
gets deserialized.
Useful for altering the user data before any hydration is applied.
"""
return data
def alter_deserialized_detail_data(self, request, data):
"""
A hook to alter detail data just after it has been received from the user &
gets deserialized.
Useful for altering the user data before any hydration is applied.
"""
return data
def dispatch_list(self, request, **kwargs):
"""
A view for handling the various HTTP methods (GET/POST/PUT/DELETE) over
the entire list of resources.
Relies on ``Resource.dispatch`` for the heavy-lifting.
"""
return self.dispatch('list', request, **kwargs)
def dispatch_detail(self, request, **kwargs):
"""
A view for handling the various HTTP methods (GET/POST/PUT/DELETE) on
a single resource.
Relies on ``Resource.dispatch`` for the heavy-lifting.
"""
return self.dispatch('detail', request, **kwargs)
def dispatch(self, request_type, request, **kwargs):
"""
Handles the common operations (allowed HTTP method, authentication,
throttling, method lookup) surrounding most CRUD interactions.
"""
allowed_methods = getattr(self._meta, "%s_allowed_methods" % request_type, None)
if 'HTTP_X_HTTP_METHOD_OVERRIDE' in request.META:
request.method = request.META['HTTP_X_HTTP_METHOD_OVERRIDE']
request_method = self.method_check(request, allowed=allowed_methods)
method = getattr(self, "%s_%s" % (request_method, request_type), None)
if method is None:
raise ImmediateHttpResponse(response=http.HttpNotImplemented())
self.is_authenticated(request)
self.throttle_check(request)
# All clear. Process the request.
request = convert_post_to_put(request)
response = method(request, **kwargs)
# Add the throttled request.
self.log_throttled_access(request)
# If what comes back isn't a ``HttpResponse``, assume that the
# request was accepted and that some action occurred. This also
# prevents Django from freaking out.
if not isinstance(response, HttpResponse):
return http.HttpNoContent()
return response
def remove_api_resource_names(self, url_dict):
"""
Given a dictionary of regex matches from a URLconf, removes
``api_name`` and/or ``resource_name`` if found.
This is useful for converting URLconf matches into something suitable
for data lookup. For example::
Model.objects.filter(**self.remove_api_resource_names(matches))
"""
kwargs_subset = url_dict.copy()
for key in ['api_name', 'resource_name']:
try:
del(kwargs_subset[key])
except KeyError:
pass
return kwargs_subset
def method_check(self, request, allowed=None):
"""
Ensures that the HTTP method used on the request is allowed to be
handled by the resource.
Takes an ``allowed`` parameter, which should be a list of lowercase
HTTP methods to check against. Usually, this looks like::
# The most generic lookup.
self.method_check(request, self._meta.allowed_methods)
# A lookup against what's allowed for list-type methods.
self.method_check(request, self._meta.list_allowed_methods)
# A useful check when creating a new endpoint that only handles
# GET.
self.method_check(request, ['get'])
"""
if allowed is None:
allowed = []
request_method = request.method.lower()
allows = ','.join(map(str.upper, allowed))
if request_method == "options":
response = HttpResponse(allows)
response['Allow'] = allows
raise ImmediateHttpResponse(response=response)
if not request_method in allowed:
response = http.HttpMethodNotAllowed(allows)
response['Allow'] = allows
raise ImmediateHttpResponse(response=response)
return request_method
def is_authenticated(self, request):
"""
Handles checking if the user is authenticated and dealing with
unauthenticated users.
Mostly a hook, this uses class assigned to ``authentication`` from
``Resource._meta``.
"""
# Authenticate the request as needed.
auth_result = self._meta.authentication.is_authenticated(request)
if isinstance(auth_result, HttpResponse):
raise ImmediateHttpResponse(response=auth_result)
if not auth_result is True:
raise ImmediateHttpResponse(response=http.HttpUnauthorized())
def throttle_check(self, request):
"""
Handles checking if the user should be throttled.
Mostly a hook, this uses class assigned to ``throttle`` from
``Resource._meta``.
"""
request_method = request.method.lower()
identifier = self._meta.authentication.get_identifier(request)
# Check to see if they should be throttled.
if self._meta.throttle.should_be_throttled(identifier, url=request.get_full_path(), request_method=request_method):
# Throttle limit exceeded.
raise ImmediateHttpResponse(response=http.HttpTooManyRequests())
def log_throttled_access(self, request):
"""
Handles the recording of the user's access for throttling purposes.
Mostly a hook, this uses class assigned to ``throttle`` from
``Resource._meta``.
"""
request_method = request.method.lower()
self._meta.throttle.accessed(self._meta.authentication.get_identifier(request), url=request.get_full_path(), request_method=request_method)
def unauthorized_result(self, exception):
raise ImmediateHttpResponse(response=http.HttpUnauthorized())
def authorized_read_list(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to GET this resource.
"""
try:
auth_result = self._meta.authorization.read_list(object_list, bundle)
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def authorized_read_detail(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to GET this resource.
"""
try:
auth_result = self._meta.authorization.read_detail(object_list, bundle)
if not auth_result is True:
raise Unauthorized()
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def authorized_create_list(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to POST this resource.
"""
try:
auth_result = self._meta.authorization.create_list(object_list, bundle)
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def authorized_create_detail(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to POST this resource.
"""
try:
auth_result = self._meta.authorization.create_detail(object_list, bundle)
if not auth_result is True:
raise Unauthorized()
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def authorized_update_list(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to PUT this resource.
"""
try:
auth_result = self._meta.authorization.update_list(object_list, bundle)
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def authorized_update_detail(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to PUT this resource.
"""
try:
auth_result = self._meta.authorization.update_detail(object_list, bundle)
if not auth_result is True:
raise Unauthorized()
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def authorized_delete_list(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to DELETE this resource.
"""
try:
auth_result = self._meta.authorization.delete_list(object_list, bundle)
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def authorized_delete_detail(self, object_list, bundle):
"""
Handles checking of permissions to see if the user has authorization
to DELETE this resource.
"""
try:
auth_result = self._meta.authorization.delete_detail(object_list, bundle)
if not auth_result:
raise Unauthorized()
except Unauthorized, e:
self.unauthorized_result(e)
return auth_result
def build_bundle(self, obj=None, data=None, request=None, objects_saved=None):
"""
Given either an object, a data dictionary or both, builds a ``Bundle``
for use throughout the ``dehydrate/hydrate`` cycle.
If no object is provided, an empty object from
``Resource._meta.object_class`` is created so that attempts to access
``bundle.obj`` do not fail.
"""
if obj is None:
obj = self._meta.object_class()
return Bundle(
obj=obj,
data=data,
request=request,
objects_saved=objects_saved
)
def build_filters(self, filters=None):
"""
Allows for the filtering of applicable objects.
This needs to be implemented at the user level.'
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
return filters
def apply_sorting(self, obj_list, options=None):
"""
Allows for the sorting of objects being returned.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
return obj_list
def get_bundle_detail_data(self, bundle):
"""
Convenience method to return the ``detail_uri_name`` attribute off
``bundle.obj``.
Usually just accesses ``bundle.obj.pk`` by default.
"""
return getattr(bundle.obj, self._meta.detail_uri_name)
# URL-related methods.
def detail_uri_kwargs(self, bundle_or_obj):
"""
This needs to be implemented at the user level.
Given a ``Bundle`` or an object, it returns the extra kwargs needed to
generate a detail URI.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def resource_uri_kwargs(self, bundle_or_obj=None):
"""
Builds a dictionary of kwargs to help generate URIs.
Automatically provides the ``Resource.Meta.resource_name`` (and
optionally the ``Resource.Meta.api_name`` if populated by an ``Api``
object).
If the ``bundle_or_obj`` argument is provided, it calls
``Resource.detail_uri_kwargs`` for additional bits to create
"""
kwargs = {
'resource_name': self._meta.resource_name,
}
if self._meta.api_name is not None:
kwargs['api_name'] = self._meta.api_name
if bundle_or_obj is not None:
kwargs.update(self.detail_uri_kwargs(bundle_or_obj))
return kwargs
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_list'):
"""
Handles generating a resource URI.
If the ``bundle_or_obj`` argument is not provided, it builds the URI
for the list endpoint.
If the ``bundle_or_obj`` argument is provided, it builds the URI for
the detail endpoint.
Return the generated URI. If that URI can not be reversed (not found
in the URLconf), it will return an empty string.
"""
if bundle_or_obj is not None:
url_name = 'api_dispatch_detail'
try:
return self._build_reverse_url(url_name, kwargs=self.resource_uri_kwargs(bundle_or_obj))
except NoReverseMatch:
return ''
def get_via_uri(self, uri, request=None):
"""
This pulls apart the salient bits of the URI and populates the
resource via a ``obj_get``.
Optionally accepts a ``request``.
If you need custom behavior based on other portions of the URI,
simply override this method.
"""
prefix = get_script_prefix()
chomped_uri = uri
if prefix and chomped_uri.startswith(prefix):
chomped_uri = chomped_uri[len(prefix)-1:]
try:
view, args, kwargs = resolve(chomped_uri)
except Resolver404:
raise NotFound("The URL provided '%s' was not a link to a valid resource." % uri)
bundle = self.build_bundle(request=request)
return self.obj_get(bundle=bundle, **self.remove_api_resource_names(kwargs))
# Data preparation.
def full_dehydrate(self, bundle, for_list=False):
"""
Given a bundle with an object instance, extract the information from it
to populate the resource.
"""
use_in = ['all', 'list' if for_list else 'detail']
# Dehydrate each field.
for field_name, field_object in self.fields.items():
# If it's not for use in this mode, skip
field_use_in = getattr(field_object, 'use_in', 'all')
if callable(field_use_in):
if not field_use_in(bundle):
continue
else:
if field_use_in not in use_in:
continue
# A touch leaky but it makes URI resolution work.
if getattr(field_object, 'dehydrated_type', None) == 'related':
field_object.api_name = self._meta.api_name
field_object.resource_name = self._meta.resource_name
bundle.data[field_name] = field_object.dehydrate(bundle, for_list=for_list)
# Check for an optional method to do further dehydration.
method = getattr(self, "dehydrate_%s" % field_name, None)
if method:
bundle.data[field_name] = method(bundle)
bundle = self.dehydrate(bundle)
return bundle
def dehydrate(self, bundle):
"""
A hook to allow a final manipulation of data once all fields/methods
have built out the dehydrated data.
Useful if you need to access more than one dehydrated field or want
to annotate on additional data.
Must return the modified bundle.
"""
return bundle
def full_hydrate(self, bundle):
"""
Given a populated bundle, distill it and turn it back into
a full-fledged object instance.
"""
if bundle.obj is None:
bundle.obj = self._meta.object_class()
bundle = self.hydrate(bundle)
for field_name, field_object in self.fields.items():
if field_object.readonly is True:
continue
# Check for an optional method to do further hydration.
method = getattr(self, "hydrate_%s" % field_name, None)
if method:
bundle = method(bundle)
if field_object.attribute:
value = field_object.hydrate(bundle)
# NOTE: We only get back a bundle when it is related field.
if isinstance(value, Bundle) and value.errors.get(field_name):
bundle.errors[field_name] = value.errors[field_name]
if value is not None or field_object.null:
# We need to avoid populating M2M data here as that will
# cause things to blow up.
if not getattr(field_object, 'is_related', False):
setattr(bundle.obj, field_object.attribute, value)
elif not getattr(field_object, 'is_m2m', False):
if value is not None:
# NOTE: A bug fix in Django (ticket #18153) fixes incorrect behavior
# which Tastypie was relying on. To fix this, we store value.obj to
# be saved later in save_related.
try:
setattr(bundle.obj, field_object.attribute, value.obj)
except (ValueError, ObjectDoesNotExist):
bundle.related_objects_to_save[field_object.attribute] = value.obj
elif field_object.blank:
continue
elif field_object.null:
setattr(bundle.obj, field_object.attribute, value)
return bundle
def hydrate(self, bundle):
"""
A hook to allow an initial manipulation of data before all methods/fields
have built out the hydrated data.
Useful if you need to access more than one hydrated field or want
to annotate on additional data.
Must return the modified bundle.
"""
return bundle
def hydrate_m2m(self, bundle):
"""
Populate the ManyToMany data on the instance.
"""
if bundle.obj is None:
raise HydrationError("You must call 'full_hydrate' before attempting to run 'hydrate_m2m' on %r." % self)
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
if field_object.attribute:
# Note that we only hydrate the data, leaving the instance
# unmodified. It's up to the user's code to handle this.
# The ``ModelResource`` provides a working baseline
# in this regard.
bundle.data[field_name] = field_object.hydrate_m2m(bundle)
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
method = getattr(self, "hydrate_%s" % field_name, None)
if method:
method(bundle)
return bundle
def build_schema(self):
"""
Returns a dictionary of all the fields on the resource and some
properties about those fields.
Used by the ``schema/`` endpoint to describe what will be available.
"""
data = {
'fields': {},
'default_format': self._meta.default_format,
'allowed_list_http_methods': self._meta.list_allowed_methods,
'allowed_detail_http_methods': self._meta.detail_allowed_methods,
'default_limit': self._meta.limit,
}
if self._meta.ordering:
data['ordering'] = self._meta.ordering
if self._meta.filtering:
data['filtering'] = self._meta.filtering
for field_name, field_object in self.fields.items():
data['fields'][field_name] = {
'default': field_object.default,
'type': field_object.dehydrated_type,
'nullable': field_object.null,
'blank': field_object.blank,
'readonly': field_object.readonly,
'help_text': field_object.help_text,
'unique': field_object.unique,
}
if field_object.dehydrated_type == 'related':
if getattr(field_object, 'is_m2m', False):
related_type = 'to_many'
else:
related_type = 'to_one'
data['fields'][field_name]['related_type'] = related_type
return data
def dehydrate_resource_uri(self, bundle):
"""
For the automatically included ``resource_uri`` field, dehydrate
the URI for the given bundle.
Returns empty string if no URI can be generated.
"""
try:
return self.get_resource_uri(bundle)
except NotImplementedError:
return ''
except NoReverseMatch:
return ''
def generate_cache_key(self, *args, **kwargs):
"""
Creates a unique-enough cache key.
This is based off the current api_name/resource_name/args/kwargs.
"""
smooshed = []
for key, value in kwargs.items():
smooshed.append("%s=%s" % (key, value))
# Use a list plus a ``.join()`` because it's faster than concatenation.
return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), ':'.join(smooshed))
# Data access methods.
def get_object_list(self, request):
"""
A hook to allow making returning the list of available objects.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def apply_authorization_limits(self, request, object_list):
"""
Deprecated.
FIXME: REMOVE BEFORE 1.0
"""
return self._meta.authorization.apply_limits(request, object_list)
def can_create(self):
"""
Checks to ensure ``post`` is within ``allowed_methods``.
"""
allowed = set(self._meta.list_allowed_methods + self._meta.detail_allowed_methods)
return 'post' in allowed
def can_update(self):
"""
Checks to ensure ``put`` is within ``allowed_methods``.
Used when hydrating related data.
"""
allowed = set(self._meta.list_allowed_methods + self._meta.detail_allowed_methods)
return 'put' in allowed
def can_delete(self):
"""
Checks to ensure ``delete`` is within ``allowed_methods``.
"""
allowed = set(self._meta.list_allowed_methods + self._meta.detail_allowed_methods)
return 'delete' in allowed
def apply_filters(self, request, applicable_filters):
"""
A hook to alter how the filters are applied to the object list.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_get_list(self, bundle, **kwargs):
"""
Fetches the list of objects available on the resource.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def cached_obj_get_list(self, bundle, **kwargs):
"""
A version of ``obj_get_list`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('list', **kwargs)
obj_list = self._meta.cache.get(cache_key)
if obj_list is None:
obj_list = self.obj_get_list(bundle=bundle, **kwargs)
self._meta.cache.set(cache_key, obj_list)
return obj_list
def obj_get(self, bundle, **kwargs):
"""
Fetches an individual object on the resource.
This needs to be implemented at the user level. If the object can not
be found, this should raise a ``NotFound`` exception.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def cached_obj_get(self, bundle, **kwargs):
"""
A version of ``obj_get`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('detail', **kwargs)
cached_bundle = self._meta.cache.get(cache_key)
if cached_bundle is None:
cached_bundle = self.obj_get(bundle=bundle, **kwargs)
self._meta.cache.set(cache_key, cached_bundle)
return cached_bundle
def obj_create(self, bundle, **kwargs):
"""
Creates a new object based on the provided data.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_update(self, bundle, **kwargs):
"""
Updates an existing object (or creates a new object) based on the
provided data.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_delete_list(self, bundle, **kwargs):
"""
Deletes an entire list of objects.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_delete_list_for_update(self, bundle, **kwargs):
"""
Deletes an entire list of objects, specific to PUT list.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_delete(self, bundle, **kwargs):
"""
Deletes a single object.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def create_response(self, request, data, response_class=HttpResponse, **response_kwargs):
"""
Extracts the common "which-format/serialize/return-response" cycle.
Mostly a useful shortcut/hook.
"""
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return response_class(content=serialized, content_type=build_content_type(desired_format), **response_kwargs)
def error_response(self, request, errors, response_class=None):
"""
Extracts the common "which-format/serialize/return-error-response"
cycle.
Should be used as much as possible to return errors.
"""
if response_class is None:
response_class = http.HttpBadRequest
desired_format = None
if request:
if request.GET.get('callback', None) is None:
try:
desired_format = self.determine_format(request)
except BadRequest:
pass # Fall through to default handler below
else:
# JSONP can cause extra breakage.
desired_format = 'application/json'
if not desired_format:
desired_format = self._meta.default_format
try:
serialized = self.serialize(request, errors, desired_format)
except BadRequest, e:
error = "Additional errors occurred, but serialization of those errors failed."
if settings.DEBUG:
error += " %s" % e
return response_class(content=error, content_type='text/plain')
return response_class(content=serialized, content_type=build_content_type(desired_format))
def is_valid(self, bundle):
"""
Handles checking if the data provided by the user is valid.
Mostly a hook, this uses class assigned to ``validation`` from
``Resource._meta``.
If validation fails, an error is raised with the error messages
serialized inside it.
"""
errors = self._meta.validation.is_valid(bundle, bundle.request)
if errors:
bundle.errors[self._meta.resource_name] = errors
return False
return True
def rollback(self, bundles):
"""
Given the list of bundles, delete all objects pertaining to those
bundles.
This needs to be implemented at the user level. No exceptions should
be raised if possible.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
# Views.
def get_list(self, request, **kwargs):
"""
Returns a serialized list of resources.
Calls ``obj_get_list`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
# TODO: Uncached for now. Invalidation that works for everyone may be
# impossible.
base_bundle = self.build_bundle(request=request)
objects = self.obj_get_list(bundle=base_bundle, **self.remove_api_resource_names(kwargs))
sorted_objects = self.apply_sorting(objects, options=request.GET)
paginator = self._meta.paginator_class(request.GET, sorted_objects, resource_uri=self.get_resource_uri(), limit=self._meta.limit, max_limit=self._meta.max_limit, collection_name=self._meta.collection_name)
to_be_serialized = paginator.page()
# Dehydrate the bundles in preparation for serialization.
bundles = []
for obj in to_be_serialized[self._meta.collection_name]:
bundle = self.build_bundle(obj=obj, request=request)
bundles.append(self.full_dehydrate(bundle, for_list=True))
to_be_serialized[self._meta.collection_name] = bundles
to_be_serialized = self.alter_list_data_to_serialize(request, to_be_serialized)
return self.create_response(request, to_be_serialized)
def get_detail(self, request, **kwargs):
"""
Returns a single serialized resource.
Calls ``cached_obj_get/obj_get`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
basic_bundle = self.build_bundle(request=request)
try:
obj = self.cached_obj_get(bundle=basic_bundle, **self.remove_api_resource_names(kwargs))
except ObjectDoesNotExist:
return http.HttpNotFound()
except MultipleObjectsReturned:
return http.HttpMultipleChoices("More than one resource is found at this URI.")
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
return self.create_response(request, bundle)
def post_list(self, request, **kwargs):
"""
Creates a new resource/object with the provided data.
Calls ``obj_create`` with the provided data and returns a response
with the new resource's location.
If a new resource is created, return ``HttpCreated`` (201 Created).
If ``Meta.always_return_data = True``, there will be a populated body
of serialized data.
"""
if django.VERSION >= (1, 4):
body = request.body
else:
body = request.raw_post_data
deserialized = self.deserialize(request, body, format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
updated_bundle = self.obj_create(bundle, **self.remove_api_resource_names(kwargs))
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
def post_detail(self, request, **kwargs):
"""
Creates a new subcollection of the resource under a resource.
This is not implemented by default because most people's data models
aren't self-referential.
If a new resource is created, return ``HttpCreated`` (201 Created).
"""
return http.HttpNotImplemented()
def put_list(self, request, **kwargs):
"""
Replaces a collection of resources with another collection.
Calls ``delete_list`` to clear out the collection then ``obj_create``
with the provided the data to create the new collection.
Return ``HttpNoContent`` (204 No Content) if
``Meta.always_return_data = False`` (default).
Return ``HttpAccepted`` (202 Accepted) if
``Meta.always_return_data = True``.
"""
if django.VERSION >= (1, 4):
body = request.body
else:
body = request.raw_post_data
deserialized = self.deserialize(request, body, format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_list_data(request, deserialized)
if not self._meta.collection_name in deserialized:
raise BadRequest("Invalid data sent.")
basic_bundle = self.build_bundle(request=request)
self.obj_delete_list_for_update(bundle=basic_bundle, **self.remove_api_resource_names(kwargs))
bundles_seen = []
for object_data in deserialized[self._meta.collection_name]:
bundle = self.build_bundle(data=dict_strip_unicode_keys(object_data), request=request)
# Attempt to be transactional, deleting any previously created
# objects if validation fails.
try:
self.obj_create(bundle=bundle, **self.remove_api_resource_names(kwargs))
bundles_seen.append(bundle)
except ImmediateHttpResponse:
self.rollback(bundles_seen)
raise
if not self._meta.always_return_data:
return http.HttpNoContent()
else:
to_be_serialized = {}
to_be_serialized[self._meta.collection_name] = [self.full_dehydrate(bundle, for_list=True) for bundle in bundles_seen]
to_be_serialized = self.alter_list_data_to_serialize(request, to_be_serialized)
return self.create_response(request, to_be_serialized, response_class=http.HttpAccepted)
def put_detail(self, request, **kwargs):
"""
Either updates an existing resource or creates a new one with the
provided data.
Calls ``obj_update`` with the provided data first, but falls back to
``obj_create`` if the object does not already exist.
If a new resource is created, return ``HttpCreated`` (201 Created).
If ``Meta.always_return_data = True``, there will be a populated body
of serialized data.
If an existing resource is modified and
``Meta.always_return_data = False`` (default), return ``HttpNoContent``
(204 No Content).
If an existing resource is modified and
``Meta.always_return_data = True``, return ``HttpAccepted`` (202
Accepted).
"""
if django.VERSION >= (1, 4):
body = request.body
else:
body = request.raw_post_data
deserialized = self.deserialize(request, body, format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
try:
updated_bundle = self.obj_update(bundle=bundle, **self.remove_api_resource_names(kwargs))
if not self._meta.always_return_data:
return http.HttpNoContent()
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpAccepted)
except (NotFound, MultipleObjectsReturned):
updated_bundle = self.obj_create(bundle=bundle, **self.remove_api_resource_names(kwargs))
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
def delete_list(self, request, **kwargs):
"""
Destroys a collection of resources/objects.
Calls ``obj_delete_list``.
If the resources are deleted, return ``HttpNoContent`` (204 No Content).
"""
bundle = self.build_bundle(request=request)
self.obj_delete_list(bundle=bundle, request=request, **self.remove_api_resource_names(kwargs))
return http.HttpNoContent()
def delete_detail(self, request, **kwargs):
"""
Destroys a single resource/object.
Calls ``obj_delete``.
If the resource is deleted, return ``HttpNoContent`` (204 No Content).
If the resource did not exist, return ``Http404`` (404 Not Found).
"""
# Manually construct the bundle here, since we don't want to try to
# delete an empty instance.
bundle = Bundle(request=request)
try:
self.obj_delete(bundle=bundle, **self.remove_api_resource_names(kwargs))
return http.HttpNoContent()
except NotFound:
return http.HttpNotFound()
def patch_list(self, request, **kwargs):
"""
Updates a collection in-place.
The exact behavior of ``PATCH`` to a list resource is still the matter of
some debate in REST circles, and the ``PATCH`` RFC isn't standard. So the
behavior this method implements (described below) is something of a
stab in the dark. It's mostly cribbed from GData, with a smattering
of ActiveResource-isms and maybe even an original idea or two.
The ``PATCH`` format is one that's similar to the response returned from
a ``GET`` on a list resource::
{
"objects": [{object}, {object}, ...],
"deleted_objects": ["URI", "URI", "URI", ...],
}
For each object in ``objects``:
* If the dict does not have a ``resource_uri`` key then the item is
considered "new" and is handled like a ``POST`` to the resource list.
* If the dict has a ``resource_uri`` key and the ``resource_uri`` refers
to an existing resource then the item is a update; it's treated
like a ``PATCH`` to the corresponding resource detail.
* If the dict has a ``resource_uri`` but the resource *doesn't* exist,
then this is considered to be a create-via-``PUT``.
Each entry in ``deleted_objects`` referes to a resource URI of an existing
resource to be deleted; each is handled like a ``DELETE`` to the relevent
resource.
In any case:
* If there's a resource URI it *must* refer to a resource of this
type. It's an error to include a URI of a different resource.
* ``PATCH`` is all or nothing. If a single sub-operation fails, the
entire request will fail and all resources will be rolled back.
* For ``PATCH`` to work, you **must** have ``put`` in your
:ref:`detail-allowed-methods` setting.
* To delete objects via ``deleted_objects`` in a ``PATCH`` request you
**must** have ``delete`` in your :ref:`detail-allowed-methods`
setting.
Substitute appropriate names for ``objects`` and
``deleted_objects`` if ``Meta.collection_name`` is set to something
other than ``objects`` (default).
"""
request = convert_post_to_patch(request)
if django.VERSION >= (1, 4):
body = request.body
else:
body = request.raw_post_data
deserialized = self.deserialize(request, body, format=request.META.get('CONTENT_TYPE', 'application/json'))
collection_name = self._meta.collection_name
deleted_collection_name = 'deleted_%s' % collection_name
if collection_name not in deserialized:
raise BadRequest("Invalid data sent: missing '%s'" % collection_name)
if len(deserialized[collection_name]) and 'put' not in self._meta.detail_allowed_methods:
raise ImmediateHttpResponse(response=http.HttpMethodNotAllowed())
bundles_seen = []
for data in deserialized[collection_name]:
# If there's a resource_uri then this is either an
# update-in-place or a create-via-PUT.
if "resource_uri" in data:
uri = data.pop('resource_uri')
try:
obj = self.get_via_uri(uri, request=request)
# The object does exist, so this is an update-in-place.
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle, for_list=True)
bundle = self.alter_detail_data_to_serialize(request, bundle)
self.update_in_place(request, bundle, data)
except (ObjectDoesNotExist, MultipleObjectsReturned):
# The object referenced by resource_uri doesn't exist,
# so this is a create-by-PUT equivalent.
data = self.alter_deserialized_detail_data(request, data)
bundle = self.build_bundle(data=dict_strip_unicode_keys(data), request=request)
self.obj_create(bundle=bundle)
else:
# There's no resource URI, so this is a create call just
# like a POST to the list resource.
data = self.alter_deserialized_detail_data(request, data)
bundle = self.build_bundle(data=dict_strip_unicode_keys(data), request=request)
self.obj_create(bundle=bundle)
bundles_seen.append(bundle)
deleted_collection = deserialized.get(deleted_collection_name, [])
if deleted_collection:
if 'delete' not in self._meta.detail_allowed_methods:
raise ImmediateHttpResponse(response=http.HttpMethodNotAllowed())
for uri in deleted_collection:
obj = self.get_via_uri(uri, request=request)
bundle = self.build_bundle(obj=obj, request=request)
self.obj_delete(bundle=bundle)
if not self._meta.always_return_data:
return http.HttpAccepted()
else:
to_be_serialized = {}
to_be_serialized['objects'] = [self.full_dehydrate(bundle, for_list=True) for bundle in bundles_seen]
to_be_serialized = self.alter_list_data_to_serialize(request, to_be_serialized)
return self.create_response(request, to_be_serialized, response_class=http.HttpAccepted)
def patch_detail(self, request, **kwargs):
"""
Updates a resource in-place.
Calls ``obj_update``.
If the resource is updated, return ``HttpAccepted`` (202 Accepted).
If the resource did not exist, return ``HttpNotFound`` (404 Not Found).
"""
request = convert_post_to_patch(request)
basic_bundle = self.build_bundle(request=request)
# We want to be able to validate the update, but we can't just pass
# the partial data into the validator since all data needs to be
# present. Instead, we basically simulate a PUT by pulling out the
# original data and updating it in-place.
# So first pull out the original object. This is essentially
# ``get_detail``.
try:
obj = self.cached_obj_get(bundle=basic_bundle, **self.remove_api_resource_names(kwargs))
except ObjectDoesNotExist:
return http.HttpNotFound()
except MultipleObjectsReturned:
return http.HttpMultipleChoices("More than one resource is found at this URI.")
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
# Now update the bundle in-place.
if django.VERSION >= (1, 4):
body = request.body
else:
body = request.raw_post_data
deserialized = self.deserialize(request, body, format=request.META.get('CONTENT_TYPE', 'application/json'))
self.update_in_place(request, bundle, deserialized)
if not self._meta.always_return_data:
return http.HttpAccepted()
else:
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
return self.create_response(request, bundle, response_class=http.HttpAccepted)
def update_in_place(self, request, original_bundle, new_data):
"""
Update the object in original_bundle in-place using new_data.
"""
original_bundle.data.update(**dict_strip_unicode_keys(new_data))
# Now we've got a bundle with the new data sitting in it and we're
# we're basically in the same spot as a PUT request. SO the rest of this
# function is cribbed from put_detail.
self.alter_deserialized_detail_data(request, original_bundle.data)
kwargs = {
self._meta.detail_uri_name: self.get_bundle_detail_data(original_bundle),
'request': request,
}
return self.obj_update(bundle=original_bundle, **kwargs)
def get_schema(self, request, **kwargs):
"""
Returns a serialized form of the schema of the resource.
Calls ``build_schema`` to generate the data. This method only responds
to HTTP GET.
Should return a HttpResponse (200 OK).
"""
self.method_check(request, allowed=['get'])
self.is_authenticated(request)
self.throttle_check(request)
self.log_throttled_access(request)
bundle = self.build_bundle(request=request)
self.authorized_read_detail(self.get_object_list(bundle.request), bundle)
return self.create_response(request, self.build_schema())
def get_multiple(self, request, **kwargs):
"""
Returns a serialized list of resources based on the identifiers
from the URL.
Calls ``obj_get`` to fetch only the objects requested. This method
only responds to HTTP GET.
Should return a HttpResponse (200 OK).
"""
self.method_check(request, allowed=['get'])
self.is_authenticated(request)
self.throttle_check(request)
# Rip apart the list then iterate.
kwarg_name = '%s_list' % self._meta.detail_uri_name
obj_identifiers = kwargs.get(kwarg_name, '').split(';')
objects = []
not_found = []
base_bundle = self.build_bundle(request=request)
for identifier in obj_identifiers:
try:
obj = self.obj_get(bundle=base_bundle, **{self._meta.detail_uri_name: identifier})
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle, for_list=True)
objects.append(bundle)
except (ObjectDoesNotExist, Unauthorized):
not_found.append(identifier)
object_list = {
self._meta.collection_name: objects,
}
if len(not_found):
object_list['not_found'] = not_found
self.log_throttled_access(request)
return self.create_response(request, object_list)
class ModelDeclarativeMetaclass(DeclarativeMetaclass):
def __new__(cls, name, bases, attrs):
meta = attrs.get('Meta')
if meta and hasattr(meta, 'queryset'):
setattr(meta, 'object_class', meta.queryset.model)
new_class = super(ModelDeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
include_fields = getattr(new_class._meta, 'fields', [])
excludes = getattr(new_class._meta, 'excludes', [])
field_names = new_class.base_fields.keys()
for field_name in field_names:
if field_name == 'resource_uri':
continue
if field_name in new_class.declared_fields:
continue
if len(include_fields) and not field_name in include_fields:
del(new_class.base_fields[field_name])
if len(excludes) and field_name in excludes:
del(new_class.base_fields[field_name])
# Add in the new fields.
new_class.base_fields.update(new_class.get_fields(include_fields, excludes))
if getattr(new_class._meta, 'include_absolute_url', True):
if not 'absolute_url' in new_class.base_fields:
new_class.base_fields['absolute_url'] = fields.CharField(attribute='get_absolute_url', readonly=True)
elif 'absolute_url' in new_class.base_fields and not 'absolute_url' in attrs:
del(new_class.base_fields['absolute_url'])
return new_class
class ModelResource(Resource):
"""
A subclass of ``Resource`` designed to work with Django's ``Models``.
This class will introspect a given ``Model`` and build a field list based
on the fields found on the model (excluding relational fields).
Given that it is aware of Django's ORM, it also handles the CRUD data
operations of the resource.
"""
__metaclass__ = ModelDeclarativeMetaclass
@classmethod
def should_skip_field(cls, field):
"""
Given a Django model field, return if it should be included in the
contributed ApiFields.
"""
# Ignore certain fields (related fields).
if getattr(field, 'rel'):
return True
return False
@classmethod
def api_field_from_django_field(cls, f, default=fields.CharField):
"""
Returns the field type that would likely be associated with each
Django type.
"""
result = default
internal_type = f.get_internal_type()
if internal_type in ('DateField', 'DateTimeField'):
result = fields.DateTimeField
elif internal_type in ('BooleanField', 'NullBooleanField'):
result = fields.BooleanField
elif internal_type in ('FloatField',):
result = fields.FloatField
elif internal_type in ('DecimalField',):
result = fields.DecimalField
elif internal_type in ('IntegerField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SmallIntegerField', 'AutoField'):
result = fields.IntegerField
elif internal_type in ('FileField', 'ImageField'):
result = fields.FileField
elif internal_type == 'TimeField':
result = fields.TimeField
# TODO: Perhaps enable these via introspection. The reason they're not enabled
# by default is the very different ``__init__`` they have over
# the other fields.
# elif internal_type == 'ForeignKey':
# result = ForeignKey
# elif internal_type == 'ManyToManyField':
# result = ManyToManyField
return result
@classmethod
def get_fields(cls, fields=None, excludes=None):
"""
Given any explicit fields to include and fields to exclude, add
additional fields based on the associated model.
"""
final_fields = {}
fields = fields or []
excludes = excludes or []
if not cls._meta.object_class:
return final_fields
for f in cls._meta.object_class._meta.fields:
# If the field name is already present, skip
if f.name in cls.base_fields:
continue
# If field is not present in explicit field listing, skip
if fields and f.name not in fields:
continue
# If field is in exclude list, skip
if excludes and f.name in excludes:
continue
if cls.should_skip_field(f):
continue
api_field_class = cls.api_field_from_django_field(f)
kwargs = {
'attribute': f.name,
'help_text': f.help_text,
}
if f.null is True:
kwargs['null'] = True
kwargs['unique'] = f.unique
if not f.null and f.blank is True:
kwargs['default'] = ''
kwargs['blank'] = True
if f.get_internal_type() == 'TextField':
kwargs['default'] = ''
if f.has_default():
kwargs['default'] = f.default
if getattr(f, 'auto_now', False):
kwargs['default'] = f.auto_now
if getattr(f, 'auto_now_add', False):
kwargs['default'] = f.auto_now_add
final_fields[f.name] = api_field_class(**kwargs)
final_fields[f.name].instance_name = f.name
return final_fields
def check_filtering(self, field_name, filter_type='exact', filter_bits=None):
"""
Given a field name, a optional filter type and an optional list of
additional relations, determine if a field can be filtered on.
If a filter does not meet the needed conditions, it should raise an
``InvalidFilterError``.
If the filter meets the conditions, a list of attribute names (not
field names) will be returned.
"""
if filter_bits is None:
filter_bits = []
if not field_name in self._meta.filtering:
raise InvalidFilterError("The '%s' field does not allow filtering." % field_name)
# Check to see if it's an allowed lookup type.
if not self._meta.filtering[field_name] in (ALL, ALL_WITH_RELATIONS):
# Must be an explicit whitelist.
if not filter_type in self._meta.filtering[field_name]:
raise InvalidFilterError("'%s' is not an allowed filter on the '%s' field." % (filter_type, field_name))
if self.fields[field_name].attribute is None:
raise InvalidFilterError("The '%s' field has no 'attribute' for searching with." % field_name)
# Check to see if it's a relational lookup and if that's allowed.
if len(filter_bits):
if not getattr(self.fields[field_name], 'is_related', False):
raise InvalidFilterError("The '%s' field does not support relations." % field_name)
if not self._meta.filtering[field_name] == ALL_WITH_RELATIONS:
raise InvalidFilterError("Lookups are not allowed more than one level deep on the '%s' field." % field_name)
# Recursively descend through the remaining lookups in the filter,
# if any. We should ensure that all along the way, we're allowed
# to filter on that field by the related resource.
related_resource = self.fields[field_name].get_related_resource(None)
return [self.fields[field_name].attribute] + related_resource.check_filtering(filter_bits[0], filter_type, filter_bits[1:])
return [self.fields[field_name].attribute]
def filter_value_to_python(self, value, field_name, filters, filter_expr,
filter_type):
"""
Turn the string ``value`` into a python object.
"""
# Simple values
if value in ['true', 'True', True]:
value = True
elif value in ['false', 'False', False]:
value = False
elif value in ('nil', 'none', 'None', None):
value = None
# Split on ',' if not empty string and either an in or range filter.
if filter_type in ('in', 'range') and len(value):
if hasattr(filters, 'getlist'):
value = []
for part in filters.getlist(filter_expr):
value.extend(part.split(','))
else:
value = value.split(',')
return value
def build_filters(self, filters=None):
"""
Given a dictionary of filters, create the necessary ORM-level filters.
Keys should be resource fields, **NOT** model fields.
Valid values are either a list of Django filter types (i.e.
``['startswith', 'exact', 'lte']``), the ``ALL`` constant or the
``ALL_WITH_RELATIONS`` constant.
"""
# At the declarative level:
# filtering = {
# 'resource_field_name': ['exact', 'startswith', 'endswith', 'contains'],
# 'resource_field_name_2': ['exact', 'gt', 'gte', 'lt', 'lte', 'range'],
# 'resource_field_name_3': ALL,
# 'resource_field_name_4': ALL_WITH_RELATIONS,
# ...
# }
# Accepts the filters as a dict. None by default, meaning no filters.
if filters is None:
filters = {}
qs_filters = {}
if getattr(self._meta, 'queryset', None) is not None:
# Get the possible query terms from the current QuerySet.
if hasattr(self._meta.queryset.query.query_terms, 'keys'):
# Django 1.4 & below compatibility.
query_terms = self._meta.queryset.query.query_terms.keys()
else:
# Django 1.5+.
query_terms = self._meta.queryset.query.query_terms
else:
if hasattr(QUERY_TERMS, 'keys'):
# Django 1.4 & below compatibility.
query_terms = QUERY_TERMS.keys()
else:
# Django 1.5+.
query_terms = QUERY_TERMS
for filter_expr, value in filters.items():
filter_bits = filter_expr.split(LOOKUP_SEP)
field_name = filter_bits.pop(0)
filter_type = 'exact'
if not field_name in self.fields:
# It's not a field we know about. Move along citizen.
continue
if len(filter_bits) and filter_bits[-1] in query_terms:
filter_type = filter_bits.pop()
lookup_bits = self.check_filtering(field_name, filter_type, filter_bits)
value = self.filter_value_to_python(value, field_name, filters, filter_expr, filter_type)
db_field_name = LOOKUP_SEP.join(lookup_bits)
qs_filter = "%s%s%s" % (db_field_name, LOOKUP_SEP, filter_type)
qs_filters[qs_filter] = value
return dict_strip_unicode_keys(qs_filters)
def apply_sorting(self, obj_list, options=None):
"""
Given a dictionary of options, apply some ORM-level sorting to the
provided ``QuerySet``.
Looks for the ``order_by`` key and handles either ascending (just the
field name) or descending (the field name with a ``-`` in front).
The field name should be the resource field, **NOT** model field.
"""
if options is None:
options = {}
parameter_name = 'order_by'
if not 'order_by' in options:
if not 'sort_by' in options:
# Nothing to alter the order. Return what we've got.
return obj_list
else:
warnings.warn("'sort_by' is a deprecated parameter. Please use 'order_by' instead.")
parameter_name = 'sort_by'
order_by_args = []
if hasattr(options, 'getlist'):
order_bits = options.getlist(parameter_name)
else:
order_bits = options.get(parameter_name)
if not isinstance(order_bits, (list, tuple)):
order_bits = [order_bits]
for order_by in order_bits:
order_by_bits = order_by.split(LOOKUP_SEP)
field_name = order_by_bits[0]
order = ''
if order_by_bits[0].startswith('-'):
field_name = order_by_bits[0][1:]
order = '-'
if not field_name in self.fields:
# It's not a field we know about. Move along citizen.
raise InvalidSortError("No matching '%s' field for ordering on." % field_name)
if not field_name in self._meta.ordering:
raise InvalidSortError("The '%s' field does not allow ordering." % field_name)
if self.fields[field_name].attribute is None:
raise InvalidSortError("The '%s' field has no 'attribute' for ordering with." % field_name)
order_by_args.append("%s%s" % (order, LOOKUP_SEP.join([self.fields[field_name].attribute] + order_by_bits[1:])))
return obj_list.order_by(*order_by_args)
def apply_filters(self, request, applicable_filters):
"""
An ORM-specific implementation of ``apply_filters``.
The default simply applies the ``applicable_filters`` as ``**kwargs``,
but should make it possible to do more advanced things.
"""
return self.get_object_list(request).filter(**applicable_filters)
def get_object_list(self, request):
"""
An ORM-specific implementation of ``get_object_list``.
Returns a queryset that may have been limited by other overrides.
"""
return self._meta.queryset._clone()
def obj_get_list(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_get_list``.
Takes an optional ``request`` object, whose ``GET`` dictionary can be
used to narrow the query.
"""
filters = {}
if hasattr(bundle.request, 'GET'):
# Grab a mutable copy.
filters = bundle.request.GET.copy()
# Update with the provided kwargs.
filters.update(kwargs)
applicable_filters = self.build_filters(filters=filters)
try:
objects = self.apply_filters(bundle.request, applicable_filters)
return self.authorized_read_list(objects, bundle)
except ValueError:
raise BadRequest("Invalid resource lookup data provided (mismatched type).")
def obj_get(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_get``.
Takes optional ``kwargs``, which are used to narrow the query to find
the instance.
"""
try:
object_list = self.get_object_list(bundle.request).filter(**kwargs)
stringified_kwargs = ', '.join(["%s=%s" % (k, v) for k, v in kwargs.items()])
if len(object_list) <= 0:
raise self._meta.object_class.DoesNotExist("Couldn't find an instance of '%s' which matched '%s'." % (self._meta.object_class.__name__, stringified_kwargs))
elif len(object_list) > 1:
raise MultipleObjectsReturned("More than '%s' matched '%s'." % (self._meta.object_class.__name__, stringified_kwargs))
bundle.obj = object_list[0]
self.authorized_read_detail(object_list, bundle)
return bundle.obj
except ValueError:
raise NotFound("Invalid resource lookup data provided (mismatched type).")
def obj_create(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_create``.
"""
bundle.obj = self._meta.object_class()
for key, value in kwargs.items():
setattr(bundle.obj, key, value)
self.authorized_create_detail(self.get_object_list(bundle.request), bundle)
bundle = self.full_hydrate(bundle)
return self.save(bundle)
def lookup_kwargs_with_identifiers(self, bundle, kwargs):
"""
Kwargs here represent uri identifiers Ex: /repos/<user_id>/<repo_name>/
We need to turn those identifiers into Python objects for generating
lookup parameters that can find them in the DB
"""
lookup_kwargs = {}
bundle.obj = self.get_object_list(bundle.request).model()
# Override data values, we rely on uri identifiers
bundle.data.update(kwargs)
# We're going to manually hydrate, as opposed to calling
# ``full_hydrate``, to ensure we don't try to flesh out related
# resources & keep things speedy.
bundle = self.hydrate(bundle)
for identifier in kwargs:
if identifier == self._meta.detail_uri_name:
lookup_kwargs[identifier] = kwargs[identifier]
continue
field_object = self.fields[identifier]
# Skip readonly or related fields.
if field_object.readonly is True or getattr(field_object, 'is_related', False):
continue
# Check for an optional method to do further hydration.
method = getattr(self, "hydrate_%s" % identifier, None)
if method:
bundle = method(bundle)
if field_object.attribute:
value = field_object.hydrate(bundle)
lookup_kwargs[identifier] = value
return lookup_kwargs
def obj_update(self, bundle, skip_errors=False, **kwargs):
"""
A ORM-specific implementation of ``obj_update``.
"""
if not bundle.obj or not self.get_bundle_detail_data(bundle):
try:
lookup_kwargs = self.lookup_kwargs_with_identifiers(bundle, kwargs)
except:
# if there is trouble hydrating the data, fall back to just
# using kwargs by itself (usually it only contains a "pk" key
# and this will work fine.
lookup_kwargs = kwargs
try:
bundle.obj = self.obj_get(bundle=bundle, **lookup_kwargs)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
bundle = self.full_hydrate(bundle)
return self.save(bundle, skip_errors=skip_errors)
def obj_delete_list(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_delete_list``.
"""
objects_to_delete = self.obj_get_list(bundle=bundle, **kwargs)
deletable_objects = self.authorized_delete_list(objects_to_delete, bundle)
if hasattr(deletable_objects, 'delete'):
# It's likely a ``QuerySet``. Call ``.delete()`` for efficiency.
deletable_objects.delete()
else:
for authed_obj in deletable_objects:
authed_obj.delete()
def obj_delete_list_for_update(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_delete_list_for_update``.
"""
objects_to_delete = self.obj_get_list(bundle=bundle, **kwargs)
deletable_objects = self.authorized_update_list(objects_to_delete, bundle)
if hasattr(deletable_objects, 'delete'):
# It's likely a ``QuerySet``. Call ``.delete()`` for efficiency.
deletable_objects.delete()
else:
for authed_obj in deletable_objects:
authed_obj.delete()
def obj_delete(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_delete``.
Takes optional ``kwargs``, which are used to narrow the query to find
the instance.
"""
if not hasattr(bundle.obj, 'delete'):
try:
bundle.obj = self.obj_get(bundle=bundle, **kwargs)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
self.authorized_delete_detail(self.get_object_list(bundle.request), bundle)
bundle.obj.delete()
@transaction.commit_on_success()
def patch_list(self, request, **kwargs):
"""
An ORM-specific implementation of ``patch_list``.
Necessary because PATCH should be atomic (all-success or all-fail)
and the only way to do this neatly is at the database level.
"""
return super(ModelResource, self).patch_list(request, **kwargs)
def rollback(self, bundles):
"""
A ORM-specific implementation of ``rollback``.
Given the list of bundles, delete all models pertaining to those
bundles.
"""
for bundle in bundles:
if bundle.obj and self.get_bundle_detail_data(bundle):
bundle.obj.delete()
def create_identifier(self, obj):
return u"%s.%s.%s" % (obj._meta.app_label, obj._meta.module_name, obj.pk)
def save(self, bundle, skip_errors=False):
self.is_valid(bundle)
if bundle.errors and not skip_errors:
raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors))
# Check if they're authorized.
if bundle.obj.pk:
self.authorized_update_detail(self.get_object_list(bundle.request), bundle)
else:
self.authorized_create_detail(self.get_object_list(bundle.request), bundle)
# Save FKs just in case.
self.save_related(bundle)
# Save the main object.
bundle.obj.save()
bundle.objects_saved.add(self.create_identifier(bundle.obj))
# Now pick up the M2M bits.
m2m_bundle = self.hydrate_m2m(bundle)
self.save_m2m(m2m_bundle)
return bundle
def save_related(self, bundle):
"""
Handles the saving of related non-M2M data.
Calling assigning ``child.parent = parent`` & then calling
``Child.save`` isn't good enough to make sure the ``parent``
is saved.
To get around this, we go through all our related fields &
call ``save`` on them if they have related, non-M2M data.
M2M data is handled by the ``ModelResource.save_m2m`` method.
"""
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_related', False):
continue
if getattr(field_object, 'is_m2m', False):
continue
if not field_object.attribute:
continue
if field_object.readonly:
continue
if field_object.blank and not bundle.data.has_key(field_name):
continue
# Get the object.
try:
related_obj = getattr(bundle.obj, field_object.attribute)
except ObjectDoesNotExist:
related_obj = bundle.related_objects_to_save.get(field_object.attribute, None)
# Because sometimes it's ``None`` & that's OK.
if related_obj:
if field_object.related_name:
if not self.get_bundle_detail_data(bundle):
bundle.obj.save()
setattr(related_obj, field_object.related_name, bundle.obj)
related_resource = field_object.get_related_resource(related_obj)
# Before we build the bundle & try saving it, let's make sure we
# haven't already saved it.
obj_id = self.create_identifier(related_obj)
if obj_id in bundle.objects_saved:
# It's already been saved. We're done here.
continue
if bundle.data.get(field_name) and hasattr(bundle.data[field_name], 'keys'):
# Only build & save if there's data, not just a URI.
related_bundle = related_resource.build_bundle(
obj=related_obj,
data=bundle.data.get(field_name),
request=bundle.request,
objects_saved=bundle.objects_saved
)
related_resource.save(related_bundle)
setattr(bundle.obj, field_object.attribute, related_obj)
def save_m2m(self, bundle):
"""
Handles the saving of related M2M data.
Due to the way Django works, the M2M data must be handled after the
main instance, which is why this isn't a part of the main ``save`` bits.
Currently slightly inefficient in that it will clear out the whole
relation and recreate the related data as needed.
"""
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
if not field_object.attribute:
continue
if field_object.readonly:
continue
# Get the manager.
related_mngr = None
if isinstance(field_object.attribute, basestring):
related_mngr = getattr(bundle.obj, field_object.attribute)
elif callable(field_object.attribute):
related_mngr = field_object.attribute(bundle)
if not related_mngr:
continue
if hasattr(related_mngr, 'clear'):
# FIXME: Dupe the original bundle, copy in the new object &
# check the perms on that (using the related resource)?
# Clear it out, just to be safe.
related_mngr.clear()
related_objs = []
for related_bundle in bundle.data[field_name]:
related_resource = field_object.get_related_resource(bundle.obj)
# Before we build the bundle & try saving it, let's make sure we
# haven't already saved it.
obj_id = self.create_identifier(related_bundle.obj)
if obj_id in bundle.objects_saved:
# It's already been saved. We're done here.
continue
# Only build & save if there's data, not just a URI.
updated_related_bundle = related_resource.build_bundle(
obj=related_bundle.obj,
data=related_bundle.data,
request=bundle.request,
objects_saved=bundle.objects_saved
)
#Only save related models if they're newly added.
if updated_related_bundle.obj._state.adding:
related_resource.save(updated_related_bundle)
related_objs.append(updated_related_bundle.obj)
related_mngr.add(*related_objs)
def detail_uri_kwargs(self, bundle_or_obj):
"""
Given a ``Bundle`` or an object (typically a ``Model`` instance),
it returns the extra kwargs needed to generate a detail URI.
By default, it uses the model's ``pk`` in order to create the URI.
"""
kwargs = {}
if isinstance(bundle_or_obj, Bundle):
kwargs[self._meta.detail_uri_name] = getattr(bundle_or_obj.obj, self._meta.detail_uri_name)
else:
kwargs[self._meta.detail_uri_name] = getattr(bundle_or_obj, self._meta.detail_uri_name)
return kwargs
class NamespacedModelResource(ModelResource):
"""
A ModelResource subclass that respects Django namespaces.
"""
def _build_reverse_url(self, name, args=None, kwargs=None):
namespaced = "%s:%s" % (self._meta.urlconf_namespace, name)
return reverse(namespaced, args=args, kwargs=kwargs)
# Based off of ``piston.utils.coerce_put_post``. Similarly BSD-licensed.
# And no, the irony is not lost on me.
def convert_post_to_VERB(request, verb):
"""
Force Django to process the VERB.
"""
if request.method == verb:
if hasattr(request, '_post'):
del(request._post)
del(request._files)
try:
request.method = "POST"
request._load_post_and_files()
request.method = verb
except AttributeError:
request.META['REQUEST_METHOD'] = 'POST'
request._load_post_and_files()
request.META['REQUEST_METHOD'] = verb
setattr(request, verb, request.POST)
return request
def convert_post_to_put(request):
return convert_post_to_VERB(request, verb='PUT')
def convert_post_to_patch(request):
return convert_post_to_VERB(request, verb='PATCH')
|
agpl-3.0
| 4,679,845,014,758,824,000
| 37.120434
| 213
| 0.600623
| false
|
MuffinMedic/CloudBot
|
cloudbot/hook.py
|
1
|
14146
|
import collections
import inspect
import re
from enum import Enum, unique, IntEnum
from cloudbot.event import EventType
valid_command_re = re.compile(r"^\w+$")
@unique
class Priority(IntEnum):
# Reversed to maintain compatibility with sieve hooks numeric priority
LOWEST = 127
LOW = 63
NORMAL = 0
HIGH = -64
HIGHEST = -128
@unique
class Action(Enum):
"""Defines the action to take after executing a hook"""
HALTTYPE = 0 # Once this hook executes, no other hook of that type should run
HALTALL = 1 # Once this hook executes, No other hook should run
CONTINUE = 2 # Normal execution of all hooks
class _Hook:
"""
:type function: function
:type type: str
:type kwargs: dict[str, unknown]
"""
def __init__(self, function, _type):
"""
:type function: function
:type _type: str
"""
self.function = function
self.type = _type
self.kwargs = {}
def _add_hook(self, kwargs):
"""
:type kwargs: dict[str, unknown]
"""
# update kwargs, overwriting duplicates
self.kwargs.update(kwargs)
class _CommandHook(_Hook):
"""
:type main_alias: str
:type aliases: set[str]
"""
def __init__(self, function):
"""
:type function: function
"""
_Hook.__init__(self, function, "command")
self.aliases = set()
self.main_alias = None
if function.__doc__:
self.doc = function.__doc__.split('\n', 1)[0]
else:
self.doc = None
def add_hook(self, alias_param, kwargs):
"""
:type alias_param: list[str] | str
"""
self._add_hook(kwargs)
if not alias_param:
alias_param = self.function.__name__
if isinstance(alias_param, str):
alias_param = [alias_param]
if not self.main_alias:
self.main_alias = alias_param[0]
for alias in alias_param:
if not valid_command_re.match(alias):
raise ValueError("Invalid command name {}".format(alias))
self.aliases.update(alias_param)
class _RegexHook(_Hook):
"""
:type regexes: list[re.__Regex]
"""
def __init__(self, function):
"""
:type function: function
"""
_Hook.__init__(self, function, "regex")
self.regexes = []
def add_hook(self, regex_param, kwargs):
"""
:type regex_param: Iterable[str | re.__Regex] | str | re.__Regex
:type kwargs: dict[str, unknown]
"""
self._add_hook(kwargs)
# add all regex_parameters to valid regexes
if isinstance(regex_param, str):
# if the parameter is a string, compile and add
self.regexes.append(re.compile(regex_param))
elif hasattr(regex_param, "search"):
# if the parameter is an re.__Regex, just add it
# we only use regex.search anyways, so this is a good determiner
self.regexes.append(regex_param)
else:
assert isinstance(regex_param, collections.Iterable)
# if the parameter is a list, add each one
for re_to_match in regex_param:
if isinstance(re_to_match, str):
re_to_match = re.compile(re_to_match)
else:
# make sure that the param is either a compiled regex, or has a search attribute.
assert hasattr(re_to_match, "search")
self.regexes.append(re_to_match)
class _RawHook(_Hook):
"""
:type triggers: set[str]
"""
def __init__(self, function):
"""
:type function: function
"""
_Hook.__init__(self, function, "irc_raw")
self.triggers = set()
def add_hook(self, trigger_param, kwargs):
"""
:type trigger_param: list[str] | str
:type kwargs: dict[str, unknown]
"""
self._add_hook(kwargs)
if isinstance(trigger_param, str):
self.triggers.add(trigger_param)
else:
# it's a list
self.triggers.update(trigger_param)
class _PeriodicHook(_Hook):
def __init__(self, function):
"""
:type function: function
"""
_Hook.__init__(self, function, "periodic")
self.interval = 60.0
def add_hook(self, interval, kwargs):
"""
:type interval: int
:type kwargs: dict[str, unknown]
"""
self._add_hook(kwargs)
if interval:
self.interval = interval
class _EventHook(_Hook):
"""
:type types: set[cloudbot.event.EventType]
"""
def __init__(self, function):
"""
:type function: function
"""
_Hook.__init__(self, function, "event")
self.types = set()
def add_hook(self, trigger_param, kwargs):
"""
:type trigger_param: cloudbot.event.EventType | list[cloudbot.event.EventType]
:type kwargs: dict[str, unknown]
"""
self._add_hook(kwargs)
if isinstance(trigger_param, EventType):
self.types.add(trigger_param)
else:
# it's a list
self.types.update(trigger_param)
class _CapHook(_Hook):
def __init__(self, func, _type):
super().__init__(func, "on_cap_{}".format(_type))
self.caps = set()
def add_hook(self, caps, kwargs):
self._add_hook(kwargs)
self.caps.update(caps)
class _PermissionHook(_Hook):
def __init__(self, func):
super().__init__(func, "perm_check")
self.perms = set()
def add_hook(self, perms, kwargs):
self._add_hook(kwargs)
self.perms.update(perms)
def _add_hook(func, hook):
if not hasattr(func, "_cloudbot_hook"):
func._cloudbot_hook = {}
else:
assert hook.type not in func._cloudbot_hook # in this case the hook should be using the add_hook method
func._cloudbot_hook[hook.type] = hook
def _get_hook(func, hook_type):
if hasattr(func, "_cloudbot_hook") and hook_type in func._cloudbot_hook:
return func._cloudbot_hook[hook_type]
return None
def command(*args, **kwargs):
"""External command decorator. Can be used directly as a decorator, or with args to return a decorator.
:type param: str | list[str] | function
"""
def _command_hook(func, alias_param=None):
hook = _get_hook(func, "command")
if hook is None:
hook = _CommandHook(func)
_add_hook(func, hook)
hook.add_hook(alias_param, kwargs)
return func
if len(args) == 1 and callable(args[0]): # this decorator is being used directly
return _command_hook(args[0])
# this decorator is being used indirectly, so return a decorator function
return lambda func: _command_hook(func, alias_param=args)
def irc_raw(triggers_param, **kwargs):
"""External raw decorator. Must be used as a function to return a decorator
:type triggers_param: str | list[str]
"""
def _raw_hook(func):
hook = _get_hook(func, "irc_raw")
if hook is None:
hook = _RawHook(func)
_add_hook(func, hook)
hook.add_hook(triggers_param, kwargs)
return func
if callable(triggers_param): # this decorator is being used directly, which isn't good
raise TypeError("@irc_raw() must be used as a function that returns a decorator")
# this decorator is being used as a function, so return a decorator
return lambda func: _raw_hook(func)
def event(types_param, **kwargs):
"""External event decorator. Must be used as a function to return a decorator
:type types_param: cloudbot.event.EventType | list[cloudbot.event.EventType]
"""
def _event_hook(func):
hook = _get_hook(func, "event")
if hook is None:
hook = _EventHook(func)
_add_hook(func, hook)
hook.add_hook(types_param, kwargs)
return func
if callable(types_param): # this decorator is being used directly, which isn't good
raise TypeError("@irc_raw() must be used as a function that returns a decorator")
# this decorator is being used as a function, so return a decorator
return lambda func: _event_hook(func)
def regex(regex_param, **kwargs):
"""External regex decorator. Must be used as a function to return a decorator.
:type regex_param: str | re.__Regex | list[str | re.__Regex]
:type flags: int
"""
def _regex_hook(func):
hook = _get_hook(func, "regex")
if hook is None:
hook = _RegexHook(func)
_add_hook(func, hook)
hook.add_hook(regex_param, kwargs)
return func
if callable(regex_param): # this decorator is being used directly, which isn't good
raise TypeError("@regex() hook must be used as a function that returns a decorator")
# this decorator is being used as a function, so return a decorator
return lambda func: _regex_hook(func)
def sieve(param=None, **kwargs):
"""External sieve decorator. Can be used directly as a decorator, or with args to return a decorator
:type param: function | None
"""
def _sieve_hook(func):
assert len(inspect.signature(func).parameters) == 3, \
"Sieve plugin has incorrect argument count. Needs params: bot, input, plugin"
hook = _get_hook(func, "sieve")
if hook is None:
hook = _Hook(func, "sieve") # there's no need to have a specific SieveHook object
_add_hook(func, hook)
hook._add_hook(kwargs)
return func
if callable(param):
return _sieve_hook(param)
return lambda func: _sieve_hook(func)
def periodic(interval, **kwargs):
"""External on_start decorator. Can be used directly as a decorator, or with args to return a decorator
:type param: function | None
"""
def _periodic_hook(func):
hook = _get_hook(func, "periodic")
if hook is None:
hook = _PeriodicHook(func)
_add_hook(func, hook)
hook.add_hook(interval, kwargs)
return func
if callable(interval): # this decorator is being used directly, which isn't good
raise TypeError("@periodic() hook must be used as a function that returns a decorator")
# this decorator is being used as a function, so return a decorator
return lambda func: _periodic_hook(func)
def on_start(param=None, **kwargs):
"""External on_start decorator. Can be used directly as a decorator, or with args to return a decorator
:type param: function | None
"""
def _on_start_hook(func):
hook = _get_hook(func, "on_start")
if hook is None:
hook = _Hook(func, "on_start")
_add_hook(func, hook)
hook._add_hook(kwargs)
return func
if callable(param):
return _on_start_hook(param)
return lambda func: _on_start_hook(func)
# this is temporary, to ease transition
onload = on_start
def on_stop(param=None, **kwargs):
"""External on_stop decorator. Can be used directly as a decorator, or with args to return a decorator
:type param: function | None
"""
def _on_stop_hook(func):
hook = _get_hook(func, "on_stop")
if hook is None:
hook = _Hook(func, "on_stop")
_add_hook(func, hook)
hook._add_hook(kwargs)
return func
if callable(param):
return _on_stop_hook(param)
return lambda func: _on_stop_hook(func)
on_unload = on_stop
def on_cap_available(*caps, **kwargs):
"""External on_cap_available decorator. Must be used as a function that returns a decorator
This hook will fire for each capability in a `CAP LS` response from the server
"""
def _on_cap_available_hook(func):
hook = _get_hook(func, "on_cap_available")
if hook is None:
hook = _CapHook(func, "available")
_add_hook(func, hook)
hook.add_hook(caps, kwargs)
return func
return _on_cap_available_hook
def on_cap_ack(*caps, **kwargs):
"""External on_cap_ack decorator. Must be used as a function that returns a decorator
This hook will fire for each capability that is acknowledged from the server with `CAP ACK`
"""
def _on_cap_ack_hook(func):
hook = _get_hook(func, "on_cap_ack")
if hook is None:
hook = _CapHook(func, "ack")
_add_hook(func, hook)
hook.add_hook(caps, kwargs)
return func
return _on_cap_ack_hook
def on_connect(param=None, **kwargs):
def _on_connect_hook(func):
hook = _get_hook(func, "on_connect")
if hook is None:
hook = _Hook(func, "on_connect")
_add_hook(func, hook)
hook._add_hook(kwargs)
return func
if callable(param):
return _on_connect_hook(param)
return lambda func: _on_connect_hook(func)
connect = on_connect
def irc_out(param=None, **kwargs):
def _decorate(func):
hook = _get_hook(func, "irc_out")
if hook is None:
hook = _Hook(func, "irc_out")
_add_hook(func, hook)
hook._add_hook(kwargs)
return func
if callable(param):
return _decorate(param)
return lambda func: _decorate(func)
def post_hook(param=None, **kwargs):
"""
This hook will be fired just after a hook finishes executing
"""
def _decorate(func):
hook = _get_hook(func, "post_hook")
if hook is None:
hook = _Hook(func, "post_hook")
_add_hook(func, hook)
hook._add_hook(kwargs)
return func
if callable(param):
return _decorate(param)
return lambda func: _decorate(func)
def permission(*perms, **kwargs):
def _perm_hook(func):
hook = _get_hook(func, "perm_check")
if hook is None:
hook = _PermissionHook(func)
_add_hook(func, hook)
hook.add_hook(perms, kwargs)
return func
return lambda func: _perm_hook(func)
|
gpl-3.0
| -573,451,123,311,893,000
| 26.901381
| 112
| 0.588294
| false
|
collinstocks/eventlet
|
eventlet/greenio/base.py
|
1
|
17181
|
import errno
import os
import socket
import sys
import time
import warnings
import eventlet
from eventlet.hubs import trampoline, notify_opened, IOClosed
from eventlet.support import get_errno, six
__all__ = [
'GreenSocket', '_GLOBAL_DEFAULT_TIMEOUT', 'set_nonblocking',
'SOCKET_BLOCKING', 'SOCKET_CLOSED', 'CONNECT_ERR', 'CONNECT_SUCCESS',
'shutdown_safe', 'SSL',
]
BUFFER_SIZE = 4096
CONNECT_ERR = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK))
CONNECT_SUCCESS = set((0, errno.EISCONN))
if sys.platform[:3] == "win":
CONNECT_ERR.add(errno.WSAEINVAL) # Bug 67
if six.PY2:
_python2_fileobject = socket._fileobject
_original_socket = eventlet.patcher.original('socket').socket
def socket_connect(descriptor, address):
"""
Attempts to connect to the address, returns the descriptor if it succeeds,
returns None if it needs to trampoline, and raises any exceptions.
"""
err = descriptor.connect_ex(address)
if err in CONNECT_ERR:
return None
if err not in CONNECT_SUCCESS:
raise socket.error(err, errno.errorcode[err])
return descriptor
def socket_checkerr(descriptor):
err = descriptor.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err not in CONNECT_SUCCESS:
raise socket.error(err, errno.errorcode[err])
def socket_accept(descriptor):
"""
Attempts to accept() on the descriptor, returns a client,address tuple
if it succeeds; returns None if it needs to trampoline, and raises
any exceptions.
"""
try:
return descriptor.accept()
except socket.error as e:
if get_errno(e) == errno.EWOULDBLOCK:
return None
raise
if sys.platform[:3] == "win":
# winsock sometimes throws ENOTCONN
SOCKET_BLOCKING = set((errno.EAGAIN, errno.EWOULDBLOCK,))
SOCKET_CLOSED = set((errno.ECONNRESET, errno.ENOTCONN, errno.ESHUTDOWN))
else:
# oddly, on linux/darwin, an unconnected socket is expected to block,
# so we treat ENOTCONN the same as EWOULDBLOCK
SOCKET_BLOCKING = set((errno.EAGAIN, errno.EWOULDBLOCK, errno.ENOTCONN))
SOCKET_CLOSED = set((errno.ECONNRESET, errno.ESHUTDOWN, errno.EPIPE))
def set_nonblocking(fd):
"""
Sets the descriptor to be nonblocking. Works on many file-like
objects as well as sockets. Only sockets can be nonblocking on
Windows, however.
"""
try:
setblocking = fd.setblocking
except AttributeError:
# fd has no setblocking() method. It could be that this version of
# Python predates socket.setblocking(). In that case, we can still set
# the flag "by hand" on the underlying OS fileno using the fcntl
# module.
try:
import fcntl
except ImportError:
# Whoops, Windows has no fcntl module. This might not be a socket
# at all, but rather a file-like object with no setblocking()
# method. In particular, on Windows, pipes don't support
# non-blocking I/O and therefore don't have that method. Which
# means fcntl wouldn't help even if we could load it.
raise NotImplementedError("set_nonblocking() on a file object "
"with no setblocking() method "
"(Windows pipes don't support non-blocking I/O)")
# We managed to import fcntl.
fileno = fd.fileno()
orig_flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
new_flags = orig_flags | os.O_NONBLOCK
if new_flags != orig_flags:
fcntl.fcntl(fileno, fcntl.F_SETFL, new_flags)
else:
# socket supports setblocking()
setblocking(0)
try:
from socket import _GLOBAL_DEFAULT_TIMEOUT
except ImportError:
_GLOBAL_DEFAULT_TIMEOUT = object()
class GreenSocket(object):
"""
Green version of socket.socket class, that is intended to be 100%
API-compatible.
It also recognizes the keyword parameter, 'set_nonblocking=True'.
Pass False to indicate that socket is already in non-blocking mode
to save syscalls.
"""
# This placeholder is to prevent __getattr__ from creating an infinite call loop
fd = None
def __init__(self, family_or_realsock=socket.AF_INET, *args, **kwargs):
should_set_nonblocking = kwargs.pop('set_nonblocking', True)
if isinstance(family_or_realsock, six.integer_types):
fd = _original_socket(family_or_realsock, *args, **kwargs)
# Notify the hub that this is a newly-opened socket.
notify_opened(fd.fileno())
else:
fd = family_or_realsock
# import timeout from other socket, if it was there
try:
self._timeout = fd.gettimeout() or socket.getdefaulttimeout()
except AttributeError:
self._timeout = socket.getdefaulttimeout()
if should_set_nonblocking:
set_nonblocking(fd)
self.fd = fd
# when client calls setblocking(0) or settimeout(0) the socket must
# act non-blocking
self.act_non_blocking = False
# Copy some attributes from underlying real socket.
# This is the easiest way that i found to fix
# https://bitbucket.org/eventlet/eventlet/issue/136
# Only `getsockopt` is required to fix that issue, others
# are just premature optimization to save __getattr__ call.
self.bind = fd.bind
self.close = fd.close
self.fileno = fd.fileno
self.getsockname = fd.getsockname
self.getsockopt = fd.getsockopt
self.listen = fd.listen
self.setsockopt = fd.setsockopt
self.shutdown = fd.shutdown
self._closed = False
@property
def _sock(self):
return self
if six.PY3:
def _get_io_refs(self):
return self.fd._io_refs
def _set_io_refs(self, value):
self.fd._io_refs = value
_io_refs = property(_get_io_refs, _set_io_refs)
# Forward unknown attributes to fd, cache the value for future use.
# I do not see any simple attribute which could be changed
# so caching everything in self is fine.
# If we find such attributes - only attributes having __get__ might be cached.
# For now - I do not want to complicate it.
def __getattr__(self, name):
if self.fd is None:
raise AttributeError(name)
attr = getattr(self.fd, name)
setattr(self, name, attr)
return attr
def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None):
""" We need to trampoline via the event hub.
We catch any signal back from the hub indicating that the operation we
were waiting on was associated with a filehandle that's since been
invalidated.
"""
if self._closed:
# If we did any logging, alerting to a second trampoline attempt on a closed
# socket here would be useful.
raise IOClosed()
try:
return trampoline(fd, read=read, write=write, timeout=timeout,
timeout_exc=timeout_exc,
mark_as_closed=self._mark_as_closed)
except IOClosed:
# This socket's been obsoleted. De-fang it.
self._mark_as_closed()
raise
def accept(self):
if self.act_non_blocking:
return self.fd.accept()
fd = self.fd
while True:
res = socket_accept(fd)
if res is not None:
client, addr = res
set_nonblocking(client)
return type(self)(client), addr
self._trampoline(fd, read=True, timeout=self.gettimeout(),
timeout_exc=socket.timeout("timed out"))
def _mark_as_closed(self):
""" Mark this socket as being closed """
self._closed = True
def __del__(self):
# This is in case self.close is not assigned yet (currently the constructor does it)
close = getattr(self, 'close', None)
if close is not None:
close()
def connect(self, address):
if self.act_non_blocking:
return self.fd.connect(address)
fd = self.fd
if self.gettimeout() is None:
while not socket_connect(fd, address):
try:
self._trampoline(fd, write=True)
except IOClosed:
raise socket.error(errno.EBADFD)
socket_checkerr(fd)
else:
end = time.time() + self.gettimeout()
while True:
if socket_connect(fd, address):
return
if time.time() >= end:
raise socket.timeout("timed out")
try:
self._trampoline(fd, write=True, timeout=end - time.time(),
timeout_exc=socket.timeout("timed out"))
except IOClosed:
# ... we need some workable errno here.
raise socket.error(errno.EBADFD)
socket_checkerr(fd)
def connect_ex(self, address):
if self.act_non_blocking:
return self.fd.connect_ex(address)
fd = self.fd
if self.gettimeout() is None:
while not socket_connect(fd, address):
try:
self._trampoline(fd, write=True)
socket_checkerr(fd)
except socket.error as ex:
return get_errno(ex)
except IOClosed:
return errno.EBADFD
else:
end = time.time() + self.gettimeout()
while True:
try:
if socket_connect(fd, address):
return 0
if time.time() >= end:
raise socket.timeout(errno.EAGAIN)
self._trampoline(fd, write=True, timeout=end - time.time(),
timeout_exc=socket.timeout(errno.EAGAIN))
socket_checkerr(fd)
except socket.error as ex:
return get_errno(ex)
except IOClosed:
return errno.EBADFD
def dup(self, *args, **kw):
sock = self.fd.dup(*args, **kw)
newsock = type(self)(sock, set_nonblocking=False)
newsock.settimeout(self.gettimeout())
return newsock
if six.PY3:
def makefile(self, *args, **kwargs):
return _original_socket.makefile(self, *args, **kwargs)
else:
def makefile(self, *args, **kwargs):
dupped = self.dup()
res = _python2_fileobject(dupped, *args, **kwargs)
if hasattr(dupped, "_drop"):
dupped._drop()
return res
def makeGreenFile(self, *args, **kw):
warnings.warn("makeGreenFile has been deprecated, please use "
"makefile instead", DeprecationWarning, stacklevel=2)
return self.makefile(*args, **kw)
def _read_trampoline(self):
self._trampoline(
self.fd,
read=True,
timeout=self.gettimeout(),
timeout_exc=socket.timeout("timed out"))
def _recv_loop(self, recv_meth, *args):
fd = self.fd
if self.act_non_blocking:
return recv_meth(*args)
while True:
try:
# recv: bufsize=0?
# recv_into: buffer is empty?
# This is needed because behind the scenes we use sockets in
# nonblocking mode and builtin recv* methods. Attempting to read
# 0 bytes from a nonblocking socket using a builtin recv* method
# does not raise a timeout exception. Since we're simulating
# a blocking socket here we need to produce a timeout exception
# if needed, hence the call to trampoline.
if not args[0]:
self._read_trampoline()
return recv_meth(*args)
except socket.error as e:
if get_errno(e) in SOCKET_BLOCKING:
pass
elif get_errno(e) in SOCKET_CLOSED:
return b''
else:
raise
try:
self._read_trampoline()
except IOClosed as e:
# Perhaps we should return '' instead?
raise EOFError()
def recv(self, bufsize, flags=0):
return self._recv_loop(self.fd.recv, bufsize, flags)
def recvfrom(self, bufsize, flags=0):
return self._recv_loop(self.fd.recvfrom, bufsize, flags)
def recv_into(self, buffer, nbytes=0, flags=0):
return self._recv_loop(self.fd.recv_into, buffer, nbytes, flags)
def recvfrom_into(self, buffer, nbytes=0, flags=0):
return self._recv_loop(self.fd.recvfrom_into, buffer, nbytes, flags)
def _send_loop(self, send_method, data, *args):
if self.act_non_blocking:
return send_method(data, *args)
while 1:
try:
return send_method(data, *args)
except socket.error as e:
eno = get_errno(e)
if eno == errno.ENOTCONN or eno not in SOCKET_BLOCKING:
raise
try:
self._trampoline(self.fd, write=True, timeout=self.gettimeout(),
timeout_exc=socket.timeout("timed out"))
except IOClosed:
raise socket.error(errno.ECONNRESET, 'Connection closed by another thread')
def send(self, data, flags=0):
return self._send_loop(self.fd.send, data, flags)
def sendto(self, data, *args):
return self._send_loop(self.fd.sendto, data, *args)
def sendall(self, data, flags=0):
tail = self.send(data, flags)
len_data = len(data)
while tail < len_data:
tail += self.send(data[tail:], flags)
def setblocking(self, flag):
if flag:
self.act_non_blocking = False
self._timeout = None
else:
self.act_non_blocking = True
self._timeout = 0.0
def settimeout(self, howlong):
if howlong is None or howlong == _GLOBAL_DEFAULT_TIMEOUT:
self.setblocking(True)
return
try:
f = howlong.__float__
except AttributeError:
raise TypeError('a float is required')
howlong = f()
if howlong < 0.0:
raise ValueError('Timeout value out of range')
if howlong == 0.0:
self.act_non_blocking = True
self._timeout = 0.0
else:
self.act_non_blocking = False
self._timeout = howlong
def gettimeout(self):
return self._timeout
if "__pypy__" in sys.builtin_module_names:
def _reuse(self):
getattr(self.fd, '_sock', self.fd)._reuse()
def _drop(self):
getattr(self.fd, '_sock', self.fd)._drop()
def _operation_on_closed_file(*args, **kwargs):
raise ValueError("I/O operation on closed file")
greenpipe_doc = """
GreenPipe is a cooperative replacement for file class.
It will cooperate on pipes. It will block on regular file.
Differneces from file class:
- mode is r/w property. Should re r/o
- encoding property not implemented
- write/writelines will not raise TypeError exception when non-string data is written
it will write str(data) instead
- Universal new lines are not supported and newlines property not implementeded
- file argument can be descriptor, file name or file object.
"""
# import SSL module here so we can refer to greenio.SSL.exceptionclass
try:
from OpenSSL import SSL
except ImportError:
# pyOpenSSL not installed, define exceptions anyway for convenience
class SSL(object):
class WantWriteError(Exception):
pass
class WantReadError(Exception):
pass
class ZeroReturnError(Exception):
pass
class SysCallError(Exception):
pass
def shutdown_safe(sock):
""" Shuts down the socket. This is a convenience method for
code that wants to gracefully handle regular sockets, SSL.Connection
sockets from PyOpenSSL and ssl.SSLSocket objects from Python 2.6
interchangeably. Both types of ssl socket require a shutdown() before
close, but they have different arity on their shutdown method.
Regular sockets don't need a shutdown before close, but it doesn't hurt.
"""
try:
try:
# socket, ssl.SSLSocket
return sock.shutdown(socket.SHUT_RDWR)
except TypeError:
# SSL.Connection
return sock.shutdown()
except socket.error as e:
# we don't care if the socket is already closed;
# this will often be the case in an http server context
if get_errno(e) not in (errno.ENOTCONN, errno.EBADF):
raise
|
mit
| 3,253,789,823,497,832,000
| 34.645228
| 92
| 0.584483
| false
|
mgrygoriev/CloudFerry
|
cloudferrylib/os/estimation/procedures.py
|
1
|
6018
|
# Copyright (c) 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
import heapq
from cloudferrylib.os.discovery import nova
from cloudferrylib.os.discovery import cinder
from cloudferrylib.os.discovery import glance
from cloudferrylib.os.discovery import model
from cloudferrylib.utils import sizeof_format
def list_filtered(session, cls, cloud_name, tenant):
return (x for x in session.list(cls, cloud_name)
if tenant is None or tenant == x.tenant.object_id.id)
def estimate_copy(cfg, migration_name):
migration = cfg.migrations[migration_name]
query = migration.query
src_cloud = migration.source
with model.Session() as session:
total_ephemeral_size = 0
total_volume_size = 0
total_image_size = 0
accounted_volumes = set()
accounted_images = set()
for server in query.search(session, src_cloud, nova.Server):
for ephemeral_disk in server.ephemeral_disks:
total_ephemeral_size += ephemeral_disk.size
if server.image is not None \
and server.image.object_id not in accounted_images:
total_image_size += server.image.size
accounted_images.add(server.image.object_id)
for volume in server.attached_volumes:
if volume.object_id not in accounted_volumes:
total_volume_size += volume.size
accounted_volumes.add(volume.object_id)
for volume in query.search(session, src_cloud, cinder.Volume):
if volume.object_id not in accounted_volumes:
total_volume_size += volume.size
for image in query.search(session, src_cloud, glance.Image):
if image.object_id not in accounted_images:
total_image_size += image.size
print 'Migration', migration_name, 'estimates:'
print 'Images:'
print ' Size:', sizeof_format.sizeof_fmt(total_image_size)
print 'Ephemeral disks:'
print ' Size:', sizeof_format.sizeof_fmt(total_ephemeral_size)
print 'Volumes:'
print ' Size:', sizeof_format.sizeof_fmt(total_volume_size, 'G')
def show_largest_servers(cfg, count, migration_name):
def server_size(server):
size = 0
if server.image is not None:
size += server.image.size
for ephemeral_disk in server.ephemeral_disks:
size += ephemeral_disk.size
for volume in server.attached_volumes:
size += volume.size
return size
output = []
migration = cfg.migrations[migration_name]
with model.Session() as session:
for index, server in enumerate(
heapq.nlargest(
count,
migration.query.search(session, migration.source,
nova.Server),
key=server_size),
start=1):
output.append(
' {0}. {1.object_id.id} {1.name} - {2}'.format(
index, server,
sizeof_format.sizeof_fmt(server_size(server))))
if output:
print '\n{0} largest servers:'.format(len(output))
for line in output:
print line
def show_largest_unused_resources(count, cloud_name, tenant):
with model.Session() as session:
used_volumes = set()
used_images = set()
servers = list_filtered(session, nova.Server, cloud_name, tenant)
for server in servers:
if server.image is not None:
used_images.add(server.image.object_id)
for volume in server.attached_volumes:
used_volumes.add(volume.object_id)
# Find unused volumes
volumes_output = []
volumes_size = 0
volumes = list_filtered(session, cinder.Volume, cloud_name, tenant)
for index, volume in enumerate(
heapq.nlargest(count,
(v for v in volumes
if v.object_id not in used_volumes),
key=lambda v: v.size),
start=1):
volumes_size += volume.size
size = sizeof_format.sizeof_fmt(volume.size, 'G')
volumes_output.append(
' {0:3d}. {1.object_id.id} {2:10s} {1.name}'.format(
index, volume, size))
# Find unused images
images_output = []
images_size = 0
images = list_filtered(session, glance.Image, cloud_name, tenant)
for index, image in enumerate(
heapq.nlargest(count,
(i for i in images
if i.object_id not in used_images),
key=lambda i: i.size),
start=1):
images_size += image.size
size = sizeof_format.sizeof_fmt(image.size)
images_output.append(
' {0:3d}. {1.object_id.id} {2:10s} {1.name}'.format(
index, image, size))
# Output result
if volumes_output:
print '\n{0} largest unused volumes:'.format(len(volumes_output))
for line in volumes_output:
print line
print ' Total:', sizeof_format.sizeof_fmt(volumes_size, 'G')
if images_output:
print '\n{0} largest unused images:'.format(len(images_output))
for line in images_output:
print line
print ' Total:', sizeof_format.sizeof_fmt(images_size)
|
apache-2.0
| 263,699,053,650,078,880
| 38.333333
| 75
| 0.588069
| false
|
rksaxena/crawler_templates
|
crawler_type2/crawler_type2/spiders/type_two.py
|
1
|
1779
|
# -*- coding: utf-8 -*-
import scrapy
import crawler_type2.config as config
from crawler_type2.items import CrawlerType2Item
import urlparse
class TypeTwo(scrapy.Spider):
name = 'crawler_type2'
def start_requests(self):
source = getattr(self, 'source', None)
if source is None or source not in config.SOURCES:
raise Exception("Invalid source!!!")
self.crawl_source = config.SOURCES[source]
self.allowed_domains = self.crawl_source['ALLOWED_DOMAINS']
self.start_urls = self.crawl_source['START_URLS']
yield scrapy.Request(self.start_urls[0], callback=self.parse)
def parse(self, response):
for href in response.xpath(self.crawl_source['LIST_PAGE_XPATH']):
url = urlparse.urljoin(self.crawl_source['BASE_URL'], href.extract())
print 'Sending request for url : ' + url
req = scrapy.Request(url, callback=self.parse_item)
# for key in response.meta.keys():
# req.meta[key] = response.meta[key]
yield req
def parse_item(self, response):
print "parse item for url %s" % (response.request.url)
item = CrawlerType2Item()
for element in response.xpath(self.crawl_source['BLOG_CONTENT_XPATH']):
heading = element.xpath(self.crawl_source['HEADING_XPATH']).extract()
text = element.xpath(self.crawl_source['TEXT_XPATH']).extract()
heading = [t.strip() for t in heading]
text = [t.strip() for t in text]
item['heading'] = " ".join(heading)
item['text'] = " ".join(text)
item['img'] = element.xpath(self.crawl_source['IMG_XPATH']).extract()
if 'text' in item and len(item['text']) > 0:
yield item
|
mit
| 1,685,810,864,093,220,900
| 39.431818
| 81
| 0.610455
| false
|
leppa/home-assistant
|
homeassistant/components/light/device_condition.py
|
1
|
1206
|
"""Provides device conditions for lights."""
from typing import Dict, List
import voluptuous as vol
from homeassistant.components.device_automation import toggle_entity
from homeassistant.const import CONF_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import ConditionCheckerType
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
CONDITION_SCHEMA = toggle_entity.CONDITION_SCHEMA.extend(
{vol.Required(CONF_DOMAIN): DOMAIN}
)
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> ConditionCheckerType:
"""Evaluate state based on configuration."""
if config_validation:
config = CONDITION_SCHEMA(config)
return toggle_entity.async_condition_from_config(config)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions."""
return await toggle_entity.async_get_conditions(hass, device_id, DOMAIN)
async def async_get_condition_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List condition capabilities."""
return await toggle_entity.async_get_condition_capabilities(hass, config)
|
apache-2.0
| -2,319,655,593,008,458,000
| 31.594595
| 86
| 0.766169
| false
|
piaoyaoshi/inf1340_2015_asst1
|
exercise3.py
|
1
|
3174
|
#!/usr/bin/env python
""" Assignment 1, Exercise 3, INF1340, Fall, 2015. Troubleshooting Car Issues.
This module contains one function diagnose_car(). It is an expert system to
interactive diagnose car issues.
"""
__author__ = 'Susan Sim'
__email__ = "ses@drsusansim.org"
__copyright__ = "2015 Susan Sim"
__license__ = "MIT License"
# ERROR_MESSAGE in case of improper user input
ERROR_MESSAGE = 'I don\'t understand'
# ========================================== Main Function =================================================
def diagnose_car():
"""
Interactively queries the user with yes/no questions to identify a
possible issue with a car.
Test Cases
Inputs:Y,N
Expected: Replace cables and try again.
Inputs:Y,Y
Expected: Clean terminals and try starting again.
Inputs:N,Y
Expected:Replace the battery.
Inputs:N,N,Y
Expected:Check spark plug connections.
Inputs:N,N,N,N
Expected: Engine is not getting enough fuel. Clean fuel pump.
Inputs:N,N,N,Y,Y
Expected: Get it in for service.
Inputs:N,N,N,Y,N
Expected: Check to ensure the choke is opening and closing.
"""
check_silence()
# diagnose_car()
# ========================================== Helper Functions ================================================
# Each function represents a box on flowchart and supports re-usability
# This function is the first box in the flow chart
def check_silence():
silent = raw_input("Is the car silent when you turn the key?")
if silent == "Y":
check_battery()
elif silent == "N":
check_noise()
else:
print (ERROR_MESSAGE)
check_silence()
# This is the left side of the flowchart
def check_battery():
corroded = raw_input("Are the battery terminals corroded?")
if corroded == "Y":
print ("Clean terminals and try starting again.")
elif corroded == "N":
print ("Replace cables and try again.")
else:
print (ERROR_MESSAGE)
check_battery()
# Everything below is the right side of the flow chart
def check_noise():
click = raw_input("Does the car make a clicking noise?")
if click == "Y":
print ("Replace the battery.")
elif click == "N":
check_crank()
else:
print (ERROR_MESSAGE)
check_noise()
def check_crank():
crank = raw_input("Does the car crank up but fail to start?")
if crank == "Y":
print ("Check spark plug connections.")
elif crank == "N":
check_engine()
else:
print (ERROR_MESSAGE)
check_crank()
def check_engine():
engine = raw_input("Does engine start and then die?")
if engine == "Y":
check_fuel()
elif engine == "N":
print ("Engine is not getting enough fuel. Clean fuel pump.")
else:
print (ERROR_MESSAGE)
check_engine()
def check_fuel():
fuel = raw_input("Does your car have fuel injection?")
if fuel == "N":
print ("Check to ensure the choke is opening and closing.")
elif fuel == "Y":
print ("Get it in for service.")
else:
print (ERROR_MESSAGE)
check_fuel()
|
mit
| -2,540,198,713,976,936,400
| 23.604651
| 110
| 0.587902
| false
|
cnddu/cactusblog
|
form/post.py
|
1
|
1329
|
#!/usr/bin/env python
# coding=utf-8
#
# Copyright 2013 meiritugua.com
from wtforms import TextField, HiddenField, validators
from lib.forms import Form
class NewForm(Form):
title = TextField('Title', [
validators.Required(message = "请填写帖子标题"),
validators.Length(min = 3, message = "帖子标题长度过短(3-100个字符)"),
validators.Length(max = 100, message = "帖子标题长度过长(3-100个字符)"),
])
content = TextField('Content', [
validators.Required(message = "请填写帖子简介"),
])
tag= TextField('Tag', [
validators.Required(message = "请填写帖子标签"),
])
class EditTagForm(Form):
name = TextField('Name', [
validators.Required(message = "请填写标签名称"),
validators.Length(min = 3, message = "标签名称长度过短(3-100个字符)"),
validators.Length(max = 100, message = "标签名称长度过长(3-100个字符)"),
])
intro = TextField('Intro', [
validators.Optional(),
])
category = TextField('Category', [
validators.Optional(),
])
tag_type = TextField('Tag_type', [
validators.Optional(),
])
tag = TextField('Tag', [
validators.Optional(),
])
|
gpl-3.0
| -6,134,245,195,717,030,000
| 23.354167
| 70
| 0.585115
| false
|
kartoza/jakarta-flood-maps
|
django_project/core/wsgi.py
|
1
|
1458
|
# -*- coding: utf-8 -*-
"""
WSGI config for flood_mapper project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "flood_mapper.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "core.settings.prod")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
bsd-2-clause
| -1,808,087,254,042,629,600
| 43.181818
| 79
| 0.786694
| false
|
zsdonghao/tensorlayer
|
tests/test_layers_merge.py
|
1
|
6058
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
import tensorlayer as tl
from tests.utils import CustomTestCase
class Layer_Merge_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
cls.data = dict()
##############
# vector #
##############
x = tf.placeholder(tf.float32, shape=[None, 784])
inputs = tl.layers.InputLayer(x, name='input_layer')
net_v1_1 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='relu1_1')
net_v1_2 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='relu2_1')
net_v1 = tl.layers.ConcatLayer([net_v1_1, net_v1_2], concat_dim=1, name='concat_layer')
net_v1.print_params(False)
net_v1.print_layers()
cls.data["net_vector1"] = dict()
cls.data["net_vector1"]["layers"] = net_v1.all_layers
cls.data["net_vector1"]["params"] = net_v1.all_params
cls.data["net_vector1"]["n_params"] = net_v1.count_params()
net_v2_1 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='net_0')
net_v2_2 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='net_1')
net_v2 = tl.layers.ElementwiseLayer([net_v2_1, net_v2_2], combine_fn=tf.minimum, name='minimum')
net_v2.print_params(False)
net_v2.print_layers()
cls.data["net_vector2"] = dict()
cls.data["net_vector2"]["layers"] = net_v2.all_layers
cls.data["net_vector2"]["params"] = net_v2.all_params
cls.data["net_vector2"]["n_params"] = net_v2.count_params()
net_v3_1 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='net_a')
net_v3_2 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='net_b')
net_v3 = tl.layers.ElementwiseLambdaLayer([net_v3_1, net_v3_2], fn=lambda a, b: a * b, name='multiply')
net_v3.print_params(False)
net_v3.print_layers()
cls.data["net_vector3"] = dict()
cls.data["net_vector3"]["layers"] = net_v3.all_layers
cls.data["net_vector3"]["params"] = net_v3.all_params
cls.data["net_vector3"]["n_params"] = net_v3.count_params()
#############
# Image #
#############
x = tf.placeholder(tf.float32, shape=[None, 100, 100, 3])
inputs = tl.layers.InputLayer(x, name='input')
net_im1_1 = tl.layers.Conv2d(inputs, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=tf.nn.relu, name='c1')
net_im1_2 = tl.layers.Conv2d(inputs, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=tf.nn.relu, name='c2')
net_im1 = tl.layers.ConcatLayer([net_im1_1, net_im1_2], concat_dim=-1, name='concat')
net_im1.print_params(False)
net_im1.print_layers()
cls.data["net_image1"] = dict()
cls.data["net_image1"]["shape"] = net_im1.outputs.get_shape().as_list()
cls.data["net_image1"]["layers"] = net_im1.all_layers
cls.data["net_image1"]["params"] = net_im1.all_params
cls.data["net_image1"]["n_params"] = net_im1.count_params()
net_im2 = tl.layers.ElementwiseLayer([net_im1_1, net_im1_2], combine_fn=tf.minimum, name='minimum2')
net_im2.print_params(False)
net_im2.print_layers()
cls.data["net_image2"] = dict()
cls.data["net_image2"]["shape"] = net_im2.outputs.get_shape().as_list()
cls.data["net_image2"]["layers"] = net_im2.all_layers
cls.data["net_image2"]["params"] = net_im2.all_params
cls.data["net_image2"]["n_params"] = net_im2.count_params()
net_im3 = tl.layers.ElementwiseLambdaLayer([net_im1_1, net_im1_2], fn=lambda a, b: a * b, name='multiply2')
net_im3.print_params(False)
net_im3.print_layers()
cls.data["net_image3"] = dict()
cls.data["net_image3"]["shape"] = net_im3.outputs.get_shape().as_list()
cls.data["net_image3"]["layers"] = net_im3.all_layers
cls.data["net_image3"]["params"] = net_im3.all_params
cls.data["net_image3"]["n_params"] = net_im3.count_params()
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_net_vector1(self):
self.assertEqual(len(self.data["net_vector1"]["layers"]), 4)
self.assertEqual(len(self.data["net_vector1"]["params"]), 4)
self.assertEqual(self.data["net_vector1"]["n_params"], 157000)
def test_net_vector2(self):
self.assertEqual(len(self.data["net_vector2"]["layers"]), 4)
self.assertEqual(len(self.data["net_vector2"]["params"]), 4)
self.assertEqual(self.data["net_vector2"]["n_params"], 157000)
def test_net_vector3(self):
self.assertEqual(len(self.data["net_vector3"]["layers"]), 4)
self.assertEqual(len(self.data["net_vector3"]["params"]), 4)
self.assertEqual(self.data["net_vector3"]["n_params"], 157000)
def test_net_image1(self):
self.assertEqual(self.data["net_image1"]["shape"][1:], [50, 50, 64])
self.assertEqual(len(self.data["net_image1"]["layers"]), 4)
self.assertEqual(len(self.data["net_image1"]["params"]), 4)
self.assertEqual(self.data["net_image1"]["n_params"], 1792)
def test_net_image2(self):
self.assertEqual(self.data["net_image2"]["shape"][1:], [50, 50, 32])
self.assertEqual(len(self.data["net_image2"]["layers"]), 4)
self.assertEqual(len(self.data["net_image2"]["params"]), 4)
self.assertEqual(self.data["net_image2"]["n_params"], 1792)
def test_net_image3(self):
self.assertEqual(self.data["net_image3"]["shape"][1:], [50, 50, 32])
self.assertEqual(len(self.data["net_image3"]["layers"]), 4)
self.assertEqual(len(self.data["net_image3"]["params"]), 4)
self.assertEqual(self.data["net_image3"]["n_params"], 1792)
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.DEBUG)
tl.logging.set_verbosity(tl.logging.DEBUG)
unittest.main()
|
apache-2.0
| 3,621,803,695,910,737,400
| 39.386667
| 120
| 0.600693
| false
|
openweave/openweave-core
|
src/test-apps/happy/test-templates/WeaveSWU.py
|
1
|
12087
|
#!/usr/bin/env python3
#
# Copyright (c) 2015-2017 Nest Labs, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# @file
# Implements WeaveSWU class that tests Weave SWU among two Weave Nodes.
#
from __future__ import absolute_import
from __future__ import print_function
import sys
from happy.ReturnMsg import ReturnMsg
from happy.Utils import *
from happy.utils.IP import IP
from happy.HappyNode import HappyNode
from happy.HappyNetwork import HappyNetwork
from WeaveTest import WeaveTest
options = {}
options["quiet"] = False
options["server"] = None
options["client"] = None
options["file_designator"] = None
options["integrity_type"] = None
options["update_scheme"] = None
options["announceable"] = False
options["tap"] = None
def option():
return options.copy()
class WeaveSWU(HappyNode, HappyNetwork, WeaveTest):
"""
weave-swu [-h --help] [-q --quiet] [-s --server <NAME>] [-c --client <NAME>]
[-f --file-designator <SERVER_TMP_PATH>] [-i --integrity-type <Number, Default is 0>] [-u --update-scheme <Number, Default is 3>]
[-a --announceable]
command to test swu-v0:
$ weave-swu --server node01 --client node02 --file-designator /server_files/test_image_file --integrity-type 2 --update-scheme 3 --announceable False
or
$ weave-swu -s node01 -c node02 -f /server_files/test_image_file -i 2 -u 3 -a True
return:
0 success
1 failure
"""
def __init__(self, opts = options):
HappyNode.__init__(self)
HappyNetwork.__init__(self)
WeaveTest.__init__(self)
self.quiet = opts["quiet"]
self.client = opts["client"]
self.server = opts["server"]
self.file_designator = opts["file_designator"]
self.integrity_type = opts["integrity_type"]
self.update_scheme = opts["update_scheme"]
self.announceable = opts["announceable"]
self.tap = opts["tap"]
self.vendor_id = '9050'
self.sw_version = '2.0'
self.server_process_tag = "WEAVE-SWU-SERVER"
self.client_process_tag = "WEAVE-SWU-CLIENT"
self.client_node_id = None
self.server_node_id = None
def __pre_check(self):
# Check if Weave SWU client node is given.
if self.client == None:
emsg = "Missing name or address of the Weave SWU client node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
# Check if Weave SWU server node is given.
if self.server == None:
emsg = "Missing name or address of the Weave SWU server node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
# Make sure that fabric was created
if self.getFabricId() == None:
emsg = "Weave Fabric has not been created yet."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
# Check if Weave SWU client node exists.
if self._nodeExists(self.client):
self.client_node_id = self.client
# Check if Weave SWU server node exists.
if self._nodeExists(self.server):
self.server_node_id = self.server
# Check if client is provided in a form of IP address
if IP.isIpAddress(self.client):
self.client_node_id = self.getNodeIdFromAddress(self.client)
# Check if server is provided in a form of IP address
if IP.isIpAddress(self.server):
self.server_node_id = self.getNodeIdFromAddress(self.server)
if self.client_node_id == None:
emsg = "Unknown identity of the client node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.server_node_id == None:
emsg = "Unknown identity of the server node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
self.client_ip = self.getNodeWeaveIPAddress(self.client_node_id)
self.server_ip = self.getNodeWeaveIPAddress(self.server_node_id)
self.client_weave_id = self.getWeaveNodeID(self.client_node_id)
self.server_weave_id = self.getWeaveNodeID(self.server_node_id)
# Check if all unknowns were found
if self.client_ip == None:
emsg = "Could not find IP address of the client node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.server_ip == None:
emsg = "Could not find IP address of the server node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.client_weave_id == None:
emsg = "Could not find Weave node ID of the client node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.server_weave_id == None:
emsg = "Could not find Weave node ID of the server node."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
# Check file_designator,update_scheme,integrity_type
if self.integrity_type == None:
emsg = "Missing integrity type that is used by SWU server."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.update_scheme == None:
emsg = "Missing update scheme that is used by SWU server."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.file_designator == None:
emsg = "Missing path to a temporaty directory that is used by SWU server."
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.integrity_type != None:
self.integrity_type = str(self.integrity_type)
if not self.integrity_type.isdigit():
emsg = "File integrity type must be a number, not %s." % (self.integrity_type)
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
if int(self.integrity_type) not in [0, 1, 2]:
emsg = "File integrity type must be 0, or 1, or 2, not %s." % (self.integrity_type)
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
if self.update_scheme != None:
self.update_scheme = str(self.update_scheme)
if not self.update_scheme.isdigit():
emsg = "File update scheme must be a number, not %s." % (self.update_scheme)
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
if int(self.update_scheme) not in [0, 1, 2, 3]:
emsg = "File update scheme must be 0, or 1, or 2, or 3 not %s." % (self.update_scheme)
self.logger.error("[localhost] WeaveSWU: %s" % (emsg))
sys.exit(1)
def __start_server_side(self):
cmd = self.getWeaveSWUServerPath()
if not cmd:
return
cmd += " --node-addr " + self.server_ip
cmd += " --file-designator " + self.file_designator + " --vendor-id " + self.vendor_id
cmd += " --sw-version " + str(self.sw_version) + " --integrity-type " + self.integrity_type
cmd += " --update-scheme " + self.update_scheme
if self.announceable:
cmd += " --dest-addr " + self.client_ip + " --dest-node-id " + self.client_weave_id
else:
cmd += " --listen"
if self.tap:
cmd += " --tap-device " + self.tap
self.start_weave_process(self.server_node_id, cmd, self.server_process_tag, sync_on_output = self.ready_to_service_events_str)
def __start_client_side(self):
cmd = self.getWeaveSWUClientPath()
if not cmd:
return
cmd += " --node-addr " + self.client_ip
if self.announceable:
cmd += " --listen"
else:
cmd += " --dest-addr " + self.server_ip + " " + self.server_weave_id
if self.tap:
cmd += " --tap-device " + self.tap
self.start_weave_process(self.client_node_id, cmd, self.client_process_tag)
def __process_results(self, client_output):
pass_test = False
for line in client_output.split("\n"):
if "Completed the SWU interactive protocol test" in line:
pass_test = True
break
if self.quiet == False:
print("weave-swu from server %s (%s) to client %s (%s) : " % \
(self.server_node_id, self.server_ip,
self.client_node_id, self.client_ip))
if pass_test:
print(hgreen("SWU interaction is completed"))
else:
print(hred("SWU interaction is not completed"))
return (pass_test, client_output)
def __wait_for_client(self):
self.wait_for_test_to_end(self.client_node_id, self.client_process_tag)
def __stop_server_side(self):
self.stop_weave_process(self.server_node_id, self.server_process_tag)
def run(self):
self.logger.debug("[localhost] WeaveSWU: Run.")
self.__pre_check()
if self.announceable:
self.__start_client_side()
delayExecution(0.5)
emsg = "WeaveSWU %s should be running." % (self.client_process_tag)
self.logger.debug("[%s] WeaveSWU: %s" % (self.client_node_id, emsg))
self.__start_server_side()
self.__wait_for_client()
client_output_value, client_output_data = \
self.get_test_output(self.client_node_id, self.client_process_tag, True)
client_strace_value, client_strace_data = \
self.get_test_strace(self.client_node_id, self.client_process_tag, True)
self.__stop_server_side()
server_output_value, server_output_data = \
self.get_test_output(self.server_node_id, self.server_process_tag, True)
server_strace_value, server_strace_data = \
self.get_test_strace(self.server_node_id, self.server_process_tag, True)
else:
self.__start_server_side()
emsg = "WeaveSWU %s should be running." % (self.server_process_tag)
self.logger.debug("[%s] WeaveSWU: %s" % (self.server_node_id, emsg))
delayExecution(0.5)
self.__start_client_side()
self.__wait_for_client()
client_output_value, client_output_data = \
self.get_test_output(self.client_node_id, self.client_process_tag, True)
client_strace_value, client_strace_data = \
self.get_test_strace(self.client_node_id, self.client_process_tag, True)
self.__stop_server_side()
server_output_value, server_output_data = \
self.get_test_output(self.server_node_id, self.server_process_tag, True)
server_strace_value, server_strace_data = \
self.get_test_strace(self.server_node_id, self.server_process_tag, True)
status, results = self.__process_results(client_output_data)
data = {}
data["client_output"] = client_output_data
data["client_strace"] = client_strace_data
data["server_output"] = server_output_data
data["server_strace"] = server_strace_data
self.logger.debug("[localhost] WeaveSWU: Done.")
return ReturnMsg(status, data)
|
apache-2.0
| -8,108,436,593,459,322,000
| 36.537267
| 157
| 0.586498
| false
|
GuillaumeSeren/alot
|
alot/helper.py
|
1
|
20203
|
# -*- coding: utf-8 -*-
# Copyright (C) 2011-2012 Patrick Totzke <patricktotzke@gmail.com>
# Copyright © 2017-2018 Dylan Baker
# This file is released under the GNU GPL, version 3 or a later revision.
# For further details see the COPYING file
from datetime import timedelta
from datetime import datetime
from collections import deque
import logging
import mimetypes
import os
import re
import shlex
import subprocess
import email
from email.mime.audio import MIMEAudio
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.mime.text import MIMEText
import asyncio
import urwid
import magic
def split_commandline(s, comments=False, posix=True):
"""
splits semi-colon separated commandlines
"""
# shlex seems to remove unescaped quotes and backslashes
s = s.replace('\\', '\\\\')
s = s.replace('\'', '\\\'')
s = s.replace('\"', '\\\"')
lex = shlex.shlex(s, posix=posix)
lex.whitespace_split = True
lex.whitespace = ';'
if not comments:
lex.commenters = ''
return list(lex)
def split_commandstring(cmdstring):
"""
split command string into a list of strings to pass on to subprocess.Popen
and the like. This simply calls shlex.split but works also with unicode
bytestrings.
"""
assert isinstance(cmdstring, str)
return shlex.split(cmdstring)
def string_sanitize(string, tab_width=8):
r"""
strips, and replaces non-printable characters
:param tab_width: number of spaces to replace tabs with. Read from
`globals.tabwidth` setting if `None`
:type tab_width: int or `None`
>>> string_sanitize(' foo\rbar ', 8)
' foobar '
>>> string_sanitize('foo\tbar', 8)
'foo bar'
>>> string_sanitize('foo\t\tbar', 8)
'foo bar'
"""
string = string.replace('\r', '')
lines = list()
for line in string.split('\n'):
tab_count = line.count('\t')
if tab_count > 0:
line_length = 0
new_line = list()
for i, chunk in enumerate(line.split('\t')):
line_length += len(chunk)
new_line.append(chunk)
if i < tab_count:
next_tab_stop_in = tab_width - (line_length % tab_width)
new_line.append(' ' * next_tab_stop_in)
line_length += next_tab_stop_in
lines.append(''.join(new_line))
else:
lines.append(line)
return '\n'.join(lines)
def string_decode(string, enc='ascii'):
"""
safely decodes string to unicode bytestring, respecting `enc` as a hint.
:param string: the string to decode
:type string: str or unicode
:param enc: a hint what encoding is used in string ('ascii', 'utf-8', ...)
:type enc: str
:returns: the unicode decoded input string
:rtype: unicode
"""
if enc is None:
enc = 'ascii'
try:
string = str(string, enc, errors='replace')
except LookupError: # malformed enc string
string = string.decode('ascii', errors='replace')
except TypeError: # already str
pass
return string
def shorten(string, maxlen):
"""shortens string if longer than maxlen, appending ellipsis"""
if 1 < maxlen < len(string):
string = string[:maxlen - 1] + '…'
return string[:maxlen]
def shorten_author_string(authors_string, maxlength):
"""
Parse a list of authors concatenated as a text string (comma
separated) and smartly adjust them to maxlength.
1) If the complete list of sender names does not fit in maxlength, it
tries to shorten names by using only the first part of each.
2) If the list is still too long, hide authors according to the
following priority:
- First author is always shown (if too long is shorten with ellipsis)
- If possible, last author is also shown (if too long, uses ellipsis)
- If there are more than 2 authors in the thread, show the
maximum of them. More recent senders have higher priority.
- If it is finally necessary to hide any author, an ellipsis
between first and next authors is added.
"""
# I will create a list of authors by parsing author_string. I use
# deque to do popleft without performance penalties
authors = deque()
# If author list is too long, it uses only the first part of each
# name (gmail style)
short_names = len(authors_string) > maxlength
for au in authors_string.split(", "):
if short_names:
author_as_list = au.split()
if len(author_as_list) > 0:
authors.append(author_as_list[0])
else:
authors.append(au)
# Author chain will contain the list of author strings to be
# concatenated using commas for the final formatted author_string.
authors_chain = deque()
if len(authors) == 0:
return ''
# reserve space for first author
first_au = shorten(authors.popleft(), maxlength)
remaining_length = maxlength - len(first_au)
# Tries to add an ellipsis if no space to show more than 1 author
if authors and maxlength > 3 and remaining_length < 3:
first_au = shorten(first_au, maxlength - 3)
remaining_length += 3
# Tries to add as more authors as possible. It takes into account
# that if any author will be hidden, and ellipsis should be added
while authors and remaining_length >= 3:
au = authors.pop()
if len(au) > 1 and (remaining_length == 3 or (authors and
remaining_length < 7)):
authors_chain.appendleft('…')
break
else:
if authors:
# 5= ellipsis + 2 x comma and space used as separators
au_string = shorten(au, remaining_length - 5)
else:
# 2 = comma and space used as separator
au_string = shorten(au, remaining_length - 2)
remaining_length -= len(au_string) + 2
authors_chain.appendleft(au_string)
# Add the first author to the list and concatenate list
authors_chain.appendleft(first_au)
authorsstring = ', '.join(authors_chain)
return authorsstring
def pretty_datetime(d):
"""
translates :class:`datetime` `d` to a "sup-style" human readable string.
>>> now = datetime.now()
>>> now.strftime('%c')
'Sat 31 Mar 2012 14:47:26 '
>>> pretty_datetime(now)
'just now'
>>> pretty_datetime(now - timedelta(minutes=1))
'1min ago'
>>> pretty_datetime(now - timedelta(hours=5))
'5h ago'
>>> pretty_datetime(now - timedelta(hours=12))
'02:54am'
>>> pretty_datetime(now - timedelta(days=1))
'yest 02pm'
>>> pretty_datetime(now - timedelta(days=2))
'Thu 02pm'
>>> pretty_datetime(now - timedelta(days=7))
'Mar 24'
>>> pretty_datetime(now - timedelta(days=356))
'Apr 2011'
"""
ampm = d.strftime('%p').lower()
if len(ampm):
hourfmt = '%I' + ampm
hourminfmt = '%I:%M' + ampm
else:
hourfmt = '%Hh'
hourminfmt = '%H:%M'
now = datetime.now()
today = now.date()
if d.date() == today or d > now - timedelta(hours=6):
delta = datetime.now() - d
if delta.seconds < 60:
string = 'just now'
elif delta.seconds < 3600:
string = '%dmin ago' % (delta.seconds // 60)
elif delta.seconds < 6 * 3600:
string = '%dh ago' % (delta.seconds // 3600)
else:
string = d.strftime(hourminfmt)
elif d.date() == today - timedelta(1):
string = d.strftime('yest ' + hourfmt)
elif d.date() > today - timedelta(7):
string = d.strftime('%a ' + hourfmt)
elif d.year != today.year:
string = d.strftime('%b %Y')
else:
string = d.strftime('%b %d')
return string_decode(string, 'UTF-8')
def call_cmd(cmdlist, stdin=None):
"""
get a shell commands output, error message and return value and immediately
return.
.. warning::
This returns with the first screen content for interactive commands.
:param cmdlist: shellcommand to call, already splitted into a list accepted
by :meth:`subprocess.Popen`
:type cmdlist: list of str
:param stdin: string to pipe to the process
:type stdin: str, bytes, or None
:return: triple of stdout, stderr, return value of the shell command
:rtype: str, str, int
"""
termenc = urwid.util.detected_encoding
if isinstance(stdin, str):
stdin = stdin.encode(termenc)
try:
logging.debug("Calling %s" % cmdlist)
proc = subprocess.Popen(
cmdlist,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE if stdin is not None else None)
except OSError as e:
out = b''
err = e.strerror
ret = e.errno
else:
out, err = proc.communicate(stdin)
ret = proc.returncode
out = string_decode(out, termenc)
err = string_decode(err, termenc)
return out, err, ret
async def call_cmd_async(cmdlist, stdin=None, env=None):
"""Given a command, call that command asynchronously and return the output.
This function only handles `OSError` when creating the subprocess, any
other exceptions raised either durring subprocess creation or while
exchanging data with the subprocess are the caller's responsibility to
handle.
If such an `OSError` is caught, then returncode will be set to 1, and the
error value will be set to the str() value of the exception.
:type cmdlist: list of str
:param stdin: string to pipe to the process
:type stdin: str
:return: Tuple of stdout, stderr, returncode
:rtype: tuple[str, str, int]
"""
termenc = urwid.util.detected_encoding
cmdlist = [s.encode(termenc) for s in cmdlist]
environment = os.environ.copy()
if env is not None:
environment.update(env)
logging.debug('ENV = %s', environment)
logging.debug('CMD = %s', cmdlist)
try:
proc = await asyncio.create_subprocess_exec(
*cmdlist,
env=environment,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
stdin=asyncio.subprocess.PIPE if stdin else None)
except OSError as e:
return ('', str(e), 1)
out, err = await proc.communicate(stdin.encode(termenc) if stdin else None)
return (out.decode(termenc), err.decode(termenc), proc.returncode)
def guess_mimetype(blob):
"""
uses file magic to determine the mime-type of the given data blob.
:param blob: file content as read by file.read()
:type blob: data
:returns: mime-type, falls back to 'application/octet-stream'
:rtype: str
"""
mimetype = 'application/octet-stream'
# this is a bit of a hack to support different versions of python magic.
# Hopefully at some point this will no longer be necessary
#
# the version with open() is the bindings shipped with the file source from
# http://darwinsys.com/file/ - this is what is used by the python-magic
# package on Debian/Ubuntu. However, it is not available on pypi/via pip.
#
# the version with from_buffer() is available at
# https://github.com/ahupp/python-magic and directly installable via pip.
#
# for more detail see https://github.com/pazz/alot/pull/588
if hasattr(magic, 'open'):
m = magic.open(magic.MAGIC_MIME_TYPE)
m.load()
magictype = m.buffer(blob)
elif hasattr(magic, 'from_buffer'):
# cf. issue #841
magictype = magic.from_buffer(blob, mime=True) or magictype
else:
raise Exception('Unknown magic API')
# libmagic does not always return proper mimetype strings, cf. issue #459
if re.match(r'\w+\/\w+', magictype):
mimetype = magictype
return mimetype
def guess_encoding(blob):
"""
uses file magic to determine the encoding of the given data blob.
:param blob: file content as read by file.read()
:type blob: data
:returns: encoding
:rtype: str
"""
# this is a bit of a hack to support different versions of python magic.
# Hopefully at some point this will no longer be necessary
#
# the version with open() is the bindings shipped with the file source from
# http://darwinsys.com/file/ - this is what is used by the python-magic
# package on Debian/Ubuntu. However it is not available on pypi/via pip.
#
# the version with from_buffer() is available at
# https://github.com/ahupp/python-magic and directly installable via pip.
#
# for more detail see https://github.com/pazz/alot/pull/588
if hasattr(magic, 'open'):
m = magic.open(magic.MAGIC_MIME_ENCODING)
m.load()
return m.buffer(blob)
elif hasattr(magic, 'from_buffer'):
m = magic.Magic(mime_encoding=True)
return m.from_buffer(blob)
else:
raise Exception('Unknown magic API')
def try_decode(blob):
"""Guess the encoding of blob and try to decode it into a str.
:param bytes blob: The bytes to decode
:returns: the decoded blob
:rtype: str
"""
assert isinstance(blob, bytes), 'cannot decode a str or non-bytes object'
return blob.decode(guess_encoding(blob))
def libmagic_version_at_least(version):
"""
checks if the libmagic library installed is more recent than a given
version.
:param version: minimum version expected in the form XYY (i.e. 5.14 -> 514)
with XYY >= 513
"""
if hasattr(magic, 'open'):
magic_wrapper = magic._libraries['magic']
elif hasattr(magic, 'from_buffer'):
magic_wrapper = magic.libmagic
else:
raise Exception('Unknown magic API')
if not hasattr(magic_wrapper, 'magic_version'):
# The magic_version function has been introduced in libmagic 5.13,
# if it's not present, we can't guess right, so let's assume False
return False
# Depending on the libmagic/ctypes version, magic_version is a function or
# a callable:
if callable(magic_wrapper.magic_version):
return magic_wrapper.magic_version() >= version
return magic_wrapper.magic_version >= version
# TODO: make this work on blobs, not paths
def mimewrap(path, filename=None, ctype=None):
"""Take the contents of the given path and wrap them into an email MIME
part according to the content type. The content type is auto detected from
the actual file contents and the file name if it is not given.
:param path: the path to the file contents
:type path: str
:param filename: the file name to use in the generated MIME part
:type filename: str or None
:param ctype: the content type of the file contents in path
:type ctype: str or None
:returns: the message MIME part storing the data from path
:rtype: subclasses of email.mime.base.MIMEBase
"""
with open(path, 'rb') as f:
content = f.read()
if not ctype:
ctype = guess_mimetype(content)
# libmagic < 5.12 incorrectly detects excel/powerpoint files as
# 'application/msword' (see #179 and #186 in libmagic bugtracker)
# This is a workaround, based on file extension, useful as long
# as distributions still ship libmagic 5.11.
if (ctype == 'application/msword' and
not libmagic_version_at_least(513)):
mimetype, _ = mimetypes.guess_type(path)
if mimetype:
ctype = mimetype
maintype, subtype = ctype.split('/', 1)
if maintype == 'text':
part = MIMEText(content.decode(guess_encoding(content), 'replace'),
_subtype=subtype,
_charset='utf-8')
elif maintype == 'image':
part = MIMEImage(content, _subtype=subtype)
elif maintype == 'audio':
part = MIMEAudio(content, _subtype=subtype)
else:
part = MIMEBase(maintype, subtype)
part.set_payload(content)
# Encode the payload using Base64
email.encoders.encode_base64(part)
# Set the filename parameter
if not filename:
filename = os.path.basename(path)
part.add_header('Content-Disposition', 'attachment',
filename=filename)
return part
def shell_quote(text):
"""Escape the given text for passing it to the shell for interpretation.
The resulting string will be parsed into one "word" (in the sense used in
the shell documentation, see sh(1)) by the shell.
:param text: the text to quote
:type text: str
:returns: the quoted text
:rtype: str
"""
return "'%s'" % text.replace("'", """'"'"'""")
def humanize_size(size):
"""Create a nice human readable representation of the given number
(understood as bytes) using the "KiB" and "MiB" suffixes to indicate
kibibytes and mebibytes. A kibibyte is defined as 1024 bytes (as opposed to
a kilobyte which is 1000 bytes) and a mibibyte is 1024**2 bytes (as opposed
to a megabyte which is 1000**2 bytes).
:param size: the number to convert
:type size: int
:returns: the human readable representation of size
:rtype: str
"""
for factor, format_string in ((1, '%i'),
(1024, '%iKiB'),
(1024 * 1024, '%.1fMiB')):
if size / factor < 1024:
return format_string % (size / factor)
return format_string % (size / factor)
def parse_mailcap_nametemplate(tmplate='%s'):
"""this returns a prefix and suffix to be used
in the tempfile module for a given mailcap nametemplate string"""
nt_list = tmplate.split('%s')
template_prefix = ''
template_suffix = ''
if len(nt_list) == 2:
template_suffix = nt_list[1]
template_prefix = nt_list[0]
else:
template_suffix = tmplate
return (template_prefix, template_suffix)
def parse_mailto(mailto_str):
"""
Interpret mailto-string
:param mailto_str: the string to interpret. Must conform to :rfc:2368.
:type mailto_str: str
:return: the header fields and the body found in the mailto link as a tuple
of length two
:rtype: tuple(dict(str->list(str)), str)
"""
if mailto_str.startswith('mailto:'):
import urllib.parse
to_str, parms_str = mailto_str[7:].partition('?')[::2]
headers = {}
body = ''
to = urllib.parse.unquote(to_str)
if to:
headers['To'] = [to]
for s in parms_str.split('&'):
key, value = s.partition('=')[::2]
key = key.capitalize()
if key == 'Body':
body = urllib.parse.unquote(value)
elif value:
headers[key] = [urllib.parse.unquote(value)]
return (headers, body)
else:
return (None, None)
def mailto_to_envelope(mailto_str):
"""
Interpret mailto-string into a :class:`alot.db.envelope.Envelope`
"""
from alot.db.envelope import Envelope
headers, body = parse_mailto(mailto_str)
return Envelope(bodytext=body, headers=headers)
def RFC3156_canonicalize(text):
"""
Canonicalizes plain text (MIME-encoded usually) according to RFC3156.
This function works as follows (in that order):
1. Convert all line endings to \\\\r\\\\n (DOS line endings).
2. Encode all occurrences of "From " at the beginning of a line
to "From=20" in order to prevent other mail programs to replace
this with "> From" (to avoid MBox conflicts) and thus invalidate
the signature.
:param text: text to canonicalize (already encoded as quoted-printable)
:rtype: str
"""
text = re.sub("\r?\n", "\r\n", text)
text = re.sub("^From ", "From=20", text, flags=re.MULTILINE)
return text
def get_xdg_env(env_name, fallback):
""" Used for XDG_* env variables to return fallback if unset *or* empty """
env = os.environ.get(env_name)
return env if env else fallback
|
gpl-3.0
| -2,359,341,438,656,932,400
| 32.440397
| 79
| 0.624171
| false
|
AenBleidd/man2wiki
|
man2wiki.py
|
1
|
2114
|
import sys
import re
from os import listdir, linesep
from os.path import isfile, join, splitext
def clearLine(line):
return line.strip().replace(r'\&', '').replace(r'\fB', '').replace(r'\fC', '').replace(r'\fP', '').replace(r'\-', '-').replace(r'#', '<nowiki>#</nowiki>')
def convert(in_filename, out_filename):
f = open(out_filename, 'w')
for line in open(in_filename):
m = re.match(r'^\.\\\"', line)
if m is not None:
continue
if line.strip() == '.TP' or line.strip() == '.PP' or line.strip() == '.nh':
continue
m = re.match(r'^\.TH\s+', line)
if m is not None:
continue
m = re.match(r'^\.SH\s+("?)(.*)(\1)\s*$', line)
if m is not None:
f.write(linesep)
f.write("== " + clearLine(m.group(2)) + " ==")
f.write(linesep)
continue
m = re.match(r'^\.R?B\s+(.*)\s*$', line)
if m is not None:
f.write("**" + clearLine(m.group(1)) + "**")
f.write(linesep)
continue
m = re.match(r'^\.I\s+(.*)\s*$', line)
if m is not None:
f.write("//" + clearLine(m.group(1)) + "//")
f.write(linesep)
continue
if line.strip() == ".br":
f.write(linesep)
continue
m = re.match(r'^\.in\s', line)
if m is not None:
continue
m = re.match(r'^\.ti\s', line)
if m is not None:
continue
m = re.match(r'^\.ad\s', line)
if m is not None:
continue
m = re.match(r'^\.SS\s+("?)(.*)(\1)\s*$', line)
if m is not None:
f.write(linesep)
f.write("=== " + clearLine(m.group(2)) + " ===")
f.write(linesep)
continue
m = re.match(r'^\.RI\s+("?)(\\fI)(.*)(\\fP)(\1)\s*$', line)
if m is not None:
f.write(linesep)
f.write(clearLine(m.group(3)))
f.write(linesep)
continue
m = re.match(r'^\.RI\s+("?)(.*)(\1)\s*$', line)
if m is not None:
f.write(linesep)
f.write("==== " + clearLine(m.group(2)) + " ====")
f.write(linesep)
continue
f.write(clearLine(line))
f.close()
if len(sys.argv) != 3:
print("Usage: python man2wiki.py [man folder] [wiki folder]")
else:
for f in [f for f in listdir(sys.argv[1]) if isfile(join(sys.argv[1],f))]:
convert(join(sys.argv[1], f), join(sys.argv[2], splitext(f)[0] + ".wiki"))
|
gpl-3.0
| -1,009,650,140,952,821,800
| 27.581081
| 155
| 0.556291
| false
|
magne-max/zipline-ja
|
zipline/pipeline/loaders/utils.py
|
1
|
9840
|
import datetime
import numpy as np
import pandas as pd
from zipline.utils.pandas_utils import mask_between_time
def is_sorted_ascending(a):
"""Check if a numpy array is sorted."""
return (np.fmax.accumulate(a) <= a).all()
def validate_event_metadata(event_dates,
event_timestamps,
event_sids):
assert is_sorted_ascending(event_dates), "event dates must be sorted"
assert len(event_sids) == len(event_dates) == len(event_timestamps), \
"mismatched arrays: %d != %d != %d" % (
len(event_sids),
len(event_dates),
len(event_timestamps),
)
def next_event_indexer(all_dates,
all_sids,
event_dates,
event_timestamps,
event_sids):
"""
Construct an index array that, when applied to an array of values, produces
a 2D array containing the values associated with the next event for each
sid at each moment in time.
Locations where no next event was known will be filled with -1.
Parameters
----------
all_dates : ndarray[datetime64[ns], ndim=1]
Row labels for the target output.
all_sids : ndarray[int, ndim=1]
Column labels for the target output.
event_dates : ndarray[datetime64[ns], ndim=1]
Dates on which each input events occurred/will occur. ``event_dates``
must be in sorted order, and may not contain any NaT values.
event_timestamps : ndarray[datetime64[ns], ndim=1]
Dates on which we learned about each input event.
event_sids : ndarray[int, ndim=1]
Sids assocated with each input event.
Returns
-------
indexer : ndarray[int, ndim=2]
An array of shape (len(all_dates), len(all_sids)) of indices into
``event_{dates,timestamps,sids}``.
"""
validate_event_metadata(event_dates, event_timestamps, event_sids)
out = np.full((len(all_dates), len(all_sids)), -1, dtype=np.int64)
sid_ixs = all_sids.searchsorted(event_sids)
# side='right' here ensures that we include the event date itself
# if it's in all_dates.
dt_ixs = all_dates.searchsorted(event_dates, side='right')
ts_ixs = all_dates.searchsorted(event_timestamps)
# Walk backward through the events, writing the index of the event into
# slots ranging from the event's timestamp to its asof. This depends for
# correctness on the fact that event_dates is sorted in ascending order,
# because we need to overwrite later events with earlier ones if their
# eligible windows overlap.
for i in range(len(event_sids) - 1, -1, -1):
start_ix = ts_ixs[i]
end_ix = dt_ixs[i]
out[start_ix:end_ix, sid_ixs[i]] = i
return out
def previous_event_indexer(all_dates,
all_sids,
event_dates,
event_timestamps,
event_sids):
"""
Construct an index array that, when applied to an array of values, produces
a 2D array containing the values associated with the previous event for
each sid at each moment in time.
Locations where no previous event was known will be filled with -1.
Parameters
----------
all_dates : ndarray[datetime64[ns], ndim=1]
Row labels for the target output.
all_sids : ndarray[int, ndim=1]
Column labels for the target output.
event_dates : ndarray[datetime64[ns], ndim=1]
Dates on which each input events occurred/will occur. ``event_dates``
must be in sorted order, and may not contain any NaT values.
event_timestamps : ndarray[datetime64[ns], ndim=1]
Dates on which we learned about each input event.
event_sids : ndarray[int, ndim=1]
Sids assocated with each input event.
Returns
-------
indexer : ndarray[int, ndim=2]
An array of shape (len(all_dates), len(all_sids)) of indices into
``event_{dates,timestamps,sids}``.
"""
validate_event_metadata(event_dates, event_timestamps, event_sids)
out = np.full((len(all_dates), len(all_sids)), -1, dtype=np.int64)
eff_dts = np.maximum(event_dates, event_timestamps)
sid_ixs = all_sids.searchsorted(event_sids)
dt_ixs = all_dates.searchsorted(eff_dts)
# Walk backwards through the events, writing the index of the event into
# slots ranging from max(event_date, event_timestamp) to the start of the
# previously-written event. This depends for correctness on the fact that
# event_dates is sorted in ascending order, because we need to have written
# later events so we know where to stop forward-filling earlier events.
last_written = {}
for i in range(len(event_dates) - 1, -1, -1):
sid_ix = sid_ixs[i]
dt_ix = dt_ixs[i]
out[dt_ix:last_written.get(sid_ix, None), sid_ix] = i
last_written[sid_ix] = dt_ix
return out
def normalize_data_query_time(dt, time, tz):
"""Apply the correct time and timezone to a date.
Parameters
----------
dt : pd.Timestamp
The original datetime that represents the date.
time : datetime.time
The time of day to use as the cutoff point for new data. Data points
that you learn about after this time will become available to your
algorithm on the next trading day.
tz : tzinfo
The timezone to normalize your dates to before comparing against
`time`.
Returns
-------
query_dt : pd.Timestamp
The timestamp with the correct time and date in utc.
"""
# merge the correct date with the time in the given timezone then convert
# back to utc
return pd.Timestamp(
datetime.datetime.combine(dt.date(), time),
tz=tz,
).tz_convert('utc')
def normalize_data_query_bounds(lower, upper, time, tz):
"""Adjust the first and last dates in the requested datetime index based on
the provided query time and tz.
lower : pd.Timestamp
The lower date requested.
upper : pd.Timestamp
The upper date requested.
time : datetime.time
The time of day to use as the cutoff point for new data. Data points
that you learn about after this time will become available to your
algorithm on the next trading day.
tz : tzinfo
The timezone to normalize your dates to before comparing against
`time`.
"""
# Subtract one day to grab things that happened on the first day we are
# requesting. This doesn't need to be a trading day, we are only adding
# a lower bound to limit the amount of in memory filtering that needs
# to happen.
lower -= datetime.timedelta(days=1)
if time is not None:
return normalize_data_query_time(
lower,
time,
tz,
), normalize_data_query_time(
upper,
time,
tz,
)
return lower, upper
_midnight = datetime.time(0, 0)
def normalize_timestamp_to_query_time(df,
time,
tz,
inplace=False,
ts_field='timestamp'):
"""Update the timestamp field of a dataframe to normalize dates around
some data query time/timezone.
Parameters
----------
df : pd.DataFrame
The dataframe to update. This needs a column named ``ts_field``.
time : datetime.time
The time of day to use as the cutoff point for new data. Data points
that you learn about after this time will become available to your
algorithm on the next trading day.
tz : tzinfo
The timezone to normalize your dates to before comparing against
`time`.
inplace : bool, optional
Update the dataframe in place.
ts_field : str, optional
The name of the timestamp field in ``df``.
Returns
-------
df : pd.DataFrame
The dataframe with the timestamp field normalized. If ``inplace`` is
true, then this will be the same object as ``df`` otherwise this will
be a copy.
"""
if not inplace:
# don't mutate the dataframe in place
df = df.copy()
dtidx = pd.DatetimeIndex(df.loc[:, ts_field], tz='utc')
dtidx_local_time = dtidx.tz_convert(tz)
to_roll_forward = mask_between_time(
dtidx_local_time,
time,
_midnight,
include_end=False,
)
# For all of the times that are greater than our query time add 1
# day and truncate to the date.
# We normalize twice here because of a bug in pandas 0.16.1 that causes
# tz_localize() to shift some timestamps by an hour if they are not grouped
# together by DST/EST.
df.loc[to_roll_forward, ts_field] = (
dtidx_local_time[to_roll_forward] + datetime.timedelta(days=1)
).normalize().tz_localize(None).tz_localize('utc').normalize()
df.loc[~to_roll_forward, ts_field] = dtidx[~to_roll_forward].normalize()
return df
def check_data_query_args(data_query_time, data_query_tz):
"""Checks the data_query_time and data_query_tz arguments for loaders
and raises a standard exception if one is None and the other is not.
Parameters
----------
data_query_time : datetime.time or None
data_query_tz : tzinfo or None
Raises
------
ValueError
Raised when only one of the arguments is None.
"""
if (data_query_time is None) ^ (data_query_tz is None):
raise ValueError(
"either 'data_query_time' and 'data_query_tz' must both be"
" None or neither may be None (got %r, %r)" % (
data_query_time,
data_query_tz,
),
)
|
apache-2.0
| 8,966,708,791,491,797,000
| 34.912409
| 79
| 0.618699
| false
|
napalm-automation/napalm-yang
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/bgp/neighbors/neighbor/afi_safis/afi_safi/graceful_restart/config/__init__.py
|
1
|
11849
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/neighbors/neighbor/afi-safis/afi-safi/graceful-restart/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration options for BGP graceful-restart
"""
__slots__ = ("_path_helper", "_extmethods", "__enabled")
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__enabled = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"neighbors",
"neighbor",
"afi-safis",
"afi-safi",
"graceful-restart",
"config",
]
def _get_enabled(self):
"""
Getter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/neighbors/neighbor/afi_safis/afi_safi/graceful_restart/config/enabled (boolean)
YANG Description: This leaf indicates whether graceful-restart is enabled for
this AFI-SAFI
"""
return self.__enabled
def _set_enabled(self, v, load=False):
"""
Setter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/neighbors/neighbor/afi_safis/afi_safi/graceful_restart/config/enabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enabled() directly.
YANG Description: This leaf indicates whether graceful-restart is enabled for
this AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """enabled must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=True)""",
}
)
self.__enabled = t
if hasattr(self, "_set"):
self._set()
def _unset_enabled(self):
self.__enabled = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
enabled = __builtin__.property(_get_enabled, _set_enabled)
_pyangbind_elements = OrderedDict([("enabled", enabled)])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/neighbors/neighbor/afi-safis/afi-safi/graceful-restart/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration options for BGP graceful-restart
"""
__slots__ = ("_path_helper", "_extmethods", "__enabled")
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__enabled = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"neighbors",
"neighbor",
"afi-safis",
"afi-safi",
"graceful-restart",
"config",
]
def _get_enabled(self):
"""
Getter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/neighbors/neighbor/afi_safis/afi_safi/graceful_restart/config/enabled (boolean)
YANG Description: This leaf indicates whether graceful-restart is enabled for
this AFI-SAFI
"""
return self.__enabled
def _set_enabled(self, v, load=False):
"""
Setter method for enabled, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/neighbors/neighbor/afi_safis/afi_safi/graceful_restart/config/enabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_enabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enabled() directly.
YANG Description: This leaf indicates whether graceful-restart is enabled for
this AFI-SAFI
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """enabled must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="enabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=True)""",
}
)
self.__enabled = t
if hasattr(self, "_set"):
self._set()
def _unset_enabled(self):
self.__enabled = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="enabled",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
enabled = __builtin__.property(_get_enabled, _set_enabled)
_pyangbind_elements = OrderedDict([("enabled", enabled)])
|
apache-2.0
| -5,377,686,711,822,514,000
| 36.735669
| 369
| 0.577264
| false
|
mrrrgn/build-relengapi
|
relengapi/lib/auth/ldap_group_authz.py
|
1
|
2918
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import itertools
import ldap
import logging
from relengapi import p
from relengapi.lib.auth import permissions_stale
class LdapGroups(object):
def __init__(self, app):
permissions_cfg = app.config.get('RELENGAPI_PERMISSIONS', {})
self.group_permissions = permissions_cfg.get('group-permissions', {})
# verify that each specified permission exists
for perm in set(itertools.chain(*self.group_permissions.values())):
try:
p[perm]
except KeyError:
raise RuntimeError(
"invalid permission in settings: %r" % (perm,))
self.uri = permissions_cfg['uri']
self.login_dn = permissions_cfg['login_dn']
self.login_password = permissions_cfg['login_password']
self.user_base = permissions_cfg['user_base']
self.group_base = permissions_cfg['group_base']
self.debug = permissions_cfg.get('debug')
self.logger = logging.getLogger(__name__)
permissions_stale.connect_via(app)(self.on_permissions_stale)
def get_user_groups(self, mail):
if self.debug:
self.logger.debug('Making LDAP query for %s', mail)
try:
l = ldap.initialize(self.uri)
l.simple_bind_s(self.login_dn, self.login_password)
# convert mail to DN
people = l.search_s(self.user_base, ldap.SCOPE_SUBTREE,
'(&(objectClass=inetOrgPerson)(mail=%s))' % (mail,), [])
if not people or len(people) != 1:
return []
user_dn = people[0][0]
result = l.search_s(self.group_base, ldap.SCOPE_SUBTREE,
'(&(objectClass=groupOfNames)(member=%s))' % user_dn, ['cn'])
groups = []
for glist in [g[1]['cn'] for g in result]:
groups.extend(glist)
return list(set(groups))
except ldap.LDAPError:
self.logger.exception("While connecting to the LDAP server")
return []
def on_permissions_stale(self, sender, user, permissions):
groups = self.get_user_groups(user.authenticated_email)
if self.debug:
self.logger.debug("Got groups %s for user %s", groups, user)
allowed_permissions = set()
for group in groups:
for perm in self.group_permissions.get(group, []):
allowed_permissions.add(perm)
if self.debug:
self.logger.debug("Setting permissions %s for user %s",
', '.join(allowed_permissions), user)
permissions.update([p[a] for a in allowed_permissions])
def init_app(app):
LdapGroups(app)
|
mpl-2.0
| -6,353,090,569,641,761,000
| 37.394737
| 93
| 0.585675
| false
|
jbrudvik/yahooscraper
|
tests.py
|
1
|
3744
|
import unittest
from yahooscraper import *
class YahooScraperImportTests(unittest.TestCase):
def test_import(self):
import yahooscraper
self.assertIsNotNone(yahooscraper)
self.assertIsNotNone(yahooscraper.login)
self.assertIsNotNone(yahooscraper.fantasy)
self.assertIsNotNone(yahooscraper.fantasy.team)
self.assertIsNotNone(yahooscraper.login.url)
self.assertIsNotNone(yahooscraper.fantasy.team.url)
def test_import_as(self):
import yahooscraper as ys
self.assertIsNotNone(ys)
self.assertIsNotNone(ys.login)
self.assertIsNotNone(ys.fantasy)
self.assertIsNotNone(ys.fantasy.team)
self.assertIsNotNone(ys.login.url)
self.assertIsNotNone(ys.fantasy.team.url)
def test_import_all(self):
# import * only allowed at module level, so this relies upon
# `from yahooscraper import *` outside of test class
self.assertIsNotNone(login)
self.assertIsNotNone(fantasy)
self.assertIsNotNone(fantasy.team)
self.assertIsNotNone(login.url)
self.assertIsNotNone(fantasy.team.url)
def test_import_explicit(self):
from yahooscraper import login, fantasy
self.assertIsNotNone(login)
self.assertIsNotNone(fantasy)
self.assertIsNotNone(fantasy.team)
self.assertIsNotNone(login.url)
self.assertIsNotNone(fantasy.team.url)
class YahooScraperLoginTests(unittest.TestCase):
def test_authenticated_session(self):
import yahooscraper as ys
self.assertIsNotNone(ys.login.authenticated_session)
def test_url(self):
import yahooscraper as ys
self.assertIsNotNone(ys.login.url())
def test_headers(self):
import yahooscraper as ys
self.assertIsInstance(ys.login.headers(), dict)
def test_path(self):
import yahooscraper as ys
self.assertIsNone(ys.login.path(''))
self.assertEqual(
ys.login.path('<div id="mbr-login-form" action="foo"></div>'),
'foo')
def test_post_data(self):
import yahooscraper as ys
self.assertIsNone(ys.login.post_data('', 'foo', 'bar'))
self.assertIsInstance(
ys.login.post_data('<div id="hiddens"></div>', 'foo', 'bar'),
dict)
class YahooScraperFantasyTeamTests(unittest.TestCase):
def test_url(self):
import yahooscraper as ys
self.assertIsNotNone(ys.fantasy.team.url('nba', 1, 2))
def test_team(self):
import yahooscraper as ys
self.assertIsNone(ys.fantasy.team.team(''))
self.assertEqual(
ys.fantasy.team.team('<title>league-name - team-name</title>'),
'team-name')
def test_league(self):
import yahooscraper as ys
self.assertIsNone(ys.fantasy.team.league(''))
self.assertIsNotNone(ys.fantasy.team.league(
'<title>league-name - team-name</title>'),
'league-name')
def test_date(self):
import yahooscraper as ys
self.assertIsNone(ys.fantasy.team.date(''))
self.assertEqual(ys.fantasy.team.date(
'<input name="date" value="2020-01-31"></input>'),
'Fri, Jan 31, 2020')
def test_alternates(self):
import yahooscraper as ys
self.assertIsNotNone(iter(ys.fantasy.team.alternates('')))
def test_start_active_players_path(self):
import yahooscraper as ys
self.assertIsNone(ys.fantasy.team.start_active_players_path(''))
self.assertEqual(ys.fantasy.team.start_active_players_path(
'<a href="baz">Start Active Players</a>'),
'baz')
if __name__ == '__main__':
unittest.main(verbosity=2)
|
mit
| -5,059,203,193,517,457,000
| 30.462185
| 75
| 0.644498
| false
|
swatlab/uplift-analysis
|
src_code_metrics.py
|
1
|
4848
|
import re, csv, pytz, json, subprocess
from dateutil import parser
import pandas as pd
import get_bugs
from libmozdata import patchanalysis
# Execute a shell command
def shellCommand(command_str):
cmd =subprocess.Popen(command_str.split(' '), stdout=subprocess.PIPE)
cmd_out, cmd_err = cmd.communicate()
return cmd_out
def loadReleaseDate():
print 'Loading Relase date ...'
rel_date_list = list()
rel_list = list()
with open('complexity_sna/data/release2commit.csv') as f:
csvreader = csv.reader(f)
for row in csvreader:
rel_num = row[0]
rel_date = re.sub(r'[^0-9]', '', row[2])
rel_date_list.append([rel_date, rel_num])
rel_list.append(rel_num)
return rel_date_list, list(reversed(rel_list))
def loadCommitDate():
print 'Loading commit date ...'
commit_date_dict = dict()
with open('commit_date.csv') as f:
csvreader = csv.reader(f, delimiter='\t')
for row in csvreader:
commit_id = row[0]
raw_time = row[1]
datetime_obj = parser.parse(raw_time)
time_str = datetime_obj.astimezone(pytz.utc).strftime('%Y%m%d')
commit_date_dict[commit_id] = time_str
return commit_date_dict
def correspondingRelease(commit_id, commit_date_dict, rel_date_list):
if commit_id in commit_date_dict:
commit_date = commit_date_dict[commit_id]
else:
for key in commit_date_dict:
if commit_id in key:
commit_date = commit_date_dict[key]
for item in rel_date_list:
if commit_date >= item[0]:
return item[1]
return rel_date_list[-1][1]
def removePrefix(path):
return re.sub(r'^[\/\.]+', '', path)
def loadMetrics4Releases(category, release_list):
rel_metric_dict = dict()
metric_names = None
for rel in release_list:
metric_dict = dict()
metric_file = 'complexity_sna/code_metrics/%s-%s.csv' %(category, rel.replace('.', '_'))
with open(metric_file, 'r') as f:
csvreader = csv.reader(f)
metric_names = next(csvreader, None)[1:]
for line in csvreader:
key = removePrefix(line[0])
metric_dict[key] = line[1:]
rel_metric_dict[rel] = metric_dict
return rel_metric_dict, metric_names
def extractSourceCodeMetrics(rel_date_list, rel_list, commit_date_dict, category):
# load metrics
rel_metric_dict, metric_names = loadMetrics4Releases(category, rel_list)
# map and compute metric values
result_list = list()
i = 0
bugs = get_bugs.get_all()
for bug in bugs:
if DEBUG and i > 5:
break
bug_id = bug['id']
commits, _ = patchanalysis.get_commits_for_bug(bug)
print bug_id
# extract metrics
raw_list = list()
metric_list = list()
for commit_id in commits:
i += 1
if DEBUG:
print ' ', commit_id
# corresponding (prior) release of a commit
rel_num = correspondingRelease(commit_id, commit_date_dict, rel_date_list)
# changed files in a commit
shell_res = shellCommand('hg -R %s log -r %s --template {files}\t{diffstat}' %(HG_REPO_PATH, commit_id)).split('\t')
raw_changed_files = shell_res[0]
cpp_changed_files = re.findall(r'(\S+\.(?:c|cpp|cc|cxx|h|hpp|hxx)\b)', raw_changed_files)
# map file/node to metrics
for a_file in cpp_changed_files:
metric_dict = rel_metric_dict[rel_num]
for node in metric_dict:
if node in a_file:
metrics = metric_dict[node]
raw_list.append(metrics)
# compute average/sum value for a specific attachment
if len(raw_list):
df = pd.DataFrame(raw_list, columns=metric_names).apply(pd.to_numeric)
for metric_name in metric_names:
metric_list.append(round(df[metric_name].mean(), 2))
result_list.append([bug_id] + metric_list)
else:
result_list.append([bug_id] + [0]*len(metric_names))
return pd.DataFrame(result_list, columns=['bug_id']+metric_names)
if __name__ == '__main__':
DEBUG = False
HG_REPO_PATH = '../firefox/'
# load data
rel_date_list, rel_list = loadReleaseDate()
commit_date_dict = loadCommitDate()
# extract metrics
df_complexity = extractSourceCodeMetrics(rel_date_list, rel_list, commit_date_dict, 'complexity')
df_sna = extractSourceCodeMetrics(rel_date_list, rel_list, commit_date_dict, 'sna')
df_code = pd.merge(df_complexity, df_sna, on='bug_id')
df_code.to_csv('independent_metrics/src_code_metrics.csv', index=False)
if DEBUG:
print df_code
|
mpl-2.0
| 8,802,598,191,561,831,000
| 36.007634
| 128
| 0.591172
| false
|
VapourApps/va_master
|
va_master/utils/old_to_new_store.py
|
1
|
3048
|
import requests, json, functools
import base64
import os
path = 'http://127.0.0.1:8500'
from va_master.handlers.datastore_handler import DatastoreHandler
from va_master.consul_kv.datastore import ConsulStore
import tornado.ioloop
folder_pwd = os.path.join(os.path.dirname(os.path.realpath(__file__)), '')
datastore = ConsulStore()
datastore_handler = DatastoreHandler(datastore)
run_sync = tornado.ioloop.IOLoop.instance().run_sync
def datastore_get(handle, get_key = ''):
url = '%s/v1/kv/%s' % (path, handle)
print 'Url is : ', url
result = requests.get(url).text
if not result:
return
result = json.loads(result)
result = [x['Value'] for x in result]
result = [json.loads(base64.b64decode(x)) for x in result if x]
result = result[0]
if get_key:
result = result.get(get_key, result)
return result
def datastore_insert(handle, data):
url = '%s/v1/kv/%s' % (path, handle)
print 'Url is : ', url
result = requests.put(url, data = data)
print result
def old_to_new_datastore(object_name, object_handle_unformatted, object_handle_ids = [], get_key = '', special_data_parsing = None, special_data_kwargs = {}, old_key = ''):
if not old_key:
old_key = object_name + 's'
old_data = datastore_get(old_key, get_key)
if not old_data: return
for data in old_data:
print 'Data is : ', data
try:
handles = {x : data.get(x) for x in object_handle_ids}
except:
continue #This usually happens if the script has already been run.
object_handle = object_handle_unformatted.format(**handles)
if special_data_parsing:
data = special_data_parsing(data, **special_data_kwargs)
print 'Want to insert : ', data, ' in : ', object_handle, ' with handles : ', handles
run_sync(functools.partial(datastore_handler.insert_object, object_name, data = data, handle_data = handles))
def panel_parsing(data, user_type):
if type(data['panels']) == list:
data['panels'] = {user_type : []}
panel = {
'name' : data['name'],
'panels' : data['panels'][user_type],
'icon' : data['icon'],
'servers' : data['servers']
}
return panel
def convert_all():
old_to_new_datastore('user', 'users/{username}', ['username'])
old_to_new_datastore('admin', 'admins/{username}', ['username'])
old_to_new_datastore('admin_panel', 'panels/admin/{name}', ['name'], get_key = 'admin', special_data_parsing = panel_parsing, special_data_kwargs = {"user_type" : "admin"}, old_key = 'panels')
old_to_new_datastore('user_panel', 'panels/user/{name}', ['name'], get_key = 'user', special_data_parsing = panel_parsing, special_data_kwargs = {"user_type" : "user"}, old_key = 'panels')
old_to_new_datastore('user_group', 'user_groups/{group_name}', ['group_name'])
old_to_new_datastore('state', 'state/{name}', ['name'], get_key = 'states', old_key = 'init_vals')
if __name__ == '__main__' :
convert_all()
|
gpl-3.0
| 821,649,943,494,431,100
| 37.582278
| 196
| 0.624016
| false
|
rs2/pandas
|
pandas/tests/indexing/multiindex/test_xs.py
|
1
|
9100
|
import numpy as np
import pytest
from pandas import DataFrame, Index, IndexSlice, MultiIndex, Series, concat, date_range
import pandas._testing as tm
import pandas.core.common as com
@pytest.fixture
def four_level_index_dataframe():
arr = np.array(
[
[-0.5109, -2.3358, -0.4645, 0.05076, 0.364],
[0.4473, 1.4152, 0.2834, 1.00661, 0.1744],
[-0.6662, -0.5243, -0.358, 0.89145, 2.5838],
]
)
index = MultiIndex(
levels=[["a", "x"], ["b", "q"], [10.0032, 20.0, 30.0], [3, 4, 5]],
codes=[[0, 0, 1], [0, 1, 1], [0, 1, 2], [2, 1, 0]],
names=["one", "two", "three", "four"],
)
return DataFrame(arr, index=index, columns=list("ABCDE"))
@pytest.mark.parametrize(
"key, level, exp_arr, exp_index",
[
("a", "lvl0", lambda x: x[:, 0:2], Index(["bar", "foo"], name="lvl1")),
("foo", "lvl1", lambda x: x[:, 1:2], Index(["a"], name="lvl0")),
],
)
def test_xs_named_levels_axis_eq_1(key, level, exp_arr, exp_index):
# see gh-2903
arr = np.random.randn(4, 4)
index = MultiIndex(
levels=[["a", "b"], ["bar", "foo", "hello", "world"]],
codes=[[0, 0, 1, 1], [0, 1, 2, 3]],
names=["lvl0", "lvl1"],
)
df = DataFrame(arr, columns=index)
result = df.xs(key, level=level, axis=1)
expected = DataFrame(exp_arr(arr), columns=exp_index)
tm.assert_frame_equal(result, expected)
def test_xs_values(multiindex_dataframe_random_data):
df = multiindex_dataframe_random_data
result = df.xs(("bar", "two")).values
expected = df.values[4]
tm.assert_almost_equal(result, expected)
def test_xs_loc_equality(multiindex_dataframe_random_data):
df = multiindex_dataframe_random_data
result = df.xs(("bar", "two"))
expected = df.loc[("bar", "two")]
tm.assert_series_equal(result, expected)
def test_xs_missing_values_in_index():
# see gh-6574
# missing values in returned index should be preserved
acc = [
("a", "abcde", 1),
("b", "bbcde", 2),
("y", "yzcde", 25),
("z", "xbcde", 24),
("z", None, 26),
("z", "zbcde", 25),
("z", "ybcde", 26),
]
df = DataFrame(acc, columns=["a1", "a2", "cnt"]).set_index(["a1", "a2"])
expected = DataFrame(
{"cnt": [24, 26, 25, 26]},
index=Index(["xbcde", np.nan, "zbcde", "ybcde"], name="a2"),
)
result = df.xs("z", level="a1")
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("key, level", [("one", "second"), (["one"], ["second"])])
def test_xs_with_duplicates(key, level, multiindex_dataframe_random_data):
# see gh-13719
frame = multiindex_dataframe_random_data
df = concat([frame] * 2)
assert df.index.is_unique is False
expected = concat([frame.xs("one", level="second")] * 2)
result = df.xs(key, level=level)
tm.assert_frame_equal(result, expected)
def test_xs_level(multiindex_dataframe_random_data):
df = multiindex_dataframe_random_data
result = df.xs("two", level="second")
expected = df[df.index.get_level_values(1) == "two"]
expected.index = Index(["foo", "bar", "baz", "qux"], name="first")
tm.assert_frame_equal(result, expected)
def test_xs_level_eq_2():
arr = np.random.randn(3, 5)
index = MultiIndex(
levels=[["a", "p", "x"], ["b", "q", "y"], ["c", "r", "z"]],
codes=[[2, 0, 1], [2, 0, 1], [2, 0, 1]],
)
df = DataFrame(arr, index=index)
expected = DataFrame(arr[1:2], index=[["a"], ["b"]])
result = df.xs("c", level=2)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"indexer",
[
lambda df: df.xs(("a", 4), level=["one", "four"]),
lambda df: df.xs("a").xs(4, level="four"),
],
)
def test_xs_level_multiple(indexer, four_level_index_dataframe):
df = four_level_index_dataframe
expected_values = [[0.4473, 1.4152, 0.2834, 1.00661, 0.1744]]
expected_index = MultiIndex(
levels=[["q"], [20.0]], codes=[[0], [0]], names=["two", "three"]
)
expected = DataFrame(expected_values, index=expected_index, columns=list("ABCDE"))
result = indexer(df)
tm.assert_frame_equal(result, expected)
def test_xs_setting_with_copy_error(multiindex_dataframe_random_data):
# this is a copy in 0.14
df = multiindex_dataframe_random_data
result = df.xs("two", level="second")
# setting this will give a SettingWithCopyError
# as we are trying to write a view
msg = "A value is trying to be set on a copy of a slice from a DataFrame"
with pytest.raises(com.SettingWithCopyError, match=msg):
result[:] = 10
def test_xs_setting_with_copy_error_multiple(four_level_index_dataframe):
# this is a copy in 0.14
df = four_level_index_dataframe
result = df.xs(("a", 4), level=["one", "four"])
# setting this will give a SettingWithCopyError
# as we are trying to write a view
msg = "A value is trying to be set on a copy of a slice from a DataFrame"
with pytest.raises(com.SettingWithCopyError, match=msg):
result[:] = 10
def test_xs_integer_key():
# see gh-2107
dates = range(20111201, 20111205)
ids = list("abcde")
index = MultiIndex.from_product([dates, ids], names=["date", "secid"])
df = DataFrame(np.random.randn(len(index), 3), index, ["X", "Y", "Z"])
result = df.xs(20111201, level="date")
expected = df.loc[20111201, :]
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"indexer", [lambda df: df.xs("a", level=0), lambda df: df.xs("a")]
)
def test_xs_level0(indexer, four_level_index_dataframe):
df = four_level_index_dataframe
expected_values = [
[-0.5109, -2.3358, -0.4645, 0.05076, 0.364],
[0.4473, 1.4152, 0.2834, 1.00661, 0.1744],
]
expected_index = MultiIndex(
levels=[["b", "q"], [10.0032, 20.0], [4, 5]],
codes=[[0, 1], [0, 1], [1, 0]],
names=["two", "three", "four"],
)
expected = DataFrame(expected_values, index=expected_index, columns=list("ABCDE"))
result = indexer(df)
tm.assert_frame_equal(result, expected)
def test_xs_level_series(multiindex_dataframe_random_data):
# this test is not explicitly testing .xs functionality
# TODO: move to another module or refactor
df = multiindex_dataframe_random_data
s = df["A"]
result = s[:, "two"]
expected = df.xs("two", level=1)["A"]
tm.assert_series_equal(result, expected)
def test_xs_level_series_ymd(multiindex_year_month_day_dataframe_random_data):
# this test is not explicitly testing .xs functionality
# TODO: move to another module or refactor
df = multiindex_year_month_day_dataframe_random_data
s = df["A"]
result = s[2000, 5]
expected = df.loc[2000, 5]["A"]
tm.assert_series_equal(result, expected)
def test_xs_level_series_slice_not_implemented(
multiindex_year_month_day_dataframe_random_data,
):
# this test is not explicitly testing .xs functionality
# TODO: move to another module or refactor
# not implementing this for now
df = multiindex_year_month_day_dataframe_random_data
s = df["A"]
msg = r"\(2000, slice\(3, 4, None\)\)"
with pytest.raises(TypeError, match=msg):
s[2000, 3:4]
def test_xs_IndexSlice_argument_not_implemented():
# GH 35301
index = MultiIndex(
levels=[[("foo", "bar", 0), ("foo", "baz", 0), ("foo", "qux", 0)], [0, 1]],
codes=[[0, 0, 1, 1, 2, 2], [0, 1, 0, 1, 0, 1]],
)
series = Series(np.random.randn(6), index=index)
frame = DataFrame(np.random.randn(6, 4), index=index)
msg = (
"Expected label or tuple of labels, got "
r"\(\('foo', 'qux', 0\), slice\(None, None, None\)\)"
)
with pytest.raises(TypeError, match=msg):
frame.xs(IndexSlice[("foo", "qux", 0), :])
with pytest.raises(TypeError, match=msg):
series.xs(IndexSlice[("foo", "qux", 0), :])
def test_series_getitem_multiindex_xs():
# GH6258
dt = list(date_range("20130903", periods=3))
idx = MultiIndex.from_product([list("AB"), dt])
s = Series([1, 3, 4, 1, 3, 4], index=idx)
expected = Series([1, 1], index=list("AB"))
result = s.xs("20130903", level=1)
tm.assert_series_equal(result, expected)
def test_series_getitem_multiindex_xs_by_label():
# GH5684
idx = MultiIndex.from_tuples(
[("a", "one"), ("a", "two"), ("b", "one"), ("b", "two")]
)
s = Series([1, 2, 3, 4], index=idx)
return_value = s.index.set_names(["L1", "L2"], inplace=True)
assert return_value is None
expected = Series([1, 3], index=["a", "b"])
return_value = expected.index.set_names(["L1"], inplace=True)
assert return_value is None
result = s.xs("one", level="L2")
tm.assert_series_equal(result, expected)
def test_xs_levels_raises():
df = DataFrame({"A": [1, 2, 3]})
msg = "Index must be a MultiIndex"
with pytest.raises(TypeError, match=msg):
df.xs(0, level="as")
s = df.A
with pytest.raises(TypeError, match=msg):
s.xs(0, level="as")
|
bsd-3-clause
| -494,235,310,551,949,200
| 31.5
| 87
| 0.592857
| false
|
AparatTechnologies/django-connectwise
|
djconnectwise/api.py
|
1
|
16448
|
import logging
from django.conf import settings
from djconnectwise.utils import RequestSettings
import re
import requests
from retrying import retry
class ConnectWiseAPIError(Exception):
"""Raise this, not request exceptions."""
pass
class ConnectWiseRecordNotFoundError(ConnectWiseAPIError):
"""The record was not found."""
pass
CW_RESPONSE_MAX_RECORDS = 1000 # The greatest number of records ConnectWise
# will send us in one response.
RETRY_WAIT_EXPONENTIAL_MULTAPPLIER = 1000 # Initial number of milliseconds to
# wait before retrying a request.
RETRY_WAIT_EXPONENTIAL_MAX = 10000 # Maximum number of milliseconds to wait
# before retrying a request.
CW_DEFAULT_PAGE = 1 # CW Pagination is 1-indexed
CONTENT_DISPOSITION_RE = re.compile(
'^attachment; filename=\"{0,1}(.*?)\"{0,1}$'
)
logger = logging.getLogger(__name__)
class ConnectWiseAPIClient(object):
API = None
def __init__(
self,
company_id=None,
server_url=None,
api_public_key=None,
api_private_key=None,
api_codebase=None
):
if not company_id:
company_id = settings.CONNECTWISE_CREDENTIALS['company_id']
if not server_url:
server_url = settings.CONNECTWISE_SERVER_URL
if not api_public_key:
api_public_key = settings.CONNECTWISE_CREDENTIALS['api_public_key']
if not api_private_key:
api_private_key = settings.CONNECTWISE_CREDENTIALS[
'api_private_key'
]
if not api_codebase:
api_codebase = settings.CONNECTWISE_CREDENTIALS['api_codebase']
if not self.API:
raise ValueError('API not specified')
self.api_public_key = api_public_key
self.api_private_key = api_private_key
self.api_codebase = api_codebase
self.server_url = '{0}/{1}/apis/3.0/{2}/'.format(
server_url,
self.api_codebase,
self.API,
)
self.auth = ('{0}+{1}'.format(company_id, self.api_public_key),
'{0}'.format(self.api_private_key),)
self.request_settings = RequestSettings().get_settings()
self.timeout = self.request_settings['timeout']
def _endpoint(self, path):
return '{0}{1}'.format(self.server_url, path)
def _log_failed(self, response):
logger.error('FAILED API CALL: {0} - {1} - {2}'.format(
response.url, response.status_code, response.content))
def fetch_resource(self, endpoint_url, params=None, should_page=False,
retry_counter=None,
*args, **kwargs):
"""
A convenience method for issuing a request to the
specified REST endpoint.
Note: retry_counter is used specifically for testing.
It is a dict in the form {'count': 0} that is passed in
to verify the number of attempts that were made.
"""
@retry(stop_max_attempt_number=self.request_settings['max_attempts'],
wait_exponential_multiplier=RETRY_WAIT_EXPONENTIAL_MULTAPPLIER,
wait_exponential_max=RETRY_WAIT_EXPONENTIAL_MAX)
def _fetch_resource(endpoint_url, params=None, should_page=False,
retry_counter=None,
*args, **kwargs):
if retry_counter:
retry_counter['count'] += 1
if not params:
params = {}
if should_page:
params['pageSize'] = kwargs.get('page_size',
CW_RESPONSE_MAX_RECORDS)
params['page'] = kwargs.get('page', CW_DEFAULT_PAGE)
try:
endpoint = self._endpoint(endpoint_url)
logger.debug('Making GET request to {}'.format(endpoint))
response = requests.get(
endpoint,
params=params,
auth=self.auth,
timeout=self.timeout,
)
except requests.RequestException as e:
logger.error('Request failed: GET {}: {}'.format(endpoint, e))
raise ConnectWiseAPIError('{}'.format(e))
if 200 <= response.status_code < 300:
return response.json()
if response.status_code == 404:
msg = 'Resource {} was not found.'.format(response.url)
logger.warning(msg)
raise ConnectWiseRecordNotFoundError(msg)
else:
self._log_failed(response)
raise ConnectWiseAPIError(response.content)
return _fetch_resource(endpoint_url, params=params,
should_page=should_page,
*args, **kwargs)
class ProjectAPIClient(ConnectWiseAPIClient):
API = 'project'
ENDPOINT_PROJECTS = 'projects/'
def get_project(self, project_id):
endpoint_url = '{}/{}'.format(self.ENDPOINT_PROJECTS, project_id)
return self.fetch_resource(endpoint_url)
def get_projects(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_PROJECTS, should_page=True,
*args, **kwargs)
class CompanyAPIClient(ConnectWiseAPIClient):
API = 'company'
ENDPOINT_COMPANIES = 'companies'
ENDPOINT_COMPANY_STATUSES = '{}/statuses'.format(ENDPOINT_COMPANIES)
def by_id(self, company_id):
endpoint_url = '{}/{}'.format(self.ENDPOINT_COMPANIES, company_id)
return self.fetch_resource(endpoint_url)
def get_companies(self, *args, **kwargs):
if 'conditions' in kwargs:
kwargs['params'] = {
'conditions': kwargs['conditions']
}
return self.fetch_resource(self.ENDPOINT_COMPANIES, should_page=True,
*args, **kwargs)
def get_company_statuses(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_COMPANY_STATUSES,
should_page=True,
*args, **kwargs)
class SalesAPIClient(ConnectWiseAPIClient):
API = 'sales'
ENDPOINT_OPPORTUNITIES = 'opportunities'
ENDPOINT_OPPORTUNITY_STATUSES = \
'{}/statuses'.format(ENDPOINT_OPPORTUNITIES)
ENDPOINT_OPPORTUNITY_TYPES = \
'{}/types'.format(ENDPOINT_OPPORTUNITIES)
def by_id(self, opportunity_id):
endpoint_url = '{}/{}'.format(
self.ENDPOINT_OPPORTUNITIES, opportunity_id)
return self.fetch_resource(endpoint_url)
def get_opportunities(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_OPPORTUNITIES,
should_page=True,
*args, **kwargs)
def get_opportunity_statuses(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_OPPORTUNITY_STATUSES,
should_page=True,
*args, **kwargs)
def get_opportunity_types(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_OPPORTUNITY_TYPES,
should_page=True,
*args, **kwargs)
class SystemAPIClient(ConnectWiseAPIClient):
API = 'system'
# endpoints
ENDPOINT_MEMBERS = 'members/'
ENDPOINT_MEMBERS_IMAGE = 'members/{}/image'
ENDPOINT_MEMBERS_COUNT = 'members/count'
ENDPOINT_CALLBACKS = 'callbacks/'
ENDPOINT_INFO = 'info/'
def get_connectwise_version(self):
result = self.fetch_resource(self.ENDPOINT_INFO)
return result.get('version', '')
def get_members(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_MEMBERS,
should_page=True, *args, **kwargs)
def get_member_count(self):
return self.fetch_resource(self.ENDPOINT_MEMBERS_COUNT)
def get_callbacks(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_CALLBACKS,
should_page=True, *args, **kwargs)
def delete_callback(self, entry_id):
try:
endpoint = self._endpoint(
'{}{}'.format(self.ENDPOINT_CALLBACKS, entry_id)
)
logger.debug('Making DELETE request to {}'.format(endpoint))
response = requests.request(
'delete',
endpoint,
auth=self.auth,
timeout=self.timeout,
)
except requests.RequestException as e:
logger.error('Request failed: DELETE {}: {}'.format(endpoint, e))
raise ConnectWiseAPIError('{}'.format(e))
response.raise_for_status()
return response
def create_callback(self, callback_entry):
try:
endpoint = self._endpoint(self.ENDPOINT_CALLBACKS)
logger.debug('Making POST request to {}'.format(endpoint))
response = requests.request(
'post',
endpoint,
json=callback_entry,
auth=self.auth,
timeout=self.timeout,
)
except requests.RequestException as e:
logger.error('Request failed: POST {}: {}'.format(endpoint, e))
raise ConnectWiseAPIError('{}'.format(e))
if 200 <= response.status_code < 300:
return response.json()
else:
self._log_failed(response)
raise ConnectWiseAPIError(response.content)
def update_callback(self, callback_entry):
try:
endpoint = self._endpoint(
'callbacks/{0}'.format(callback_entry.entry_id)
)
logger.debug('Making PUT request to {}'.format(endpoint))
response = requests.request(
'put',
endpoint,
json=callback_entry,
auth=self.auth,
timeout=self.timeout,
)
except requests.RequestException as e:
logger.error('Request failed: PUT {}: {}'.format(endpoint, e))
raise ConnectWiseAPIError('{}'.format(e))
if 200 <= response.status_code < 300:
return response.json()
else:
self._log_failed(response)
raise ConnectWiseAPIError(response.content)
def get_member_by_identifier(self, identifier):
return self.fetch_resource('members/{0}'.format(identifier))
def get_member_image_by_identifier(self, identifier):
"""
Return a (filename, content) tuple.
"""
try:
endpoint = self._endpoint(
self.ENDPOINT_MEMBERS_IMAGE.format(identifier)
)
logger.debug('Making GET request to {}'.format(endpoint))
response = requests.get(
endpoint,
auth=self.auth,
timeout=self.timeout,
)
except requests.RequestException as e:
logger.error('Request failed: GET {}: {}'.format(endpoint, e))
raise ConnectWiseAPIError('{}'.format(e))
if 200 <= response.status_code < 300:
headers = response.headers
content_disposition_header = headers.get('Content-Disposition',
default='')
msg = "Got member '{}' image; size {} bytes " \
"and content-disposition header '{}'"
logger.info(msg.format(
identifier,
len(response.content),
content_disposition_header
))
attachment_filename = self._attachment_filename(
content_disposition_header)
return attachment_filename, response.content
else:
self._log_failed(response)
return None, None
def _attachment_filename(self, content_disposition):
"""
Return the attachment filename from the content disposition header.
If there's no match, return None.
"""
m = CONTENT_DISPOSITION_RE.match(content_disposition)
return m.group(1) if m else None
class ServiceAPIClient(ConnectWiseAPIClient):
API = 'service'
ENDPOINT_TICKETS = 'tickets'
ENDPOINT_BOARDS = 'boards'
ENDPOINT_PRIORITIES = 'priorities'
ENDPOINT_LOCATIONS = 'locations'
def __init__(self, *args, **kwargs):
self.extra_conditions = None
if 'extra_conditions' in kwargs:
self.extra_conditions = kwargs.pop('extra_conditions')
super().__init__(*args, **kwargs)
def get_conditions(self):
default_conditions = settings.DJCONNECTWISE_DEFAULT_TICKET_CONDITIONS
condition_list = [c for c in [
default_conditions, self.extra_conditions] if c]
conditions = ''
for condition in condition_list:
condition = '({})'.format(condition)
if conditions:
condition = ' AND {}'.format(condition)
conditions += condition
return conditions
def tickets_count(self):
params = dict(
conditions=self.get_conditions(),
)
return self.fetch_resource(
'{}/count'.format(self.ENDPOINT_TICKETS), params
).get('count', 0)
def get_ticket(self, ticket_id):
endpoint_url = '{}/{}'.format(self.ENDPOINT_TICKETS, ticket_id)
return self.fetch_resource(endpoint_url)
def get_tickets(self, *args, **kwargs):
params = dict(
conditions=self.get_conditions()
)
return self.fetch_resource(self.ENDPOINT_TICKETS, should_page=True,
params=params, *args, **kwargs)
def update_ticket_status(self, ticket_id, closed_flag, status):
"""
Update the ticket's closedFlag and status on the server.
"""
# Yeah, this schema is a bit bizarre. See CW docs at
# https://developer.connectwise.com/Manage/Developer_Guide#Patch
body = [
{
'op': 'replace',
'path': 'closedFlag',
'value': closed_flag
},
{
'op': 'replace',
'path': 'status',
'value': {
'id': status.id,
'name': status.name,
},
},
]
try:
endpoint = self._endpoint(
'{}/{}'.format(self.ENDPOINT_TICKETS, ticket_id)
)
logger.debug('Making PATCH request to {}'.format(endpoint))
response = requests.patch(
endpoint,
json=body,
auth=self.auth,
timeout=self.timeout,
)
except requests.RequestException as e:
logger.error('Request failed: PATCH {}: {}'.format(endpoint, e))
raise ConnectWiseAPIError('{}'.format(e))
if 200 <= response.status_code < 300:
return response.json()
else:
self._log_failed(response)
raise ConnectWiseAPIError(response.content)
def get_statuses(self, board_id, *args, **kwargs):
"""
Returns the status types associated with the specified board.
"""
endpoint_url = '{}/{}/statuses'.format(self.ENDPOINT_BOARDS, board_id)
return self.fetch_resource(endpoint_url, should_page=True,
*args, **kwargs)
def get_boards(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_BOARDS, should_page=True,
*args, **kwargs)
def get_board(self, board_id):
return self.fetch_resource('{}/{}'.format(
self.ENDPOINT_BOARDS, board_id)
)
def get_priorities(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_PRIORITIES, should_page=True,
*args, **kwargs)
def get_teams(self, board_id, *args, **kwargs):
endpoint = '{}/{}/teams/'.format(self.ENDPOINT_BOARDS, board_id)
return self.fetch_resource(endpoint, should_page=True, *args, **kwargs)
def get_locations(self, *args, **kwargs):
return self.fetch_resource(self.ENDPOINT_LOCATIONS, should_page=True,
*args, **kwargs)
|
mit
| -2,677,239,882,592,082,400
| 34.912664
| 79
| 0.557515
| false
|
mark-rushakoff/FlackOverstow
|
grabber.py
|
1
|
1835
|
#!/usr/bin/env python
__author__ = "Mark Rushakoff"
__license__ = "MIT"
import sys
import urllib2
import re
import StringIO
import gzip
try:
import simplejson as json
except ImportError:
try:
import json
except ImportError:
sys.stderr.write("simplejson or json required for operation. Aborting.\n")
sys.exit()
try:
from BeautifulSoup import BeautifulStoneSoup as bss
except ImportError:
sys.stderr.write("BeautifulSoup required to format data")
sys.stderr.write("Try `easy_install beautifulsoup`")
sys.exit()
stripHtmlTags = re.compile(r"<[^>]*>")
compressWhiteSpace = re.compile(r"\s+")
def format(text):
return bss(compressWhiteSpace.sub(' ', stripHtmlTags.sub('', text)), convertEntities=bss.ALL_ENTITIES)
class Grabber(object):
""" Class to obtain JSON data from Stack API """
_api = '1.0'
def __init__(self, site, user_id, api_key=None):
self.site = site
self.user_id = user_id
self.api_key = api_key
def _grab(self, users_arg):
url = 'http://api.%s/%s/users/%s/%s?body=true&pagesize=100' % (self.site, self._api, self.user_id, users_arg)
if self.api_key is not None:
url += '&key=%s' % self.api_key
content = StringIO.StringIO(urllib2.urlopen(url).read())
return gzip.GzipFile(fileobj=content).read()
def minimal_text(self, users_arg):
""" return a list of just the simple text of the `body`s of the users_arg section of the pulled json """
json_data = self._grab(users_arg)
answers = [answer['body'] for answer in json.loads(json_data)[users_arg]]
return [str(format(answer)) for answer in answers]
if __name__ == "__main__":
grabber = Grabber('stackoverflow.com', 126042)
for g in grabber.minimal_text('answers'):
print g
|
mit
| -9,221,221,283,494,337,000
| 29.583333
| 117
| 0.643052
| false
|
ddico/odoo
|
addons/l10n_hr/__manifest__.py
|
1
|
1748
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Author: Goran Kliska
# mail: goran.kliska(AT)slobodni-programi.hr
# Copyright (C) 2011- Slobodni programi d.o.o., Zagreb
# Contributions:
# Tomislav Bošnjaković, Storm Computers d.o.o. :
# - account types
{
"name": "Croatia - Accounting (RRIF 2012)",
"description": """
Croatian localisation.
======================
Author: Goran Kliska, Slobodni programi d.o.o., Zagreb
https://www.slobodni-programi.hr
Contributions:
Tomislav Bošnjaković, Storm Computers: tipovi konta
Ivan Vađić, Slobodni programi: tipovi konta
Description:
Croatian Chart of Accounts (RRIF ver.2012)
RRIF-ov računski plan za poduzetnike za 2012.
Vrste konta
Kontni plan prema RRIF-u, dorađen u smislu kraćenja naziva i dodavanja analitika
Porezne grupe prema poreznoj prijavi
Porezi PDV obrasca
Ostali porezi
Osnovne fiskalne pozicije
Izvori podataka:
https://www.rrif.hr/dok/preuzimanje/rrif-rp2011.rar
https://www.rrif.hr/dok/preuzimanje/rrif-rp2012.rar
""",
"version": "13.0",
"author": "OpenERP Croatian Community",
'category': 'Accounting/Localizations',
'depends': [
'account',
],
'data': [
'data/l10n_hr_chart_data.xml',
'data/account.account.type.csv',
'data/account.account.template.csv',
'data/account_chart_tag_data.xml',
'data/account.tax.group.csv',
'data/account_tax_report_data.xml',
'data/account_tax_template_data.xml',
'data/account_tax_fiscal_position_data.xml',
'data/account_chart_template_data.xml',
],
'demo': [
'demo/demo_company.xml',
],
"active": False,
}
|
agpl-3.0
| 3,099,756,170,522,868,700
| 26.603175
| 80
| 0.66015
| false
|
minlexx/pyevemon
|
esi_client/models/delete_fleets_fleet_id_squads_squad_id_forbidden.py
|
1
|
3083
|
# coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class DeleteFleetsFleetIdSquadsSquadIdForbidden(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, error=None):
"""
DeleteFleetsFleetIdSquadsSquadIdForbidden - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'error': 'str'
}
self.attribute_map = {
'error': 'error'
}
self._error = error
@property
def error(self):
"""
Gets the error of this DeleteFleetsFleetIdSquadsSquadIdForbidden.
Forbidden message
:return: The error of this DeleteFleetsFleetIdSquadsSquadIdForbidden.
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""
Sets the error of this DeleteFleetsFleetIdSquadsSquadIdForbidden.
Forbidden message
:param error: The error of this DeleteFleetsFleetIdSquadsSquadIdForbidden.
:type: str
"""
self._error = error
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, DeleteFleetsFleetIdSquadsSquadIdForbidden):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
gpl-3.0
| 3,020,769,290,137,850,000
| 25.350427
| 82
| 0.537464
| false
|
PrzemyslawUrbanczyk/pu_zadanie1
|
fixture/contact.py
|
1
|
10214
|
import re
from model.contact import Contact
class ContactHelper:
def __init__(self, app):
self.app = app
def return_to_home_page(self):
wd = self.app.wd
wd.find_element_by_link_text("home page").click()
def create(self, contact):
wd = self.app.wd
# init contact creation
wd.find_element_by_link_text("nowy wpis").click()
# fill contact form
self.fill_contact_form(contact)
# submit contact creation
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.contact_cache = None
def fill_contact_form(self, contact):
wd = self.app.wd
self.change_field_value("firstname", contact.first_name)
self.change_field_value("middlename", contact.middle_name)
self.change_field_value("lastname", contact.last_name)
self.change_field_value("nickname", contact.nickname)
self.change_field_value("title", contact.title)
self.change_field_value("company", contact.company)
self.change_field_value("address", contact.address)
self.change_field_value("home", contact.home_number)
self.change_field_value("mobile", contact.mobile_number)
self.change_field_value("work", contact.work_number)
self.change_field_value("fax", contact.fax)
self.change_field_value("email", contact.first_email)
self.change_field_value("email2", contact.second_email)
self.change_field_value("email3", contact.third_email)
self.change_field_value("homepage", contact.wwwpage)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[16]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[16]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[2]").click()
self.change_field_value("byear", contact.birth_year)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[17]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[17]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[6]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[6]").click()
self.change_field_value("ayear", contact.anniversary_year)
self.change_field_value("address2", contact.second_address)
self.change_field_value("phone2", contact.second_private_number)
self.change_field_value("notes", contact.notes)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def delete_first_contact(self):
self.delete_contact_by_index(0)
def delete_contact_by_index(self, index):
wd = self.app.wd
self.open_contacts_page()
self.select_contact_by_index(index)
# confirm deletion
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
self.contact_cache = None
def delete_contact_by_id(self, id):
wd = self.app.wd
self.app.open_home_page()
# open deletion
wd.find_element_by_css_selector("input[value='%s']" % id).click()
# submit deletion
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
self.app.open_home_page()
self.group_cache = None
def select_first_contact(self):
self.select_contact_by_index(0)
def modify_first_contact(self):
self.modify_contact_by_index[0]
def modify_contact_by_index(self, index, new_contact_data):
wd = self.app.wd
self.open_contacts_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")
cells[7].click()
# modify contact form
self.fill_contact_form(new_contact_data)
# submit contact creation
wd.find_element_by_xpath("//div[@id='content']/form[1]/input[22]").click()
self.contact_cache = None
def modify_contact_by_id(self, id, contact):
wd = self.app.wd
self.app.open_home_page()
# open modification form
checkbox = wd.find_element_by_css_selector("input[value='%s']" % id)
row = checkbox.find_element_by_xpath("./../..")
cells = row.find_elements_by_tag_name("td")
cells[7].click()
# fill group form
self.fill_contact_form(contact)
# submit changes
wd.find_element_by_name("update").click()
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_contact_by_id(self, id):
wd = self.app.wd
row = wd.find_element_by_css_selector("input[value='%s']" % id)
return row
def open_contacts_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("index.php") and len(wd.find_elements_by_id("MassCB")) > 0):
wd.find_element_by_link_text("strona główna").click()
def count(self):
wd = self.app.wd
self.open_contacts_page()
return len(wd.find_elements_by_name("selected[]"))
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.open_contacts_page()
self.contact_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name("td")
firstname = cells[2].text
lastname = cells[1]. text
id = row.find_element_by_name("selected[]").get_attribute("value")
all_emails = cells[4].text
all_phones = cells[5].text
address = cells[3].text
self.contact_cache.append(Contact(first_name=firstname, last_name=lastname, id=id,
all_phones_from_home_page=all_phones,
all_emails_from_home_page=all_emails, address=address))
return list(self.contact_cache)
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[7]
cells.find_element_by_tag_name("a").click()
def open_contact_view_by_index(self, index):
wd = self.app.wd
self.open_contacts_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[6]
cell.find_elements_by_tag_name("a").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
secondaryphone = wd.find_element_by_name("phone2").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
address = wd.find_element_by_name("address").get_attribute("value")
return Contact(first_name=firstname, last_name=lastname, id=id,
home_number=homephone, mobile_number=mobilephone, work_number=workphone,
second_private_number=secondaryphone,
first_email=email, second_email=email2, third_email=email3, address=address)
def get_contact_view_page(self, index):
wd = self.app.wd
self.open_contact_to_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
def open_contact_to_view_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cells = row.find_elements_by_tag_name("td")[6]
cells.find_element_by_tag_name("a").click()
def add_contact_to_group_by_id(self, id, group):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
# add mew contact
wd.find_element_by_css_selector("input[value='%s']" % id).click()
number=group.id
wd.find_element_by_xpath("//select[@name='to_group']//option[@value='%s']"% number).click()
wd.find_element_by_name("add").click()
self.app.open_home_page()
self.contact_cache = None
def add_contact_to_group(self, Contact, group):
wd = self.app.wd
if not len(wd.find_elements_by_name("searchstring")) > 0:
self.app.open_home_page()
# add new contact
wd.find_element_by_link_text("add new").click()
self.fill_contact_form(Contact)
number = group.id
wd.find_element_by_xpath("//div[@id='content']/form/select[5]//option[@value='%s']" % number).click()
# accept
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.app.open_home_page()
self.contact_cache = None
|
apache-2.0
| -1,192,542,450,972,658,000
| 42.455319
| 109
| 0.600372
| false
|
wavefrontHQ/python-client
|
wavefront_api_client/models/response_container_service_account.py
|
1
|
4422
|
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ResponseContainerServiceAccount(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'response': 'ServiceAccount',
'status': 'ResponseStatus'
}
attribute_map = {
'response': 'response',
'status': 'status'
}
def __init__(self, response=None, status=None): # noqa: E501
"""ResponseContainerServiceAccount - a model defined in Swagger""" # noqa: E501
self._response = None
self._status = None
self.discriminator = None
if response is not None:
self.response = response
self.status = status
@property
def response(self):
"""Gets the response of this ResponseContainerServiceAccount. # noqa: E501
:return: The response of this ResponseContainerServiceAccount. # noqa: E501
:rtype: ServiceAccount
"""
return self._response
@response.setter
def response(self, response):
"""Sets the response of this ResponseContainerServiceAccount.
:param response: The response of this ResponseContainerServiceAccount. # noqa: E501
:type: ServiceAccount
"""
self._response = response
@property
def status(self):
"""Gets the status of this ResponseContainerServiceAccount. # noqa: E501
:return: The status of this ResponseContainerServiceAccount. # noqa: E501
:rtype: ResponseStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ResponseContainerServiceAccount.
:param status: The status of this ResponseContainerServiceAccount. # noqa: E501
:type: ResponseStatus
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResponseContainerServiceAccount, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResponseContainerServiceAccount):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
apache-2.0
| 1,939,211,225,929,616,100
| 30.140845
| 409
| 0.593171
| false
|
wdv4758h/ZipPy
|
edu.uci.python.test/regressiontests/test_grammar.py
|
1
|
33140
|
# Python test set -- part 1, grammar.
# This just tests whether the parser accepts them all.
#from test.support import run_unittest, check_syntax_error
# Currently test.support cannot be imported
import unittest
#import sys
# testing import *
#from sys import *
class TokenTests(unittest.TestCase):
def testBackslash(self):
# Backslash means line continuation:
x = 1 \
+ 1
self.assertEqual(x, 2, 'backslash for line continuation')
# Backslash does not means continuation in comments :\
x = 0
self.assertEqual(x, 0, 'backslash ending comment')
def testPlainIntegers(self):
self.assertEqual(type(000), type(0))
self.assertEqual(0xff, 255)
self.assertEqual(0o377, 255)
self.assertEqual(2147483647, 0o17777777777)
self.assertEqual(0b1001, 9)
# "0x" is not a valid literal
self.assertRaises(SyntaxError, eval, "0x")
from sys import maxsize
if maxsize == 2147483647:
self.assertEqual(-2147483647-1, -0o20000000000)
# XXX -2147483648
self.assertTrue(0o37777777777 > 0)
self.assertTrue(0xffffffff > 0)
self.assertTrue(0b1111111111111111111111111111111 > 0)
for s in ('2147483648', '0o40000000000', '0x100000000',
'0b10000000000000000000000000000000'):
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
elif maxsize == 9223372036854775807:
self.assertEqual(-9223372036854775807-1, -0o1000000000000000000000)
self.assertTrue(0o1777777777777777777777 > 0)
self.assertTrue(0xffffffffffffffff > 0)
self.assertTrue(0b11111111111111111111111111111111111111111111111111111111111111 > 0)
for s in '9223372036854775808', '0o2000000000000000000000', \
'0x10000000000000000', \
'0b100000000000000000000000000000000000000000000000000000000000000':
try:
x = eval(s)
except OverflowError:
self.fail("OverflowError on huge integer literal %r" % s)
else:
self.fail('Weird maxsize value %r' % maxsize)
def testLongIntegers(self):
x = 0
x = 0xffffffffffffffff
x = 0Xffffffffffffffff
x = 0o77777777777777777
x = 0O77777777777777777
x = 123456789012345678901234567890
x = 0b100000000000000000000000000000000000000000000000000000000000000000000
x = 0B111111111111111111111111111111111111111111111111111111111111111111111
def testFloats(self):
x = 3.14
x = 314.
x = 0.314
# XXX x = 000.314
x = .314
x = 3e14
x = 3E14
x = 3e-14
x = 3e+14
x = 3.e14
x = .3e14
x = 3.1e4
def testStringLiterals(self):
x = ''; y = ""; self.assertTrue(len(x) == 0 and x == y)
x = '\''; y = "'"; self.assertTrue(len(x) == 1 and x == y and ord(x) == 39)
x = '"'; y = "\""; self.assertTrue(len(x) == 1 and x == y and ord(x) == 34)
x = "doesn't \"shrink\" does it"
y = 'doesn\'t "shrink" does it'
self.assertTrue(len(x) == 24 and x == y)
x = "does \"shrink\" doesn't it"
y = 'does "shrink" doesn\'t it'
self.assertTrue(len(x) == 24 and x == y)
x = """
The "quick"
brown fox
jumps over
the 'lazy' dog.
"""
y = '\nThe "quick"\nbrown fox\njumps over\nthe \'lazy\' dog.\n'
self.assertEqual(x, y)
y = '''
The "quick"
brown fox
jumps over
the 'lazy' dog.
'''
self.assertEqual(x, y)
y = "\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the 'lazy' dog.\n\
"
self.assertEqual(x, y)
y = '\n\
The \"quick\"\n\
brown fox\n\
jumps over\n\
the \'lazy\' dog.\n\
'
self.assertEqual(x, y)
# def testEllipsis(self):
# x = ...
# self.assertTrue(x is Ellipsis)
# self.assertRaises(SyntaxError, eval, ".. .")
class GrammarTests(unittest.TestCase):
# single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
# XXX can't test in a script -- this rule is only used when interactive
# file_input: (NEWLINE | stmt)* ENDMARKER
# Being tested as this very moment this very module
# expr_input: testlist NEWLINE
# XXX Hard to test -- used only in calls to input()
def testEvalInput(self):
# testlist ENDMARKER
x = eval('1, 0 or 1')
def testFuncdef(self):
### [decorators] 'def' NAME parameters ['->' test] ':' suite
### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
### decorators: decorator+
### parameters: '(' [typedargslist] ')'
### typedargslist: ((tfpdef ['=' test] ',')*
### ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef)
### | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
### tfpdef: NAME [':' test]
### varargslist: ((vfpdef ['=' test] ',')*
### ('*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef)
### | vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
### vfpdef: NAME
def f1(): pass
f1()
f1(*())
f1(*(), **{})
def f2(one_argument): pass
def f3(two, arguments): pass
self.assertEqual(f2.__code__.co_varnames, ('one_argument',))
self.assertEqual(f3.__code__.co_varnames, ('two', 'arguments'))
def a1(one_arg,): pass
def a2(two, args,): pass
def v0(*rest): pass
def v1(a, *rest): pass
def v2(a, b, *rest): pass
f1()
f2(1)
f2(1,)
f3(1, 2)
f3(1, 2,)
v0()
v0(1)
v0(1,)
v0(1,2)
v0(1,2,3,4,5,6,7,8,9,0)
v1(1)
v1(1,)
v1(1,2)
v1(1,2,3)
v1(1,2,3,4,5,6,7,8,9,0)
v2(1,2)
v2(1,2,3)
v2(1,2,3,4)
v2(1,2,3,4,5,6,7,8,9,0)
def d01(a=1): pass
d01()
d01(1)
d01(*(1,))
d01(**{'a':2})
def d11(a, b=1): pass
d11(1)
d11(1, 2)
d11(1, **{'b':2})
def d21(a, b, c=1): pass
d21(1, 2)
d21(1, 2, 3)
d21(*(1, 2, 3))
d21(1, *(2, 3))
d21(1, 2, *(3,))
d21(1, 2, **{'c':3})
def d02(a=1, b=2): pass
d02()
d02(1)
d02(1, 2)
d02(*(1, 2))
d02(1, *(2,))
d02(1, **{'b':2})
d02(**{'a': 1, 'b': 2})
def d12(a, b=1, c=2): pass
d12(1)
d12(1, 2)
d12(1, 2, 3)
def d22(a, b, c=1, d=2): pass
d22(1, 2)
d22(1, 2, 3)
d22(1, 2, 3, 4)
def d01v(a=1, *rest): pass
d01v()
d01v(1)
d01v(1, 2)
d01v(*(1, 2, 3, 4))
d01v(*(1,))
d01v(**{'a':2})
def d11v(a, b=1, *rest): pass
d11v(1)
d11v(1, 2)
d11v(1, 2, 3)
def d21v(a, b, c=1, *rest): pass
d21v(1, 2)
d21v(1, 2, 3)
d21v(1, 2, 3, 4)
d21v(*(1, 2, 3, 4))
d21v(1, 2, **{'c': 3})
def d02v(a=1, b=2, *rest): pass
d02v()
d02v(1)
d02v(1, 2)
d02v(1, 2, 3)
d02v(1, *(2, 3, 4))
d02v(**{'a': 1, 'b': 2})
def d12v(a, b=1, c=2, *rest): pass
d12v(1)
d12v(1, 2)
d12v(1, 2, 3)
d12v(1, 2, 3, 4)
d12v(*(1, 2, 3, 4))
d12v(1, 2, *(3, 4, 5))
d12v(1, *(2,), **{'c': 3})
def d22v(a, b, c=1, d=2, *rest): pass
d22v(1, 2)
d22v(1, 2, 3)
d22v(1, 2, 3, 4)
d22v(1, 2, 3, 4, 5)
d22v(*(1, 2, 3, 4))
d22v(1, 2, *(3, 4, 5))
d22v(1, *(2, 3), **{'d': 4})
# keyword argument type tests
try:
str('x', **{b'foo':1 })
except TypeError:
pass
else:
self.fail('Bytes should not work as keyword argument names')
# keyword only argument tests
# def pos0key1(*, key): return key
# pos0key1(key=100)
# def pos2key2(p1, p2, *, k1, k2=100): return p1,p2,k1,k2
# pos2key2(1, 2, k1=100)
# pos2key2(1, 2, k1=100, k2=200)
# pos2key2(1, 2, k2=100, k1=200)
# def pos2key2dict(p1, p2, *, k1=100, k2, **kwarg): return p1,p2,k1,k2,kwarg
# pos2key2dict(1,2,k2=100,tokwarg1=100,tokwarg2=200)
# pos2key2dict(1,2,tokwarg1=100,tokwarg2=200, k2=100)
# keyword arguments after *arglist
def f(*args, **kwargs):
return args, kwargs
self.assertEqual(f(1, x=2, *[3, 4], y=5), ((1, 3, 4),
{'x':2, 'y':5}))
self.assertRaises(SyntaxError, eval, "f(1, *(2,3), 4)")
self.assertRaises(SyntaxError, eval, "f(1, x=2, *(3,4), x=5)")
# argument annotation tests
# def f(x) -> list: pass
# self.assertEqual(f.__annotations__, {'return': list})
# def f(x:int): pass
# self.assertEqual(f.__annotations__, {'x': int})
# def f(*x:str): pass
# self.assertEqual(f.__annotations__, {'x': str})
# def f(**x:float): pass
# self.assertEqual(f.__annotations__, {'x': float})
# def f(x, y:1+2): pass
# self.assertEqual(f.__annotations__, {'y': 3})
# def f(a, b:1, c:2, d): pass
# self.assertEqual(f.__annotations__, {'b': 1, 'c': 2})
# def f(a, b:1, c:2, d, e:3=4, f=5, *g:6): pass
# self.assertEqual(f.__annotations__,
# {'b': 1, 'c': 2, 'e': 3, 'g': 6})
# def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10,
# **k:11) -> 12: pass
# self.assertEqual(f.__annotations__,
# {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9,
# 'k': 11, 'return': 12})
# Check for SF Bug #1697248 - mixing decorators and a return annotation
# def null(x): return x
# @null
# def f(x) -> list: pass
# self.assertEqual(f.__annotations__, {'return': list})
#
# # test MAKE_CLOSURE with a variety of oparg's
# closure = 1
# def f(): return closure
# def f(x=1): return closure
# def f(*, k=1): return closure
# def f() -> int: return closure
# Check ast errors in *args and *kwargs
# check_syntax_error(self, "f(*g(1=2))")
# Currently test.support module is not supported, so check_syntax_error is handled as the following
self.assertRaises(SyntaxError, compile, "f(*g(1=2))", '<test string>', 'exec')
# check_syntax_error(self, "f(**g(1=2))")
self.assertRaises(SyntaxError, compile, "f(**g(1=2))", '<test string>', 'exec')
def testLambdef(self):
### lambdef: 'lambda' [varargslist] ':' test
l1 = lambda : 0
self.assertEqual(l1(), 0)
l2 = lambda : a[d] # XXX just testing the expression
l3 = lambda : [2 < x for x in [-1, 3, 0]]
self.assertEqual(l3(), [0, 1, 0])
l4 = lambda x = lambda y = lambda z=1 : z : y() : x()
self.assertEqual(l4(), 1)
l5 = lambda x, y, z=2: x + y + z
self.assertEqual(l5(1, 2), 5)
self.assertEqual(l5(1, 2, 3), 6)
# check_syntax_error(self, "lambda x: x = 2")
# Currently test.support module is not supported, so check_syntax_error is handled as the following
self.assertRaises(SyntaxError, compile, "lambda x: x = 2", '<test string>', 'exec')
# check_syntax_error(self, "lambda (None,): None")
self.assertRaises(SyntaxError, compile, "lambda (None,): None", '<test string>', 'exec')
# l6 = lambda x, y, *, k=20: x+y+k
# self.assertEqual(l6(1,2), 1+2+20)
# self.assertEqual(l6(1,2,k=10), 1+2+10)
### stmt: simple_stmt | compound_stmt
# Tested below
def testSimpleStmt(self):
### simple_stmt: small_stmt (';' small_stmt)* [';']
x = 1; pass; del x
def foo():
# verify statements that end with semi-colons
x = 1; pass; del x;
foo()
### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt
# Tested below
def testExprStmt(self):
# (exprlist '=')* exprlist
1
1, 2, 3
x = 1
x = 1, 2, 3
x = y = z = 1, 2, 3
x, y, z = 1, 2, 3
abc = a, b, c = x, y, z = xyz = 1, 2, (3, 4)
# check_syntax_error(self, "x + 1 = 1")
# Currently test.support module is not supported, so check_syntax_error is handled as the following
self.assertRaises(SyntaxError, compile, "x + 1 = 1", '<test string>', 'exec')
# check_syntax_error(self, "a + 1 = b + 2")
self.assertRaises(SyntaxError, compile, "a + 1 = b + 2", '<test string>', 'exec')
def testDelStmt(self):
# 'del' exprlist
abc = [1,2,3]
x, y, z = abc
xyz = x, y, z
del abc
del x, y, (z, xyz)
def testPassStmt(self):
# 'pass'
pass
# flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt
# Tested below
def testBreakStmt(self):
# 'break'
while 1: break
def testContinueStmt(self):
# 'continue'
i = 1
while i: i = 0; continue
msg = ""
while not msg:
msg = "ok"
try:
continue
msg = "continue failed to continue inside try"
except:
msg = "continue inside try called except block"
if msg != "ok":
self.fail(msg)
msg = ""
while not msg:
msg = "finally block not called"
try:
continue
finally:
msg = "ok"
if msg != "ok":
self.fail(msg)
def test_break_continue_loop(self):
# This test warrants an explanation. It is a test specifically for SF bugs
# #463359 and #462937. The bug is that a 'break' statement executed or
# exception raised inside a try/except inside a loop, *after* a continue
# statement has been executed in that loop, will cause the wrong number of
# arguments to be popped off the stack and the instruction pointer reset to
# a very small number (usually 0.) Because of this, the following test
# *must* written as a function, and the tracking vars *must* be function
# arguments with default values. Otherwise, the test will loop and loop.
def test_inner(extra_burning_oil = 1, count=0):
big_hippo = 2
while big_hippo:
count += 1
try:
if extra_burning_oil and big_hippo == 1:
extra_burning_oil -= 1
break
big_hippo -= 1
continue
except:
raise
if count > 2 or big_hippo != 1:
self.fail("continue then break in try/except in loop broken!")
test_inner()
def testReturn(self):
# 'return' [testlist]
def g1(): return
def g2(): return 1
g1()
x = g2()
# check_syntax_error(self, "class foo:return 1")
# Currently test.support module is not supported, so check_syntax_error is handled as the following
self.assertRaises(SyntaxError, compile, "class foo:return 1", '<test string>', 'exec')
def testYield(self):
# check_syntax_error(self, "class foo:yield 1")
# Currently test.support module is not supported, so check_syntax_error is handled as the following
self.assertRaises(SyntaxError, compile, "class foo:yield 1", '<test string>', 'exec')
def testRaise(self):
# 'raise' test [',' test]
try: raise RuntimeError('just testing')
except RuntimeError: pass
try: raise KeyboardInterrupt
except KeyboardInterrupt: pass
def testImport(self):
# 'import' dotted_as_names
import sys
import time, sys
# 'from' dotted_name 'import' ('*' | '(' import_as_names ')' | import_as_names)
from time import time
from time import (time)
# not testable inside a function, but already done at top of the module
# from sys import *
from sys import path, argv
from sys import (path, argv)
from sys import (path, argv,)
def testGlobal(self):
# 'global' NAME (',' NAME)*
global a
global a, b
global one, two, three, four, five, six, seven, eight, nine, ten
# def testNonlocal(self):
# # 'nonlocal' NAME (',' NAME)*
# x = 0
# y = 0
# def f():
# nonlocal x
# nonlocal x, y
def testAssert(self):
# assertTruestmt: 'assert' test [',' test]
assert 1
assert 1, 1
assert lambda x:x
assert 1, lambda x:x+1
try:
assert True
except AssertionError as e:
self.fail("'assert True' should not have raised an AssertionError")
try:
assert True, 'this should always pass'
except AssertionError as e:
self.fail("'assert True, msg' should not have "
"raised an AssertionError")
# these tests fail if python is run with -O, so check __debug__
@unittest.skipUnless(__debug__, "Won't work if __debug__ is False")
def testAssert2(self):
try:
assert 0, "msg"
except AssertionError as e:
self.assertEqual(e.args[0], "msg")
else:
self.fail("AssertionError not raised by assert 0")
try:
assert False
except AssertionError as e:
self.assertEqual(len(e.args), 0)
else:
self.fail("AssertionError not raised by 'assert False'")
### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef
# Tested below
def testIf(self):
# 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
if 1: pass
if 1: pass
else: pass
if 0: pass
elif 0: pass
if 0: pass
elif 0: pass
elif 0: pass
elif 0: pass
else: pass
def testWhile(self):
# 'while' test ':' suite ['else' ':' suite]
while 0: pass
while 0: pass
else: pass
# Issue1920: "while 0" is optimized away,
# ensure that the "else" clause is still present.
x = 0
while 0:
x = 1
else:
x = 2
self.assertEqual(x, 2)
def testFor(self):
# 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
for i in 1, 2, 3: pass
for i, j, k in (): pass
else: pass
class Squares:
def __init__(self, max):
self.max = max
self.sofar = []
def __len__(self): return len(self.sofar)
def __getitem__(self, i):
if not 0 <= i < self.max: raise IndexError
n = len(self.sofar)
while n <= i:
self.sofar.append(n*n)
n = n+1
return self.sofar[i]
n = 0
for x in Squares(10): n = n+x
if n != 285:
self.fail('for over growing sequence')
result = []
for x, in [(1,), (2,), (3,)]:
result.append(x)
self.assertEqual(result, [1, 2, 3])
def testTry(self):
### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
### | 'try' ':' suite 'finally' ':' suite
### except_clause: 'except' [expr ['as' expr]]
try:
1/0
except ZeroDivisionError:
pass
else:
pass
try: 1/0
except EOFError: pass
except TypeError as msg: pass
except RuntimeError as msg: pass
except: pass
else: pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError): pass
try: 1/0
except (EOFError, TypeError, ZeroDivisionError) as msg: pass
try: pass
finally: pass
def testSuite(self):
# simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
if 1: pass
if 1:
pass
if 1:
#
#
#
pass
pass
#
pass
#
def testTest(self):
### and_test ('or' and_test)*
### and_test: not_test ('and' not_test)*
### not_test: 'not' not_test | comparison
if not 1: pass
if 1 and 1: pass
if 1 or 1: pass
if not not not 1: pass
if not 1 and 1 and 1: pass
if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass
def testComparison(self):
### comparison: expr (comp_op expr)*
### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
if 1: pass
x = (1 == 1)
if 1 == 1: pass
if 1 != 1: pass
if 1 < 1: pass
if 1 > 1: pass
if 1 <= 1: pass
if 1 >= 1: pass
if 1 is 1: pass
if 1 is not 1: pass
if 1 in (): pass
if 1 not in (): pass
if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass
def testBinaryMaskOps(self):
x = 1 & 1
x = 1 ^ 1
x = 1 | 1
def testShiftOps(self):
x = 1 << 1
x = 1 >> 1
x = 1 << 1 >> 1
def testAdditiveOps(self):
x = 1
x = 1 + 1
x = 1 - 1 - 1
x = 1 - 1 + 1 - 1 + 1
def testMultiplicativeOps(self):
x = 1 * 1
x = 1 / 1
x = 1 % 1
x = 1 / 1 * 1 % 1
def testUnaryOps(self):
x = +1
x = -1
x = ~1
x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
x = -1*1/1 + 1*1 - ---1*1
def testSelectors(self):
### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME
### subscript: expr | [expr] ':' [expr]
import sys, time
c = sys.path[0]
x = time.time()
x = sys.modules['time'].time()
a = '01234'
c = a[0]
c = a[-1]
s = a[0:5]
s = a[:5]
s = a[0:]
s = a[:]
s = a[-5:]
s = a[:-1]
s = a[-4:-3]
# A rough test of SF bug 1333982. http://python.org/sf/1333982
# The testing here is fairly incomplete.
# Test cases should include: commas with 1 and 2 colons
d = {}
d[1] = 1
d[1,] = 2
d[1,2] = 3
d[1,2,3] = 4
L = list(d)
L.sort(key=lambda x: x if isinstance(x, tuple) else ())
self.assertEqual(str(L), '[1, (1,), (1, 2), (1, 2, 3)]')
def testAtoms(self):
### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING
### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [','])
x = (1)
x = (1 or 2 or 3)
x = (1 or 2 or 3, 2, 3)
x = []
x = [1]
x = [1 or 2 or 3]
x = [1 or 2 or 3, 2, 3]
x = []
x = {}
x = {'one': 1}
x = {'one': 1,}
x = {'one' or 'two': 1 or 2}
x = {'one': 1, 'two': 2}
x = {'one': 1, 'two': 2,}
x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}
x = {'one'}
x = {'one', 1,}
x = {'one', 'two', 'three'}
x = {2, 3, 4,}
x = x
x = 'x'
x = 123
### exprlist: expr (',' expr)* [',']
### testlist: test (',' test)* [',']
# These have been exercised enough above
def testClassdef(self):
# 'class' NAME ['(' [testlist] ')'] ':' suite
class B: pass
class B2(): pass
class C1(B): pass
class C2(B): pass
class D(C1, C2, B): pass
class C:
def meth1(self): pass
def meth2(self, arg): pass
def meth3(self, a1, a2): pass
# decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
# decorators: decorator+
# decorated: decorators (classdef | funcdef)
def class_decorator(x): return x
@class_decorator
class G: pass
def testDictcomps(self):
# dictorsetmaker: ( (test ':' test (comp_for |
# (',' test ':' test)* [','])) |
# (test (comp_for | (',' test)* [','])) )
nums = [1, 2, 3]
self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4})
def testListcomps(self):
# list comprehension tests
nums = [1, 2, 3, 4, 5]
strs = ["Apple", "Banana", "Coconut"]
spcs = [" Apple", " Banana ", "Coco nut "]
self.assertEqual([s.strip() for s in spcs], ['Apple', 'Banana', 'Coco nut'])
self.assertEqual([3 * x for x in nums], [3, 6, 9, 12, 15])
self.assertEqual([x for x in nums if x > 2], [3, 4, 5])
self.assertEqual([(i, s) for i in nums for s in strs],
[(1, 'Apple'), (1, 'Banana'), (1, 'Coconut'),
(2, 'Apple'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Apple'), (3, 'Banana'), (3, 'Coconut'),
(4, 'Apple'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Apple'), (5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(i, s) for i in nums for s in [f for f in strs if "n" in f]],
[(1, 'Banana'), (1, 'Coconut'), (2, 'Banana'), (2, 'Coconut'),
(3, 'Banana'), (3, 'Coconut'), (4, 'Banana'), (4, 'Coconut'),
(5, 'Banana'), (5, 'Coconut')])
self.assertEqual([(lambda a:[a**i for i in range(a+1)])(j) for j in range(5)],
[[1], [1, 1], [1, 2, 4], [1, 3, 9, 27], [1, 4, 16, 64, 256]])
def test_in_func(l):
return [0 < x < 3 for x in l if x > 2]
self.assertEqual(test_in_func(nums), [False, False, False])
def test_nested_front():
self.assertEqual([[y for y in [x, x + 1]] for x in [1,3,5]],
[[1, 2], [3, 4], [5, 6]])
test_nested_front()
# check_syntax_error(self, "[i, s for i in nums for s in strs]")
# Currently test.support module is not supported, so check_syntax_error is handled as the following
self.assertRaises(SyntaxError, compile, "[i, s for i in nums for s in strs]", '<test string>', 'exec')
# check_syntax_error(self, "[x if y]")
self.assertRaises(SyntaxError, compile, "[x if y]", '<test string>', 'exec')
suppliers = [
(1, "Boeing"),
(2, "Ford"),
(3, "Macdonalds")
]
parts = [
(10, "Airliner"),
(20, "Engine"),
(30, "Cheeseburger")
]
suppart = [
(1, 10), (1, 20), (2, 20), (3, 30)
]
x = [
(sname, pname)
for (sno, sname) in suppliers
for (pno, pname) in parts
for (sp_sno, sp_pno) in suppart
if sno == sp_sno and pno == sp_pno
]
self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'),
('Macdonalds', 'Cheeseburger')])
def testGenexps(self):
# generator expression tests
g = ([x for x in range(10)] for x in range(1))
self.assertEqual(next(g), [x for x in range(10)])
try:
next(g)
self.fail('should produce StopIteration exception')
except StopIteration:
pass
a = 1
try:
g = (a for d in a)
next(g)
self.fail('should produce TypeError')
except TypeError:
pass
self.assertEqual(list((x, y) for x in 'abcd' for y in 'abcd'), [(x, y) for x in 'abcd' for y in 'abcd'])
self.assertEqual(list((x, y) for x in 'ab' for y in 'xy'), [(x, y) for x in 'ab' for y in 'xy'])
a = [x for x in range(10)]
b = (x for x in (y for y in a))
self.assertEqual(sum(b), sum([x for x in range(10)]))
self.assertEqual(sum(x**2 for x in range(10)), sum([x**2 for x in range(10)]))
self.assertEqual(sum(x*x for x in range(10) if x%2), sum([x*x for x in range(10) if x%2]))
self.assertEqual(sum(x for x in (y for y in range(10))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10)))), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in [y for y in (z for z in range(10))]), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True)) if True), sum([x for x in range(10)]))
self.assertEqual(sum(x for x in (y for y in (z for z in range(10) if True) if False) if True), 0)
# check_syntax_error(self, "foo(x for x in range(10), 100)")
# Currently test.support module is not supported, so check_syntax_error is handled as the following
self.assertRaises(SyntaxError, compile, "foo(x for x in range(10), 100)", '<test string>', 'exec')
# check_syntax_error(self, "foo(100, x for x in range(10))")
self.assertRaises(SyntaxError, compile, "foo(100, x for x in range(10))", '<test string>', 'exec')
def testComprehensionSpecials(self):
# test for outmost iterable precomputation
x = 10; g = (i for i in range(x)); x = 5
self.assertEqual(len(list(g)), 10)
# This should hold, since we're only precomputing outmost iterable.
x = 10; t = False; g = ((i,j) for i in range(x) if t for j in range(x))
x = 5; t = True;
self.assertEqual([(i,j) for i in range(10) for j in range(5)], list(g))
# Grammar allows multiple adjacent 'if's in listcomps and genexps,
# even though it's silly. Make sure it works (ifelse broke this.)
self.assertEqual([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7])
self.assertEqual(list(x for x in range(10) if x % 2 if x % 3), [1, 5, 7])
# verify unpacking single element tuples in listcomp/genexp.
self.assertEqual([x for x, in [(4,), (5,), (6,)]], [4, 5, 6])
self.assertEqual(list(x for x, in [(7,), (8,), (9,)]), [7, 8, 9])
def test_with_statement(self):
class manager(object):
def __enter__(self):
return (1, 2)
def __exit__(self, *args):
pass
with manager():
pass
with manager() as x:
pass
with manager() as (x, y):
pass
with manager(), manager():
pass
with manager() as x, manager() as y:
pass
with manager() as x, manager():
pass
def testIfElseExpr(self):
# Test ifelse expressions in various cases
def _checkeval(msg, ret):
"helper to check that evaluation of expressions is done correctly"
print(x)
return ret
# the next line is not allowed anymore
#self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True])
self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True])
self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True])
self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5)
self.assertEqual((_checkeval("check 2", 0) if 0 else 5), 5)
self.assertEqual((5 and 6 if 0 else 1), 1)
self.assertEqual(((5 and 6) if 0 else 1), 1)
self.assertEqual((5 and (6 if 1 else 1)), 6)
self.assertEqual((0 or _checkeval("check 3", 2) if 0 else 3), 3)
self.assertEqual((1 or _checkeval("check 4", 2) if 1 else _checkeval("check 5", 3)), 1)
self.assertEqual((0 or 5 if 1 else _checkeval("check 6", 3)), 5)
self.assertEqual((not 5 if 1 else 1), False)
self.assertEqual((not 5 if 0 else 1), 1)
self.assertEqual((6 + 1 if 1 else 2), 7)
self.assertEqual((6 - 1 if 1 else 2), 5)
self.assertEqual((6 * 2 if 1 else 4), 12)
self.assertEqual((6 / 2 if 1 else 3), 3)
self.assertEqual((6 < 4 if 0 else 2), 2)
def test_paren_evaluation(self):
self.assertEqual(16 // (4 // 2), 8)
self.assertEqual((16 // 4) // 2, 2)
self.assertEqual(16 // 4 // 2, 2)
self.assertTrue(False is (2 is 3))
self.assertFalse((False is 2) is 3)
self.assertFalse(False is 2 is 3)
# def test_main():
# run_unittest(TokenTests, GrammarTests)
if __name__ == '__main__':
#test_main()
unittest.main()
|
bsd-3-clause
| -3,071,993,888,614,629,400
| 33.129763
| 132
| 0.490616
| false
|
z01nl1o02/tests
|
text/mlpbase.py
|
1
|
8554
|
import os,sys,pdb,pickle
import numpy as np
import cv2
import theano
import theano.tensor as T
import random
#you may try several times to get a good model and the init 'cost' may be quite large, 78 .e.g.
class Layer(object):
"""
a layer is a maxtrix with row = output of this layer and col = output of
previous layer
"""
def __init__(self, W_init, b_init, activation):
n_output,n_input = W_init.shape
assert b_init.shape == (n_output,)
self.W = theano.shared(value=W_init.astype(theano.config.floatX),
name="W",
borrow=True)
self.b = theano.shared(value=b_init.reshape(n_output,1).astype(theano.config.floatX),
name="b",
borrow=True,
broadcastable=(False,True))
self.activation = activation
self.params = [self.W, self.b]
def output(self,x):
lin_output = T.dot(self.W,x) + self.b
return (lin_output if self.activation is None else self.activation(lin_output))
class MLP(object):
def __init__(self, W_init, b_init, activations):
assert len(W_init) == len(b_init) == len(activations)
self.layers = []
for W,b,activation in zip(W_init, b_init, activations):
self.layers.append(Layer(W,b,activation))
self.params = []
for layer in self.layers:
self.params += layer.params
def output(self,x):
for layer in self.layers:
x = layer.output(x)
return x
def squared_error(self,x,y):
return T.sum((self.output(x) - y) ** 2)
return T.mean((self.output(x) - y) ** 2)
def cvt2c(self):
line = ""
for param in self.params:
parval = param.get_value()
line += "%d"%(parval.shape[0]) + ',' + "%d"%(parval.shape[1]) + ',\n'
for y in range(parval.shape[0]):
for x in range(parval.shape[1]):
line += "%lf"%(parval[y,x])+ ','
line += '\n'
return line
class MLP_PROXY(object):
def __init__(self, modelpath):
self._train = None
self._predict = None
self._cost = None
self._minmax = None
self._modelpath = modelpath
self._mlp = None
def gradient_updates_momentum(self,cost, params, learning_rate, momentum):
assert momentum < 1 and momentum >= 0
updates = []
for param in params:
param_update = theano.shared(param.get_value() * 0., broadcastable=param.broadcastable)
updates.append((param, param - learning_rate * param_update))
updates.append((param_update, momentum * param_update + (1. - momentum)*T.grad(cost, param)))
return updates
def write_in_c_format(self,outpath):
line = ""
for m0,m1 in zip(self._minmax[0], self._minmax[1]):
line += "%lf,%lf,"%(m0,m1)
line += '\n'
line += self._mlp.cvt2c()
with open(outpath, 'w') as f:
f.writelines(line)
return
def create(self, layer_sizes, learning_rate = 0.01, momentum = 0.6):
W_init = []
b_init = []
activations = []
for n_input, n_output in zip(layer_sizes[:-1], layer_sizes[1:]):
W_init.append(np.random.randn(n_output, n_input))
b_init.append(np.random.randn(n_output))
activations.append(T.nnet.sigmoid)
mlp = MLP(W_init, b_init, activations)
mlp_input = T.matrix('mlp_input')
mlp_target = T.matrix('mlp_target')
self._cost = mlp.squared_error(mlp_input, mlp_target)
self._train = theano.function([mlp_input,mlp_target], self._cost, updates=self.gradient_updates_momentum(self._cost, mlp.params, learning_rate, momentum))
self._predict = theano.function([mlp_input], mlp.output(mlp_input))
self._mlp = mlp
return
def train(self,samples, targets, max_iteration=5000, min_cost = 0.01):
#samples and targets : (samples num) X (feature dimenstion)
iteration = 0
samplesT = np.transpose(samples) #W*x + b
targetsT = np.transpose(targets)
batchsize = 5
echostep = max_iteration / 10
if echostep > 1000:
echostep = 1000
while iteration < max_iteration:
cost = 0
total = 0
for k in range(0,samplesT.shape[1],batchsize):
kk = k
if kk + batchsize > samplesT.shape[1]:
kk = samplesT.shape[1] - batchsize
s = np.reshape(samplesT[:,kk:kk+batchsize],(-1,batchsize))
t = np.reshape(targetsT[:,kk:kk+batchsize],(-1,batchsize))
current_cost = self._train(s,t)
cost = cost + current_cost.sum()
total += batchsize
if (1+iteration)% echostep == 0:
print iteration + 1, ',', cost
if cost < min_cost:
break
iteration += 1
return
def predict(self,samples):
samplesT = np.transpose(samples) #W*x + b
output = self._predict(samplesT)
targets = np.transpose(output)
return targets
def pre_normalization(self, samples):
m0 = samples[0,:]
m1 = samples[0,:]
for k in range(1,samples.shape[0]):
m0 = np.minimum(samples[k,:],m0)
m1 = np.maximum(samples[k,:],m1)
self._minmax = (m0,m1)
return
def normalization(self, samples, u=1, l=-1):
if None == self._minmax:
return None
m0,m1 = self._minmax
rng = m1 - m0
tmp = np.ones(rng.shape)
for k in range(len(rng)):
if rng[k] < 0.001:
rng[k] = 1
tmp[k] = 0
ratio = tmp / rng
for k in range(samples.shape[0]):
feat = samples[k,:]
feat = (feat - m0) * ratio * (u - l) + l
idx = feat>u
feat[idx] = u
idx = feat<l
feat[idx] = l
samples[k,:] = feat
return samples
def shuffle(self, samples, targets):
totalnum = samples.shape[0]
idx = range(totalnum)
random.shuffle(idx)
rnd_samples = np.zeros(samples.shape)
rnd_targets = np.zeros(targets.shape)
for k in range(len(idx)):
i = idx[k]
rnd_samples[k,:] = samples[i,:]
rnd_targets[k,:] = targets[i,:]
return (rnd_samples, rnd_targets)
def target_vec2mat(self, target_list, labelnum, hvalue = 1.0, lvalue = 0.0):
#0-based
targetnum = len(target_list)
targets = np.zeros((targetnum, labelnum))
for k in range(targetnum):
for j in range(labelnum):
targets[k,j] = lvalue
for j in target_list[k]:
targets[k,j] = hvalue
return targets
def target_mat2vec(self, targets, labelnum, thresh = 0.5):
target_list = []
if thresh > 0:
for k in range(targets.shape[0]):
l = []
for j in range(targets.shape[1]):
if targets[k,j] >= thresh:
l.append(j)
target_list.append(l)
if thresh < -1024.0:
for k in range(targets.shape[0]):
l = []
m1 = targets[k,:].max()
for j in range(targets.shape[1]):
if np.abs(targets[k,j] - m1) < 0.01:
l.append((j,m1)) #label and confidence
target_list.append(l)
else: #top value
for k in range(targets.shape[0]):
l = []
m1 = targets[k,:].max()
for j in range(targets.shape[1]):
if np.abs(targets[k,j] - m1) < 0.01:
l.append(j)
target_list.append(l)
return target_list
def save(self):
if None == self._modelpath:
return -1
with open(self._modelpath, 'wb') as f:
pickle.dump((self._cost, self._train, self._predict, self._minmax,self._mlp), f)
return 0
def load(self):
if None == self._modelpath:
return -1
with open(self._modelpath, 'rb') as f:
self._cost, self._train, self._predict, self._minmax, self._mlp = pickle.load(f)
return 0
|
gpl-2.0
| 3,815,222,287,935,987,700
| 35.4
| 162
| 0.513561
| false
|
wpoely86/vsc-base
|
lib/vsc/utils/run.py
|
2
|
31071
|
#
# Copyright 2009-2013 Ghent University
#
# This file is part of vsc-base,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/vsc-base
#
# vsc-base is free software: you can redistribute it and/or modify
# it under the terms of the GNU Library General Public License as
# published by the Free Software Foundation, either version 2 of
# the License, or (at your option) any later version.
#
# vsc-base is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with vsc-base. If not, see <http://www.gnu.org/licenses/>.
#
"""
Python module to execute a command
Historical overview of existing equivalent code
- EasyBuild filetools module
- C{run_cmd(cmd, log_ok=True, log_all=False, simple=False, inp=None, regexp=True, log_output=False, path=None)}
- C{run_cmd_qa(cmd, qa, no_qa=None, log_ok=True, log_all=False, simple=False, regexp=True, std_qa=None, path=None)}
- Executes a command cmd
- looks for questions and tries to answer based on qa dictionary
- returns exitcode and stdout+stderr (mixed)
- no input though stdin
- if C{log_ok} or C{log_all} are set -> will C{log.error} if non-zero exit-code
- if C{simple} is C{True} -> instead of returning a tuple (output, ec) it will just return C{True} or C{False} signifying succes
- C{regexp} -> Regex used to check the output for errors. If C{True} will use default (see C{parselogForError})
- if log_output is True -> all output of command will be logged to a tempfile
- path is the path run_cmd should chdir to before doing anything
- Q&A: support reading stdout asynchronous and replying to a question through stdin
- Manage C{managecommands} module C{Command} class
- C{run} method
- python-package-vsc-utils run module Command class
- C{run} method
- C{mympirun} (old)
- C{runrun(self, cmd, returnout=False, flush=False, realcmd=False)}:
- C{runrunnormal(self, cmd, returnout=False, flush=False)}
- C{runrunfile(self, cmd, returnout=False, flush=False)}
- C{hanything} commands/command module
- C{run} method
- fake pty support
@author: Stijn De Weirdt (Ghent University)
"""
import errno
import logging
import os
import pty
import re
import signal
import sys
import time
from vsc.utils.fancylogger import getLogger, getAllExistingLoggers
PROCESS_MODULE_ASYNCPROCESS_PATH = 'vsc.utils.asyncprocess'
PROCESS_MODULE_SUBPROCESS_PATH = 'subprocess'
RUNRUN_TIMEOUT_OUTPUT = ''
RUNRUN_TIMEOUT_EXITCODE = 123
RUNRUN_QA_MAX_MISS_EXITCODE = 124
BASH = '/bin/bash'
SHELL = BASH
class DummyFunction(object):
def __getattr__(self, name):
def dummy(*args, **kwargs):
pass
return dummy
class Run(object):
"""Base class for static run method"""
INIT_INPUT_CLOSE = True
USE_SHELL = True
SHELL = SHELL # set the shell via the module constant
@classmethod
def run(cls, cmd, **kwargs):
"""static method
return (exitcode,output)
"""
r = cls(cmd, **kwargs)
return r._run()
def __init__(self, cmd=None, **kwargs):
"""
Handle initiliastion
@param cmd: command to run
@param input: set "simple" input
@param startpath: directory to change to before executing command
@param disable_log: use fake logger (won't log anything)
@param use_shell: use the subshell
@param shell: change the shell
"""
self.input = kwargs.pop('input', None)
self.startpath = kwargs.pop('startpath', None)
self.use_shell = kwargs.pop('use_shell', self.USE_SHELL)
self.shell = kwargs.pop('shell', self.SHELL)
if kwargs.pop('disable_log', None):
self.log = DummyFunction() # No logging
if not hasattr(self, 'log'):
self.log = getLogger(self._get_log_name())
self.cmd = cmd # actual command
self._cwd_before_startpath = None
self._process_module = None
self._process = None
self.readsize = 1024 # number of bytes to read blocking
self._shellcmd = None
self._popen_named_args = None
self._process_exitcode = None
self._process_output = None
self._post_exitcode_log_failure = self.log.error
super(Run, self).__init__(**kwargs)
def _get_log_name(self):
"""Set the log name"""
return self.__class__.__name__
def _prep_module(self, modulepath=None, extendfromlist=None):
# these will provide the required Popen, PIPE and STDOUT
if modulepath is None:
modulepath = PROCESS_MODULE_SUBPROCESS_PATH
fromlist = ['Popen', 'PIPE', 'STDOUT']
if extendfromlist is not None:
fromlist.extend(extendfromlist)
self._process_modulepath = modulepath
self._process_module = __import__(self._process_modulepath, globals(), locals(), fromlist)
def _run(self):
"""actual method
Structure
- pre
- convert command to shell command - DONE
- chdir before start - DONE
- start C{Popen} - DONE
- support async and subprocess - DONE
- support for
- filehandle
- PIPE - DONE
- pty - DONE
- main
- should capture exitcode and output
- features
- separate stdout and stderr ?
- simple single run
- no timeout/waiting - DONE
- flush to
- stdout
- logger - DONE
- both stdout and logger
- process intermediate output
- qa
- input
- qa
- from file ?
- text - DONE
- post
- parse with regexp
- raise/log error on match
- return
- return output
- log output
- write to file
- return in string - DONE
- on C{ec > 0}
- error - DONE
- raiseException
- simple
- just return True/False
"""
self._run_pre()
self._wait_for_process()
return self._run_post()
def _run_pre(self):
"""Non-blocking start"""
if self._process_module is None:
self._prep_module()
if self.startpath is not None:
self._start_in_path()
if self._shellcmd is None:
self._make_shell_command()
if self._popen_named_args is None:
self._make_popen_named_args()
self._init_process()
self._init_input()
def _run_post(self):
self._cleanup_process()
self._post_exitcode()
self._post_output()
if self.startpath is not None:
self._return_to_previous_start_in_path()
return self._run_return()
def _start_in_path(self):
"""Change path before the run"""
if self.startpath is None:
self.log.debug("_start_in_path: no startpath set")
return
if os.path.exists(self.startpath):
if os.path.isdir(self.startpath):
try:
self._cwd_before_startpath = os.getcwd() # store it some one can return to it
os.chdir(self.startpath)
except:
self.raiseException("_start_in_path: failed to change path from %s to startpath %s" %
(self._cwd_before_startpath, self.startpath))
else:
self.log.raiseExcpetion("_start_in_path: provided startpath %s exists but is no directory" %
self.startpath)
else:
self.raiseException("_start_in_path: startpath %s does not exist" % self.startpath)
def _return_to_previous_start_in_path(self):
"""Change to original path before the change to startpath"""
if self._cwd_before_startpath is None:
self.log.warning("_return_to_previous_start_in_path: previous cwd is empty. Not trying anything")
return
if os.path.exists(self._cwd_before_startpath):
if os.path.isdir(self._cwd_before_startpath):
try:
currentpath = os.getcwd()
if not currentpath == self.startpath:
self.log.warning(("_return_to_previous_start_in_path: current diretory %s does not match "
"startpath %s") % (currentpath, self.startpath))
os.chdir(self._cwd_before_startpath)
except:
self.raiseException(("_return_to_previous_start_in_path: failed to change path from current %s "
"to previous path %s") % (currentpath, self._cwd_before_startpath))
else:
self.log.raiseExcpetion(("_return_to_previous_start_in_path: provided previous cwd path %s exists "
"but is no directory") % self._cwd_before_startpath)
else:
self.raiseException("_return_to_previous_start_in_path: previous cwd path %s does not exist" %
self._cwd_before_startpath)
def _make_popen_named_args(self, others=None):
"""Create the named args for Popen"""
self._popen_named_args = {
'stdout': self._process_module.PIPE,
'stderr': self._process_module.STDOUT,
'stdin': self._process_module.PIPE,
'close_fds': True,
'shell': self.use_shell,
'executable': self.shell,
}
if others is not None:
self._popen_named_args.update(others)
self.log.debug("_popen_named_args %s" % self._popen_named_args)
def _make_shell_command(self):
"""Convert cmd into shell command"""
if self.cmd is None:
self.log.raiseExcpetion("_make_shell_command: no cmd set.")
if isinstance(self.cmd, basestring):
self._shellcmd = self.cmd
elif isinstance(self.cmd, (list, tuple,)):
self._shellcmd = " ".join(self.cmd)
else:
self.log.raiseException("Failed to convert cmd %s (type %s) into shell command" % (self.cmd, type(self.cmd)))
def _init_process(self):
"""Initialise the self._process"""
try:
self._process = self._process_module.Popen(self._shellcmd, **self._popen_named_args)
except OSError:
self.log.raiseException("_init_process: init Popen shellcmd %s failed: %s" % (self._shellcmd))
def _init_input(self):
"""Handle input, if any in a simple way"""
if self.input is not None: # allow empty string (whatever it may mean)
try:
self._process.stdin.write(self.input)
except:
self.log.raiseException("_init_input: Failed write input %s to process" % self.input)
if self.INIT_INPUT_CLOSE:
self._process.stdin.close()
self.log.debug("_init_input: process stdin closed")
else:
self.log.debug("_init_input: process stdin NOT closed")
def _wait_for_process(self):
"""The main loop
This one has most simple loop
"""
try:
self._process_exitcode = self._process.wait()
self._process_output = self._read_process(-1) # -1 is read all
except:
self.log.raiseException("_wait_for_process: problem during wait exitcode %s output %s" %
(self._process_exitcode, self._process_output))
def _cleanup_process(self):
"""Cleanup any leftovers from the process"""
pass
def _read_process(self, readsize=None):
"""Read from process, return out"""
if readsize is None:
readsize = self.readsize
if readsize is None:
readsize = -1 # read all
self.log.debug("_read_process: going to read with readsize %s" % readsize)
out = self._process.stdout.read(readsize)
return out
def _post_exitcode(self):
"""Postprocess the exitcode in self._process_exitcode"""
if not self._process_exitcode == 0:
self._post_exitcode_log_failure("_post_exitcode: problem occured with cmd %s: output %s" %
(self.cmd, self._process_output))
else:
self.log.debug("_post_exitcode: success cmd %s: output %s" % (self.cmd, self._process_output))
def _post_output(self):
"""Postprocess the output in self._process_output"""
pass
def _run_return(self):
"""What to return"""
return self._process_exitcode, self._process_output
def _killtasks(self, tasks=None, sig=signal.SIGKILL, kill_pgid=False):
"""
Kill all tasks
@param: tasks list of processids
@param: sig, signal to use to kill
@apram: kill_pgid, send kill to group
"""
if tasks is None:
self.log.error("killtasks no tasks passed")
elif isinstance(tasks, basestring):
try:
tasks = [int(tasks)]
except:
self.log.error("killtasks failed to convert tasks string %s to int" % tasks)
for pid in tasks:
pgid = os.getpgid(pid)
try:
os.kill(int(pid), sig)
if kill_pgid:
os.killpg(pgid, sig)
self.log.debug("Killed %s with signal %s" % (pid, sig))
except OSError, err:
# ERSCH is no such process, so no issue
if not err.errno == errno.ESRCH:
self.log.error("Failed to kill %s: %s" % (pid, err))
except Exception, err:
self.log.error("Failed to kill %s: %s" % (pid, err))
def stop_tasks(self):
"""Cleanup current run"""
self._killtasks(tasks=[self._process.pid])
try:
os.waitpid(-1, os.WNOHANG)
except:
pass
class RunNoWorries(Run):
"""When the exitcode is >0, log.debug instead of log.error"""
def __init__(self, cmd, **kwargs):
super(RunNoWorries, self).__init__(cmd, **kwargs)
self._post_exitcode_log_failure = self.log.debug
class RunLoopException(Exception):
def __init__(self, code, output):
self.code = code
self.output = output
def __str__(self):
return "%s code %s output %s" % (self.__class__.__name__, self.code, self.output)
class RunLoop(Run):
"""Main process is a while loop which reads the output in blocks
need to read from time to time.
otherwise the stdout/stderr buffer gets filled and it all stops working
"""
LOOP_TIMEOUT_INIT = 0.1
LOOP_TIMEOUT_MAIN = 1
def __init__(self, cmd, **kwargs):
super(RunLoop, self).__init__(cmd, **kwargs)
self._loop_count = None
self._loop_continue = None # intial state, change this to break out the loop
def _wait_for_process(self):
"""Loop through the process in timesteps
collected output is run through _loop_process_output
"""
# these are initialised outside the function (cannot be forgotten, but can be overwritten)
self._loop_count = 0 # internal counter
self._loop_continue = True
self._process_output = ''
# further initialisation
self._loop_initialise()
time.sleep(self.LOOP_TIMEOUT_INIT)
ec = self._process.poll()
try:
while self._loop_continue and ec < 0:
output = self._read_process()
self._process_output += output
# process after updating the self._process_ vars
self._loop_process_output(output)
if len(output) == 0:
time.sleep(self.LOOP_TIMEOUT_MAIN)
ec = self._process.poll()
self._loop_count += 1
self.log.debug("_wait_for_process: loop stopped after %s iterations (ec %s loop_continue %s)" %
(self._loop_count, ec, self._loop_continue))
# read remaining data (all of it)
output = self._read_process(-1)
self._process_output += output
self._process_exitcode = ec
# process after updating the self._process_ vars
self._loop_process_output_final(output)
except RunLoopException, err:
self.log.debug('RunLoopException %s' % err)
self._process_output = err.output
self._process_exitcode = err.code
def _loop_initialise(self):
"""Initialisation before the loop starts"""
pass
def _loop_process_output(self, output):
"""Process the output that is read in blocks
simplest form: do nothing
"""
pass
def _loop_process_output_final(self, output):
"""Process the remaining output that is read
simplest form: do the same as _loop_process_output
"""
self._loop_process_output(output)
class RunLoopLog(RunLoop):
LOOP_LOG_LEVEL = logging.INFO
def _wait_for_process(self):
# initialise the info logger
self.log.info("Going to run cmd %s" % self._shellcmd)
super(RunLoopLog, self)._wait_for_process()
def _loop_process_output(self, output):
"""Process the output that is read in blocks
send it to the logger. The logger need to be stream-like
"""
self.log.streamLog(self.LOOP_LOG_LEVEL, output)
super(RunLoopLog, self)._loop_process_output(output)
class RunLoopStdout(RunLoop):
def _loop_process_output(self, output):
"""Process the output that is read in blocks
send it to the stdout
"""
sys.stdout.write(output)
sys.stdout.flush()
super(RunLoopStdout, self)._loop_process_output(output)
class RunAsync(Run):
"""Async process class"""
def _prep_module(self, modulepath=None, extendfromlist=None):
# these will provide the required Popen, PIPE and STDOUT
if modulepath is None:
modulepath = PROCESS_MODULE_ASYNCPROCESS_PATH
if extendfromlist is None:
extendfromlist = ['send_all', 'recv_some']
super(RunAsync, self)._prep_module(modulepath=modulepath, extendfromlist=extendfromlist)
def _read_process(self, readsize=None):
"""Read from async process, return out"""
if readsize is None:
readsize = self.readsize
if self._process.stdout is None:
# Nothing yet/anymore
return ''
try:
if readsize is not None and readsize < 0:
# read all blocking (it's not why we should use async
out = self._process.stdout.read()
else:
# non-blocking read (readsize is a maximum to return !
out = self._process_module.recv_some(self._process, maxread=readsize)
return out
except (IOError, Exception):
# recv_some may throw Exception
self.log.exception("_read_process: read failed")
return ''
class RunFile(Run):
"""Popen to filehandle"""
def __init__(self, cmd, **kwargs):
self.filename = kwargs.pop('filename', None)
self.filehandle = None
super(RunFile, self).__init__(cmd, **kwargs)
def _make_popen_named_args(self, others=None):
if others is None:
if os.path.exists(self.filename):
if os.path.isfile(self.filename):
self.log.warning("_make_popen_named_args: going to overwrite existing file %s" % self.filename)
elif os.path.isdir(self.filename):
self.raiseException(("_make_popen_named_args: writing to filename %s impossible. Path exists and "
"is a directory.") % self.filename)
else:
self.raiseException("_make_popen_named_args: path exists and is not a file or directory %s" %
self.filename)
else:
dirname = os.path.dirname(self.filename)
if dirname and not os.path.isdir(dirname):
try:
os.makedirs(dirname)
except:
self.log.raiseException(("_make_popen_named_args: dirname %s for file %s does not exists. "
"Creating it failed.") % (dirname, self.filename))
try:
self.filehandle = open(self.filename, 'w')
except:
self.log.raiseException("_make_popen_named_args: failed to open filehandle for file %s" % self.filename)
others = {
'stdout': self.filehandle,
}
super(RunFile, self)._make_popen_named_args(others=others)
def _cleanup_process(self):
"""Close the filehandle"""
try:
self.filehandle.close()
except:
self.log.raiseException("_cleanup_process: failed to close filehandle for filename %s" % self.filename)
def _read_process(self, readsize=None):
"""Meaningless for filehandle"""
return ''
class RunPty(Run):
"""Pty support (eg for screen sessions)"""
def _read_process(self, readsize=None):
"""This does not work for pty"""
return ''
def _make_popen_named_args(self, others=None):
if others is None:
(master, slave) = pty.openpty()
others = {
'stdin': slave,
'stdout': slave,
'stderr': slave
}
super(RunPty, self)._make_popen_named_args(others=others)
class RunTimeout(RunLoop, RunAsync):
"""Question/Answer processing"""
def __init__(self, cmd, **kwargs):
self.timeout = float(kwargs.pop('timeout', None))
self.start = time.time()
super(RunTimeout, self).__init__(cmd, **kwargs)
def _loop_process_output(self, output):
""""""
time_passed = time.time() - self.start
if self.timeout is not None and time_passed > self.timeout:
self.log.debug("Time passed %s > timeout %s." % (time_passed, self.timeout))
self.stop_tasks()
# go out of loop
raise RunLoopException(RUNRUN_TIMEOUT_EXITCODE, RUNRUN_TIMEOUT_OUTPUT)
super(RunTimeout, self)._loop_process_output(output)
class RunQA(RunLoop, RunAsync):
"""Question/Answer processing"""
LOOP_MAX_MISS_COUNT = 20
INIT_INPUT_CLOSE = False
CYCLE_ANSWERS = True
def __init__(self, cmd, **kwargs):
"""
Add question and answer style running
@param qa: dict with exact questions and answers
@param qa_reg: dict with (named) regex-questions and answers (answers can contain named string templates)
@param no_qa: list of regex that can block the output, but is not seen as a question.
Regular expressions are compiled, just pass the (raw) text.
"""
qa = kwargs.pop('qa', {})
qa_reg = kwargs.pop('qa_reg', {})
no_qa = kwargs.pop('no_qa', [])
self._loop_miss_count = None # maximum number of misses
self._loop_previous_ouput_length = None # track length of output through loop
super(RunQA, self).__init__(cmd, **kwargs)
self.qa, self.qa_reg, self.no_qa = self._parse_qa(qa, qa_reg, no_qa)
def _parse_qa(self, qa, qa_reg, no_qa):
"""
process the QandA dictionary
- given initial set of Q and A (in dict), return dict of reg. exp. and A
- make regular expression that matches the string with
- replace whitespace
- replace newline
- qa_reg: question is compiled as is, and whitespace+ending is added
- provided answers can be either strings or lists of strings (which will be used iteratively)
"""
def escape_special(string):
specials = '.*+?(){}[]|\$^'
return re.sub(r"([%s])" % ''.join(['\%s' % x for x in specials]), r"\\\1", string)
SPLIT = '[\s\n]+'
REG_SPLIT = re.compile(r"" + SPLIT)
def process_answers(answers):
"""Construct list of newline-terminated answers (as strings)."""
if isinstance(answers, basestring):
answers = [answers]
elif isinstance(answers, list):
# list is manipulated when answering matching question, so take a copy
answers = answers[:]
else:
msg_tmpl = "Invalid type for answer, not a string or list: %s (%s)"
self.log.raiseException(msg_tmpl % (type(answers), answers), exception=TypeError)
# add optional split at the end
for i in [idx for idx, a in enumerate(answers) if not a.endswith('\n')]:
answers[i] += '\n'
return answers
def process_question(question):
"""Convert string question to regex."""
split_q = [escape_special(x) for x in REG_SPLIT.split(question)]
reg_q_txt = SPLIT.join(split_q) + SPLIT.rstrip('+') + "*$"
reg_q = re.compile(r"" + reg_q_txt)
if reg_q.search(question):
return reg_q
else:
# this is just a sanity check on the created regex, can this actually occur?
msg_tmpl = "_parse_qa process_question: question %s converted in %s does not match itself"
self.log.raiseException(msg_tmpl % (question.pattern, reg_q_txt), exception=ValueError)
new_qa = {}
self.log.debug("new_qa: ")
for question, answers in qa.items():
reg_q = process_question(question)
new_qa[reg_q] = process_answers(answers)
self.log.debug("new_qa[%s]: %s" % (reg_q.pattern.__repr__(), answers))
new_qa_reg = {}
self.log.debug("new_qa_reg: ")
for question, answers in qa_reg.items():
reg_q = re.compile(r"" + question + r"[\s\n]*$")
new_qa_reg[reg_q] = process_answers(answers)
self.log.debug("new_qa_reg[%s]: %s" % (reg_q.pattern.__repr__(), answers))
# simple statements, can contain wildcards
new_no_qa = [re.compile(r"" + x + r"[\s\n]*$") for x in no_qa]
self.log.debug("new_no_qa: %s" % [x.pattern.__repr__() for x in new_no_qa])
return new_qa, new_qa_reg, new_no_qa
def _loop_initialise(self):
"""Initialisation before the loop starts"""
self._loop_miss_count = 0
self._loop_previous_ouput_length = 0
def _loop_process_output(self, output):
"""Process the output that is read in blocks
check the output passed to questions available
"""
hit = False
self.log.debug('output %s all_output %s' % (output, self._process_output))
# qa first and then qa_reg
nr_qa = len(self.qa)
for idx, (question, answers) in enumerate(self.qa.items() + self.qa_reg.items()):
res = question.search(self._process_output)
if output and res:
answer = answers[0] % res.groupdict()
if len(answers) > 1:
prev_answer = answers.pop(0)
if self.CYCLE_ANSWERS:
answers.append(prev_answer)
self.log.debug("New answers list for question %s: %s" % (question.pattern, answers))
self.log.debug("_loop_process_output: answer %s question %s (std: %s) out %s" %
(answer, question.pattern, idx >= nr_qa, self._process_output[-50:]))
self._process_module.send_all(self._process, answer)
hit = True
break
if not hit:
curoutlen = len(self._process_output)
if curoutlen > self._loop_previous_ouput_length:
# still progress in output, just continue (but don't reset miss counter either)
self._loop_previous_ouput_length = curoutlen
else:
noqa = False
for r in self.no_qa:
if r.search(self._process_output):
self.log.debug("_loop_process_output: no_qa found for out %s" % self._process_output[-50:])
noqa = True
if not noqa:
self._loop_miss_count += 1
else:
self._loop_miss_count = 0 # rreset miss counter on hit
if self._loop_miss_count > self.LOOP_MAX_MISS_COUNT:
self.log.debug("loop_process_output: max misses LOOP_MAX_MISS_COUNT %s reached. End of output: %s" %
(self.LOOP_MAX_MISS_COUNT, self._process_output[-500:]))
self.stop_tasks()
# go out of loop
raise RunLoopException(RUNRUN_QA_MAX_MISS_EXITCODE, self._process_output)
super(RunQA, self)._loop_process_output(output)
class RunAsyncLoop(RunLoop, RunAsync):
"""Async read in loop"""
pass
class RunAsyncLoopLog(RunLoopLog, RunAsync):
"""Async read, log to logger"""
pass
class RunQALog(RunLoopLog, RunQA):
"""Async loop QA with LoopLog"""
pass
class RunQAStdout(RunLoopStdout, RunQA):
"""Async loop QA with LoopLogStdout"""
pass
class RunAsyncLoopStdout(RunLoopStdout, RunAsync):
"""Async read, flush to stdout"""
pass
# convenient names
# eg: from vsc.utils.run import trivial
run_simple = Run.run
run_simple_noworries = RunNoWorries.run
run_async = RunAsync.run
run_asyncloop = RunAsyncLoop.run
run_timeout = RunTimeout.run
run_to_file = RunFile.run
run_async_to_stdout = RunAsyncLoopStdout.run
run_qa = RunQA.run
run_qalog = RunQALog.run
run_qastdout = RunQAStdout.run
if __name__ == "__main__":
run_simple('echo ok')
|
lgpl-2.1
| 4,259,829,277,435,143
| 35.813981
| 132
| 0.568408
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.