_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q280300
|
path.read_md5
|
test
|
def read_md5(self):
""" Calculate the md5 hash for this file.
This reads through the entire file.
"""
f = self.open('rb')
try:
m = md5()
while True:
d = f.read(8192)
if not d:
break
m.update(d)
finally:
f.close()
return m.digest()
|
python
|
{
"resource": ""
}
|
q280301
|
Profile.begin
|
test
|
def begin(self):
"""Create profile stats file and load profiler.
"""
if not self.available():
return
self._create_pfile()
self.prof = hotshot.Profile(self.pfile)
|
python
|
{
"resource": ""
}
|
q280302
|
Profile.report
|
test
|
def report(self, stream):
"""Output profiler report.
"""
log.debug('printing profiler report')
self.prof.close()
prof_stats = stats.load(self.pfile)
prof_stats.sort_stats(self.sort)
# 2.5 has completely different stream handling from 2.4 and earlier.
# Before 2.5, stats objects have no stream attribute; in 2.5 and later
# a reference sys.stdout is stored before we can tweak it.
compat_25 = hasattr(prof_stats, 'stream')
if compat_25:
tmp = prof_stats.stream
prof_stats.stream = stream
else:
tmp = sys.stdout
sys.stdout = stream
try:
if self.restrict:
log.debug('setting profiler restriction to %s', self.restrict)
prof_stats.print_stats(*self.restrict)
else:
prof_stats.print_stats()
finally:
if compat_25:
prof_stats.stream = tmp
else:
sys.stdout = tmp
|
python
|
{
"resource": ""
}
|
q280303
|
Profile.finalize
|
test
|
def finalize(self, result):
"""Clean up stats file, if configured to do so.
"""
if not self.available():
return
try:
self.prof.close()
except AttributeError:
# TODO: is this trying to catch just the case where not
# hasattr(self.prof, "close")? If so, the function call should be
# moved out of the try: suite.
pass
if self.clean_stats_file:
if self.fileno:
try:
os.close(self.fileno)
except OSError:
pass
try:
os.unlink(self.pfile)
except OSError:
pass
return None
|
python
|
{
"resource": ""
}
|
q280304
|
Command.handle
|
test
|
def handle(self, *args, **options):
"""Handle CLI command"""
try:
while True:
Channel(HEARTBEAT_CHANNEL).send({'time':time.time()})
time.sleep(HEARTBEAT_FREQUENCY)
except KeyboardInterrupt:
print("Received keyboard interrupt, exiting...")
|
python
|
{
"resource": ""
}
|
q280305
|
InputHookManager.enable_wx
|
test
|
def enable_wx(self, app=None):
"""Enable event loop integration with wxPython.
Parameters
----------
app : WX Application, optional.
Running application to use. If not given, we probe WX for an
existing application object, and create a new one if none is found.
Notes
-----
This methods sets the ``PyOS_InputHook`` for wxPython, which allows
the wxPython to integrate with terminal based applications like
IPython.
If ``app`` is not given we probe for an existing one, and return it if
found. If no existing app is found, we create an :class:`wx.App` as
follows::
import wx
app = wx.App(redirect=False, clearSigInt=False)
"""
import wx
wx_version = V(wx.__version__).version
if wx_version < [2, 8]:
raise ValueError("requires wxPython >= 2.8, but you have %s" % wx.__version__)
from IPython.lib.inputhookwx import inputhook_wx
self.set_inputhook(inputhook_wx)
self._current_gui = GUI_WX
import wx
if app is None:
app = wx.GetApp()
if app is None:
app = wx.App(redirect=False, clearSigInt=False)
app._in_event_loop = True
self._apps[GUI_WX] = app
return app
|
python
|
{
"resource": ""
}
|
q280306
|
InputHookManager.disable_wx
|
test
|
def disable_wx(self):
"""Disable event loop integration with wxPython.
This merely sets PyOS_InputHook to NULL.
"""
if self._apps.has_key(GUI_WX):
self._apps[GUI_WX]._in_event_loop = False
self.clear_inputhook()
|
python
|
{
"resource": ""
}
|
q280307
|
InputHookManager.disable_qt4
|
test
|
def disable_qt4(self):
"""Disable event loop integration with PyQt4.
This merely sets PyOS_InputHook to NULL.
"""
if self._apps.has_key(GUI_QT4):
self._apps[GUI_QT4]._in_event_loop = False
self.clear_inputhook()
|
python
|
{
"resource": ""
}
|
q280308
|
InputHookManager.enable_gtk
|
test
|
def enable_gtk(self, app=None):
"""Enable event loop integration with PyGTK.
Parameters
----------
app : ignored
Ignored, it's only a placeholder to keep the call signature of all
gui activation methods consistent, which simplifies the logic of
supporting magics.
Notes
-----
This methods sets the PyOS_InputHook for PyGTK, which allows
the PyGTK to integrate with terminal based applications like
IPython.
"""
import gtk
try:
gtk.set_interactive(True)
self._current_gui = GUI_GTK
except AttributeError:
# For older versions of gtk, use our own ctypes version
from IPython.lib.inputhookgtk import inputhook_gtk
self.set_inputhook(inputhook_gtk)
self._current_gui = GUI_GTK
|
python
|
{
"resource": ""
}
|
q280309
|
InputHookManager.enable_tk
|
test
|
def enable_tk(self, app=None):
"""Enable event loop integration with Tk.
Parameters
----------
app : toplevel :class:`Tkinter.Tk` widget, optional.
Running toplevel widget to use. If not given, we probe Tk for an
existing one, and create a new one if none is found.
Notes
-----
If you have already created a :class:`Tkinter.Tk` object, the only
thing done by this method is to register with the
:class:`InputHookManager`, since creating that object automatically
sets ``PyOS_InputHook``.
"""
self._current_gui = GUI_TK
if app is None:
import Tkinter
app = Tkinter.Tk()
app.withdraw()
self._apps[GUI_TK] = app
return app
|
python
|
{
"resource": ""
}
|
q280310
|
InputHookManager.enable_pyglet
|
test
|
def enable_pyglet(self, app=None):
"""Enable event loop integration with pyglet.
Parameters
----------
app : ignored
Ignored, it's only a placeholder to keep the call signature of all
gui activation methods consistent, which simplifies the logic of
supporting magics.
Notes
-----
This methods sets the ``PyOS_InputHook`` for pyglet, which allows
pyglet to integrate with terminal based applications like
IPython.
"""
import pyglet
from IPython.lib.inputhookpyglet import inputhook_pyglet
self.set_inputhook(inputhook_pyglet)
self._current_gui = GUI_PYGLET
return app
|
python
|
{
"resource": ""
}
|
q280311
|
wave_saver
|
test
|
def wave_saver(u, x, y, t):
"""save the wave log"""
global u_hist
global t_hist
t_hist.append(t)
u_hist.append(1.0*u)
|
python
|
{
"resource": ""
}
|
q280312
|
HistoryAccessor.init_db
|
test
|
def init_db(self):
"""Connect to the database, and create tables if necessary."""
# use detect_types so that timestamps return datetime objects
self.db = sqlite3.connect(self.hist_file,
detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
primary key autoincrement, start timestamp,
end timestamp, num_cmds integer, remark text)""")
self.db.execute("""CREATE TABLE IF NOT EXISTS history
(session integer, line integer, source text, source_raw text,
PRIMARY KEY (session, line))""")
# Output history is optional, but ensure the table's there so it can be
# enabled later.
self.db.execute("""CREATE TABLE IF NOT EXISTS output_history
(session integer, line integer, output text,
PRIMARY KEY (session, line))""")
self.db.commit()
|
python
|
{
"resource": ""
}
|
q280313
|
HistoryAccessor._run_sql
|
test
|
def _run_sql(self, sql, params, raw=True, output=False):
"""Prepares and runs an SQL query for the history database.
Parameters
----------
sql : str
Any filtering expressions to go after SELECT ... FROM ...
params : tuple
Parameters passed to the SQL query (to replace "?")
raw, output : bool
See :meth:`get_range`
Returns
-------
Tuples as :meth:`get_range`
"""
toget = 'source_raw' if raw else 'source'
sqlfrom = "history"
if output:
sqlfrom = "history LEFT JOIN output_history USING (session, line)"
toget = "history.%s, output_history.output" % toget
cur = self.db.execute("SELECT session, line, %s FROM %s " %\
(toget, sqlfrom) + sql, params)
if output: # Regroup into 3-tuples, and parse JSON
return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur)
return cur
|
python
|
{
"resource": ""
}
|
q280314
|
HistoryAccessor.get_session_info
|
test
|
def get_session_info(self, session=0):
"""get info about a session
Parameters
----------
session : int
Session number to retrieve. The current session is 0, and negative
numbers count back from current session, so -1 is previous session.
Returns
-------
(session_id [int], start [datetime], end [datetime], num_cmds [int],
remark [unicode])
Sessions that are running or did not exit cleanly will have `end=None`
and `num_cmds=None`.
"""
if session <= 0:
session += self.session_number
query = "SELECT * from sessions where session == ?"
return self.db.execute(query, (session,)).fetchone()
|
python
|
{
"resource": ""
}
|
q280315
|
HistoryAccessor.get_tail
|
test
|
def get_tail(self, n=10, raw=True, output=False, include_latest=False):
"""Get the last n lines from the history database.
Parameters
----------
n : int
The number of lines to get
raw, output : bool
See :meth:`get_range`
include_latest : bool
If False (default), n+1 lines are fetched, and the latest one
is discarded. This is intended to be used where the function
is called by a user command, which it should not return.
Returns
-------
Tuples as :meth:`get_range`
"""
self.writeout_cache()
if not include_latest:
n += 1
cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?",
(n,), raw=raw, output=output)
if not include_latest:
return reversed(list(cur)[1:])
return reversed(list(cur))
|
python
|
{
"resource": ""
}
|
q280316
|
HistoryAccessor.get_range_by_str
|
test
|
def get_range_by_str(self, rangestr, raw=True, output=False):
"""Get lines of history from a string of ranges, as used by magic
commands %hist, %save, %macro, etc.
Parameters
----------
rangestr : str
A string specifying ranges, e.g. "5 ~2/1-4". See
:func:`magic_history` for full details.
raw, output : bool
As :meth:`get_range`
Returns
-------
Tuples as :meth:`get_range`
"""
for sess, s, e in extract_hist_ranges(rangestr):
for line in self.get_range(sess, s, e, raw=raw, output=output):
yield line
|
python
|
{
"resource": ""
}
|
q280317
|
HistoryManager._get_hist_file_name
|
test
|
def _get_hist_file_name(self, profile=None):
"""Get default history file name based on the Shell's profile.
The profile parameter is ignored, but must exist for compatibility with
the parent class."""
profile_dir = self.shell.profile_dir.location
return os.path.join(profile_dir, 'history.sqlite')
|
python
|
{
"resource": ""
}
|
q280318
|
HistoryManager.name_session
|
test
|
def name_session(self, name):
"""Give the current session a name in the history database."""
with self.db:
self.db.execute("UPDATE sessions SET remark=? WHERE session==?",
(name, self.session_number))
|
python
|
{
"resource": ""
}
|
q280319
|
HistoryManager.reset
|
test
|
def reset(self, new_session=True):
"""Clear the session history, releasing all object references, and
optionally open a new session."""
self.output_hist.clear()
# The directory history can't be completely empty
self.dir_hist[:] = [os.getcwdu()]
if new_session:
if self.session_number:
self.end_session()
self.input_hist_parsed[:] = [""]
self.input_hist_raw[:] = [""]
self.new_session()
|
python
|
{
"resource": ""
}
|
q280320
|
HistoryManager._get_range_session
|
test
|
def _get_range_session(self, start=1, stop=None, raw=True, output=False):
"""Get input and output history from the current session. Called by
get_range, and takes similar parameters."""
input_hist = self.input_hist_raw if raw else self.input_hist_parsed
n = len(input_hist)
if start < 0:
start += n
if not stop or (stop > n):
stop = n
elif stop < 0:
stop += n
for i in range(start, stop):
if output:
line = (input_hist[i], self.output_hist_reprs.get(i))
else:
line = input_hist[i]
yield (0, i, line)
|
python
|
{
"resource": ""
}
|
q280321
|
HistoryManager.store_output
|
test
|
def store_output(self, line_num):
"""If database output logging is enabled, this saves all the
outputs from the indicated prompt number to the database. It's
called by run_cell after code has been executed.
Parameters
----------
line_num : int
The line number from which to save outputs
"""
if (not self.db_log_output) or (line_num not in self.output_hist_reprs):
return
output = self.output_hist_reprs[line_num]
with self.db_output_cache_lock:
self.db_output_cache.append((line_num, output))
if self.db_cache_size <= 1:
self.save_flag.set()
|
python
|
{
"resource": ""
}
|
q280322
|
HistoryManager.writeout_cache
|
test
|
def writeout_cache(self, conn=None):
"""Write any entries in the cache to the database."""
if conn is None:
conn = self.db
with self.db_input_cache_lock:
try:
self._writeout_input_cache(conn)
except sqlite3.IntegrityError:
self.new_session(conn)
print("ERROR! Session/line number was not unique in",
"database. History logging moved to new session",
self.session_number)
try:
# Try writing to the new session. If this fails, don't
# recurse
self._writeout_input_cache(conn)
except sqlite3.IntegrityError:
pass
finally:
self.db_input_cache = []
with self.db_output_cache_lock:
try:
self._writeout_output_cache(conn)
except sqlite3.IntegrityError:
print("!! Session/line number for output was not unique",
"in database. Output will not be stored.")
finally:
self.db_output_cache = []
|
python
|
{
"resource": ""
}
|
q280323
|
HistorySavingThread.stop
|
test
|
def stop(self):
"""This can be called from the main thread to safely stop this thread.
Note that it does not attempt to write out remaining history before
exiting. That should be done by calling the HistoryManager's
end_session method."""
self.stop_now = True
self.history_manager.save_flag.set()
self.join()
|
python
|
{
"resource": ""
}
|
q280324
|
_get_num_cpus
|
test
|
def _get_num_cpus():
"""Return the number of CPUs on the system"""
# we try to determine num CPUs by using different approaches.
# SC_NPROCESSORS_ONLN seems to be the safer and it is also
# used by multiprocessing module
try:
return os.sysconf("SC_NPROCESSORS_ONLN")
except ValueError:
# as a second fallback we try to parse /proc/cpuinfo
num = 0
f = open('/proc/cpuinfo', 'r')
try:
lines = f.readlines()
finally:
f.close()
for line in lines:
if line.lower().startswith('processor'):
num += 1
# unknown format (e.g. amrel/sparc architectures), see:
# http://code.google.com/p/psutil/issues/detail?id=200
# try to parse /proc/stat as a last resort
if num == 0:
f = open('/proc/stat', 'r')
try:
lines = f.readlines()
finally:
f.close()
search = re.compile('cpu\d')
for line in lines:
line = line.split(' ')[0]
if search.match(line):
num += 1
if num == 0:
raise RuntimeError("can't determine number of CPUs")
return num
|
python
|
{
"resource": ""
}
|
q280325
|
get_system_per_cpu_times
|
test
|
def get_system_per_cpu_times():
"""Return a list of namedtuple representing the CPU times
for every CPU available on the system.
"""
cpus = []
f = open('/proc/stat', 'r')
# get rid of the first line who refers to system wide CPU stats
try:
f.readline()
for line in f.readlines():
if line.startswith('cpu'):
values = line.split()[1:8]
values = tuple([float(x) / _CLOCK_TICKS for x in values])
entry = nt_sys_cputimes(*values[:7])
cpus.append(entry)
return cpus
finally:
f.close()
|
python
|
{
"resource": ""
}
|
q280326
|
disk_partitions
|
test
|
def disk_partitions(all=False):
"""Return mounted disk partitions as a list of nameduples"""
phydevs = []
f = open("/proc/filesystems", "r")
try:
for line in f:
if not line.startswith("nodev"):
phydevs.append(line.strip())
finally:
f.close()
retlist = []
partitions = _psutil_linux.get_disk_partitions()
for partition in partitions:
device, mountpoint, fstype, opts = partition
if device == 'none':
device = ''
if not all:
if device == '' or fstype not in phydevs:
continue
ntuple = nt_partition(device, mountpoint, fstype, opts)
retlist.append(ntuple)
return retlist
|
python
|
{
"resource": ""
}
|
q280327
|
get_pid_list
|
test
|
def get_pid_list():
"""Returns a list of PIDs currently running on the system."""
pids = [int(x) for x in os.listdir('/proc') if x.isdigit()]
return pids
|
python
|
{
"resource": ""
}
|
q280328
|
nice_pair
|
test
|
def nice_pair(pair):
"""Make a nice string representation of a pair of numbers.
If the numbers are equal, just return the number, otherwise return the pair
with a dash between them, indicating the range.
"""
start, end = pair
if start == end:
return "%d" % start
else:
return "%d-%d" % (start, end)
|
python
|
{
"resource": ""
}
|
q280329
|
format_lines
|
test
|
def format_lines(statements, lines):
"""Nicely format a list of line numbers.
Format a list of line numbers for printing by coalescing groups of lines as
long as the lines represent consecutive statements. This will coalesce
even if there are gaps between statements.
For example, if `statements` is [1,2,3,4,5,10,11,12,13,14] and
`lines` is [1,2,5,10,11,13,14] then the result will be "1-2, 5-11, 13-14".
"""
pairs = []
i = 0
j = 0
start = None
statements = sorted(statements)
lines = sorted(lines)
while i < len(statements) and j < len(lines):
if statements[i] == lines[j]:
if start == None:
start = lines[j]
end = lines[j]
j += 1
elif start:
pairs.append((start, end))
start = None
i += 1
if start:
pairs.append((start, end))
ret = ', '.join(map(nice_pair, pairs))
return ret
|
python
|
{
"resource": ""
}
|
q280330
|
short_stack
|
test
|
def short_stack():
"""Return a string summarizing the call stack."""
stack = inspect.stack()[:0:-1]
return "\n".join(["%30s : %s @%d" % (t[3],t[1],t[2]) for t in stack])
|
python
|
{
"resource": ""
}
|
q280331
|
expensive
|
test
|
def expensive(fn):
"""A decorator to cache the result of an expensive operation.
Only applies to methods with no arguments.
"""
attr = "_cache_" + fn.__name__
def _wrapped(self):
"""Inner fn that checks the cache."""
if not hasattr(self, attr):
setattr(self, attr, fn(self))
return getattr(self, attr)
return _wrapped
|
python
|
{
"resource": ""
}
|
q280332
|
join_regex
|
test
|
def join_regex(regexes):
"""Combine a list of regexes into one that matches any of them."""
if len(regexes) > 1:
return "|".join(["(%s)" % r for r in regexes])
elif regexes:
return regexes[0]
else:
return ""
|
python
|
{
"resource": ""
}
|
q280333
|
file_be_gone
|
test
|
def file_be_gone(path):
"""Remove a file, and don't get annoyed if it doesn't exist."""
try:
os.remove(path)
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOENT:
raise
|
python
|
{
"resource": ""
}
|
q280334
|
Hasher.update
|
test
|
def update(self, v):
"""Add `v` to the hash, recursively if needed."""
self.md5.update(to_bytes(str(type(v))))
if isinstance(v, string_class):
self.md5.update(to_bytes(v))
elif v is None:
pass
elif isinstance(v, (int, float)):
self.md5.update(to_bytes(str(v)))
elif isinstance(v, (tuple, list)):
for e in v:
self.update(e)
elif isinstance(v, dict):
keys = v.keys()
for k in sorted(keys):
self.update(k)
self.update(v[k])
else:
for k in dir(v):
if k.startswith('__'):
continue
a = getattr(v, k)
if inspect.isroutine(a):
continue
self.update(k)
self.update(a)
|
python
|
{
"resource": ""
}
|
q280335
|
ClusterManager.update_profiles
|
test
|
def update_profiles(self):
"""List all profiles in the ipython_dir and cwd.
"""
for path in [get_ipython_dir(), os.getcwdu()]:
for profile in list_profiles_in(path):
pd = self.get_profile_dir(profile, path)
if profile not in self.profiles:
self.log.debug("Adding cluster profile '%s'" % profile)
self.profiles[profile] = {
'profile': profile,
'profile_dir': pd,
'status': 'stopped'
}
|
python
|
{
"resource": ""
}
|
q280336
|
ClusterManager.start_cluster
|
test
|
def start_cluster(self, profile, n=None):
"""Start a cluster for a given profile."""
self.check_profile(profile)
data = self.profiles[profile]
if data['status'] == 'running':
raise web.HTTPError(409, u'cluster already running')
cl, esl, default_n = self.build_launchers(data['profile_dir'])
n = n if n is not None else default_n
def clean_data():
data.pop('controller_launcher',None)
data.pop('engine_set_launcher',None)
data.pop('n',None)
data['status'] = 'stopped'
def engines_stopped(r):
self.log.debug('Engines stopped')
if cl.running:
cl.stop()
clean_data()
esl.on_stop(engines_stopped)
def controller_stopped(r):
self.log.debug('Controller stopped')
if esl.running:
esl.stop()
clean_data()
cl.on_stop(controller_stopped)
dc = ioloop.DelayedCallback(lambda: cl.start(), 0, self.loop)
dc.start()
dc = ioloop.DelayedCallback(lambda: esl.start(n), 1000*self.delay, self.loop)
dc.start()
self.log.debug('Cluster started')
data['controller_launcher'] = cl
data['engine_set_launcher'] = esl
data['n'] = n
data['status'] = 'running'
return self.profile_info(profile)
|
python
|
{
"resource": ""
}
|
q280337
|
ClusterManager.stop_cluster
|
test
|
def stop_cluster(self, profile):
"""Stop a cluster for a given profile."""
self.check_profile(profile)
data = self.profiles[profile]
if data['status'] == 'stopped':
raise web.HTTPError(409, u'cluster not running')
data = self.profiles[profile]
cl = data['controller_launcher']
esl = data['engine_set_launcher']
if cl.running:
cl.stop()
if esl.running:
esl.stop()
# Return a temp info dict, the real one is updated in the on_stop
# logic above.
result = {
'profile': data['profile'],
'profile_dir': data['profile_dir'],
'status': 'stopped'
}
return result
|
python
|
{
"resource": ""
}
|
q280338
|
_find_cmd
|
test
|
def _find_cmd(cmd):
"""Find the full path to a .bat or .exe using the win32api module."""
try:
from win32api import SearchPath
except ImportError:
raise ImportError('you need to have pywin32 installed for this to work')
else:
PATH = os.environ['PATH']
extensions = ['.exe', '.com', '.bat', '.py']
path = None
for ext in extensions:
try:
path = SearchPath(PATH, cmd + ext)[0]
except:
pass
if path is None:
raise OSError("command %r not found" % cmd)
else:
return path
|
python
|
{
"resource": ""
}
|
q280339
|
_system_body
|
test
|
def _system_body(p):
"""Callback for _system."""
enc = DEFAULT_ENCODING
for line in read_no_interrupt(p.stdout).splitlines():
line = line.decode(enc, 'replace')
print(line, file=sys.stdout)
for line in read_no_interrupt(p.stderr).splitlines():
line = line.decode(enc, 'replace')
print(line, file=sys.stderr)
# Wait to finish for returncode
return p.wait()
|
python
|
{
"resource": ""
}
|
q280340
|
Reporter.find_code_units
|
test
|
def find_code_units(self, morfs):
"""Find the code units we'll report on.
`morfs` is a list of modules or filenames.
"""
morfs = morfs or self.coverage.data.measured_files()
file_locator = self.coverage.file_locator
self.code_units = code_unit_factory(morfs, file_locator)
if self.config.include:
patterns = prep_patterns(self.config.include)
filtered = []
for cu in self.code_units:
for pattern in patterns:
if fnmatch.fnmatch(cu.filename, pattern):
filtered.append(cu)
break
self.code_units = filtered
if self.config.omit:
patterns = prep_patterns(self.config.omit)
filtered = []
for cu in self.code_units:
for pattern in patterns:
if fnmatch.fnmatch(cu.filename, pattern):
break
else:
filtered.append(cu)
self.code_units = filtered
self.code_units.sort()
|
python
|
{
"resource": ""
}
|
q280341
|
Reporter.report_files
|
test
|
def report_files(self, report_fn, morfs, directory=None):
"""Run a reporting function on a number of morfs.
`report_fn` is called for each relative morf in `morfs`. It is called
as::
report_fn(code_unit, analysis)
where `code_unit` is the `CodeUnit` for the morf, and `analysis` is
the `Analysis` for the morf.
"""
self.find_code_units(morfs)
if not self.code_units:
raise CoverageException("No data to report.")
self.directory = directory
if self.directory and not os.path.exists(self.directory):
os.makedirs(self.directory)
for cu in self.code_units:
try:
report_fn(cu, self.coverage._analyze(cu))
except NoSource:
if not self.config.ignore_errors:
raise
except NotPython:
# Only report errors for .py files, and only if we didn't
# explicitly suppress those errors.
if cu.should_be_python() and not self.config.ignore_errors:
raise
|
python
|
{
"resource": ""
}
|
q280342
|
raises
|
test
|
def raises(*exceptions):
"""Test must raise one of expected exceptions to pass.
Example use::
@raises(TypeError, ValueError)
def test_raises_type_error():
raise TypeError("This test passes")
@raises(Exception)
def test_that_fails_by_passing():
pass
If you want to test many assertions about exceptions in a single test,
you may want to use `assert_raises` instead.
"""
valid = ' or '.join([e.__name__ for e in exceptions])
def decorate(func):
name = func.__name__
def newfunc(*arg, **kw):
try:
func(*arg, **kw)
except exceptions:
pass
except:
raise
else:
message = "%s() did not raise %s" % (name, valid)
raise AssertionError(message)
newfunc = make_decorator(func)(newfunc)
return newfunc
return decorate
|
python
|
{
"resource": ""
}
|
q280343
|
set_trace
|
test
|
def set_trace():
"""Call pdb.set_trace in the calling frame, first restoring
sys.stdout to the real output stream. Note that sys.stdout is NOT
reset to whatever it was before the call once pdb is done!
"""
import pdb
import sys
stdout = sys.stdout
sys.stdout = sys.__stdout__
pdb.Pdb().set_trace(sys._getframe().f_back)
|
python
|
{
"resource": ""
}
|
q280344
|
timed
|
test
|
def timed(limit):
"""Test must finish within specified time limit to pass.
Example use::
@timed(.1)
def test_that_fails():
time.sleep(.2)
"""
def decorate(func):
def newfunc(*arg, **kw):
start = time.time()
func(*arg, **kw)
end = time.time()
if end - start > limit:
raise TimeExpired("Time limit (%s) exceeded" % limit)
newfunc = make_decorator(func)(newfunc)
return newfunc
return decorate
|
python
|
{
"resource": ""
}
|
q280345
|
InteractiveShellApp.init_extensions
|
test
|
def init_extensions(self):
"""Load all IPython extensions in IPythonApp.extensions.
This uses the :meth:`ExtensionManager.load_extensions` to load all
the extensions listed in ``self.extensions``.
"""
try:
self.log.debug("Loading IPython extensions...")
extensions = self.default_extensions + self.extensions
for ext in extensions:
try:
self.log.info("Loading IPython extension: %s" % ext)
self.shell.extension_manager.load_extension(ext)
except:
self.log.warn("Error in loading extension: %s" % ext +
"\nCheck your config files in %s" % self.profile_dir.location
)
self.shell.showtraceback()
except:
self.log.warn("Unknown error in loading extensions:")
self.shell.showtraceback()
|
python
|
{
"resource": ""
}
|
q280346
|
InteractiveShellApp.init_code
|
test
|
def init_code(self):
"""run the pre-flight code, specified via exec_lines"""
self._run_startup_files()
self._run_exec_lines()
self._run_exec_files()
self._run_cmd_line_code()
self._run_module()
# flush output, so itwon't be attached to the first cell
sys.stdout.flush()
sys.stderr.flush()
# Hide variables defined here from %who etc.
self.shell.user_ns_hidden.update(self.shell.user_ns)
|
python
|
{
"resource": ""
}
|
q280347
|
InteractiveShellApp._run_exec_lines
|
test
|
def _run_exec_lines(self):
"""Run lines of code in IPythonApp.exec_lines in the user's namespace."""
if not self.exec_lines:
return
try:
self.log.debug("Running code from IPythonApp.exec_lines...")
for line in self.exec_lines:
try:
self.log.info("Running code in user namespace: %s" %
line)
self.shell.run_cell(line, store_history=False)
except:
self.log.warn("Error in executing line in user "
"namespace: %s" % line)
self.shell.showtraceback()
except:
self.log.warn("Unknown error in handling IPythonApp.exec_lines:")
self.shell.showtraceback()
|
python
|
{
"resource": ""
}
|
q280348
|
InteractiveShellApp._run_startup_files
|
test
|
def _run_startup_files(self):
"""Run files from profile startup directory"""
startup_dir = self.profile_dir.startup_dir
startup_files = glob.glob(os.path.join(startup_dir, '*.py'))
startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
if not startup_files:
return
self.log.debug("Running startup files from %s...", startup_dir)
try:
for fname in sorted(startup_files):
self._exec_file(fname)
except:
self.log.warn("Unknown error in handling startup files:")
self.shell.showtraceback()
|
python
|
{
"resource": ""
}
|
q280349
|
InteractiveShellApp._run_exec_files
|
test
|
def _run_exec_files(self):
"""Run files from IPythonApp.exec_files"""
if not self.exec_files:
return
self.log.debug("Running files in IPythonApp.exec_files...")
try:
for fname in self.exec_files:
self._exec_file(fname)
except:
self.log.warn("Unknown error in handling IPythonApp.exec_files:")
self.shell.showtraceback()
|
python
|
{
"resource": ""
}
|
q280350
|
InteractiveShellApp._run_cmd_line_code
|
test
|
def _run_cmd_line_code(self):
"""Run code or file specified at the command-line"""
if self.code_to_run:
line = self.code_to_run
try:
self.log.info("Running code given at command line (c=): %s" %
line)
self.shell.run_cell(line, store_history=False)
except:
self.log.warn("Error in executing line in user namespace: %s" %
line)
self.shell.showtraceback()
# Like Python itself, ignore the second if the first of these is present
elif self.file_to_run:
fname = self.file_to_run
try:
self._exec_file(fname)
except:
self.log.warn("Error in executing file in user namespace: %s" %
fname)
self.shell.showtraceback()
|
python
|
{
"resource": ""
}
|
q280351
|
InteractiveShellApp._run_module
|
test
|
def _run_module(self):
"""Run module specified at the command-line."""
if self.module_to_run:
# Make sure that the module gets a proper sys.argv as if it were
# run using `python -m`.
save_argv = sys.argv
sys.argv = [sys.executable] + self.extra_args
try:
self.shell.safe_run_module(self.module_to_run,
self.shell.user_ns)
finally:
sys.argv = save_argv
|
python
|
{
"resource": ""
}
|
q280352
|
generic
|
test
|
def generic(func):
"""Create a simple generic function"""
_sentinel = object()
def _by_class(*args, **kw):
cls = args[0].__class__
for t in type(cls.__name__, (cls,object), {}).__mro__:
f = _gbt(t, _sentinel)
if f is not _sentinel:
return f(*args, **kw)
else:
return func(*args, **kw)
_by_type = {object: func}
try:
_by_type[InstanceType] = _by_class
except NameError: # Python 3
pass
_gbt = _by_type.get
def when_type(*types):
"""Decorator to add a method that will be called for the given types"""
for t in types:
if not isinstance(t, classtypes):
raise TypeError(
"%r is not a type or class" % (t,)
)
def decorate(f):
for t in types:
if _by_type.setdefault(t,f) is not f:
raise TypeError(
"%r already has method for type %r" % (func, t)
)
return f
return decorate
_by_object = {}
_gbo = _by_object.get
def when_object(*obs):
"""Decorator to add a method to be called for the given object(s)"""
def decorate(f):
for o in obs:
if _by_object.setdefault(id(o), (o,f))[1] is not f:
raise TypeError(
"%r already has method for object %r" % (func, o)
)
return f
return decorate
def dispatch(*args, **kw):
f = _gbo(id(args[0]), _sentinel)
if f is _sentinel:
for t in type(args[0]).__mro__:
f = _gbt(t, _sentinel)
if f is not _sentinel:
return f(*args, **kw)
else:
return func(*args, **kw)
else:
return f[1](*args, **kw)
dispatch.__name__ = func.__name__
dispatch.__dict__ = func.__dict__.copy()
dispatch.__doc__ = func.__doc__
dispatch.__module__ = func.__module__
dispatch.when_type = when_type
dispatch.when_object = when_object
dispatch.default = func
dispatch.has_object = lambda o: id(o) in _by_object
dispatch.has_type = lambda t: t in _by_type
return dispatch
|
python
|
{
"resource": ""
}
|
q280353
|
data_filename
|
test
|
def data_filename(fname, pkgdir=""):
"""Return the path to a data file of ours.
The file is searched for on `STATIC_PATH`, and the first place it's found,
is returned.
Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir`
is provided, at that subdirectory.
"""
for static_dir in STATIC_PATH:
static_filename = os.path.join(static_dir, fname)
if os.path.exists(static_filename):
return static_filename
if pkgdir:
static_filename = os.path.join(static_dir, pkgdir, fname)
if os.path.exists(static_filename):
return static_filename
raise CoverageException("Couldn't find static file %r" % fname)
|
python
|
{
"resource": ""
}
|
q280354
|
data
|
test
|
def data(fname):
"""Return the contents of a data file of ours."""
data_file = open(data_filename(fname))
try:
return data_file.read()
finally:
data_file.close()
|
python
|
{
"resource": ""
}
|
q280355
|
escape
|
test
|
def escape(t):
"""HTML-escape the text in `t`."""
return (t
# Convert HTML special chars into HTML entities.
.replace("&", "&").replace("<", "<").replace(">", ">")
.replace("'", "'").replace('"', """)
# Convert runs of spaces: "......" -> " . . ."
.replace(" ", " ")
# To deal with odd-length runs, convert the final pair of spaces
# so that "....." -> " . ."
.replace(" ", " ")
)
|
python
|
{
"resource": ""
}
|
q280356
|
HtmlReporter.report
|
test
|
def report(self, morfs):
"""Generate an HTML report for `morfs`.
`morfs` is a list of modules or filenames.
"""
assert self.config.html_dir, "must give a directory for html reporting"
# Read the status data.
self.status.read(self.config.html_dir)
# Check that this run used the same settings as the last run.
m = Hasher()
m.update(self.config)
these_settings = m.digest()
if self.status.settings_hash() != these_settings:
self.status.reset()
self.status.set_settings_hash(these_settings)
# The user may have extra CSS they want copied.
if self.config.extra_css:
self.extra_css = os.path.basename(self.config.extra_css)
# Process all the files.
self.report_files(self.html_file, morfs, self.config.html_dir)
if not self.files:
raise CoverageException("No data to report.")
# Write the index file.
self.index_file()
self.make_local_static_report_files()
return self.totals.pc_covered
|
python
|
{
"resource": ""
}
|
q280357
|
HtmlReporter.make_local_static_report_files
|
test
|
def make_local_static_report_files(self):
"""Make local instances of static files for HTML report."""
# The files we provide must always be copied.
for static, pkgdir in self.STATIC_FILES:
shutil.copyfile(
data_filename(static, pkgdir),
os.path.join(self.directory, static)
)
# The user may have extra CSS they want copied.
if self.extra_css:
shutil.copyfile(
self.config.extra_css,
os.path.join(self.directory, self.extra_css)
)
|
python
|
{
"resource": ""
}
|
q280358
|
HtmlReporter.write_html
|
test
|
def write_html(self, fname, html):
"""Write `html` to `fname`, properly encoded."""
fout = open(fname, "wb")
try:
fout.write(html.encode('ascii', 'xmlcharrefreplace'))
finally:
fout.close()
|
python
|
{
"resource": ""
}
|
q280359
|
HtmlReporter.file_hash
|
test
|
def file_hash(self, source, cu):
"""Compute a hash that changes if the file needs to be re-reported."""
m = Hasher()
m.update(source)
self.coverage.data.add_to_hash(cu.filename, m)
return m.digest()
|
python
|
{
"resource": ""
}
|
q280360
|
HtmlReporter.index_file
|
test
|
def index_file(self):
"""Write the index.html file for this report."""
index_tmpl = Templite(
data("index.html"), self.template_globals
)
self.totals = sum([f['nums'] for f in self.files])
html = index_tmpl.render({
'arcs': self.arcs,
'extra_css': self.extra_css,
'files': self.files,
'totals': self.totals,
})
if sys.version_info < (3, 0):
html = html.decode("utf-8")
self.write_html(
os.path.join(self.directory, "index.html"),
html
)
# Write the latest hashes for next time.
self.status.write(self.directory)
|
python
|
{
"resource": ""
}
|
q280361
|
HtmlStatus.read
|
test
|
def read(self, directory):
"""Read the last status in `directory`."""
usable = False
try:
status_file = os.path.join(directory, self.STATUS_FILE)
fstatus = open(status_file, "rb")
try:
status = pickle.load(fstatus)
finally:
fstatus.close()
except (IOError, ValueError):
usable = False
else:
usable = True
if status['format'] != self.STATUS_FORMAT:
usable = False
elif status['version'] != coverage.__version__:
usable = False
if usable:
self.files = status['files']
self.settings = status['settings']
else:
self.reset()
|
python
|
{
"resource": ""
}
|
q280362
|
HtmlStatus.write
|
test
|
def write(self, directory):
"""Write the current status to `directory`."""
status_file = os.path.join(directory, self.STATUS_FILE)
status = {
'format': self.STATUS_FORMAT,
'version': coverage.__version__,
'settings': self.settings,
'files': self.files,
}
fout = open(status_file, "wb")
try:
pickle.dump(status, fout)
finally:
fout.close()
|
python
|
{
"resource": ""
}
|
q280363
|
sort_compare
|
test
|
def sort_compare(lst1, lst2, inplace=1):
"""Sort and compare two lists.
By default it does it in place, thus modifying the lists. Use inplace = 0
to avoid that (at the cost of temporary copy creation)."""
if not inplace:
lst1 = lst1[:]
lst2 = lst2[:]
lst1.sort(); lst2.sort()
return lst1 == lst2
|
python
|
{
"resource": ""
}
|
q280364
|
get_slice
|
test
|
def get_slice(seq, start=0, stop=None, step=1):
"""Get a slice of a sequence with variable step. Specify start,stop,step."""
if stop == None:
stop = len(seq)
item = lambda i: seq[i]
return map(item,xrange(start,stop,step))
|
python
|
{
"resource": ""
}
|
q280365
|
chop
|
test
|
def chop(seq, size):
"""Chop a sequence into chunks of the given size."""
chunk = lambda i: seq[i:i+size]
return map(chunk,xrange(0,len(seq),size))
|
python
|
{
"resource": ""
}
|
q280366
|
read_config
|
test
|
def read_config():
"""Read configuration from setup.cfg."""
# XXX modifies global state, which is kind of evil
config = ConfigParser.ConfigParser()
config.read(['setup.cfg'])
if not config.has_section('check-manifest'):
return
if (config.has_option('check-manifest', 'ignore-default-rules')
and config.getboolean('check-manifest', 'ignore-default-rules')):
del IGNORE[:]
if config.has_option('check-manifest', 'ignore'):
patterns = [p.strip() for p in config.get('check-manifest',
'ignore').splitlines()]
IGNORE.extend(p for p in patterns if p)
|
python
|
{
"resource": ""
}
|
q280367
|
read_manifest
|
test
|
def read_manifest():
"""Read existing configuration from MANIFEST.in.
We use that to ignore anything the MANIFEST.in ignores.
"""
# XXX modifies global state, which is kind of evil
if not os.path.isfile('MANIFEST.in'):
return
with open('MANIFEST.in') as manifest:
contents = manifest.read()
ignore, ignore_regexps = _get_ignore_from_manifest(contents)
IGNORE.extend(ignore)
IGNORE_REGEXPS.extend(ignore_regexps)
|
python
|
{
"resource": ""
}
|
q280368
|
_glob_to_regexp
|
test
|
def _glob_to_regexp(pat):
"""Compile a glob pattern into a regexp.
We need to do this because fnmatch allows * to match /, which we
don't want. E.g. an MANIFEST.in exclude of 'dirname/*css' should
match 'dirname/foo.css' but not 'dirname/subdir/bar.css'.
"""
pat = fnmatch.translate(pat)
# Note that distutils in Python 2.6 has a buggy glob_to_re in
# distutils.filelist -- it converts '*.cfg' to '[^/]*cfg' instead
# of '[^\\]*cfg' on Windows.
sep = r'\\\\' if os.path.sep == '\\' else os.path.sep
return re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^%s]' % sep, pat)
|
python
|
{
"resource": ""
}
|
q280369
|
file_matches
|
test
|
def file_matches(filename, patterns):
"""Does this filename match any of the patterns?"""
return any(fnmatch.fnmatch(filename, pat) for pat in patterns)
|
python
|
{
"resource": ""
}
|
q280370
|
Git.get_versioned_files
|
test
|
def get_versioned_files():
"""List all files versioned by git in the current directory."""
# Git for Windows uses UTF-8 instead of the locale encoding.
# Regular Git on sane POSIX systems uses the locale encoding
encoding = 'UTF-8' if sys.platform == 'win32' else None
output = run(['git', 'ls-files', '-z'], encoding=encoding)
return add_directories(output.split('\0')[:-1])
|
python
|
{
"resource": ""
}
|
q280371
|
MultiKernelManager.start_kernel
|
test
|
def start_kernel(self, **kwargs):
"""Start a new kernel."""
kernel_id = unicode(uuid.uuid4())
# use base KernelManager for each Kernel
km = self.kernel_manager_factory(connection_file=os.path.join(
self.connection_dir, "kernel-%s.json" % kernel_id),
config=self.config,
)
km.start_kernel(**kwargs)
# start just the shell channel, needed for graceful restart
km.start_channels(shell=True, sub=False, stdin=False, hb=False)
self._kernels[kernel_id] = km
return kernel_id
|
python
|
{
"resource": ""
}
|
q280372
|
MultiKernelManager.shutdown_kernel
|
test
|
def shutdown_kernel(self, kernel_id):
"""Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
"""
self.get_kernel(kernel_id).shutdown_kernel()
del self._kernels[kernel_id]
|
python
|
{
"resource": ""
}
|
q280373
|
MultiKernelManager.kill_kernel
|
test
|
def kill_kernel(self, kernel_id):
"""Kill a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to kill.
"""
self.get_kernel(kernel_id).kill_kernel()
del self._kernels[kernel_id]
|
python
|
{
"resource": ""
}
|
q280374
|
MultiKernelManager.get_kernel
|
test
|
def get_kernel(self, kernel_id):
"""Get the single KernelManager object for a kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
km = self._kernels.get(kernel_id)
if km is not None:
return km
else:
raise KeyError("Kernel with id not found: %s" % kernel_id)
|
python
|
{
"resource": ""
}
|
q280375
|
MultiKernelManager.get_kernel_ports
|
test
|
def get_kernel_ports(self, kernel_id):
"""Return a dictionary of ports for a kernel.
Parameters
==========
kernel_id : uuid
The id of the kernel.
Returns
=======
port_dict : dict
A dict of key, value pairs where the keys are the names
(stdin_port,iopub_port,shell_port) and the values are the
integer port numbers for those channels.
"""
# this will raise a KeyError if not found:
km = self.get_kernel(kernel_id)
return dict(shell_port=km.shell_port,
iopub_port=km.iopub_port,
stdin_port=km.stdin_port,
hb_port=km.hb_port,
)
|
python
|
{
"resource": ""
}
|
q280376
|
MappingKernelManager.notebook_for_kernel
|
test
|
def notebook_for_kernel(self, kernel_id):
"""Return the notebook_id for a kernel_id or None."""
notebook_ids = [k for k, v in self._notebook_mapping.iteritems() if v == kernel_id]
if len(notebook_ids) == 1:
return notebook_ids[0]
else:
return None
|
python
|
{
"resource": ""
}
|
q280377
|
MappingKernelManager.start_kernel
|
test
|
def start_kernel(self, notebook_id=None, **kwargs):
"""Start a kernel for a notebok an return its kernel_id.
Parameters
----------
notebook_id : uuid
The uuid of the notebook to associate the new kernel with. If this
is not None, this kernel will be persistent whenever the notebook
requests a kernel.
"""
kernel_id = self.kernel_for_notebook(notebook_id)
if kernel_id is None:
kwargs['extra_arguments'] = self.kernel_argv
kernel_id = super(MappingKernelManager, self).start_kernel(**kwargs)
self.set_kernel_for_notebook(notebook_id, kernel_id)
self.log.info("Kernel started: %s" % kernel_id)
self.log.debug("Kernel args: %r" % kwargs)
else:
self.log.info("Using existing kernel: %s" % kernel_id)
return kernel_id
|
python
|
{
"resource": ""
}
|
q280378
|
MappingKernelManager.shutdown_kernel
|
test
|
def shutdown_kernel(self, kernel_id):
"""Shutdown a kernel and remove its notebook association."""
self._check_kernel_id(kernel_id)
super(MappingKernelManager, self).shutdown_kernel(kernel_id)
self.delete_mapping_for_kernel(kernel_id)
self.log.info("Kernel shutdown: %s" % kernel_id)
|
python
|
{
"resource": ""
}
|
q280379
|
MappingKernelManager.interrupt_kernel
|
test
|
def interrupt_kernel(self, kernel_id):
"""Interrupt a kernel."""
self._check_kernel_id(kernel_id)
super(MappingKernelManager, self).interrupt_kernel(kernel_id)
self.log.info("Kernel interrupted: %s" % kernel_id)
|
python
|
{
"resource": ""
}
|
q280380
|
MappingKernelManager.restart_kernel
|
test
|
def restart_kernel(self, kernel_id):
"""Restart a kernel while keeping clients connected."""
self._check_kernel_id(kernel_id)
km = self.get_kernel(kernel_id)
km.restart_kernel()
self.log.info("Kernel restarted: %s" % kernel_id)
return kernel_id
# the following remains, in case the KM restart machinery is
# somehow unacceptable
# Get the notebook_id to preserve the kernel/notebook association.
notebook_id = self.notebook_for_kernel(kernel_id)
# Create the new kernel first so we can move the clients over.
new_kernel_id = self.start_kernel()
# Now kill the old kernel.
self.kill_kernel(kernel_id)
# Now save the new kernel/notebook association. We have to save it
# after the old kernel is killed as that will delete the mapping.
self.set_kernel_for_notebook(notebook_id, new_kernel_id)
self.log.info("Kernel restarted: %s" % new_kernel_id)
return new_kernel_id
|
python
|
{
"resource": ""
}
|
q280381
|
MappingKernelManager.create_iopub_stream
|
test
|
def create_iopub_stream(self, kernel_id):
"""Create a new iopub stream."""
self._check_kernel_id(kernel_id)
return super(MappingKernelManager, self).create_iopub_stream(kernel_id)
|
python
|
{
"resource": ""
}
|
q280382
|
MappingKernelManager.create_shell_stream
|
test
|
def create_shell_stream(self, kernel_id):
"""Create a new shell stream."""
self._check_kernel_id(kernel_id)
return super(MappingKernelManager, self).create_shell_stream(kernel_id)
|
python
|
{
"resource": ""
}
|
q280383
|
MappingKernelManager.create_hb_stream
|
test
|
def create_hb_stream(self, kernel_id):
"""Create a new hb stream."""
self._check_kernel_id(kernel_id)
return super(MappingKernelManager, self).create_hb_stream(kernel_id)
|
python
|
{
"resource": ""
}
|
q280384
|
ResetMixin.reset
|
test
|
def reset(self):
"""Reset all OneTimeProperty attributes that may have fired already."""
instdict = self.__dict__
classdict = self.__class__.__dict__
# To reset them, we simply remove them from the instance dict. At that
# point, it's as if they had never been computed. On the next access,
# the accessor function from the parent class will be called, simply
# because that's how the python descriptor protocol works.
for mname, mval in classdict.items():
if mname in instdict and isinstance(mval, OneTimeProperty):
delattr(self, mname)
|
python
|
{
"resource": ""
}
|
q280385
|
export_html
|
test
|
def export_html(html, filename, image_tag = None, inline = True):
""" Export the contents of the ConsoleWidget as HTML.
Parameters:
-----------
html : str,
A utf-8 encoded Python string containing the Qt HTML to export.
filename : str
The file to be saved.
image_tag : callable, optional (default None)
Used to convert images. See ``default_image_tag()`` for information.
inline : bool, optional [default True]
If True, include images as inline PNGs. Otherwise, include them as
links to external PNG files, mimicking web browsers' "Web Page,
Complete" behavior.
"""
if image_tag is None:
image_tag = default_image_tag
else:
image_tag = ensure_utf8(image_tag)
if inline:
path = None
else:
root,ext = os.path.splitext(filename)
path = root + "_files"
if os.path.isfile(path):
raise OSError("%s exists, but is not a directory." % path)
with open(filename, 'w') as f:
html = fix_html(html)
f.write(IMG_RE.sub(lambda x: image_tag(x, path = path, format = "png"),
html))
|
python
|
{
"resource": ""
}
|
q280386
|
export_xhtml
|
test
|
def export_xhtml(html, filename, image_tag=None):
""" Export the contents of the ConsoleWidget as XHTML with inline SVGs.
Parameters:
-----------
html : str,
A utf-8 encoded Python string containing the Qt HTML to export.
filename : str
The file to be saved.
image_tag : callable, optional (default None)
Used to convert images. See ``default_image_tag()`` for information.
"""
if image_tag is None:
image_tag = default_image_tag
else:
image_tag = ensure_utf8(image_tag)
with open(filename, 'w') as f:
# Hack to make xhtml header -- note that we are not doing any check for
# valid XML.
offset = html.find("<html>")
assert offset > -1, 'Invalid HTML string: no <html> tag.'
html = ('<html xmlns="http://www.w3.org/1999/xhtml">\n'+
html[offset+6:])
html = fix_html(html)
f.write(IMG_RE.sub(lambda x: image_tag(x, path = None, format = "svg"),
html))
|
python
|
{
"resource": ""
}
|
q280387
|
ensure_utf8
|
test
|
def ensure_utf8(image_tag):
"""wrapper for ensuring image_tag returns utf8-encoded str on Python 2"""
if py3compat.PY3:
# nothing to do on Python 3
return image_tag
def utf8_image_tag(*args, **kwargs):
s = image_tag(*args, **kwargs)
if isinstance(s, unicode):
s = s.encode('utf8')
return s
return utf8_image_tag
|
python
|
{
"resource": ""
}
|
q280388
|
fix_html
|
test
|
def fix_html(html):
""" Transforms a Qt-generated HTML string into a standards-compliant one.
Parameters:
-----------
html : str,
A utf-8 encoded Python string containing the Qt HTML.
"""
# A UTF-8 declaration is needed for proper rendering of some characters
# (e.g., indented commands) when viewing exported HTML on a local system
# (i.e., without seeing an encoding declaration in an HTTP header).
# C.f. http://www.w3.org/International/O-charset for details.
offset = html.find('<head>')
if offset > -1:
html = (html[:offset+6]+
'\n<meta http-equiv="Content-Type" '+
'content="text/html; charset=utf-8" />\n'+
html[offset+6:])
# Replace empty paragraphs tags with line breaks.
html = re.sub(EMPTY_P_RE, '<br/>', html)
return html
|
python
|
{
"resource": ""
}
|
q280389
|
HtmlExporter.export
|
test
|
def export(self):
""" Displays a dialog for exporting HTML generated by Qt's rich text
system.
Returns
-------
The name of the file that was saved, or None if no file was saved.
"""
parent = self.control.window()
dialog = QtGui.QFileDialog(parent, 'Save as...')
dialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
filters = [
'HTML with PNG figures (*.html *.htm)',
'XHTML with inline SVG figures (*.xhtml *.xml)'
]
dialog.setNameFilters(filters)
if self.filename:
dialog.selectFile(self.filename)
root,ext = os.path.splitext(self.filename)
if ext.lower() in ('.xml', '.xhtml'):
dialog.selectNameFilter(filters[-1])
if dialog.exec_():
self.filename = dialog.selectedFiles()[0]
choice = dialog.selectedNameFilter()
html = self.control.document().toHtml().encode('utf-8')
# Configure the exporter.
if choice.startswith('XHTML'):
exporter = export_xhtml
else:
# If there are PNGs, decide how to export them.
inline = self.inline_png
if inline is None and IMG_RE.search(html):
dialog = QtGui.QDialog(parent)
dialog.setWindowTitle('Save as...')
layout = QtGui.QVBoxLayout(dialog)
msg = "Exporting HTML with PNGs"
info = "Would you like inline PNGs (single large html " \
"file) or external image files?"
checkbox = QtGui.QCheckBox("&Don't ask again")
checkbox.setShortcut('D')
ib = QtGui.QPushButton("&Inline")
ib.setShortcut('I')
eb = QtGui.QPushButton("&External")
eb.setShortcut('E')
box = QtGui.QMessageBox(QtGui.QMessageBox.Question,
dialog.windowTitle(), msg)
box.setInformativeText(info)
box.addButton(ib, QtGui.QMessageBox.NoRole)
box.addButton(eb, QtGui.QMessageBox.YesRole)
layout.setSpacing(0)
layout.addWidget(box)
layout.addWidget(checkbox)
dialog.setLayout(layout)
dialog.show()
reply = box.exec_()
dialog.hide()
inline = (reply == 0)
if checkbox.checkState():
# Don't ask anymore; always use this choice.
self.inline_png = inline
exporter = lambda h, f, i: export_html(h, f, i, inline)
# Perform the export!
try:
return exporter(html, self.filename, self.image_tag)
except Exception, e:
msg = "Error exporting HTML to %s\n" % self.filename + str(e)
reply = QtGui.QMessageBox.warning(parent, 'Error', msg,
QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
return None
|
python
|
{
"resource": ""
}
|
q280390
|
get_unique_or_none
|
test
|
def get_unique_or_none(klass, *args, **kwargs):
""" Returns a unique instance of `klass` or None """
try:
return klass.objects.get(*args, **kwargs)
except klass.DoesNotExist:
return None
except klass.MultipleObjectsReturned:
return None
return None
|
python
|
{
"resource": ""
}
|
q280391
|
get_query_includes
|
test
|
def get_query_includes(tokenized_terms, search_fields):
"""
Builds a query for included terms in a text search.
"""
query = None
for term in tokenized_terms:
or_query = None
for field_name in search_fields:
q = Q(**{"%s__icontains" % field_name: term})
if or_query is None:
or_query = q
else:
or_query = or_query | q
if query is None:
query = or_query
else:
query = query & or_query
return query
|
python
|
{
"resource": ""
}
|
q280392
|
get_text_query
|
test
|
def get_text_query(query_string, search_fields):
"""
Builds a query for both included & excluded terms in a text search.
"""
include_terms, exclude_terms = get_text_tokenizer(query_string)
include_q = get_query_includes(include_terms, search_fields)
exclude_q = get_query_excludes(exclude_terms, search_fields)
query = None
if include_q and exclude_q:
query = include_q & ~exclude_q
elif not exclude_q:
query = include_q
else:
query = ~exclude_q
return query
|
python
|
{
"resource": ""
}
|
q280393
|
get_date_greater_query
|
test
|
def get_date_greater_query(days, date_field):
"""
Query for if date_field is within number of "days" ago.
"""
query = None
days = get_integer(days)
if days:
past = get_days_ago(days)
query = Q(**{"%s__gte" % date_field: past.isoformat()})
return query
|
python
|
{
"resource": ""
}
|
q280394
|
get_date_less_query
|
test
|
def get_date_less_query(days, date_field):
"""
Query for if date_field is within number of "days" from now.
"""
query = None
days = get_integer(days)
if days:
future = get_days_from_now(days)
query = Q(**{"%s__lte" % date_field: future.isoformat()})
return query
|
python
|
{
"resource": ""
}
|
q280395
|
get_null_or_blank_query
|
test
|
def get_null_or_blank_query(field=None):
"""
Query for null or blank field.
"""
if not field:
return field
null_q = get_null_query(field)
blank_q = get_blank_query(field)
return (null_q | blank_q)
|
python
|
{
"resource": ""
}
|
q280396
|
CaseInsensitiveQuerySet.case_insensitive
|
test
|
def case_insensitive(self, fields_dict):
"""
Converts queries to case insensitive for special fields.
"""
if hasattr(self.model, 'CASE_INSENSITIVE_FIELDS'):
for field in self.model.CASE_INSENSITIVE_FIELDS:
if field in fields_dict:
fields_dict[field + '__iexact'] = fields_dict[field]
del fields_dict[field]
|
python
|
{
"resource": ""
}
|
q280397
|
AttributeSelector.options
|
test
|
def options(self, parser, env):
"""Register command line options"""
parser.add_option("-a", "--attr",
dest="attr", action="append",
default=env.get('NOSE_ATTR'),
metavar="ATTR",
help="Run only tests that have attributes "
"specified by ATTR [NOSE_ATTR]")
# disable in < 2.4: eval can't take needed args
if compat_24:
parser.add_option("-A", "--eval-attr",
dest="eval_attr", metavar="EXPR", action="append",
default=env.get('NOSE_EVAL_ATTR'),
help="Run only tests for whose attributes "
"the Python expression EXPR evaluates "
"to True [NOSE_EVAL_ATTR]")
|
python
|
{
"resource": ""
}
|
q280398
|
AttributeSelector.validateAttrib
|
test
|
def validateAttrib(self, method, cls = None):
"""Verify whether a method has the required attributes
The method is considered a match if it matches all attributes
for any attribute group.
."""
# TODO: is there a need for case-sensitive value comparison?
any = False
for group in self.attribs:
match = True
for key, value in group:
attr = get_method_attr(method, cls, key)
if callable(value):
if not value(key, method, cls):
match = False
break
elif value is True:
# value must exist and be True
if not bool(attr):
match = False
break
elif value is False:
# value must not exist or be False
if bool(attr):
match = False
break
elif type(attr) in (list, tuple):
# value must be found in the list attribute
if not str(value).lower() in [str(x).lower()
for x in attr]:
match = False
break
else:
# value must match, convert to string and compare
if (value != attr
and str(value).lower() != str(attr).lower()):
match = False
break
any = any or match
if any:
# not True because we don't want to FORCE the selection of the
# item, only say that it is acceptable
return None
return False
|
python
|
{
"resource": ""
}
|
q280399
|
AttributeSelector.wantMethod
|
test
|
def wantMethod(self, method):
"""Accept the method if its attributes match.
"""
try:
cls = method.im_class
except AttributeError:
return False
return self.validateAttrib(method, cls)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.