"__package__": mod_name.rpartition(".")[0],
})
def create_ns(init_globals):
return run_module(mod_name, init_globals, run_name)
try:
self.check_code_execution(create_ns, expected_ns)
finally:
self._del_pkg(pkg_dir, depth, mod_name)
class RunPathTestCase(unittest.TestCase, CodeExecutionMixin):
"""Unit tests for runpy.run_path"""
def _make_test_script(self, script_dir, script_basename, source=None):
if source is None:
source = example_source
return make_script(script_dir, script_basename, source)
def _check_script(self, script_name, expected_name, expected_file,
expected_argv0):
# First check is without run_name
def create_ns(init_globals):
return run_path(script_name, init_globals)
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": expected_name,
"__file__": expected_file,
"__package__": "",
"run_argv0": expected_argv0,
"run_name_in_sys_modules": True,
"module_in_sys_modules": True,
})
self.check_code_execution(create_ns, expected_ns)
# Second check makes sure run_name works in all cases
run_name = "prove.issue15230.is.fixed"
def create_ns(init_globals):
return run_path(script_name, init_globals, run_name)
expected_ns["__name__"] = run_name
expected_ns["__package__"] = run_name.rpartition(".")[0]
self.check_code_execution(create_ns, expected_ns)
def _check_import_error(self, script_name, msg):
msg = re.escape(msg)
self.assertRaisesRegex(ImportError, msg, run_path, script_name)
def test_basic_script(self):
with temp_dir() as script_dir:
mod_name = 'script'
script_name = self._make_test_script(script_dir, mod_name)
self._check_script(script_name, "", script_name,
script_name)
def test_script_compiled(self):
with temp_dir() as script_dir:
mod_name = 'script'
script_name = self._make_test_script(script_dir, mod_name)
compiled_name = py_compile.compile(script_name, doraise=True)
os.remove(script_name)
self._check_script(compiled_name, "", compiled_name,
compiled_name)
def test_directory(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
self._check_script(script_dir, "", script_name,
script_dir)
def test_directory_compiled(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
compiled_name = py_compile.compile(script_name, doraise=True)
os.remove(script_name)
if not sys.dont_write_bytecode:
legacy_pyc = make_legacy_pyc(script_name)
self._check_script(script_dir, "", legacy_pyc,
script_dir)
def test_directory_error(self):
with temp_dir() as script_dir:
mod_name = 'not_main'
script_name = self._make_test_script(script_dir, mod_name)
msg = "can't find '__main__' module in %r" % script_dir
self._check_import_error(script_dir, msg)
def test_zipfile(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
zip_name, fname = make_zip_script(script_dir, 'test_zip', script_name)
self._check_script(zip_name, "", fname, zip_name)
def test_zipfile_compiled(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
compiled_name = py_compile.compile(script_name, doraise=True)
zip_name, fname = make_zip_script(script_dir, 'test_zip',
compiled_name)
self._check_script(zip_name, "", fname, zip_name)
def test_zipfile_error(self):
with temp_dir() as script_dir:
mod_name = 'not_main'
script_name = self._make_test_script(script_dir, mod_name)
zip_name, fname = make_zip_script(script_dir, 'test_zip', script_name)
msg = "can't find '__main__' module in %r" % zip_name
self._check_import_error(zip_name, msg)
def test_main_recursion_error(self):
with temp_dir() as script_dir, temp_dir() as dummy_dir:
mod_name = '__main__'
source = ("import runpy\n"
"runpy.run_path(%r)\n") % dummy_dir
script_name = self._make_test_script(script_dir, mod_name, source)
zip_name, fname = make_zip_script(script_dir, 'test_zip', script_name)
msg = "recursion depth exceeded"
self.assertRaisesRegex(RuntimeError, msg, run_path, zip_name)
def test_encoding(self):
with temp_dir() as script_dir:
filename = os.path.join(script_dir, 'script.py')
with open(filename, 'w', encoding='latin1') as f:
f.write("""
#coding:latin1
s = "non-ASCII: h\xe9"
""")
result = run_path(filename)
self.assertEqual(result['s'], "non-ASCII: h\xe9")
def test_main():
run_unittest(
ExecutionLayerTestCase,
RunModuleTestCase,
RunPathTestCase
)
if __name__ == "__main__":
test_main()
import sys
import pytest
from setuptools_scm.utils import do
from setuptools_scm import PRETEND_KEY, PRETEND_KEY_NAMED
@pytest.fixture
def wd(wd):
wd("git init")
wd("git config user.email test@example.com")
wd('git config user.name "a test"')
wd.add_command = "git add ."
wd.commit_command = "git commit -m test-{reason}"
return wd
def test_pyproject_support(tmpdir, monkeypatch):
pytest.importorskip("toml")
monkeypatch.delenv("SETUPTOOLS_SCM_DEBUG")
pkg = tmpdir.ensure("package", dir=42)
pkg.join("pyproject.toml").write(
"""[tool.setuptools_scm]
fallback_version = "12.34"
"""
)
pkg.join("setup.py").write("__import__('setuptools').setup()")
res = do((sys.executable, "setup.py", "--version"), pkg)
assert res == "12.34"
def test_pyproject_support_with_git(tmpdir, monkeypatch, wd):
pytest.importorskip("toml")
pkg = tmpdir.join("wd")
pkg.join("pyproject.toml").write("""[tool.setuptools_scm]""")
pkg.join("setup.py").write(
"__import__('setuptools').setup(name='setuptools_scm_example')"
)
res = do((sys.executable, "setup.py", "--version"), pkg)
assert res.endswith("0.1.dev0")
def test_pretend_version(tmpdir, monkeypatch, wd):
monkeypatch.setenv(PRETEND_KEY, "1.0.0")
assert wd.get_version() == "1.0.0"
assert wd.get_version(dist_name="ignored") == "1.0.0"
def test_pretend_version_named_pyproject_integration(tmpdir, monkeypatch, wd):
test_pyproject_support_with_git(tmpdir, monkeypatch, wd)
monkeypatch.setenv(
PRETEND_KEY_NAMED.format(name="setuptools_scm_example".upper()), "3.2.1"
)
res = do((sys.executable, "setup.py", "--version"), tmpdir / "wd")
assert res.endswith("3.2.1")
def test_pretend_version_named(tmpdir, monkeypatch, wd):
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test".upper()), "1.0.0")
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test2".upper()), "2.0.0")
assert wd.get_version(dist_name="test") == "1.0.0"
assert wd.get_version(dist_name="test2") == "2.0.0"
def test_pretend_version_name_takes_precedence(tmpdir, monkeypatch, wd):
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test".upper()), "1.0.0")
monkeypatch.setenv(PRETEND_KEY, "2.0.0")
assert wd.get_version(dist_name="test") == "1.0.0"
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see .
import copy
import os
import logging
import logging.handlers
import logging.config
import ConfigParser
from flask import request, session
from ZODB.POSException import POSError
from indico.core.config import Config
from MaKaC.common.contextManager import ContextManager
class AddIDFilter(logging.Filter):
def filter(self, record):
if not logging.Filter.filter(self, record):
return False
# Add request ID if available
try:
record.request_id = request.id
except RuntimeError:
record.request_id = '0' * 12
return True
class ExtraIndicoFilter(AddIDFilter):
def filter(self, record):
if record.name.split('.')[0] == 'indico':
return False
return AddIDFilter.filter(self, record)
class IndicoMailFormatter(logging.Formatter):
def format(self, record):
s = logging.Formatter.format(self, record)
if isinstance(s, unicode):
s = s.encode('utf-8')
return s + self._getRequestInfo()
def _getRequestInfo(self):
rh = ContextManager.get('currentRH', None)
info = ['Additional information:']
try:
info.append('Request: %s' % request.id)
info.append('URL: %s' % request.url)
if request.url_rule:
info.append('Endpoint: {0}'.format(request.url_rule.endpoint))
info.append('Method: %s' % request.method)
if rh:
info.append('Params: %s' % rh._getTruncatedParams())
if session:
try:
info.append('User: {0}'.format(session.user))
except POSError:
# If the DB connection is closed getting the avatar may fail
info.append('User id: {0}'.format(session.get('_avatarId')))
info.append('IP: %s' % request.remote_addr)
info.append('User Agent: %s' % request.user_agent)
info.append('Referer: %s' % (request.referrer or 'n/a'))
except RuntimeError, e:
info.append('Not available: %s' % e)
return '\n\n%s' % '\n'.join(x.encode('utf-8') if isinstance(x, unicode) else x for x in info)
class LoggerUtils:
@classmethod
def _bootstrap_cp(cls, cp, defaultArgs):
"""
Creates a very basic logging config for cases in which
logging.conf does not yet exist
"""
if not cp.has_section('loggers'):
cp.add_section('loggers')
cp.add_section('logger_root')
cp.add_section('handlers')
cp.set('loggers', 'keys', 'root')
cp.set('logger_root', 'handlers', ','.join(defaultArgs))
cp.set('handlers', 'keys', ','.join(defaultArgs))
for handler_name in defaultArgs:
section_name = 'handler_' + handler_name
cp.add_section(section_name)
cp.set(section_name, 'formatter', 'defaultFormatter')
@classmethod
def configFromFile(cls, fname, defaultArgs, filters):
"""
Read the logging configuration from the logging.conf file.
Fetch default values if the logging.conf file is not set.
"""
cp = ConfigParser.ConfigParser()
parsed_files = cp.read(fname)
if cp.has_section('formatters'):
formatters = logging.config._create_formatters(cp)
else:
formatters = {}
# Really ugly.. but logging fails to import MaKaC.common.logger.IndicoMailFormatter
# when using it in the class= option...
if 'mailFormatter' in formatters:
f = formatters.get('mailFormatter')
if f:
formatters['mailFormatter'] = IndicoMailFormatter(f._fmt, f.datefmt)
# if there is a problem with the config file, set some sane defaults
if not parsed_files:
formatters['defaultFormatter'] = logging.Formatter(
'%(asctime)s %(levelname)-7s %(request_id)s %(name)-25s %(message)s')
cls._bootstrap_cp(cp, defaultArgs)
logging._acquireLock()
try:
logging._handlers.clear()
del logging._handlerList[:]
handlers = cls._install_handlers(cp, defaultArgs, formatters, filters)
logging.config._install_loggers(cp, handlers, False)
finally:
logging._releaseLock()
return handlers
@classmethod
def _install_handlers(cls, cp, defaultArgs, formatters, filters=None):
"""
Install and return handlers. If a handler configuration
is missing its args, fetches the default values from the
indico.conf file
"""
hlist = cp.get("handlers", "keys")
hlist = hlist.split(",")
handlers = {}
fixups = [] # for inter-handler references
for hand in hlist:
sectname = "handler_%s" % hand.strip()
opts = cp.options(sectname)
if "class" in opts:
klass = cp.get(sectname, "class")
else:
klass = defaultArgs[hand.strip()][0]
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
klass = eval(klass, vars(logging))
if "args" in opts:
# if the args are not present in the file,
# take default values
args = cp.get(sectname, "args")
else:
try:
args = defaultArgs[hand.strip()][1]
except KeyError:
continue
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
else:
h.setLevel(logging._levelNames[defaultArgs[hand.strip()][2]])
if len(fmt):
h.setFormatter(formatters[fmt])
if filters and hand.strip() in filters:
for fltr in filters[hand.strip()]:
h.addFilter(fltr)
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
class Logger:
"""
Encapsulates the features provided by the standard logging module
"""
handlers = {}
@classmethod
def initialize(cls):
# Lists of filters for each handler
filters = {'indico': [AddIDFilter('indico')],
'other': [ExtraIndicoFilter()],
'smtp': [AddIDFilter('indico')]}
config = Config.getInstance()
if 'files' in config.getLoggers():
logConfFilepath = os.path.join(config.getConfigurationDir(), 'logging.conf')
smtpServer = config.getSmtpServer()
serverName = config.getWorkerName()
if not serverName:
serverName = config.getHostNameURL()
# Default arguments for the handlers, taken mostly for the configuration
defaultArgs = {
'indico': ("FileHandler", "('%s', 'a')" % cls._log_path('indico.log'), 'DEBUG'),
'other': ("FileHandler", "('%s', 'a')" % cls._log_path('other.log'), 'DEBUG'),
'smtp': (
"handlers.SMTPHandler", "(%s, 'logger@%s', ['%s'], 'Unexpected Exception occurred at %s')"
% (smtpServer, serverName, config.getSupportEmail(), serverName), "ERROR")
}
cls.handlers.update(LoggerUtils.configFromFile(logConfFilepath, defaultArgs, filters))
@classmethod
def init_app(cls, app):
"""
Initialize Flask app logging (add Sentry if needed)
"""
config = Config.getInstance()
if 'sentry' in config.getLoggers():
from raven.contrib.flask import Sentry
app.config['SENTRY_DSN'] = config.getSentryDSN()
# Plug into both Flask and `logging`
Sentry(app, logging=True, level=getattr(logging, config.getSentryLoggingLevel()))
@classmethod
def reset(cls):
"""
Reset the config, using new paths, etc (useful for testing)
"""
if cls.handlers:
for handler in copy.copy(cls.handlers):
cls.removeHandler(handler)
cls.initialize()
@classmethod
def removeHandler(cls, handlerName):
if cls.handlers:
handler = cls.handlers.get(handlerName)
if handler and handler in cls.handlers:
del cls.handlers[handlerName]
logging.root.handlers.remove(handler)
@classmethod
def get(cls, module=None):
return logging.getLogger('indico' if module is None else 'indico.' + module)
@classmethod
def _log_path(cls, fname):
config = Config.getInstance()
configDir = config.getLogDir()
fpath = os.path.join(configDir, fname)
if not os.access(os.path.dirname(fpath), os.W_OK):
# if the file in the config is not accessible, use a "local" one
fpath = os.path.join(os.getcwd(), '.indico.log')
return fpath.replace('\\', '\\\\')
Logger.initialize()
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append("../if/gen-py")
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
host = "localhost"
port = 9290
from hadoop.api import Jobtracker
from hadoop.api.ttypes import *
#print dir(ThriftTaskPhase)
#exit()
socket = TSocket.TSocket(host,port)
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = Jobtracker.Client(protocol)
transport.open()
#print client.trackerName(None,)
#print client.getClusterStatus(None,)
#print dir(client)
#print client.getQueues(None,)
jobs = client.getCompletedJobs(None,)
print jobs
if jobs and len(jobs) > 0:
counters = client.getJobCounters(None, jobs[0].job_id)
for c in counters:
print "--------------------------------------------"
print "CounterGroup:: ", c.displayName
for name in c.counters:
print "Counter '%s':: %s" % (name,c.counters[name].value)
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.client.session.Session's list_devices API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import cluster_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import pywrap_tf_session as tf_session
from tensorflow.python.client import session
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
from tensorflow.python.training import server_lib
class SessionListDevicesTest(test_util.TensorFlowTestCase):
def testListDevices(self):
with session.Session() as sess:
devices = sess.list_devices()
self.assertTrue('/job:localhost/replica:0/task:0/device:CPU:0' in set(
[d.name for d in devices]), devices)
# All valid device incarnations must be non-zero.
self.assertTrue(all(d.incarnation != 0 for d in devices))
def testInvalidDeviceNumber(self):
opts = tf_session.TF_NewSessionOptions()
c_session = tf_session.TF_NewSession(ops.get_default_graph()._c_graph, opts)
raw_device_list = tf_session.TF_SessionListDevices(c_session)
size = tf_session.TF_DeviceListCount(raw_device_list)
with self.assertRaises(errors.InvalidArgumentError):
tf_session.TF_DeviceListMemoryBytes(raw_device_list, size)
tf_session.TF_DeleteDeviceList(raw_device_list)
tf_session.TF_CloseSession(c_session)
def testListDevicesGrpcSession(self):
server = server_lib.Server.create_local_server()
with session.Session(server.target) as sess:
devices = sess.list_devices()
self.assertTrue(
'/job:localhost/replica:0/task:0/device:CPU:0' in set(
[d.name for d in devices]), devices)
# All valid device incarnations must be non-zero.
self.assertTrue(all(d.incarnation != 0 for d in devices))
def testListDevicesClusterSpecPropagation(self):
server1 = server_lib.Server.create_local_server()
server2 = server_lib.Server.create_local_server()
cluster_def = cluster_pb2.ClusterDef()
job = cluster_def.job.add()
job.name = 'worker'
job.tasks[0] = server1.target[len('grpc://'):]
job.tasks[1] = server2.target[len('grpc://'):]
config = config_pb2.ConfigProto(cluster_def=cluster_def)
with session.Session(server1.target, config=config) as sess:
devices = sess.list_devices()
device_names = set(d.name for d in devices)
self.assertTrue(
'/job:worker/replica:0/task:0/device:CPU:0' in device_names)
self.assertTrue(
'/job:worker/replica:0/task:1/device:CPU:0' in device_names)
# All valid device incarnations must be non-zero.
self.assertTrue(all(d.incarnation != 0 for d in devices))
if __name__ == '__main__':
googletest.main()
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import easy_template
def CmpByName(x, y):
return cmp(x['NAME'], y['NAME'])
class LandingPage(object):
def __init__(self):
self.section_list = ['Getting Started', 'API', 'Demo', 'Tutorial']
self.section_map = collections.defaultdict(list)
def GeneratePage(self, template_path):
with open(template_path) as template_file:
template = template_file.read()
sec_map = {}
for section_name in self.section_map:
items = self.section_map[section_name]
items = sorted(items, cmp=CmpByName)
sec_map[section_name] = items
print 'Add section ' + section_name
template_dict = { 'section_map': sec_map }
return easy_template.RunTemplateString(template, template_dict)
def AddDesc(self, desc):
group = desc['GROUP']
assert group in self.section_list
self.section_map[group].append(desc)
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""docfield utililtes"""
import webnotes
def rename(doctype, fieldname, newname):
"""rename docfield"""
df = webnotes.conn.sql("""select * from tabDocField where parent=%s and fieldname=%s""",
(doctype, fieldname), as_dict=1)
if not df:
return
df = df[0]
if webnotes.conn.get_value('DocType', doctype, 'issingle'):
update_single(df, newname)
else:
update_table(df, newname)
update_parent_field(df, newname)
def update_single(f, new):
"""update in tabSingles"""
webnotes.conn.begin()
webnotes.conn.sql("""update tabSingles set field=%s where doctype=%s and field=%s""",
(new, f['parent'], f['fieldname']))
webnotes.conn.commit()
def update_table(f, new):
"""update table"""
query = get_change_column_query(f, new)
if query:
webnotes.conn.sql(query)
def update_parent_field(f, new):
"""update 'parentfield' in tables"""
if f['fieldtype']=='Table':
webnotes.conn.begin()
webnotes.conn.sql("""update `tab%s` set parentfield=%s where parentfield=%s""" \
% (f['options'], '%s', '%s'), (new, f['fieldname']))
webnotes.conn.commit()
def get_change_column_query(f, new):
"""generate change fieldname query"""
desc = webnotes.conn.sql("desc `tab%s`" % f['parent'])
for d in desc:
if d[0]== f['fieldname']:
return 'alter table `tab%s` change `%s` `%s` %s' % \
(f['parent'], f['fieldname'], new, d[1])
"""
>>> from pybrain.tools.shortcuts import buildNetwork
>>> from test_recurrent_network import buildRecurrentNetwork
>>> from test_peephole_lstm import buildMinimalLSTMNetwork
>>> from test_peephole_mdlstm import buildMinimalMDLSTMNetwork
>>> from test_nested_network import buildNestedNetwork
>>> from test_simple_lstm_network import buildSimpleLSTMNetwork
>>> from test_simple_mdlstm import buildSimpleMDLSTMNetwork
>>> from test_swiping_network import buildSwipingNetwork
>>> from test_shared_connections import buildSharedCrossedNetwork
>>> from test_sliced_connections import buildSlicedNetwork
>>> from test_borderswipingnetwork import buildSimpleBorderSwipingNet
Test a number of network architectures, and compare if they produce the same output,
whether the Python implementation is used, or CTYPES.
Use the network construction scripts in other test files to build a number of networks,
and then test the equivalence of each.
Simple net
>>> testEquivalence(buildNetwork(2,2))
True
A lot of layers
>>> net = buildNetwork(2,3,4,3,2,3,4,3,2)
>>> testEquivalence(net)
True
Nonstandard components
>>> from pybrain.structure import TanhLayer
>>> net = buildNetwork(2,3,2, bias = True, outclass = TanhLayer)
>>> testEquivalence(net)
True
Shared connections
>>> net = buildSharedCrossedNetwork()
>>> testEquivalence(net)
True
Sliced connections
>>> net = buildSlicedNetwork()
>>> testEquivalence(net)
True
Nested networks (not supposed to work yet!)
>>> net = buildNestedNetwork()
>>> testEquivalence(net)
Network cannot be converted.
Recurrent networks
>>> net = buildRecurrentNetwork()
>>> net.name = '22'
>>> net.params[:] = [1,1,0.5]
>>> testEquivalence(net)
True
Swiping networks
>>> net = buildSwipingNetwork()
>>> testEquivalence(net)
True
Border-swiping networks
>>> net = buildSimpleBorderSwipingNet()
>>> testEquivalence(net)
True
Lstm
>>> net = buildSimpleLSTMNetwork()
>>> testEquivalence(net)
True
Mdlstm
>>> net = buildSimpleMDLSTMNetwork()
>>> testEquivalence(net)
True
Lstm with peepholes
>>> net = buildMinimalLSTMNetwork(True)
>>> testEquivalence(net)
True
Mdlstm with peepholes
>>> net = buildMinimalMDLSTMNetwork(True)
>>> testEquivalence(net)
True
TODO:
- heavily nested
- exotic module use
"""
__author__ = 'Tom Schaul, tom@idsia.ch'
_dependencies = ['arac']
from pybrain.tests.helpers import buildAppropriateDataset, epsilonCheck
from pybrain.tests import runModuleTestSuite
def testEquivalence(net):
cnet = net.convertToFastNetwork()
if cnet == None:
return None
ds = buildAppropriateDataset(net)
if net.sequential:
for seq in ds:
net.reset()
cnet.reset()
for input, _ in seq:
res = net.activate(input)
cres = cnet.activate(input)
if net.name == '22':
h = net['hidden0']
ch = cnet['hidden0']
print(('ni', input, net.inputbuffer.T))
print(('ci', input, cnet.inputbuffer.T))
print(('hni', h.inputbuffer.T[0]))
print(('hci', ch.inputbuffer.T[0]))
print(('hnout', h.outputbuffer.T[0]))
print(('hcout', ch.outputbuffer.T[0]))
print()
else:
for input, _ in ds:
res = net.activate(input)
cres = cnet.activate(input)
if epsilonCheck(sum(res - cres), 0.001):
return True
else:
print(('in-net', net.inputbuffer.T))
print(('in-arac', cnet.inputbuffer.T))
print(('out-net', net.outputbuffer.T))
print(('out-arac', cnet.outputbuffer.T))
return (res, cres)
if __name__ == "__main__":
runModuleTestSuite(__import__('__main__'))
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class GpuRasterizationTestsPage(page_module.Page):
def __init__(self, page_set):
super(GpuRasterizationTestsPage, self).__init__(
url='file://../../data/gpu/pixel_background.html',
page_set=page_set,
name='GpuRasterization.BlueBox')
self.expectations = [
{'comment': 'body-t',
'color': [255, 255, 255],
'tolerance': 0,
'location': [5, 5]},
{'comment': 'body-r',
'color': [255, 255, 255],
'tolerance': 0,
'location': [215, 5]},
{'comment': 'body-b',
'color': [255, 255, 255],
'tolerance': 0,
'location': [215, 215]},
{'comment': 'body-l',
'color': [255, 255, 255],
'tolerance': 0,
'location': [5, 215]},
{'comment': 'background-t',
'color': [0, 0, 0],
'tolerance': 0,
'location': [30, 30]},
{'comment': 'background-r',
'color': [0, 0, 0],
'tolerance': 0,
'location': [170, 30]},
{'comment': 'background-b',
'color': [0, 0, 0],
'tolerance': 0,
'location': [170, 170]},
{'comment': 'background-l',
'color': [0, 0, 0],
'tolerance': 0,
'location': [30, 170]},
{'comment': 'box-t',
'color': [0, 0, 255],
'tolerance': 0,
'location': [70, 70]},
{'comment': 'box-r',
'color': [0, 0, 255],
'tolerance': 0,
'location': [140, 70]},
{'comment': 'box-b',
'color': [0, 0, 255],
'tolerance': 0,
'location': [140, 140]},
{'comment': 'box-l',
'color': [0, 0, 255],
'tolerance': 0,
'location': [70, 140]}
]
self.test_rect = [0, 0, 220, 220]
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition(
'domAutomationController._finished', timeout_in_seconds=30)
class GpuRasterizationTestsPageSet(page_set_module.PageSet):
""" Basic test cases for GPU rasterization. """
def __init__(self):
super(GpuRasterizationTestsPageSet, self).__init__()
self.AddPage(GpuRasterizationTestsPage(self))
"""Temporary files.
This module provides generic, low- and high-level interfaces for
creating temporary files and directories. The interfaces listed
as "safe" just below can be used without fear of race conditions.
Those listed as "unsafe" cannot, and are provided for backward
compatibility only.
This module also provides some data items to the user:
TMP_MAX - maximum number of names that will be tried before
giving up.
tempdir - If this is set to a string before the first use of
any routine from this module, it will be considered as
another candidate location to store temporary files.
"""
__all__ = [
"NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
"SpooledTemporaryFile", "TemporaryDirectory",
"mkstemp", "mkdtemp", # low level safe interfaces
"mktemp", # deprecated unsafe interface
"TMP_MAX", "gettempprefix", # constants
"tempdir", "gettempdir"
]
# Imports.
import functools as _functools
import warnings as _warnings
import io as _io
import os as _os
import shutil as _shutil
import errno as _errno
from random import Random as _Random
import weakref as _weakref
try:
import _thread
except ImportError:
import _dummy_thread as _thread
_allocate_lock = _thread.allocate_lock
_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
if hasattr(_os, 'O_NOFOLLOW'):
_text_openflags |= _os.O_NOFOLLOW
_bin_openflags = _text_openflags
if hasattr(_os, 'O_BINARY'):
_bin_openflags |= _os.O_BINARY
if hasattr(_os, 'TMP_MAX'):
TMP_MAX = _os.TMP_MAX
else:
TMP_MAX = 10000
# Although it does not have an underscore for historical reasons, this
# variable is an internal implementation detail (see issue 10354).
template = "tmp"
# Internal routines.
_once_lock = _allocate_lock()
if hasattr(_os, "lstat"):
_stat = _os.lstat
elif hasattr(_os, "stat"):
_stat = _os.stat
else:
# Fallback. All we need is something that raises OSError if the
# file doesn't exist.
def _stat(fn):
fd = _os.open(fn, _os.O_RDONLY)
_os.close(fd)
def _exists(fn):
try:
_stat(fn)
except OSError:
return False
else:
return True
class _RandomNameSequence:
"""An instance of _RandomNameSequence generates an endless
sequence of unpredictable strings which can safely be incorporated
into file names. Each string is six characters long. Multiple
threads can safely use the same instance at the same time.
_RandomNameSequence is an iterator."""
characters = "abcdefghijklmnopqrstuvwxyz0123456789_"
@property
def rng(self):
cur_pid = _os.getpid()
if cur_pid != getattr(self, '_rng_pid', None):
self._rng = _Random()
self._rng_pid = cur_pid
return self._rng
def __iter__(self):
return self
def __next__(self):
c = self.characters
choose = self.rng.choice
letters = [choose(c) for dummy in range(8)]
return ''.join(letters)
def _candidate_tempdir_list():
"""Generate a list of candidate temporary directories which
_get_default_tempdir will try."""
dirlist = []
# First, try the environment.
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = _os.getenv(envname)
if dirname: dirlist.append(dirname)
# Failing that, try OS-specific locations.
if _os.name == 'nt':
dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
else:
dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
# As a last resort, the current directory.
try:
dirlist.append(_os.getcwd())
except (AttributeError, OSError):
dirlist.append(_os.curdir)
return dirlist
def _get_default_tempdir():
"""Calculate the default directory to use for temporary files.
This routine should be called exactly once.
We determine whether or not a candidate temp dir is usable by
trying to create and write to a file in that directory. If this
is successful, the test file is deleted. To prevent denial of
service, the name of the test file must be randomized."""
namer = _RandomNameSequence()
dirlist = _candidate_tempdir_list()
for dir in dirlist:
if dir != _os.curdir:
dir = _os.path.abspath(dir)
# Try only a few names per directory.
for seq in range(100):
name = next(namer)
filename = _os.path.join(dir, name)
try:
fd = _os.open(filename, _bin_openflags, 0o600)
try:
try:
with _io.open(fd, 'wb', closefd=False) as fp:
fp.write(b'blat')
finally:
_os.close(fd)
finally:
_os.unlink(filename)
return dir
except FileExistsError:
pass
except OSError:
break # no point trying more names in this directory
raise FileNotFoundError(_errno.ENOENT,
"No usable temporary directory found in %s" %
dirlist)
_name_sequence = None
def _get_candidate_names():
"""Common setup sequence for all user-callable interfaces."""
global _name_sequence
if _name_sequence is None:
_once_lock.acquire()
try:
if _name_sequence is None:
_name_sequence = _RandomNameSequence()
finally:
_once_lock.release()
return _name_sequence
def _mkstemp_inner(dir, pre, suf, flags):
"""Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, pre + name + suf)
try:
fd = _os.open(file, flags, 0o600)
return (fd, _os.path.abspath(file))
except FileExistsError:
continue # try again
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if _os.name == 'nt':
continue
else:
raise
raise FileExistsError(_errno.EEXIST,
"No usable temporary file name found")
# User visible interfaces.
def gettempprefix():
"""Accessor for tempdir.template."""
return template
tempdir = None
def gettempdir():
"""Accessor for tempfile.tempdir."""
global tempdir
if tempdir is None:
_once_lock.acquire()
try:
if tempdir is None:
tempdir = _get_default_tempdir()
finally:
_once_lock.release()
return tempdir
def mkstemp(suffix="", prefix=template, dir=None, text=False):
"""User-callable function to create and return a unique temporary
file. The return value is a pair (fd, name) where fd is the
file descriptor returned by os.open, and name is the filename.
If 'suffix' is specified, the file name will end with that suffix,
otherwise there will be no suffix.
If 'prefix' is specified, the file name will begin with that prefix,
otherwise a default prefix is used.
If 'dir' is specified, the file will be created in that directory,
otherwise a default directory is used.
If 'text' is specified and true, the file is opened in text
mode. Else (the default) the file is opened in binary mode. On
some operating systems, this makes no difference.
The file is readable and writable only by the creating user ID.
If the operating system uses permission bits to indicate whether a
file is executable, the file is executable by no one. The file
descriptor is not inherited by children of this process.
Caller is responsible for deleting the file when done with it.
"""
if dir is None:
dir = gettempdir()
if text:
flags = _text_openflags
else:
flags = _bin_openflags
return _mkstemp_inner(dir, prefix, suffix, flags)
def mkdtemp(suffix="", prefix=template, dir=None):
"""User-callable function to create and return a unique temporary
directory. The return value is the pathname of the directory.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
The directory is readable, writable, and searchable only by the
creating user.
Caller is responsible for deleting the directory when done with it.
"""
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
try:
_os.mkdir(file, 0o700)
return file
except FileExistsError:
continue # try again
raise FileExistsError(_errno.EEXIST,
"No usable temporary directory name found")
def mktemp(suffix="", prefix=template, dir=None):
"""User-callable function to return a unique temporary file name. The
file is not created.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
This function is unsafe and should not be used. The file name
refers to a file that did not exist at some point, but by the time
you get around to creating it, someone else may have beaten you to
the punch.
"""
## from warnings import warn as _warn
## _warn("mktemp is a potential security risk to your program",
## RuntimeWarning, stacklevel=2)
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
if not _exists(file):
return file
raise FileExistsError(_errno.EEXIST,
"No usable temporary filename found")
class _TemporaryFileCloser:
"""A separate object allowing proper closing of a temporary file's
underlying file object, without adding a __del__ method to the
temporary file."""
file = None # Set here since __del__ checks it
close_called = False
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.delete = delete
# NT provides delete-on-close as a primitive, so we don't need
# the wrapper to do anything special. We still use it so that
# file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
if _os.name != 'nt':
# Cache the unlinker so we don't get spurious errors at
# shutdown when the module-level "os" is None'd out. Note
# that this must be referenced as self.unlink, because the
# name TemporaryFileWrapper may also get None'd out before
# __del__ is called.
def close(self, unlink=_os.unlink):
if not self.close_called and self.file is not None:
self.close_called = True
self.file.close()
if self.delete:
unlink(self.name)
# Need to ensure the file is deleted on __del__
def __del__(self):
self.close()
else:
def close(self):
if not self.close_called:
self.close_called = True
self.file.close()
class _TemporaryFileWrapper:
"""Temporary file wrapper
This class provides a wrapper around files opened for
temporary use. In particular, it seeks to automatically
remove the file when it is no longer needed.
"""
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.delete = delete
self._closer = _TemporaryFileCloser(file, name, delete)
def __getattr__(self, name):
# Attribute lookups are delegated to the underlying file
# and cached for non-numeric results
# (i.e. methods are cached, closed and friends are not)
file = self.__dict__['file']
a = getattr(file, name)
if hasattr(a, '__call__'):
func = a
@_functools.wraps(func)
def func_wrapper(*args, **kwargs):
return func(*args, **kwargs)
# Avoid closing the file as long as the wrapper is alive,
# see issue #18879.
func_wrapper._closer = self._closer
a = func_wrapper
if not isinstance(a, int):
setattr(self, name, a)
return a
# The underlying __enter__ method returns the wrong object
# (self.file) so override it to return the wrapper
def __enter__(self):
self.file.__enter__()
return self
# Need to trap __exit__ as well to ensure the file gets
# deleted when used in a with statement
def __exit__(self, exc, value, tb):
result = self.file.__exit__(exc, value, tb)
self.close()
return result
def close(self):
"""
Close the temporary file, possibly deleting it.
"""
self._closer.close()
# iter() doesn't use __getattr__ to find the __iter__ method
def __iter__(self):
return iter(self.file)
def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix="", prefix=template,
dir=None, delete=True):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
'delete' -- whether the file is deleted on close (default True).
The file is created as mkstemp() would do it.
Returns an object with a file-like interface; the name of the file
is accessible as file.name. The file will be automatically deleted
when it is closed unless the 'delete' argument is set to False.
"""
if dir is None:
dir = gettempdir()
flags = _bin_openflags
# Setting O_TEMPORARY in the flags causes the OS to delete
# the file when it is closed. This is only supported by Windows.
if _os.name == 'nt' and delete:
flags |= _os.O_TEMPORARY
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
try:
file = _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
return _TemporaryFileWrapper(file, name, delete)
except Exception:
_os.close(fd)
raise
if _os.name != 'posix' or _os.sys.platform == 'cygwin':
# On non-POSIX and Cygwin systems, assume that we cannot unlink a file
# while it is open.
TemporaryFile = NamedTemporaryFile
else:
def TemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix="", prefix=template,
dir=None):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
The file is created as mkstemp() would do it.
Returns an object with a file-like interface. The file has no
name, and will cease to exist when it is closed.
"""
if dir is None:
dir = gettempdir()
flags = _bin_openflags
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
try:
_os.unlink(name)
return _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
except:
_os.close(fd)
raise
class SpooledTemporaryFile:
"""Temporary file wrapper, specialized to switch from BytesIO
or StringIO to a real file when it exceeds a certain size or
when a fileno is needed.
"""
_rolled = False
def __init__(self, max_size=0, mode='w+b', buffering=-1,
encoding=None, newline=None,
suffix="", prefix=template, dir=None):
if 'b' in mode:
self._file = _io.BytesIO()
else:
# Setting newline="\n" avoids newline translation;
# this is important because otherwise on Windows we'd
# hget double newline translation upon rollover().
self._file = _io.StringIO(newline="\n")
self._max_size = max_size
self._rolled = False
self._TemporaryFileArgs = {'mode': mode, 'buffering': buffering,
'suffix': suffix, 'prefix': prefix,
'encoding': encoding, 'newline': newline,
'dir': dir}
def _check(self, file):
if self._rolled: return
max_size = self._max_size
if max_size and file.tell() > max_size:
self.rollover()
def rollover(self):
if self._rolled: return
file = self._file
newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
del self._TemporaryFileArgs
newfile.write(file.getvalue())
newfile.seek(file.tell(), 0)
self._rolled = True
# The method caching trick from NamedTemporaryFile
# won't work here, because _file may change from a
# BytesIO/StringIO instance to a real file. So we list
# all the methods directly.
# Context management protocol
def __enter__(self):
if self._file.closed:
raise ValueError("Cannot enter context with closed file")
return self
def __exit__(self, exc, value, tb):
self._file.close()
# file protocol
def __iter__(self):
return self._file.__iter__()
def close(self):
self._file.close()
@property
def closed(self):
return self._file.closed
@property
def encoding(self):
try:
return self._file.encoding
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['encoding']
def fileno(self):
self.rollover()
return self._file.fileno()
def flush(self):
self._file.flush()
def isatty(self):
return self._file.isatty()
@property
def mode(self):
try:
return self._file.mode
except AttributeError:
return self._TemporaryFileArgs['mode']
@property
def name(self):
try:
return self._file.name
except AttributeError:
return None
@property
def newlines(self):
try:
return self._file.newlines
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['newline']
def read(self, *args):
return self._file.read(*args)
def readline(self, *args):
return self._file.readline(*args)
def readlines(self, *args):
return self._file.readlines(*args)
def seek(self, *args):
self._file.seek(*args)
@property
def softspace(self):
return self._file.softspace
def tell(self):
return self._file.tell()
def truncate(self, size=None):
if size is None:
self._file.truncate()
else:
if size > self._max_size:
self.rollover()
self._file.truncate(size)
def write(self, s):
file = self._file
rv = file.write(s)
self._check(file)
return rv
def writelines(self, iterable):
file = self._file
rv = file.writelines(iterable)
self._check(file)
return rv
class TemporaryDirectory(object):
"""Create and return a temporary directory. This has the same
behavior as mkdtemp but can be used as a context manager. For
example:
with TemporaryDirectory() as tmpdir:
...
Upon exiting the context, the directory and everything contained
in it are removed.
"""
# Handle mkdtemp raising an exception
name = None
_finalizer = None
_closed = False
def __init__(self, suffix="", prefix=template, dir=None):
self.name = mkdtemp(suffix, prefix, dir)
self._finalizer = _weakref.finalize(
self, self._cleanup, self.name,
warn_message="Implicitly cleaning up {!r}".format(self))
@classmethod
def _cleanup(cls, name, warn_message=None):
_shutil.rmtree(name)
if warn_message is not None:
_warnings.warn(warn_message, ResourceWarning)
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self._finalizer is not None:
self._finalizer.detach()
if self.name is not None and not self._closed:
_shutil.rmtree(self.name)
self._closed = True
"""A simple log mechanism styled after PEP 282."""
# This module should be kept compatible with Python 2.1.
# The class here is styled after PEP 282 so that it could later be
# replaced with a standard Python logging implementation.
DEBUG = 1
INFO = 2
WARN = 3
ERROR = 4
FATAL = 5
import sys
class Log:
def __init__(self, threshold=WARN):
self.threshold = threshold
def _log(self, level, msg, args):
if level >= self.threshold:
if not args:
# msg may contain a '%'. If args is empty,
# don't even try to string-format
print msg
else:
print msg % args
sys.stdout.flush()
def log(self, level, msg, *args):
self._log(level, msg, args)
def debug(self, msg, *args):
self._log(DEBUG, msg, args)
def info(self, msg, *args):
self._log(INFO, msg, args)
def warn(self, msg, *args):
self._log(WARN, msg, args)
def error(self, msg, *args):
self._log(ERROR, msg, args)
def fatal(self, msg, *args):
self._log(FATAL, msg, args)
_global_log = Log()
log = _global_log.log
debug = _global_log.debug
info = _global_log.info
warn = _global_log.warn
error = _global_log.error
fatal = _global_log.fatal
def set_threshold(level):
# return the old threshold for use from tests
old = _global_log.threshold
_global_log.threshold = level
return old
def set_verbosity(v):
if v <= 0:
set_threshold(WARN)
elif v == 1:
set_threshold(INFO)
elif v >= 2:
set_threshold(DEBUG)
from __future__ import unicode_literals
from django.http import HttpResponse, Http404
from django.template import loader
from django.contrib.sites.models import get_current_site
from django.core import urlresolvers
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.contrib.gis.db.models.fields import GeometryField
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.models import get_model
from django.utils import six
from django.utils.translation import ugettext as _
from django.contrib.gis.shortcuts import render_to_kml, render_to_kmz
def index(request, sitemaps):
"""
This view generates a sitemap index that uses the proper view
for resolving geographic section sitemap URLs.
"""
current_site = get_current_site(request)
sites = []
protocol = 'https' if request.is_secure() else 'http'
for section, site in sitemaps.items():
if callable(site):
pages = site().paginator.num_pages
else:
pages = site.paginator.num_pages
sitemap_url = urlresolvers.reverse('django.contrib.gis.sitemaps.views.sitemap', kwargs={'section': section})
sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url))
if pages > 1:
for page in range(2, pages+1):
sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page))
xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites})
return HttpResponse(xml, content_type='application/xml')
def sitemap(request, sitemaps, section=None):
"""
This view generates a sitemap with additional geographic
elements defined by Google.
"""
maps, urls = [], []
if section is not None:
if section not in sitemaps:
raise Http404(_("No sitemap available for section: %r") % section)
maps.append(sitemaps[section])
else:
maps = list(six.itervalues(sitemaps))
page = request.GET.get("p", 1)
current_site = get_current_site(request)
for site in maps:
try:
if callable(site):
urls.extend(site().get_urls(page=page, site=current_site))
else:
urls.extend(site.get_urls(page=page, site=current_site))
except EmptyPage:
raise Http404(_("Page %s empty") % page)
except PageNotAnInteger:
raise Http404(_("No page '%s'") % page)
xml = loader.render_to_string('gis/sitemaps/geo_sitemap.xml', {'urlset': urls})
return HttpResponse(xml, content_type='application/xml')
def kml(request, label, model, field_name=None, compress=False, using=DEFAULT_DB_ALIAS):
"""
This view generates KML for the given app label, model, and field name.
The model's default manager must be GeoManager, and the field name
must be that of a geographic field.
"""
placemarks = []
klass = get_model(label, model)
if not klass:
raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model))
if field_name:
try:
info = klass._meta.get_field_by_name(field_name)
if not isinstance(info[0], GeometryField):
raise Exception
except:
raise Http404('Invalid geometry field.')
connection = connections[using]
if connection.ops.postgis:
# PostGIS will take care of transformation.
placemarks = klass._default_manager.using(using).kml(field_name=field_name)
else:
# There's no KML method on Oracle or MySQL, so we use the `kml`
# attribute of the lazy geometry instead.
placemarks = []
if connection.ops.oracle:
qs = klass._default_manager.using(using).transform(4326, field_name=field_name)
else:
qs = klass._default_manager.using(using).all()
for mod in qs:
mod.kml = getattr(mod, field_name).kml
placemarks.append(mod)
# Getting the render function and rendering to the correct.
if compress:
render = render_to_kmz
else:
render = render_to_kml
return render('gis/kml/placemarks.kml', {'places' : placemarks})
def kmz(request, label, model, field_name=None, using=DEFAULT_DB_ALIAS):
"""
This view returns KMZ for the given app label, model, and field name.
"""
return kml(request, label, model, field_name, compress=True, using=using)
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Kevin Brebanov
# Based on pacman (Afterburn , Aaron Bull Schaefer )
# and apt (Matthew Williams ) modules.
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: apk
short_description: Manages apk packages
description:
- Manages I(apk) packages for Alpine Linux.
author: "Kevin Brebanov (@kbrebanov)"
version_added: "2.0"
options:
available:
description:
- During upgrade, reset versioned world dependencies and change logic to prefer replacing or downgrading packages (instead of holding them)
if the currently installed package is no longer available from any repository.
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.4"
name:
description:
- A package name, like C(foo), or multiple packages, like C(foo, bar).
required: false
default: null
repository:
description:
- A package repository or multiple repositories
required: false
default: null
version_added: "2.4"
state:
description:
- Indicates the desired package(s) state.
- C(present) ensures the package(s) is/are present.
- C(absent) ensures the package(s) is/are absent.
- C(latest) ensures the package(s) is/are present and the latest version(s).
required: false
default: present
choices: [ "present", "absent", "latest" ]
update_cache:
description:
- Update repository indexes. Can be run with other steps or on it's own.
required: false
default: no
choices: [ "yes", "no" ]
upgrade:
description:
- Upgrade all installed packages to their latest version.
required: false
default: no
choices: [ "yes", "no" ]
notes:
- '"name" and "upgrade" are mutually exclusive.'
'''
EXAMPLES = '''
# Update repositories and install "foo" package
- apk:
name: foo
update_cache: yes
# Update repositories and install "foo" and "bar" packages
- apk:
name: foo,bar
update_cache: yes
# Remove "foo" package
- apk:
name: foo
state: absent
# Remove "foo" and "bar" packages
- apk:
name: foo,bar
state: absent
# Install the package "foo"
- apk:
name: foo
state: present
# Install the packages "foo" and "bar"
- apk:
name: foo,bar
state: present
# Update repositories and update package "foo" to latest version
- apk:
name: foo
state: latest
update_cache: yes
# Update repositories and update packages "foo" and "bar" to latest versions
- apk:
name: foo,bar
state: latest
update_cache: yes
# Update all installed packages to the latest versions
- apk:
upgrade: yes
# Upgrade / replace / downgrade / uninstall all installed packages to the latest versions available
- apk:
available: yes
upgrade: yes
# Update repositories as a separate step
- apk:
update_cache: yes
# Install package from a specific repository
- apk:
name: foo
state: latest
update_cache: yes
repository: http://dl-3.alpinelinux.org/alpine/edge/main
'''
RETURN = '''
packages:
description: a list of packages that have been changed
returned: when packages have changed
type: list
sample: ['package', 'other-package']
'''
import re
# Import module snippets.
from ansible.module_utils.basic import AnsibleModule
def parse_for_packages(stdout):
packages = []
data = stdout.split('\n')
regex = re.compile('^\(\d+/\d+\)\s+\S+\s+(\S+)')
for l in data:
p = regex.search(l)
if p:
packages.append(p.group(1))
return packages
def update_package_db(module, exit):
cmd = "%s update" % (APK_PATH)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="could not update package db", stdout=stdout, stderr=stderr)
elif exit:
module.exit_json(changed=True, msg='updated repository indexes', stdout=stdout, stderr=stderr)
else:
return True
def query_package(module, name):
cmd = "%s -v info --installed %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
return False
def query_latest(module, name):
cmd = "%s version %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"(%s)-[\d\.\w]+-[\d\w]+\s+(.)\s+[\d\.\w]+-[\d\w]+\s+" % (re.escape(name))
match = re.search(search_pattern, stdout)
if match and match.group(2) == "<":
return False
return True
def query_virtual(module, name):
cmd = "%s -v info --description %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"^%s: virtual meta package" % (re.escape(name))
if re.search(search_pattern, stdout):
return True
return False
def get_dependencies(module, name):
cmd = "%s -v info --depends %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
dependencies = stdout.split()
if len(dependencies) > 1:
return dependencies[1:]
else:
return []
def upgrade_packages(module, available):
if module.check_mode:
cmd = "%s upgrade --simulate" % (APK_PATH)
else:
cmd = "%s upgrade" % (APK_PATH)
if available:
cmd = "%s --available" % cmd
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to upgrade packages", stdout=stdout, stderr=stderr, packages=packagelist)
if re.search(r'^OK', stdout):
module.exit_json(changed=False, msg="packages already upgraded", stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="upgraded packages", stdout=stdout, stderr=stderr, packages=packagelist)
def install_packages(module, names, state):
upgrade = False
to_install = []
to_upgrade = []
for name in names:
# Check if virtual package
if query_virtual(module, name):
# Get virtual package dependencies
dependencies = get_dependencies(module, name)
for dependency in dependencies:
if state == 'latest' and not query_latest(module, dependency):
to_upgrade.append(dependency)
else:
if not query_package(module, name):
to_install.append(name)
elif state == 'latest' and not query_latest(module, name):
to_upgrade.append(name)
if to_upgrade:
upgrade = True
if not to_install and not upgrade:
module.exit_json(changed=False, msg="package(s) already installed")
packages = " ".join(to_install + to_upgrade)
if upgrade:
if module.check_mode:
cmd = "%s add --upgrade --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add --upgrade %s" % (APK_PATH, packages)
else:
if module.check_mode:
cmd = "%s add --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add %s" % (APK_PATH, packages)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to install %s" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="installed %s package(s)" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
def remove_packages(module, names):
installed = []
for name in names:
if query_package(module, name):
installed.append(name)
if not installed:
module.exit_json(changed=False, msg="package(s) already removed")
names = " ".join(installed)
if module.check_mode:
cmd = "%s del --purge --simulate %s" % (APK_PATH, names)
else:
cmd = "%s del --purge %s" % (APK_PATH, names)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to remove %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="removed %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
# ==========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'installed', 'absent', 'removed', 'latest']),
name=dict(type='list'),
repository=dict(type='list'),
update_cache=dict(default='no', type='bool'),
upgrade=dict(default='no', type='bool'),
available=dict(default='no', type='bool'),
),
required_one_of=[['name', 'update_cache', 'upgrade']],
mutually_exclusive=[['name', 'upgrade']],
supports_check_mode=True
)
# Set LANG env since we parse stdout
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
global APK_PATH
APK_PATH = module.get_bin_path('apk', required=True)
p = module.params
# add repositories to the APK_PATH
if p['repository']:
for r in p['repository']:
APK_PATH = "%s --repository %s" % (APK_PATH, r)
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
if p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p['update_cache']:
update_package_db(module, not p['name'] and not p['upgrade'])
if p['upgrade']:
upgrade_packages(module, p['available'])
if p['state'] in ['present', 'latest']:
install_packages(module, p['name'], p['state'])
elif p['state'] == 'absent':
remove_packages(module, p['name'])
if __name__ == '__main__':
main()
import time, sys
import h2o, h2o_hosts, h2o_import as h2i
class SetupUnitTest(object):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global localhost
localhost = h2o.decide_if_localhost()
if (localhost):
params = collectConf(cls)
h2o.build_cloud(**params)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
# if we got here by time out exception waiting for a job, we should clear
# all jobs, if we're leaving h2o cloud up, and going to run another test
#h2o.cancelAllJobs()
h2o.tear_down_cloud()
def collectConf(cls):
result = { }
if hasattr(cls, 'nodes'): result['node_count'] = cls.nodes
if hasattr(cls, 'java_xmx'): result['java_heap_GB'] = cls.java_xmx
return result
# typical use in a unittest:
# class releaseTest(h2o_common.ReleaseCommon, unittest.TestCase):
# see multiple inheritance at http://docs.python.org/release/1.5/tut/node66.html
#************************************************************************************
class SetupOneJVM14(object):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global localhost
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(node_count=1, java_heap_GB=14)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
# if we got here by time out exception waiting for a job, we should clear
# all jobs, if we're leaving h2o cloud up, and going to run another test
#h2o.cancelAllJobs()
h2o.tear_down_cloud()
#************************************************************************************
class SetupThreeJVM4(object):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global localhost
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(node_count=3, java_heap_GB=4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
# if we got here by time out exception waiting for a job, we should clear
# all jobs, if we're leaving h2o cloud up, and going to run another test
#h2o.cancelAllJobs()
h2o.tear_down_cloud()
#************************************************************************************
class ReleaseCommon(object):
def tearDown(self):
print "tearDown"
# try to download the logs...may fail again! If we have no logs, h2o_sandbox will complain about not being able to look at anything
h2o.nodes[0].log_download()
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
print "setUpClass"
# standard method for being able to reproduce the random.* seed
h2o.setup_random_seed()
# do a hack so we can run release tests with a -cj arg. so we don't have to build the cloud separately
# those tests will always want to run non-local (big machien) so detecting -cj is fine
if h2o.config_json:
h2o_hosts.build_cloud_with_hosts()
else:
# this is the normal thing for release tests (separate cloud was built. clone it)
h2o.build_cloud_with_json()
# if you're fast with a test and cloud building, you may need to wait for cloud to stabilize
# normally this is baked into build_cloud, but let's leave it here for now
h2o.stabilize_cloud(h2o.nodes[0], h2o.nodes, timeoutSecs=90)
# this ?should? work although not guaranteed all will agree on the cloud size
# unless we do the conservative stabilize above
h2o.verify_cloud_size()
@classmethod
def tearDownClass(cls):
print "tearDownClass"
# DON"T
### h2o.tear_down_cloud()
# this could fail too
if h2o.nodes[0].delete_keys_at_teardown:
start = time.time()
h2i.delete_keys_at_all_nodes(timeoutSecs=300)
elapsed = time.time() - start
print "delete_keys_at_all_nodes(): took", elapsed, "secs"
#************************************************************************************
# no log download or key delete. Used for the test_shutdown.py
class ReleaseCommon2(object):
def tearDown(self):
print "tearDown"
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
print "setUpClass"
h2o.build_cloud_with_json()
# normally this shouldn't be necessary?
h2o.stabilize_cloud(h2o.nodes[0], h2o.nodes, timeoutSecs=90)
#************************************************************************************
# Notes:
# http://stackoverflow.com/questions/1323455/python-unit-test-with-base-and-sub-class
#
# This method only works for setUp and tearDown methods if you reverse the order of the base classes.
# Because the methods are defined in unittest.TestCase, and they don't call super(),
# then any setUp and tearDown methods in CommonTests need to be first in the MRO,
# or they won't be called at all. - Ian Clelland Oct 11 '10
#
# If you add setUp and tearDown methods to CommonTests class, and you want them to be called for
# each test in derived classes, you have to reverse the order of the base classes,
# so that it will be: class SubTest1(CommonTests, unittest.TestCase).
# - Denis Golomazov July 17
#************************************************************************************
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Dag Wieers (@dagwieers)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: mso_tenant
short_description: Manage tenants
description:
- Manage tenants on Cisco ACI Multi-Site.
author:
- Dag Wieers (@dagwieers)
version_added: '2.8'
options:
tenant:
description:
- The name of the tenant.
type: str
required: yes
aliases: [ name ]
display_name:
description:
- The name of the tenant to be displayed in the web UI.
type: str
required: yes
description:
description:
- The description for this tenant.
type: str
users:
description:
- A list of associated users for this tenant.
- Using this property will replace any existing associated users.
type: list
sites:
description:
- A list of associated sites for this tenant.
- Using this property will replace any existing associated sites.
type: list
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: mso
'''
EXAMPLES = r'''
- name: Add a new tenant
mso_tenant:
host: mso_host
username: admin
password: SomeSecretPassword
tenant: north_europe
display_name: North European Datacenter
description: This tenant manages the NEDC environment.
state: present
delegate_to: localhost
- name: Remove a tenant
mso_tenant:
host: mso_host
username: admin
password: SomeSecretPassword
tenant: north_europe
state: absent
delegate_to: localhost
- name: Query a tenant
mso_tenant:
host: mso_host
username: admin
password: SomeSecretPassword
tenant: north_europe
state: query
delegate_to: localhost
register: query_result
- name: Query all tenants
mso_tenant:
host: mso_host
username: admin
password: SomeSecretPassword
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.mso import MSOModule, mso_argument_spec, issubset
def main():
argument_spec = mso_argument_spec()
argument_spec.update(
description=dict(type='str'),
display_name=dict(type='str'),
tenant=dict(type='str', aliases=['name']),
users=dict(type='list'),
sites=dict(type='list'),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['tenant']],
['state', 'present', ['tenant']],
],
)
description = module.params['description']
display_name = module.params['display_name']
tenant = module.params['tenant']
state = module.params['state']
mso = MSOModule(module)
# Convert sites and users
sites = mso.lookup_sites(module.params['sites'])
users = mso.lookup_users(module.params['users'])
tenant_id = None
path = 'tenants'
# Query for existing object(s)
if tenant:
mso.existing = mso.get_obj(path, name=tenant)
if mso.existing:
tenant_id = mso.existing['id']
# If we found an existing object, continue with it
path = 'tenants/{id}'.format(id=tenant_id)
else:
mso.existing = mso.query_objs(path)
if state == 'query':
pass
elif state == 'absent':
mso.previous = mso.existing
if mso.existing:
if module.check_mode:
mso.existing = {}
else:
mso.existing = mso.request(path, method='DELETE')
elif state == 'present':
mso.previous = mso.existing
payload = dict(
description=description,
id=tenant_id,
name=tenant,
displayName=display_name,
siteAssociations=sites,
userAssociations=users,
)
mso.sanitize(payload, collate=True)
# Ensure displayName is not undefined
if mso.sent.get('displayName') is None:
mso.sent['displayName'] = tenant
# Ensure tenant has at least admin user
if mso.sent.get('userAssociations') is None:
mso.sent['userAssociations'] = [dict(userId="0000ffff0000000000000020")]
if mso.existing:
if not issubset(mso.sent, mso.existing):
if module.check_mode:
mso.existing = mso.proposed
else:
mso.existing = mso.request(path, method='PUT', data=mso.sent)
else:
if module.check_mode:
mso.existing = mso.proposed
else:
mso.existing = mso.request(path, method='POST', data=mso.sent)
mso.exit_json()
if __name__ == "__main__":
main()
# -*- coding: utf-8 -*-
"""
pygments.styles
~~~~~~~~~~~~~~~
Contains built-in styles.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.plugin import find_plugin_styles
from pygments.util import ClassNotFound
#: Maps style names to 'submodule::classname'.
STYLE_MAP = {
'default': 'default::DefaultStyle',
'emacs': 'emacs::EmacsStyle',
'friendly': 'friendly::FriendlyStyle',
'colorful': 'colorful::ColorfulStyle',
'autumn': 'autumn::AutumnStyle',
'murphy': 'murphy::MurphyStyle',
'manni': 'manni::ManniStyle',
'monokai': 'monokai::MonokaiStyle',
'perldoc': 'perldoc::PerldocStyle',
'pastie': 'pastie::PastieStyle',
'borland': 'borland::BorlandStyle',
'trac': 'trac::TracStyle',
'native': 'native::NativeStyle',
'fruity': 'fruity::FruityStyle',
'bw': 'bw::BlackWhiteStyle',
'vim': 'vim::VimStyle',
'vs': 'vs::VisualStudioStyle',
'tango': 'tango::TangoStyle',
'rrt': 'rrt::RrtStyle',
}
def get_style_by_name(name):
if name in STYLE_MAP:
mod, cls = STYLE_MAP[name].split('::')
builtin = "yes"
else:
for found_name, style in find_plugin_styles():
if name == found_name:
return style
# perhaps it got dropped into our styles package
builtin = ""
mod = name
cls = name.title() + "Style"
try:
mod = __import__('pygments.styles.' + mod, None, None, [cls])
except ImportError:
raise ClassNotFound("Could not find style module %r" % mod +
(builtin and ", though it should be builtin") + ".")
try:
return getattr(mod, cls)
except AttributeError:
raise ClassNotFound("Could not find style class %r in style module." % cls)
def get_all_styles():
"""Return an generator for all styles by name,
both builtin and plugin."""
for name in STYLE_MAP:
yield name
for name, _ in find_plugin_styles():
yield name
"""
Badge Awarding backend for Badgr-Server.
"""
from __future__ import absolute_import
import hashlib
import logging
import mimetypes
import requests
import six
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from eventtracking import tracker
from lazy import lazy
from requests.packages.urllib3.exceptions import HTTPError
from badges.backends.base import BadgeBackend
from badges.models import BadgeAssertion
MAX_SLUG_LENGTH = 255
LOGGER = logging.getLogger(__name__)
class BadgrBackend(BadgeBackend):
"""
Backend for Badgr-Server by Concentric Sky. http://info.badgr.io/
"""
badges = []
def __init__(self):
super(BadgrBackend, self).__init__()
if not settings.BADGR_API_TOKEN:
raise ImproperlyConfigured("BADGR_API_TOKEN not set.")
@lazy
def _base_url(self):
"""
Base URL for all API requests.
"""
return "{}/v1/issuer/issuers/{}".format(settings.BADGR_BASE_URL, settings.BADGR_ISSUER_SLUG)
@lazy
def _badge_create_url(self):
"""
URL for generating a new Badge specification
"""
return "{}/badges".format(self._base_url)
def _badge_url(self, slug):
"""
Get the URL for a course's badge in a given mode.
"""
return "{}/{}".format(self._badge_create_url, slug)
def _assertion_url(self, slug):
"""
URL for generating a new assertion.
"""
return "{}/assertions".format(self._badge_url(slug))
def _slugify(self, badge_class):
"""
Get a compatible badge slug from the specification.
"""
slug = badge_class.issuing_component + badge_class.slug
if badge_class.issuing_component and badge_class.course_id:
# Make this unique to the course, and down to 64 characters.
# We don't do this to badges without issuing_component set for backwards compatibility.
slug = hashlib.sha256(slug + six.text_type(badge_class.course_id)).hexdigest()
if len(slug) > MAX_SLUG_LENGTH:
# Will be 64 characters.
slug = hashlib.sha256(slug).hexdigest()
return slug
def _log_if_raised(self, response, data):
"""
Log server response if there was an error.
"""
try:
response.raise_for_status()
except HTTPError:
LOGGER.error(
u"Encountered an error when contacting the Badgr-Server. Request sent to %r with headers %r.\n"
u"and data values %r\n"
u"Response status was %s.\n%s",
response.request.url, response.request.headers,
data,
response.status_code, response.content
)
raise
def _create_badge(self, badge_class):
"""
Create the badge class on Badgr.
"""
image = badge_class.image
# We don't want to bother validating the file any further than making sure we can detect its MIME type,
# for HTTP. The Badgr-Server should tell us if there's anything in particular wrong with it.
content_type, __ = mimetypes.guess_type(image.name)
if not content_type:
raise ValueError(
u"Could not determine content-type of image! Make sure it is a properly named .png file. "
u"Filename was: {}".format(image.name)
)
files = {'image': (image.name, image, content_type)}
data = {
'name': badge_class.display_name,
'criteria': badge_class.criteria,
'slug': self._slugify(badge_class),
'description': badge_class.description,
}
result = requests.post(
self._badge_create_url, headers=self._get_headers(), data=data, files=files,
timeout=settings.BADGR_TIMEOUT
)
self._log_if_raised(result, data)
def _send_assertion_created_event(self, user, assertion):
"""
Send an analytics event to record the creation of a badge assertion.
"""
tracker.emit(
'edx.badge.assertion.created', {
'user_id': user.id,
'badge_slug': assertion.badge_class.slug,
'badge_name': assertion.badge_class.display_name,
'issuing_component': assertion.badge_class.issuing_component,
'course_id': six.text_type(assertion.badge_class.course_id),
'enrollment_mode': assertion.badge_class.mode,
'assertion_id': assertion.id,
'assertion_image_url': assertion.image_url,
'assertion_json_url': assertion.assertion_url,
'issuer': assertion.data.get('issuer'),
}
)
def _create_assertion(self, badge_class, user, evidence_url):
"""
Register an assertion with the Badgr server for a particular user for a specific class.
"""
data = {
'email': user.email,
'evidence': evidence_url,
}
response = requests.post(
self._assertion_url(self._slugify(badge_class)), headers=self._get_headers(), data=data,
timeout=settings.BADGR_TIMEOUT
)
self._log_if_raised(response, data)
assertion, __ = BadgeAssertion.objects.get_or_create(user=user, badge_class=badge_class)
assertion.data = response.json()
assertion.backend = 'BadgrBackend'
assertion.image_url = assertion.data['image']
assertion.assertion_url = assertion.data['json']['id']
assertion.save()
self._send_assertion_created_event(user, assertion)
return assertion
@staticmethod
def _get_headers():
"""
Headers to send along with the request-- used for authentication.
"""
return {'Authorization': u'Token {}'.format(settings.BADGR_API_TOKEN)}
def _ensure_badge_created(self, badge_class):
"""
Verify a badge has been created for this badge class, and create it if not.
"""
slug = self._slugify(badge_class)
if slug in BadgrBackend.badges:
return
response = requests.get(self._badge_url(slug), headers=self._get_headers(), timeout=settings.BADGR_TIMEOUT)
if response.status_code != 200:
self._create_badge(badge_class)
BadgrBackend.badges.append(slug)
def award(self, badge_class, user, evidence_url=None):
"""
Make sure the badge class has been created on the backend, and then award the badge class to the user.
"""
self._ensure_badge_created(badge_class)
return self._create_assertion(badge_class, user, evidence_url)
import datetime
from django.contrib.auth.models import User
from django.contrib.comments.managers import CommentManager
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.db import models
from django.core import urlresolvers
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
COMMENT_MAX_LENGTH = getattr(settings,'COMMENT_MAX_LENGTH',3000)
class BaseCommentAbstractModel(models.Model):
"""
An abstract base class that any custom comment models probably should
subclass.
"""
# Content-object field
content_type = models.ForeignKey(ContentType,
verbose_name=_('content type'),
related_name="content_type_set_for_%(class)s")
object_pk = models.TextField(_('object ID'))
content_object = generic.GenericForeignKey(ct_field="content_type", fk_field="object_pk")
# Metadata about the comment
site = models.ForeignKey(Site)
class Meta:
abstract = True
def get_content_object_url(self):
"""
Get a URL suitable for redirecting to the content object.
"""
return urlresolvers.reverse(
"comments-url-redirect",
args=(self.content_type_id, self.object_pk)
)
class Comment(BaseCommentAbstractModel):
"""
A user comment about some object.
"""
# Who posted this comment? If ``user`` is set then it was an authenticated
# user; otherwise at least user_name should have been set and the comment
# was posted by a non-authenticated user.
user = models.ForeignKey(User, verbose_name=_('user'),
blank=True, null=True, related_name="%(class)s_comments")
user_name = models.CharField(_("user's name"), max_length=50, blank=True)
user_email = models.EmailField(_("user's email address"), blank=True)
user_url = models.URLField(_("user's URL"), blank=True)
comment = models.TextField(_('comment'), max_length=COMMENT_MAX_LENGTH)
# Metadata about the comment
submit_date = models.DateTimeField(_('date/time submitted'), default=None)
ip_address = models.IPAddressField(_('IP address'), blank=True, null=True)
is_public = models.BooleanField(_('is public'), default=True,
help_text=_('Uncheck this box to make the comment effectively ' \
'disappear from the site.'))
is_removed = models.BooleanField(_('is removed'), default=False,
help_text=_('Check this box if the comment is inappropriate. ' \
'A "This comment has been removed" message will ' \
'be displayed instead.'))
# Manager
objects = CommentManager()
class Meta:
db_table = "django_comments"
ordering = ('submit_date',)
permissions = [("can_moderate", "Can moderate comments")]
verbose_name = _('comment')
verbose_name_plural = _('comments')
def __unicode__(self):
return "%s: %s..." % (self.name, self.comment[:50])
def save(self, *args, **kwargs):
if self.submit_date is None:
self.submit_date = datetime.datetime.now()
super(Comment, self).save(*args, **kwargs)
def _get_userinfo(self):
"""
Get a dictionary that pulls together information about the poster
safely for both authenticated and non-authenticated comments.
This dict will have ``name``, ``email``, and ``url`` fields.
"""
if not hasattr(self, "_userinfo"):
self._userinfo = {
"name" : self.user_name,
"email" : self.user_email,
"url" : self.user_url
}
if self.user_id:
u = self.user
if u.email:
self._userinfo["email"] = u.email
# If the user has a full name, use that for the user name.
# However, a given user_name overrides the raw user.username,
# so only use that if this comment has no associated name.
if u.get_full_name():
self._userinfo["name"] = self.user.get_full_name()
elif not self.user_name:
self._userinfo["name"] = u.username
return self._userinfo
userinfo = property(_get_userinfo, doc=_get_userinfo.__doc__)
def _get_name(self):
return self.userinfo["name"]
def _set_name(self, val):
if self.user_id:
raise AttributeError(_("This comment was posted by an authenticated "\
"user and thus the name is read-only."))
self.user_name = val
name = property(_get_name, _set_name, doc="The name of the user who posted this comment")
def _get_email(self):
return self.userinfo["email"]
def _set_email(self, val):
if self.user_id:
raise AttributeError(_("This comment was posted by an authenticated "\
"user and thus the email is read-only."))
self.user_email = val
email = property(_get_email, _set_email, doc="The email of the user who posted this comment")
def _get_url(self):
return self.userinfo["url"]
def _set_url(self, val):
self.user_url = val
url = property(_get_url, _set_url, doc="The URL given by the user who posted this comment")
def get_absolute_url(self, anchor_pattern="#c%(id)s"):
return self.get_content_object_url() + (anchor_pattern % self.__dict__)
def get_as_text(self):
"""
Return this comment as plain text. Useful for emails.
"""
d = {
'user': self.user or self.name,
'date': self.submit_date,
'comment': self.comment,
'domain': self.site.domain,
'url': self.get_absolute_url()
}
return _('Posted by %(user)s at %(date)s\n\n%(comment)s\n\nhttp://%(domain)s%(url)s') % d
class CommentFlag(models.Model):
"""
Records a flag on a comment. This is intentionally flexible; right now, a
flag could be:
* A "removal suggestion" -- where a user suggests a comment for (potential) removal.
* A "moderator deletion" -- used when a moderator deletes a comment.
You can (ab)use this model to add other flags, if needed. However, by
design users are only allowed to flag a comment with a given flag once;
if you want rating look elsewhere.
"""
user = models.ForeignKey(User, verbose_name=_('user'), related_name="comment_flags")
comment = models.ForeignKey(Comment, verbose_name=_('comment'), related_name="flags")
flag = models.CharField(_('flag'), max_length=30, db_index=True)
flag_date = models.DateTimeField(_('date'), default=None)
# Constants for flag types
SUGGEST_REMOVAL = "removal suggestion"
MODERATOR_DELETION = "moderator deletion"
MODERATOR_APPROVAL = "moderator approval"
class Meta:
db_table = 'django_comment_flags'
unique_together = [('user', 'comment', 'flag')]
verbose_name = _('comment flag')
verbose_name_plural = _('comment flags')
def __unicode__(self):
return "%s flag of comment ID %s by %s" % \
(self.flag, self.comment_id, self.user.username)
def save(self, *args, **kwargs):
if self.flag_date is None:
self.flag_date = datetime.datetime.now()
super(CommentFlag, self).save(*args, **kwargs)
#===--- protocol_graph.py -------------------------*- coding: utf-8 -*----===#
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
#===----------------------------------------------------------------------===#
#
# Create a graph of the protocol refinement relationships, associated
# types, operator requirements, and defaulted generic operators.
#
# run as follows to view the Nth-largest connected component in a web browser:
#
# N=0 && rm -f /tmp/protocols.dot && \
# python protocol_graph.py stdlib.swift > /tmp/p0.dot && \
# (ccomps -zX#$N -o /tmp/protocols.dot /tmp/p0.dot || true) \
# && dot -Tsvg /tmp/protocols.dot > /tmp/protocols.svg \
# && open /tmp/protocols.svg
#===----------------------------------------------------------------------===#
import re
import sys
import os
import cgi
# Open 'stdlib.swift' in this directory if no path specified.
args = list(sys.argv) + [os.path.join(os.path.dirname(__file__), 'stdlib.swift')]
reFlags = re.MULTILINE | re.VERBOSE
# Pattern to recognize stdlib identifiers (FIXME: doesn't handle Unicode).
identifier = '[A-Za-z_][A-Za-z0-9_]*'
# Pattern to recognize a (possibly-generic) operator decl.
operator = r'''
(?:(?:prefix|postfix).*)? func \s*
(?=\S)[^A-Za-z_] # non-space, non-identifier: begins an operator name
(?:(?=\S)[^(){])* # rest of operator name
\s*
(<[^>{]+>)? # generic parameter list
\s*
\([^)]*\) # function parameter list
'''
# substitute local variables into the string
def interpolate(string):
import inspect
frame = inspect.currentframe()
return string % frame.f_back.f_locals
# Given the bodyText of a protocol definition, return a list of
# associated type and operator requirements.
def bodyLines(bodyText):
return [
cgi.escape(b.group(0)) for b in
re.finditer(
r'(typealias\s*'+identifier+r'(\s*[:,]\s*'+identifier + ')?|' + operator + '.*)',
bodyText, reFlags)
]
# Mapping from protocol to associated type / operator requirements
body = {}
# Mapping from a parent protocol to set of children.
graph = {}
# Mapping from protocol to generic operators taking instances as arguments
genericOperators = {}
comments = r'//.* | /[*] (.|\n)*? [*]/' # FIXME: doesn't respect strings or comment nesting)
# read source, stripping all comments
sourceSansComments = re.sub(comments, '', open(args[1]).read(), flags=reFlags)
genericParameterConstraint = interpolate(r' (%(identifier)s) \s* : \s* (%(identifier)s) ')
def parseGenericOperator(m):
genericParams = m.group(5)
genericOperator = cgi.escape(m.group(0).strip())
functionParamStart = m.end(5) - m.start(0)
functionParams = genericOperator[functionParamStart:]
for m2 in re.finditer(genericParameterConstraint, genericParams, reFlags):
typeParameter = m2.group(1)
protocol = m2.group(2)
# we're only interested if we can find a function parameter of that type
if not re.search(r':\s*%s\s*[,)]' % typeParameter, functionParams): continue
# Make some replacements in the signature to limit the graph size
letterTau = 'τ'
letterPi = 'π'
abbreviatedSignature = re.sub(
r'\b%s\b' % protocol, letterPi,
re.sub(r'\b%s\b' % typeParameter, letterTau, genericOperator))
genericOperators.setdefault(protocol, set()).add(abbreviatedSignature)
def parseProtocol(m):
child = m.group(1)
# skip irrelevant protocols
if re.match(r'_Builtin.*Convertible', child): return
graph.setdefault(child, set())
body[child] = bodyLines(m.group(3))
if m.group(2):
for parent in m.group(2).strip().split(","):
if re.match(r'_Builtin.*Convertible', parent): return
graph.setdefault(parent.strip(), set()).add(child)
protocolsAndOperators = interpolate(r'''
\bprotocol \s+ (%(identifier)s) \s*
(?::\s*([^{]+))? # refinements
{([^{}\n]*(.*\n)*?)} # body
|
%(operator)s [^{]*(?={) # operator definition up to the open brace
''')
# Main parsing loop
for m in re.finditer(protocolsAndOperators, sourceSansComments, reFlags):
if m.group(1): parseProtocol(m)
elif m.group(5): parseGenericOperator(m)
# otherwise we matched some non-generic operator
# Find clusters of protocols that have the same name when underscores
# are stripped
clusterBuilder = {} # map from potential cluster name to nodes in the cluster
for n in graph:
clusterBuilder.setdefault(n.translate(None, '_'), set()).add(n)
# Grab the clusters with more than one member.
clusters = dict((c, nodes) for (c, nodes) in clusterBuilder.items() if len(nodes) > 1)
# A set of all intra-cluster edges
clusterEdges = set(
(s, t) for (c, elements) in clusters.items()
for s in elements
for t in graph[s] if t in elements)
print 'digraph ProtocolHierarchies {'
print ' mclimit = 100; ranksep=1.5; ' # ; packmode="array1"
print ' edge [dir="back"];'
print ' node [shape = box, fontname = Helvetica, fontsize = 10];'
for c in sorted(clusters):
print ' subgraph "cluster_%s" {' % c
for (s, t) in sorted(clusterEdges):
if s in clusters[c]:
print '%s -> %s [weight=100];' % (s, t)
print '}'
for node in sorted(graph.keys()):
requirements = body.get(node, [])
generics = sorted(genericOperators.get(node, set()))
style = 'solid' if node.startswith('_') else 'bold'
divider = '
\n' if len(requirements) != 0 and len(generics) != 0 else ''
label = node if len(requirements + generics) == 0 else (
'\n\n' % (
node,
'\n'.join('%s |
' % r for r in requirements),
divider,
'\n'.join('%s |
' % g for g in generics)))
print interpolate(' %(node)s [style = %(style)s, label=<%(label)s>]')
for (parent, children) in sorted(graph.items()):
print ' %s -> {' % parent,
print '; '.join(
sorted(child for child in children if not (parent, child) in clusterEdges)),
print '}'
print '}'
# (c) 2014, Maciej Delmanowski
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
from functools import partial
import types
try:
import netaddr
except ImportError:
# in this case, we'll make the filters return error messages (see bottom)
netaddr = None
else:
class mac_linux(netaddr.mac_unix):
pass
mac_linux.word_fmt = '%.2x'
from ansible import errors
# ---- IP address and network query helpers ----
def _empty_ipaddr_query(v, vtype):
# We don't have any query to process, so just check what type the user
# expects, and return the IP address in a correct format
if v:
if vtype == 'address':
return str(v.ip)
elif vtype == 'network':
return str(v)
def _6to4_query(v, vtype, value):
if v.version == 4:
if v.size == 1:
ipconv = str(v.ip)
elif v.size > 1:
if v.ip != v.network:
ipconv = str(v.ip)
else:
ipconv = False
if ipaddr(ipconv, 'public'):
numbers = list(map(int, ipconv.split('.')))
try:
return '2002:{:02x}{:02x}:{:02x}{:02x}::1/48'.format(*numbers)
except:
return False
elif v.version == 6:
if vtype == 'address':
if ipaddr(str(v), '2002::/16'):
return value
elif vtype == 'network':
if v.ip != v.network:
if ipaddr(str(v.ip), '2002::/16'):
return value
else:
return False
def _ip_query(v):
if v.size == 1:
return str(v.ip)
if v.size > 1:
if v.ip != v.network:
return str(v.ip)
def _gateway_query(v):
if v.size > 1:
if v.ip != v.network:
return str(v.ip) + '/' + str(v.prefixlen)
def _bool_ipaddr_query(v):
if v:
return True
def _broadcast_query(v):
if v.size > 1:
return str(v.broadcast)
def _cidr_query(v):
return str(v)
def _cidr_lookup_query(v, iplist, value):
try:
if v in iplist:
return value
except:
return False
def _host_query(v):
if v.size == 1:
return str(v)
elif v.size > 1:
if v.ip != v.network:
return str(v.ip) + '/' + str(v.prefixlen)
def _hostmask_query(v):
return str(v.hostmask)
def _int_query(v, vtype):
if vtype == 'address':
return int(v.ip)
elif vtype == 'network':
return str(int(v.ip)) + '/' + str(int(v.prefixlen))
def _ipv4_query(v, value):
if v.version == 6:
try:
return str(v.ipv4())
except:
return False
else:
return value
def _ipv6_query(v, value):
if v.version == 4:
return str(v.ipv6())
else:
return value
def _link_local_query(v, value):
v_ip = netaddr.IPAddress(str(v.ip))
if v.version == 4:
if ipaddr(str(v_ip), '169.254.0.0/24'):
return value
elif v.version == 6:
if ipaddr(str(v_ip), 'fe80::/10'):
return value
def _loopback_query(v, value):
v_ip = netaddr.IPAddress(str(v.ip))
if v_ip.is_loopback():
return value
def _multicast_query(v, value):
if v.is_multicast():
return value
def _net_query(v):
if v.size > 1:
if v.ip == v.network:
return str(v.network) + '/' + str(v.prefixlen)
def _netmask_query(v):
if v.size > 1:
return str(v.netmask)
def _network_query(v):
if v.size > 1:
return str(v.network)
def _prefix_query(v):
return int(v.prefixlen)
def _private_query(v, value):
if v.is_private():
return value
def _public_query(v, value):
v_ip = netaddr.IPAddress(str(v.ip))
if v_ip.is_unicast() and not v_ip.is_private() and \
not v_ip.is_loopback() and not v_ip.is_netmask() and \
not v_ip.is_hostmask():
return value
def _revdns_query(v):
v_ip = netaddr.IPAddress(str(v.ip))
return v_ip.reverse_dns
def _size_query(v):
return v.size
def _subnet_query(v):
return str(v.cidr)
def _type_query(v):
if v.size == 1:
return 'address'
if v.size > 1:
if v.ip != v.network:
return 'address'
else:
return 'network'
def _unicast_query(v, value):
if v.is_unicast():
return value
def _version_query(v):
return v.version
def _wrap_query(v, vtype, value):
if v.version == 6:
if vtype == 'address':
return '[' + str(v.ip) + ']'
elif vtype == 'network':
return '[' + str(v.ip) + ']/' + str(v.prefixlen)
else:
return value
# ---- HWaddr query helpers ----
def _bare_query(v):
v.dialect = netaddr.mac_bare
return str(v)
def _bool_hwaddr_query(v):
if v:
return True
def _cisco_query(v):
v.dialect = netaddr.mac_cisco
return str(v)
def _empty_hwaddr_query(v, value):
if v:
return value
def _linux_query(v):
v.dialect = mac_linux
return str(v)
def _postgresql_query(v):
v.dialect = netaddr.mac_pgsql
return str(v)
def _unix_query(v):
v.dialect = netaddr.mac_unix
return str(v)
def _win_query(v):
v.dialect = netaddr.mac_eui48
return str(v)
# ---- IP address and network filters ----
def ipaddr(value, query = '', version = False, alias = 'ipaddr'):
''' Check if string is an IP address or network and filter it '''
query_func_extra_args = {
'': ('vtype',),
'6to4': ('vtype', 'value'),
'cidr_lookup': ('iplist', 'value'),
'int': ('vtype',),
'ipv4': ('value',),
'ipv6': ('value',),
'link-local': ('value',),
'loopback': ('value',),
'lo': ('value',),
'multicast': ('value',),
'private': ('value',),
'public': ('value',),
'unicast': ('value',),
'wrap': ('vtype', 'value'),
}
query_func_map = {
'': _empty_ipaddr_query,
'6to4': _6to4_query,
'address': _ip_query,
'address/prefix': _gateway_query,
'bool': _bool_ipaddr_query,
'broadcast': _broadcast_query,
'cidr': _cidr_query,
'cidr_lookup': _cidr_lookup_query,
'gateway': _gateway_query,
'gw': _gateway_query,
'host': _host_query,
'host/prefix': _gateway_query,
'hostmask': _hostmask_query,
'hostnet': _gateway_query,
'int': _int_query,
'ip': _ip_query,
'ipv4': _ipv4_query,
'ipv6': _ipv6_query,
'link-local': _link_local_query,
'lo': _loopback_query,
'loopback': _loopback_query,
'multicast': _multicast_query,
'net': _net_query,
'netmask': _netmask_query,
'network': _network_query,
'prefix': _prefix_query,
'private': _private_query,
'public': _public_query,
'revdns': _revdns_query,
'router': _gateway_query,
'size': _size_query,
'subnet': _subnet_query,
'type': _type_query,
'unicast': _unicast_query,
'v4': _ipv4_query,
'v6': _ipv6_query,
'version': _version_query,
'wrap': _wrap_query,
}
vtype = None
if not value:
return False
elif value == True:
return False
# Check if value is a list and parse each element
elif isinstance(value, (list, tuple, types.GeneratorType)):
_ret = []
for element in value:
if ipaddr(element, str(query), version):
_ret.append(ipaddr(element, str(query), version))
if _ret:
return _ret
else:
return list()
# Check if value is a number and convert it to an IP address
elif str(value).isdigit():
# We don't know what IP version to assume, so let's check IPv4 first,
# then IPv6
try:
if ((not version) or (version and version == 4)):
v = netaddr.IPNetwork('0.0.0.0/0')
v.value = int(value)
v.prefixlen = 32
elif version and version == 6:
v = netaddr.IPNetwork('::/0')
v.value = int(value)
v.prefixlen = 128
# IPv4 didn't work the first time, so it definitely has to be IPv6
except:
try:
v = netaddr.IPNetwork('::/0')
v.value = int(value)
v.prefixlen = 128
# The value is too big for IPv6. Are you a nanobot?
except:
return False
# We got an IP address, let's mark it as such
value = str(v)
vtype = 'address'
# value has not been recognized, check if it's a valid IP string
else:
try:
v = netaddr.IPNetwork(value)
# value is a valid IP string, check if user specified
# CIDR prefix or just an IP address, this will indicate default
# output format
try:
address, prefix = value.split('/')
vtype = 'network'
except:
vtype = 'address'
# value hasn't been recognized, maybe it's a numerical CIDR?
except:
try:
address, prefix = value.split('/')
address.isdigit()
address = int(address)
prefix.isdigit()
prefix = int(prefix)
# It's not numerical CIDR, give up
except:
return False
# It is something, so let's try and build a CIDR from the parts
try:
v = netaddr.IPNetwork('0.0.0.0/0')
v.value = address
v.prefixlen = prefix
# It's not a valid IPv4 CIDR
except:
try:
v = netaddr.IPNetwork('::/0')
v.value = address
v.prefixlen = prefix
# It's not a valid IPv6 CIDR. Give up.
except:
return False
# We have a valid CIDR, so let's write it in correct format
value = str(v)
vtype = 'network'
# We have a query string but it's not in the known query types. Check if
# that string is a valid subnet, if so, we can check later if given IP
# address/network is inside that specific subnet
try:
### ?? 6to4 and link-local were True here before. Should they still?
if query and (query not in query_func_map or query == 'cidr_lookup') and ipaddr(query, 'network'):
iplist = netaddr.IPSet([netaddr.IPNetwork(query)])
query = 'cidr_lookup'
except:
pass
# This code checks if value maches the IP version the user wants, ie. if
# it's any version ("ipaddr()"), IPv4 ("ipv4()") or IPv6 ("ipv6()")
# If version does not match, return False
if version and v.version != version:
return False
extras = []
for arg in query_func_extra_args.get(query, tuple()):
extras.append(locals()[arg])
try:
return query_func_map[query](v, *extras)
except KeyError:
try:
float(query)
if v.size == 1:
if vtype == 'address':
return str(v.ip)
elif vtype == 'network':
return str(v)
elif v.size > 1:
try:
return str(v[query]) + '/' + str(v.prefixlen)
except:
return False
else:
return value
except:
raise errors.AnsibleFilterError(alias + ': unknown filter type: %s' % query)
return False
def ipwrap(value, query = ''):
try:
if isinstance(value, (list, tuple, types.GeneratorType)):
_ret = []
for element in value:
if ipaddr(element, query, version = False, alias = 'ipwrap'):
_ret.append(ipaddr(element, 'wrap'))
else:
_ret.append(element)
return _ret
else:
_ret = ipaddr(value, query, version = False, alias = 'ipwrap')
if _ret:
return ipaddr(_ret, 'wrap')
else:
return value
except:
return value
def ipv4(value, query = ''):
return ipaddr(value, query, version = 4, alias = 'ipv4')
def ipv6(value, query = ''):
return ipaddr(value, query, version = 6, alias = 'ipv6')
# Split given subnet into smaller subnets or find out the biggest subnet of
# a given IP address with given CIDR prefix
# Usage:
#
# - address or address/prefix | ipsubnet
# returns CIDR subnet of a given input
#
# - address/prefix | ipsubnet(cidr)
# returns number of possible subnets for given CIDR prefix
#
# - address/prefix | ipsubnet(cidr, index)
# returns new subnet with given CIDR prefix
#
# - address | ipsubnet(cidr)
# returns biggest subnet with given CIDR prefix that address belongs to
#
# - address | ipsubnet(cidr, index)
# returns next indexed subnet which contains given address
def ipsubnet(value, query = '', index = 'x'):
''' Manipulate IPv4/IPv6 subnets '''
try:
vtype = ipaddr(value, 'type')
if vtype == 'address':
v = ipaddr(value, 'cidr')
elif vtype == 'network':
v = ipaddr(value, 'subnet')
value = netaddr.IPNetwork(v)
except:
return False
if not query:
return str(value)
elif str(query).isdigit():
vsize = ipaddr(v, 'size')
query = int(query)
try:
float(index)
index = int(index)
if vsize > 1:
try:
return str(list(value.subnet(query))[index])
except:
return False
elif vsize == 1:
try:
return str(value.supernet(query)[index])
except:
return False
except:
if vsize > 1:
try:
return str(len(list(value.subnet(query))))
except:
return False
elif vsize == 1:
try:
return str(value.supernet(query)[0])
except:
return False
return False
# Returns the nth host within a network described by value.
# Usage:
#
# - address or address/prefix | nthhost(nth)
# returns the nth host within the given network
def nthhost(value, query=''):
''' Get the nth host within a given network '''
try:
vtype = ipaddr(value, 'type')
if vtype == 'address':
v = ipaddr(value, 'cidr')
elif vtype == 'network':
v = ipaddr(value, 'subnet')
value = netaddr.IPNetwork(v)
except:
return False
if not query:
return False
try:
vsize = ipaddr(v, 'size')
nth = int(query)
if value.size > nth:
return value[nth]
except ValueError:
return False
return False
# Returns the SLAAC address within a network for a given HW/MAC address.
# Usage:
#
# - prefix | slaac(mac)
def slaac(value, query = ''):
''' Get the SLAAC address within given network '''
try:
vtype = ipaddr(value, 'type')
if vtype == 'address':
v = ipaddr(value, 'cidr')
elif vtype == 'network':
v = ipaddr(value, 'subnet')
if v.version != 6:
return False
value = netaddr.IPNetwork(v)
except:
return False
if not query:
return False
try:
mac = hwaddr(query, alias = 'slaac')
eui = netaddr.EUI(mac)
except:
return False
return eui.ipv6(value.network)
# ---- HWaddr / MAC address filters ----
def hwaddr(value, query = '', alias = 'hwaddr'):
''' Check if string is a HW/MAC address and filter it '''
query_func_extra_args = {
'': ('value',),
}
query_func_map = {
'': _empty_hwaddr_query,
'bare': _bare_query,
'bool': _bool_hwaddr_query,
'cisco': _cisco_query,
'eui48': _win_query,
'linux': _linux_query,
'pgsql': _postgresql_query,
'postgresql': _postgresql_query,
'psql': _postgresql_query,
'unix': _unix_query,
'win': _win_query,
}
try:
v = netaddr.EUI(value)
except:
if query and query != 'bool':
raise errors.AnsibleFilterError(alias + ': not a hardware address: %s' % value)
extras = []
for arg in query_func_extra_args.get(query, tuple()):
extras.append(locals()[arg])
try:
return query_func_map[query](v, *extras)
except KeyError:
raise errors.AnsibleFilterError(alias + ': unknown filter type: %s' % query)
return False
def macaddr(value, query = ''):
return hwaddr(value, query, alias = 'macaddr')
def _need_netaddr(f_name, *args, **kwargs):
raise errors.AnsibleFilterError('The {0} filter requires python-netaddr be'
' installed on the ansible controller'.format(f_name))
# ---- Ansible filters ----
class FilterModule(object):
''' IP address and network manipulation filters '''
filter_map = {
# IP addresses and networks
'ipaddr': ipaddr,
'ipwrap': ipwrap,
'ipv4': ipv4,
'ipv6': ipv6,
'ipsubnet': ipsubnet,
'nthhost': nthhost,
'slaac': slaac,
# MAC / HW addresses
'hwaddr': hwaddr,
'macaddr': macaddr
}
def filters(self):
if netaddr:
return self.filter_map
else:
# Need to install python-netaddr for these filters to work
return dict((f, partial(_need_netaddr, f)) for f in self.filter_map)
## features.py
##
## Copyright (C) 2003-2004 Alexey "Snake" Nezhdanov
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
# $Id: features.py,v 1.25 2009/04/07 07:11:48 snakeru Exp $
"""
This module contains variable stuff that is not worth splitting into separate modules.
Here is:
DISCO client and agents-to-DISCO and browse-to-DISCO emulators.
IBR and password manager.
jabber:iq:privacy methods
All these methods takes 'disp' first argument that should be already connected
(and in most cases already authorised) dispatcher instance.
"""
from protocol import *
REGISTER_DATA_RECEIVED='REGISTER DATA RECEIVED'
### DISCO ### http://jabber.org/protocol/disco ### JEP-0030 ####################
### Browse ### jabber:iq:browse ### JEP-0030 ###################################
### Agents ### jabber:iq:agents ### JEP-0030 ###################################
def _discover(disp,ns,jid,node=None,fb2b=0,fb2a=1):
""" Try to obtain info from the remote object.
If remote object doesn't support disco fall back to browse (if fb2b is true)
and if it doesnt support browse (or fb2b is not true) fall back to agents protocol
(if gb2a is true). Returns obtained info. Used internally. """
iq=Iq(to=jid,typ='get',queryNS=ns)
if node: iq.setQuerynode(node)
rep=disp.SendAndWaitForResponse(iq)
if fb2b and not isResultNode(rep): rep=disp.SendAndWaitForResponse(Iq(to=jid,typ='get',queryNS=NS_BROWSE)) # Fallback to browse
if fb2a and not isResultNode(rep): rep=disp.SendAndWaitForResponse(Iq(to=jid,typ='get',queryNS=NS_AGENTS)) # Fallback to agents
if isResultNode(rep): return [n for n in rep.getQueryPayload() if isinstance(n, Node)]
return []
def discoverItems(disp,jid,node=None):
""" Query remote object about any items that it contains. Return items list. """
""" According to JEP-0030:
query MAY have node attribute
item: MUST HAVE jid attribute and MAY HAVE name, node, action attributes.
action attribute of item can be either of remove or update value."""
ret=[]
for i in _discover(disp,NS_DISCO_ITEMS,jid,node):
if i.getName()=='agent' and i.getTag('name'): i.setAttr('name',i.getTagData('name'))
ret.append(i.attrs)
return ret
def discoverInfo(disp,jid,node=None):
""" Query remote object about info that it publishes. Returns identities and features lists."""
""" According to JEP-0030:
query MAY have node attribute
identity: MUST HAVE category and name attributes and MAY HAVE type attribute.
feature: MUST HAVE var attribute"""
identities , features = [] , []
for i in _discover(disp,NS_DISCO_INFO,jid,node):
if i.getName()=='identity': identities.append(i.attrs)
elif i.getName()=='feature': features.append(i.getAttr('var'))
elif i.getName()=='agent':
if i.getTag('name'): i.setAttr('name',i.getTagData('name'))
if i.getTag('description'): i.setAttr('name',i.getTagData('description'))
identities.append(i.attrs)
if i.getTag('groupchat'): features.append(NS_GROUPCHAT)
if i.getTag('register'): features.append(NS_REGISTER)
if i.getTag('search'): features.append(NS_SEARCH)
return identities , features
### Registration ### jabber:iq:register ### JEP-0077 ###########################
def getRegInfo(disp,host,info={},sync=True):
""" Gets registration form from remote host.
You can pre-fill the info dictionary.
F.e. if you are requesting info on registering user joey than specify
info as {'username':'joey'}. See JEP-0077 for details.
'disp' must be connected dispatcher instance."""
iq=Iq('get',NS_REGISTER,to=host)
for i in info.keys(): iq.setTagData(i,info[i])
if sync:
resp=disp.SendAndWaitForResponse(iq)
_ReceivedRegInfo(disp.Dispatcher,resp, host)
return resp
else: disp.SendAndCallForResponse(iq,_ReceivedRegInfo, {'agent': host})
def _ReceivedRegInfo(con, resp, agent):
iq=Iq('get',NS_REGISTER,to=agent)
if not isResultNode(resp): return
df=resp.getTag('query',namespace=NS_REGISTER).getTag('x',namespace=NS_DATA)
if df:
con.Event(NS_REGISTER,REGISTER_DATA_RECEIVED,(agent, DataForm(node=df)))
return
df=DataForm(typ='form')
for i in resp.getQueryPayload():
if type(i)<>type(iq): pass
elif i.getName()=='instructions': df.addInstructions(i.getData())
else: df.setField(i.getName()).setValue(i.getData())
con.Event(NS_REGISTER,REGISTER_DATA_RECEIVED,(agent, df))
def register(disp,host,info):
""" Perform registration on remote server with provided info.
disp must be connected dispatcher instance.
Returns true or false depending on registration result.
If registration fails you can get additional info from the dispatcher's owner
attributes lastErrNode, lastErr and lastErrCode.
"""
iq=Iq('set',NS_REGISTER,to=host)
if type(info)<>type({}): info=info.asDict()
for i in info.keys(): iq.setTag('query').setTagData(i,info[i])
resp=disp.SendAndWaitForResponse(iq)
if isResultNode(resp): return 1
def unregister(disp,host):
""" Unregisters with host (permanently removes account).
disp must be connected and authorized dispatcher instance.
Returns true on success."""
resp=disp.SendAndWaitForResponse(Iq('set',NS_REGISTER,to=host,payload=[Node('remove')]))
if isResultNode(resp): return 1
def changePasswordTo(disp,newpassword,host=None):
""" Changes password on specified or current (if not specified) server.
disp must be connected and authorized dispatcher instance.
Returns true on success."""
if not host: host=disp._owner.Server
resp=disp.SendAndWaitForResponse(Iq('set',NS_REGISTER,to=host,payload=[Node('username',payload=[disp._owner.Server]),Node('password',payload=[newpassword])]))
if isResultNode(resp): return 1
### Privacy ### jabber:iq:privacy ### draft-ietf-xmpp-im-19 ####################
#type=[jid|group|subscription]
#action=[allow|deny]
def getPrivacyLists(disp):
""" Requests privacy lists from connected server.
Returns dictionary of existing lists on success."""
try:
dict={'lists':[]}
resp=disp.SendAndWaitForResponse(Iq('get',NS_PRIVACY))
if not isResultNode(resp): return
for list in resp.getQueryPayload():
if list.getName()=='list': dict['lists'].append(list.getAttr('name'))
else: dict[list.getName()]=list.getAttr('name')
return dict
except: pass
def getPrivacyList(disp,listname):
""" Requests specific privacy list listname. Returns list of XML nodes (rules)
taken from the server responce."""
try:
resp=disp.SendAndWaitForResponse(Iq('get',NS_PRIVACY,payload=[Node('list',{'name':listname})]))
if isResultNode(resp): return resp.getQueryPayload()[0]
except: pass
def setActivePrivacyList(disp,listname=None,typ='active'):
""" Switches privacy list 'listname' to specified type.
By default the type is 'active'. Returns true on success."""
if listname: attrs={'name':listname}
else: attrs={}
resp=disp.SendAndWaitForResponse(Iq('set',NS_PRIVACY,payload=[Node(typ,attrs)]))
if isResultNode(resp): return 1
def setDefaultPrivacyList(disp,listname=None):
""" Sets the default privacy list as 'listname'. Returns true on success."""
return setActivePrivacyList(disp,listname,'default')
def setPrivacyList(disp,list):
""" Set the ruleset. 'list' should be the simpleXML node formatted
according to RFC 3921 (XMPP-IM) (I.e. Node('list',{'name':listname},payload=[...]) )
Returns true on success."""
resp=disp.SendAndWaitForResponse(Iq('set',NS_PRIVACY,payload=[list]))
if isResultNode(resp): return 1
def delPrivacyList(disp,listname):
""" Deletes privacy list 'listname'. Returns true on success."""
resp=disp.SendAndWaitForResponse(Iq('set',NS_PRIVACY,payload=[Node('list',{'name':listname})]))
if isResultNode(resp): return 1
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Low-level locale data access.
:note: The `Locale` class, which uses this module under the hood, provides a
more convenient interface for accessing the locale data.
"""
import os
import pickle
try:
import threading
except ImportError:
import dummy_threading as threading
from UserDict import DictMixin
__all__ = ['exists', 'list', 'load']
__docformat__ = 'restructuredtext en'
_cache = {}
_cache_lock = threading.RLock()
_dirname = os.path.join(os.path.dirname(__file__), 'localedata')
def exists(name):
"""Check whether locale data is available for the given locale.
:param name: the locale identifier string
:return: `True` if the locale data exists, `False` otherwise
:rtype: `bool`
"""
if name in _cache:
return True
return os.path.exists(os.path.join(_dirname, '%s.dat' % name))
def list():
"""Return a list of all locale identifiers for which locale data is
available.
:return: a list of locale identifiers (strings)
:rtype: `list`
:since: version 0.8.1
"""
return [stem for stem, extension in [
os.path.splitext(filename) for filename in os.listdir(_dirname)
] if extension == '.dat' and stem != 'root']
def load(name, merge_inherited=True):
"""Load the locale data for the given locale.
The locale data is a dictionary that contains much of the data defined by
the Common Locale Data Repository (CLDR). This data is stored as a
collection of pickle files inside the ``babel`` package.
>>> d = load('en_US')
>>> d['languages']['sv']
u'Swedish'
Note that the results are cached, and subsequent requests for the same
locale return the same dictionary:
>>> d1 = load('en_US')
>>> d2 = load('en_US')
>>> d1 is d2
True
:param name: the locale identifier string (or "root")
:param merge_inherited: whether the inherited data should be merged into
the data of the requested locale
:return: the locale data
:rtype: `dict`
:raise `IOError`: if no locale data file is found for the given locale
identifer, or one of the locales it inherits from
"""
_cache_lock.acquire()
try:
data = _cache.get(name)
if not data:
# Load inherited data
if name == 'root' or not merge_inherited:
data = {}
else:
parts = name.split('_')
if len(parts) == 1:
parent = 'root'
else:
parent = '_'.join(parts[:-1])
data = load(parent).copy()
filename = os.path.join(_dirname, '%s.dat' % name)
fileobj = open(filename, 'rb')
try:
if name != 'root' and merge_inherited:
merge(data, pickle.load(fileobj))
else:
data = pickle.load(fileobj)
_cache[name] = data
finally:
fileobj.close()
return data
finally:
_cache_lock.release()
def merge(dict1, dict2):
"""Merge the data from `dict2` into the `dict1` dictionary, making copies
of nested dictionaries.
>>> d = {1: 'foo', 3: 'baz'}
>>> merge(d, {1: 'Foo', 2: 'Bar'})
>>> items = d.items(); items.sort(); items
[(1, 'Foo'), (2, 'Bar'), (3, 'baz')]
:param dict1: the dictionary to merge into
:param dict2: the dictionary containing the data that should be merged
"""
for key, val2 in dict2.items():
if val2 is not None:
val1 = dict1.get(key)
if isinstance(val2, dict):
if val1 is None:
val1 = {}
if isinstance(val1, Alias):
val1 = (val1, val2)
elif isinstance(val1, tuple):
alias, others = val1
others = others.copy()
merge(others, val2)
val1 = (alias, others)
else:
val1 = val1.copy()
merge(val1, val2)
else:
val1 = val2
dict1[key] = val1
class Alias(object):
"""Representation of an alias in the locale data.
An alias is a value that refers to some other part of the locale data,
as specified by the `keys`.
"""
def __init__(self, keys):
self.keys = tuple(keys)
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.keys)
def resolve(self, data):
"""Resolve the alias based on the given data.
This is done recursively, so if one alias resolves to a second alias,
that second alias will also be resolved.
:param data: the locale data
:type data: `dict`
"""
base = data
for key in self.keys:
data = data[key]
if isinstance(data, Alias):
data = data.resolve(base)
elif isinstance(data, tuple):
alias, others = data
data = alias.resolve(base)
return data
class LocaleDataDict(DictMixin, dict):
"""Dictionary wrapper that automatically resolves aliases to the actual
values.
"""
def __init__(self, data, base=None):
dict.__init__(self, data)
if base is None:
base = data
self.base = base
def __getitem__(self, key):
orig = val = dict.__getitem__(self, key)
if isinstance(val, Alias): # resolve an alias
val = val.resolve(self.base)
if isinstance(val, tuple): # Merge a partial dict with an alias
alias, others = val
val = alias.resolve(self.base).copy()
merge(val, others)
if type(val) is dict: # Return a nested alias-resolving dict
val = LocaleDataDict(val, base=self.base)
if val is not orig:
self[key] = val
return val
def copy(self):
return LocaleDataDict(dict.copy(self), base=self.base)
import os
# toolchains options
ARCH='arm'
CPU='cortex-m4'
CROSS_TOOL='gcc'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
# cross_tool provides the cross compiler
# EXEC_PATH is the compiler execute path, for example, CodeSourcery, Keil MDK, IAR
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = 'C:/Program Files (x86)/GNU Tools ARM Embedded/6 2017-q2-update/bin'
elif CROSS_TOOL == 'keil':
PLATFORM = 'armcc'
EXEC_PATH = 'C:\Keil_v5'
elif CROSS_TOOL == 'iar':
PLATFORM = 'iar'
EXEC_PATH = 'C:/Program Files (x86)/IAR Systems/Embedded Workbench 7.5'
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'g++'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m4 -mthumb -ffunction-sections -fdata-sections'
CFLAGS = DEVICE
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp'
LFLAGS = DEVICE + ' -nostartfiles -Wl,--gc-sections,-Map=rtthread-nuc472.map -T ldscripts/sections.ld '
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
elif PLATFORM == 'armcc':
# toolchains
CC = 'armcc'
AS = 'armasm'
AR = 'armar'
LINK = 'armlink'
TARGET_EXT = 'axf'
DEVICE = ' --cpu=cortex-m4.fp'
CFLAGS = DEVICE + ' --apcs=interwork'
AFLAGS = DEVICE
LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers --list rtthread-nuc472.map --scatter nuc472_flash.sct'
CFLAGS += ' --c99'
CFLAGS += ' -I' + EXEC_PATH + '/ARM/RV31/INC'
LFLAGS += ' --libpath ' + EXEC_PATH + '/ARM/RV31/LIB'
EXEC_PATH += '/arm/bin40/'
if BUILD == 'debug':
CFLAGS += ' -g -O0'
AFLAGS += ' -g'
else:
CFLAGS += ' -O2'
POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET'
elif PLATFORM == 'iar':
# toolchains
CC = 'iccarm'
AS = 'iasmarm'
AR = 'iarchive'
LINK = 'ilinkarm'
TARGET_EXT = 'out'
DEVICE = ' '
CFLAGS = DEVICE
CFLAGS += ' --diag_suppress Pa050'
CFLAGS += ' --no_cse'
CFLAGS += ' --no_unroll'
CFLAGS += ' --no_inline'
CFLAGS += ' --no_code_motion'
CFLAGS += ' --no_tbaa'
CFLAGS += ' --no_clustering'
CFLAGS += ' --no_scheduling'
CFLAGS += ' --debug'
CFLAGS += ' --endian=little'
CFLAGS += ' --cpu=Cortex-M4'
CFLAGS += ' -e'
CFLAGS += ' --fpu=None'
CFLAGS += ' --dlib_config "' + EXEC_PATH + '/arm/INC/c/DLib_Config_Normal.h"'
CFLAGS += ' -Ol'
CFLAGS += ' --use_c++_inline'
AFLAGS = ''
AFLAGS += ' -s+'
AFLAGS += ' -w+'
AFLAGS += ' -r'
AFLAGS += ' --cpu Cortex-M4'
AFLAGS += ' --fpu None'
LFLAGS = ' --config nuc472_flash.icf'
LFLAGS += ' --redirect _Printf=_PrintfTiny'
LFLAGS += ' --redirect _Scanf=_ScanfSmall'
LFLAGS += ' --entry __iar_program_start'
EXEC_PATH = EXEC_PATH + '/arm/bin/'
POST_ACTION = ''
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from datetime import datetime
from django.test import SimpleTestCase, ignore_warnings
from django.utils import html, safestring, six
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
class TestUtilsHtml(SimpleTestCase):
def check_output(self, function, value, output=None):
"""
Check that function(value) equals output. If output is None,
check that function(value) equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
f = html.escape
items = (
('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
for pattern in patterns:
self.check_output(f, pattern % value, pattern % output)
# Check repeated values.
self.check_output(f, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(f, '<&', '<&')
def test_format_html(self):
self.assertEqual(
html.format_html("{} {} {third} {fourth}",
"< Dangerous >",
html.mark_safe("safe"),
third="< dangerous again",
fourth=html.mark_safe("safe again")
),
"< Dangerous > safe < dangerous again safe again"
)
def test_linebreaks(self):
f = html.linebreaks
items = (
("para1\n\npara2\r\rpara3", "para1
\n\npara2
\n\npara3
"),
("para1\nsub1\rsub2\n\npara2", "para1
sub1
sub2
\n\npara2
"),
("para1\r\n\r\npara2\rsub1\r\rpara4", "para1
\n\npara2
sub1
\n\npara4
"),
("para1\tmore\n\npara2", "para1\tmore
\n\npara2
"),
)
for value, output in items:
self.check_output(f, value, output)
def test_strip_tags(self):
f = html.strip_tags
items = (
('See: 'é is an apostrophe followed by e acute
',
'See: 'é is an apostrophe followed by e acute'),
('a', 'a'),
('a', 'a'),
('e', 'e'),
('hi, b2!', 'b7>b2!'),
('b', 'b'),
('ab
c', 'abc'),
('ab
c', 'abc'),
('de
f', 'def'),
('foobar', 'foobar'),
# caused infinite loop on Pythons not patched with
# http://bugs.python.org/issue20288
('&gotcha<>', '&gotcha<>'),
)
for value, output in items:
self.check_output(f, value, output)
# Some convoluted syntax for which parsing may differ between python versions
output = html.strip_tags('ript>test</script>')
self.assertNotIn('&h')
self.assertNotIn('', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'),
(
'paragraph separator:\u2029and line separator:\u2028',
'paragraph separator:\\u2029and line separator:\\u2028'
),
)
for value, output in items:
self.check_output(f, value, output)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_remove_tags(self):
f = html.remove_tags
items = (
("Yes", "b i", "Yes"),
("x y
", "a b", "x y
"),
)
for value, tags, output in items:
self.assertEqual(f(value, tags), output)
def test_smart_urlquote(self):
quote = html.smart_urlquote
# Ensure that IDNs are properly quoted
self.assertEqual(quote('http://Ă¶Ă¤Ă¼.com/'), 'http://xn--4ca9at.com/')
self.assertEqual(quote('http://Ă¶Ă¤Ă¼.com/Ă¶Ă¤Ă¼/'), 'http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/')
# Ensure that everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered safe as per RFC
self.assertEqual(quote('http://example.com/path/Ă¶Ă¤Ă¼/'), 'http://example.com/path/%C3%B6%C3%A4%C3%BC/')
self.assertEqual(quote('http://example.com/%C3%B6/ä/'), 'http://example.com/%C3%B6/%C3%A4/')
self.assertEqual(quote('http://example.com/?x=1&y=2+3&z='), 'http://example.com/?x=1&y=2+3&z=')
self.assertEqual(quote('http://example.com/?x=<>"\''), 'http://example.com/?x=%3C%3E%22%27')
self.assertEqual(quote('http://example.com/?q=http://example.com/?x=1%26q=django'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
self.assertEqual(quote('http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
def test_conditional_escape(self):
s = 'interop
'
self.assertEqual(html.conditional_escape(s),
'<h1>interop</h1>')
self.assertEqual(html.conditional_escape(safestring.mark_safe(s)), s)
def test_html_safe(self):
@html.html_safe
class HtmlClass(object):
if six.PY2:
def __unicode__(self):
return "I'm a html class!
"
else:
def __str__(self):
return "I'm a html class!
"
html_obj = HtmlClass()
self.assertTrue(hasattr(HtmlClass, '__html__'))
self.assertTrue(hasattr(html_obj, '__html__'))
self.assertEqual(force_text(html_obj), html_obj.__html__())
def test_html_safe_subclass(self):
if six.PY2:
class BaseClass(object):
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __unicode__(self):
return 'some non html content'
@html.html_safe
class Subclass(BaseClass):
def __unicode__(self):
# overrides __unicode__ and is marked as html_safe
return 'some html safe content'
else:
class BaseClass(object):
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __str__(self):
return 'some non html content'
@html.html_safe
class Subclass(BaseClass):
def __str__(self):
# overrides __str__ and is marked as html_safe
return 'some html safe content'
subclass_obj = Subclass()
self.assertEqual(force_text(subclass_obj), subclass_obj.__html__())
def test_html_safe_defines_html_error(self):
msg = "can't apply @html_safe to HtmlClass because it defines __html__()."
with self.assertRaisesMessage(ValueError, msg):
@html.html_safe
class HtmlClass(object):
def __html__(self):
return "I'm a html class!
"
def test_html_safe_doesnt_define_str(self):
method_name = '__unicode__()' if six.PY2 else '__str__()'
msg = "can't apply @html_safe to HtmlClass because it doesn't define %s." % method_name
with self.assertRaisesMessage(ValueError, msg):
@html.html_safe
class HtmlClass(object):
pass
#!/usr/bin/python
# -*- coding: UTF-8 -*-
########################################################
# __Author__: Xueer Chen #
# Kaggle competition "Display Advertising Challenge": #
# https://www.kaggle.com/c/avazu-ctr-prediction #
# Credit: Triskelion #
########################################################
from datetime import datetime
from csv import DictReader
import sys
def csv_to_vw(loc_csv, loc_output, train=True):
"""
Munges a CSV file (loc_csv) to a VW file (loc_output). Set "train"
to False when munging a test set.
TODO: Too slow for a daily cron job. Try optimize, Pandas or Go.
"""
start = datetime.now()
print("\nTurning %s into %s. Is_train_set? %s"%(loc_csv,loc_output,train))
with open(loc_output,"wb") as outfile:
for e, row in enumerate( DictReader(open(loc_csv)) ):
#Creating the features
numerical_features = ""
categorical_features = ""
for k,v in row.items():
if k not in ["id","click"]:
if len(str(v)) > 0:
categorical_features += " %s" % v
#Creating the labels
if train: #we care about labels
if row['click'] == "1":
label = 1
else:
label = -1 #we set negative label to -1
outfile.write( "%s '%s |i%s |c%s\n" % (label,row['id'],numerical_features,categorical_features) )
else: #we dont care about labels
outfile.write( "1 '%s |i%s |c%s\n" % (row['id'],numerical_features,categorical_features) )
#Reporting progress
if e % 100000 == 0:
print("%s\t%s"%(e, str(datetime.now() - start)))
print("\n %s Task execution time:\n\t%s"%(e, str(datetime.now() - start)))
def main():
return 0
if __name__ == '__main__':
# main should return 0 for success, something else (usually 1) for error.
sys.exit(main())
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/trainOriginal.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/trainOriginal.vw", train=True)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/testOriginal.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/testOriginal.vw", train=False)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/trainProbs.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/trainProbs.vw", train=True)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/testProbs.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/testProbs.vw", train=False)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/ProbsTfidf.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/ProbsTfidf.vw", train=True)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/testProbsTfidf.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/testProbsTfidf.vw", train=False)
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib import messages
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from django.core.urlresolvers import reverse
import django.contrib.auth.views as django_auth_views
from astakos.im.util import prepare_response, get_query
from astakos.im.models import PendingThirdPartyUser
from astakos.im.forms import LoginForm, ExtendedPasswordChangeForm, \
ExtendedSetPasswordForm
from astakos.im import settings
import astakos.im.messages as astakos_messages
from astakos.im import auth_providers as auth
from astakos.im.views.decorators import cookie_fix, requires_anonymous, \
signed_terms_required, requires_auth_provider, login_required
from ratelimit.decorators import ratelimit
retries = settings.RATELIMIT_RETRIES_ALLOWED - 1
rate = str(retries) + '/m'
@requires_auth_provider('local')
@require_http_methods(["GET", "POST"])
@csrf_exempt
@requires_anonymous
@cookie_fix
@ratelimit(field='username', method='POST', rate=rate)
def login(request, on_failure='im/login.html'):
"""
on_failure: the template name to render on login failure
"""
if request.method == 'GET':
return HttpResponseRedirect(reverse('login'))
was_limited = getattr(request, 'limited', False)
form = LoginForm(data=request.POST,
was_limited=was_limited,
request=request)
next = get_query(request).get('next', '')
third_party_token = get_query(request).get('key', False)
provider = auth.get_provider('local')
if not form.is_valid():
if third_party_token:
messages.info(request, provider.get_login_to_add_msg)
return render_to_response(
on_failure,
{'login_form': form,
'next': next,
'key': third_party_token},
context_instance=RequestContext(request))
# get the user from the cache
user = form.user_cache
provider = auth.get_provider('local', user)
if not provider.get_login_policy:
message = provider.get_login_disabled_msg
messages.error(request, message)
return HttpResponseRedirect(reverse('login'))
message = None
if not user:
message = provider.get_authentication_failed_msg
elif not user.is_active:
message = user.get_inactive_message('local')
elif not user.has_auth_provider('local'):
# valid user logged in with no auth providers set, add local provider
# and let him log in
if not user.get_available_auth_providers():
user.add_auth_provider('local')
else:
message = _(astakos_messages.NO_LOCAL_AUTH)
if message:
messages.error(request, message)
return render_to_response(on_failure,
{'login_form': form},
context_instance=RequestContext(request))
response = prepare_response(request, user, next)
if third_party_token:
# use requests to assign the account he just authenticated with with
# a third party provider account
try:
request.user.add_pending_auth_provider(third_party_token)
except PendingThirdPartyUser.DoesNotExist:
provider = auth.get_provider('local', request.user)
messages.error(request, provider.get_add_failed_msg)
provider = user.get_auth_provider('local')
messages.success(request, provider.get_login_success_msg)
response.set_cookie('astakos_last_login_method', 'local')
provider.update_last_login_at()
return response
@require_http_methods(["GET"])
@cookie_fix
def password_reset_done(request, *args, **kwargs):
messages.success(request, _(astakos_messages.PASSWORD_RESET_DONE))
return HttpResponseRedirect(reverse('index'))
@require_http_methods(["GET"])
@cookie_fix
def password_reset_confirm_done(request, *args, **kwargs):
messages.success(request, _(astakos_messages.PASSWORD_RESET_CONFIRM_DONE))
return HttpResponseRedirect(reverse('index'))
@cookie_fix
def password_reset(request, *args, **kwargs):
kwargs['post_reset_redirect'] = reverse(
'astakos.im.views.target.local.password_reset_done')
return django_auth_views.password_reset(request, *args, **kwargs)
@cookie_fix
def password_reset_confirm(request, *args, **kwargs):
kwargs['post_reset_redirect'] = reverse(
'astakos.im.views.target.local.password_reset_complete')
return django_auth_views.password_reset_confirm(request, *args, **kwargs)
@cookie_fix
def password_reset_complete(request, *args, **kwargs):
return django_auth_views.password_reset_complete(request, *args, **kwargs)
@require_http_methods(["GET", "POST"])
@signed_terms_required
@login_required
@cookie_fix
@requires_auth_provider('local', login=True)
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=ExtendedPasswordChangeForm):
create_password = False
provider = auth.get_provider('local', request.user)
# no local backend user wants to create a password
if not request.user.has_auth_provider('local'):
if not provider.get_add_policy:
messages.error(request, provider.get_add_disabled_msg)
return HttpResponseRedirect(reverse('edit_profile'))
create_password = True
password_change_form = ExtendedSetPasswordForm
if post_change_redirect is None:
post_change_redirect = reverse('edit_profile')
if request.method == "POST":
form_kwargs = dict(
user=request.user,
data=request.POST,
)
if not create_password:
form_kwargs['session_key'] = request.session.session_key
form = password_change_form(**form_kwargs)
if form.is_valid():
form.save()
if create_password:
provider = auth.get_provider('local', request.user)
messages.success(request, provider.get_added_msg)
else:
messages.success(request,
astakos_messages.PASSWORD_RESET_CONFIRM_DONE)
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
return render_to_response(template_name, {
'form': form,
}, context_instance=RequestContext(request, {'create_password':
create_password}))
# -*- coding: utf-8 -*-
from __future__ import with_statement
from djangocms_text_ckeditor.models import Text
from django.contrib.admin.sites import site
from django.contrib.admin.util import unquote
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser, Group, Permission
from django.contrib.sites.models import Site
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.test.client import RequestFactory
from django.test.utils import override_settings
from cms.api import (add_plugin, assign_user_to_page, create_page,
create_page_user, publish_page)
from cms.admin.forms import save_permissions
from cms.constants import PUBLISHER_STATE_PENDING
from cms.management.commands.subcommands.moderator import log
from cms.menu import get_visible_pages
from cms.models import Page, CMSPlugin, Title, ACCESS_PAGE
from cms.models.permissionmodels import (ACCESS_DESCENDANTS,
ACCESS_PAGE_AND_DESCENDANTS,
PagePermission,
GlobalPagePermission)
from cms.plugin_pool import plugin_pool
from cms.test_utils.testcases import (URL_CMS_PAGE_ADD, URL_CMS_PLUGIN_REMOVE,
URL_CMS_PLUGIN_ADD, CMSTestCase)
from cms.test_utils.util.context_managers import disable_logger
from cms.test_utils.util.fuzzy_int import FuzzyInt
from cms.utils.i18n import force_language
from cms.utils.page_resolver import get_page_from_path
from cms.utils.permissions import (has_page_add_permission,
has_page_change_permission,
has_generic_permission)
def fake_tree_attrs(page):
page.depth = 1
page.path = '0001'
page.numchild = 0
@override_settings(CMS_PERMISSION=True)
class PermissionModeratorTests(CMSTestCase):
"""Permissions and moderator together
Fixtures contains 3 users and 1 published page and some other stuff
Users:
1. `super`: superuser
2. `master`: user with permissions to all applications
3. `slave`: user assigned to page `slave-home`
Pages:
1. `home`:
- published page
- master can do anything on its subpages, but not on home!
2. `master`:
- published page
- created by super
- `master` can do anything on it and its descendants
- subpages:
3. `slave-home`:
- not published
- assigned slave user which can add/change/delete/
move/publish this page and its descendants
- `master` user want to moderate this page and all descendants
4. `pageA`:
- created by super
- master can add/change/delete on it and descendants
"""
#TODO: Split this test case into one that tests publish functionality, and
#TODO: one that tests permission inheritance. This is too complex.
def setUp(self):
# create super user
self.user_super = self._create_user("super", is_staff=True,
is_superuser=True)
self.user_staff = self._create_user("staff", is_staff=True,
add_default_permissions=True)
self.user_master = self._create_user("master", is_staff=True,
add_default_permissions=True)
self.user_slave = self._create_user("slave", is_staff=True,
add_default_permissions=True)
self.user_normal = self._create_user("normal", is_staff=False)
self.user_normal.user_permissions.add(
Permission.objects.get(codename='publish_page'))
with self.login_user_context(self.user_super):
self.home_page = create_page("home", "nav_playground.html", "en",
created_by=self.user_super)
# master page & master user
self.master_page = create_page("master", "nav_playground.html", "en")
# create non global, non staff user
self.user_non_global = self._create_user("nonglobal")
# assign master user under home page
assign_user_to_page(self.home_page, self.user_master,
grant_on=ACCESS_DESCENDANTS, grant_all=True)
# and to master page
assign_user_to_page(self.master_page, self.user_master,
grant_on=ACCESS_PAGE_AND_DESCENDANTS, grant_all=True)
# slave page & slave user
self.slave_page = create_page("slave-home", "col_two.html", "en",
parent=self.master_page, created_by=self.user_super)
assign_user_to_page(self.slave_page, self.user_slave, grant_all=True)
# create page_b
page_b = create_page("pageB", "nav_playground.html", "en", created_by=self.user_super)
# Normal user
# it's allowed for the normal user to view the page
assign_user_to_page(page_b, self.user_normal, can_view=True)
# create page_a - sample page from master
page_a = create_page("pageA", "nav_playground.html", "en",
created_by=self.user_super)
assign_user_to_page(page_a, self.user_master,
can_add=True, can_change=True, can_delete=True, can_publish=True,
can_move_page=True)
# publish after creating all drafts
publish_page(self.home_page, self.user_super, 'en')
publish_page(self.master_page, self.user_super, 'en')
self.page_b = publish_page(page_b, self.user_super, 'en')
def _add_plugin(self, user, page):
"""
Add a plugin using the test client to check for permissions.
"""
with self.login_user_context(user):
placeholder = page.placeholders.all()[0]
post_data = {
'plugin_language': 'en',
'plugin_parent': '',
'placeholder_id': placeholder.pk,
'plugin_type': 'TextPlugin'
}
url = URL_CMS_PLUGIN_ADD
response = self.client.post(url, post_data)
self.assertEqual(response.status_code, 200)
return response.content.decode('utf8')
def test_super_can_add_page_to_root(self):
with self.login_user_context(self.user_super):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 200)
def test_master_cannot_add_page_to_root(self):
with self.login_user_context(self.user_master):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 403)
def test_slave_cannot_add_page_to_root(self):
with self.login_user_context(self.user_slave):
response = self.client.get(URL_CMS_PAGE_ADD)
self.assertEqual(response.status_code, 403)
def test_slave_can_add_page_under_slave_home(self):
with self.login_user_context(self.user_slave):
# move to admin.py?
# url = URL_CMS_PAGE_ADD + "?target=%d&position=last-child" % slave_page.pk
# can he even access it over get?
# response = self.client.get(url)
# self.assertEqual(response.status_code, 200)
# add page
page = create_page("page", "nav_playground.html", "en",
parent=self.slave_page, created_by=self.user_slave)
# adds user_slave as page moderator for this page
# public model shouldn't be available yet, because of the moderation
# moderators and approval ok?
# must not have public object yet
self.assertFalse(page.publisher_public)
self.assertObjectExist(Title.objects, slug="page")
self.assertObjectDoesNotExist(Title.objects.public(), slug="page")
self.assertTrue(has_generic_permission(page.pk, self.user_slave, "publish", 1))
# publish as slave, published as user_master before
publish_page(page, self.user_slave, 'en')
# user_slave is moderator for this page
# approve / publish as user_slave
# user master should be able to approve as well
@override_settings(
CMS_PLACEHOLDER_CONF={
'col_left': {
'default_plugins': [
{
'plugin_type': 'TextPlugin',
'values': {
'body': 'Lorem ipsum dolor sit amet, consectetur adipisicing elit. Culpa, repellendus, delectus, quo quasi ullam inventore quod quam aut voluptatum aliquam voluptatibus harum officiis officia nihil minus unde accusamus dolorem repudiandae.'
},
},
]
},
},
)
def test_default_plugins(self):
with self.login_user_context(self.user_slave):
self.assertEqual(CMSPlugin.objects.count(), 0)
response = self.client.get(self.slave_page.get_absolute_url(), {'edit': 1})
self.assertEqual(response.status_code, 200)
self.assertEqual(CMSPlugin.objects.count(), 1)
def test_page_added_by_slave_can_be_published_by_user_master(self):
# add page
page = create_page("page", "nav_playground.html", "en",
parent=self.slave_page, created_by=self.user_slave)
# same as test_slave_can_add_page_under_slave_home
# must not have public object yet
self.assertFalse(page.publisher_public)
self.assertTrue(has_generic_permission(page.pk, self.user_master, "publish", page.site.pk))
# should be True user_master should have publish permissions for children as well
publish_page(self.slave_page, self.user_master, 'en')
page = publish_page(page, self.user_master, 'en')
self.assertTrue(page.publisher_public_id)
# user_master is moderator for top level page / but can't approve descendants?
# approve / publish as user_master
# user master should be able to approve descendants
def test_super_can_add_plugin(self):
self._add_plugin(self.user_super, page=self.slave_page)
def test_master_can_add_plugin(self):
self._add_plugin(self.user_master, page=self.slave_page)
def test_slave_can_add_plugin(self):
self._add_plugin(self.user_slave, page=self.slave_page)
def test_same_order(self):
# create 4 pages
slugs = []
for i in range(0, 4):
page = create_page("page", "nav_playground.html", "en",
parent=self.home_page)
slug = page.title_set.drafts()[0].slug
slugs.append(slug)
# approve last 2 pages in reverse order
for slug in reversed(slugs[2:]):
page = self.assertObjectExist(Page.objects.drafts(), title_set__slug=slug)
page = publish_page(page, self.user_master, 'en')
self.check_published_page_attributes(page)
def test_create_copy_publish(self):
# create new page to copy
page = create_page("page", "nav_playground.html", "en",
parent=self.slave_page)
# copy it under home page...
# TODO: Use page.copy_page here
with self.login_user_context(self.user_master):
copied_page = self.copy_page(page, self.home_page)
page = publish_page(copied_page, self.user_master, 'en')
self.check_published_page_attributes(page)
def test_create_publish_copy(self):
# create new page to copy
page = create_page("page", "nav_playground.html", "en",
parent=self.home_page)
page = publish_page(page, self.user_master, 'en')
# copy it under master page...
# TODO: Use page.copy_page here
with self.login_user_context(self.user_master):
copied_page = self.copy_page(page, self.master_page)
self.check_published_page_attributes(page)
copied_page = publish_page(copied_page, self.user_master, 'en')
self.check_published_page_attributes(copied_page)
def test_subtree_needs_approval(self):
# create page under slave_page
page = create_page("parent", "nav_playground.html", "en",
parent=self.home_page)
self.assertFalse(page.publisher_public)
# create subpage under page
subpage = create_page("subpage", "nav_playground.html", "en", parent=page)
self.assertFalse(subpage.publisher_public)
# publish both of them in reverse order
subpage = publish_page(subpage, self.user_master, 'en')
# subpage should not be published, because parent is not published
# yet, should be marked as `publish when parent`
self.assertFalse(subpage.publisher_public)
# publish page (parent of subage), so subpage must be published also
page = publish_page(page, self.user_master, 'en')
self.assertNotEqual(page.publisher_public, None)
# reload subpage, it was probably changed
subpage = self.reload(subpage)
# parent was published, so subpage must be also published..
self.assertNotEqual(subpage.publisher_public, None)
#check attributes
self.check_published_page_attributes(page)
self.check_published_page_attributes(subpage)
def test_subtree_with_super(self):
# create page under root
page = create_page("page", "nav_playground.html", "en")
self.assertFalse(page.publisher_public)
# create subpage under page
subpage = create_page("subpage", "nav_playground.html", "en",
parent=page)
self.assertFalse(subpage.publisher_public)
# tree id must be the same
self.assertEqual(page.path[0:4], subpage.path[0:4])
# publish both of them
page = self.reload(page)
page = publish_page(page, self.user_super, 'en')
# reload subpage, there were an path change
subpage = self.reload(subpage)
self.assertEqual(page.path[0:4], subpage.path[0:4])
subpage = publish_page(subpage, self.user_super, 'en')
# tree id must stay the same
self.assertEqual(page.path[0:4], subpage.path[0:4])
# published pages must also have the same root-path
self.assertEqual(page.publisher_public.path[0:4], subpage.publisher_public.path[0:4])
#check attributes
self.check_published_page_attributes(page)
self.check_published_page_attributes(subpage)
def test_super_add_page_to_root(self):
"""Create page which is not under moderation in root, and check if
some properties are correct.
"""
# create page under root
page = create_page("page", "nav_playground.html", "en")
# public must not exist
self.assertFalse(page.publisher_public)
def test_moderator_flags(self):
"""Add page under slave_home and check its flag
"""
page = create_page("page", "nav_playground.html", "en",
parent=self.slave_page)
# No public version
self.assertIsNone(page.publisher_public)
self.assertFalse(page.publisher_public_id)
# check publish box
page = publish_page(page, self.user_slave, 'en')
# public page must not exist because of parent
self.assertFalse(page.publisher_public)
# waiting for parents
self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
# publish slave page
self.slave_page = self.slave_page.reload()
slave_page = publish_page(self.slave_page, self.user_master, 'en')
self.assertFalse(page.publisher_public)
self.assertTrue(slave_page.publisher_public)
def test_plugins_get_published(self):
# create page under root
page = create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.all()[0]
add_plugin(placeholder, "TextPlugin", "en", body="test")
# public must not exist
self.assertEqual(CMSPlugin.objects.all().count(), 1)
publish_page(page, self.user_super, 'en')
self.assertEqual(CMSPlugin.objects.all().count(), 2)
def test_remove_plugin_page_under_moderation(self):
# login as slave and create page
page = create_page("page", "nav_playground.html", "en", parent=self.slave_page)
# add plugin
placeholder = page.placeholders.all()[0]
plugin = add_plugin(placeholder, "TextPlugin", "en", body="test")
# publish page
page = self.reload(page)
page = publish_page(page, self.user_slave, 'en')
# only the draft plugin should exist
self.assertEqual(CMSPlugin.objects.all().count(), 1)
# page should require approval
self.assertEqual(page.get_publisher_state('en'), PUBLISHER_STATE_PENDING)
# master approves and publishes the page
# first approve slave-home
slave_page = self.reload(self.slave_page)
publish_page(slave_page, self.user_master, 'en')
page = self.reload(page)
page = publish_page(page, self.user_master, 'en')
# draft and public plugins should now exist
self.assertEqual(CMSPlugin.objects.all().count(), 2)
# login as slave and delete the plugin - should require moderation
with self.login_user_context(self.user_slave):
plugin_data = {
'plugin_id': plugin.pk
}
remove_url = URL_CMS_PLUGIN_REMOVE + "%s/" % plugin.pk
response = self.client.post(remove_url, plugin_data)
self.assertEqual(response.status_code, 302)
# there should only be a public plugin - since the draft has been deleted
self.assertEqual(CMSPlugin.objects.all().count(), 1)
page = self.reload(page)
# login as super user and approve/publish the page
publish_page(page, self.user_super, 'en')
# there should now be 0 plugins
self.assertEqual(CMSPlugin.objects.all().count(), 0)
def test_superuser_can_view(self):
url = self.page_b.get_absolute_url(language='en')
with self.login_user_context(self.user_super):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_staff_can_view(self):
url = self.page_b.get_absolute_url(language='en')
all_view_perms = PagePermission.objects.filter(can_view=True)
# verifiy that the user_staff has access to this page
has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b:
if perm.user == self.user_staff:
has_perm = True
self.assertEqual(has_perm, False)
login_ok = self.client.login(username=getattr(self.user_staff, get_user_model().USERNAME_FIELD),
password=getattr(self.user_staff, get_user_model().USERNAME_FIELD))
self.assertTrue(login_ok)
# really logged in
self.assertTrue('_auth_user_id' in self.client.session)
login_user_id = self.client.session.get('_auth_user_id')
user = get_user_model().objects.get(pk=self.user_staff.pk)
self.assertEqual(login_user_id, user.id)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_user_normal_can_view(self):
url = self.page_b.get_absolute_url(language='en')
all_view_perms = PagePermission.objects.filter(can_view=True)
# verifiy that the normal_user has access to this page
normal_has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b:
if perm.user == self.user_normal:
normal_has_perm = True
self.assertTrue(normal_has_perm)
with self.login_user_context(self.user_normal):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# verifiy that the user_non_global has not access to this page
non_global_has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b:
if perm.user == self.user_non_global:
non_global_has_perm = True
self.assertFalse(non_global_has_perm)
with self.login_user_context(self.user_non_global):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# non logged in user
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_user_globalpermission(self):
# Global user
user_global = self._create_user("global")
with self.login_user_context(self.user_super):
user_global = create_page_user(user_global, user_global)
user_global.is_staff = False
user_global.save() # Prevent is_staff permission
global_page = create_page("global", "nav_playground.html", "en",
published=True)
# Removed call since global page user doesn't have publish permission
#global_page = publish_page(global_page, user_global)
# it's allowed for the normal user to view the page
assign_user_to_page(global_page, user_global,
global_permission=True, can_view=True)
url = global_page.get_absolute_url('en')
all_view_perms = PagePermission.objects.filter(can_view=True)
has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b and perm.user == user_global:
has_perm = True
self.assertEqual(has_perm, False)
global_page_perm_q = Q(user=user_global) & Q(can_view=True)
global_view_perms = GlobalPagePermission.objects.filter(global_page_perm_q).exists()
self.assertEqual(global_view_perms, True)
# user_global
with self.login_user_context(user_global):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# self.non_user_global
has_perm = False
for perm in all_view_perms:
if perm.page == self.page_b and perm.user == self.user_non_global:
has_perm = True
self.assertEqual(has_perm, False)
global_page_perm_q = Q(user=self.user_non_global) & Q(can_view=True)
global_view_perms = GlobalPagePermission.objects.filter(global_page_perm_q).exists()
self.assertEqual(global_view_perms, False)
with self.login_user_context(self.user_non_global):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_anonymous_user_public_for_all(self):
url = self.page_b.get_absolute_url('en')
with self.settings(CMS_PUBLIC_FOR='all'):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_anonymous_user_public_for_none(self):
# default of when to show pages to anonymous user doesn't take
# global permissions into account
url = self.page_b.get_absolute_url('en')
with self.settings(CMS_PUBLIC_FOR=None):
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
@override_settings(CMS_PERMISSION=True)
class PatricksMoveTest(CMSTestCase):
"""
Fixtures contains 3 users and 1 published page and some other stuff
Users:
1. `super`: superuser
2. `master`: user with permissions to all applications
3. `slave`: user assigned to page `slave-home`
Pages:
1. `home`:
- published page
- master can do anything on its subpages, but not on home!
2. `master`:
- published page
- crated by super
- `master` can do anything on it and its descendants
- subpages:
3. `slave-home`:
- not published
- assigned slave user which can add/change/delete/
move/publish/moderate this page and its descendants
- `master` user want to moderate this page and all descendants
4. `pageA`:
- created by super
- master can add/change/delete on it and descendants
"""
def setUp(self):
# create super user
self.user_super = self._create_user("super", True, True)
with self.login_user_context(self.user_super):
self.home_page = create_page("home", "nav_playground.html", "en",
created_by=self.user_super)
# master page & master user
self.master_page = create_page("master", "nav_playground.html", "en")
# create master user
self.user_master = self._create_user("master", True)
self.user_master.user_permissions.add(Permission.objects.get(codename='publish_page'))
#self.user_master = create_page_user(self.user_super, master, grant_all=True)
# assign master user under home page
assign_user_to_page(self.home_page, self.user_master,
grant_on=ACCESS_DESCENDANTS, grant_all=True)
# and to master page
assign_user_to_page(self.master_page, self.user_master, grant_all=True)
# slave page & slave user
self.slave_page = create_page("slave-home", "nav_playground.html", "en",
parent=self.master_page, created_by=self.user_super)
slave = self._create_user("slave", True)
self.user_slave = create_page_user(self.user_super, slave, can_add_page=True,
can_change_page=True, can_delete_page=True)
assign_user_to_page(self.slave_page, self.user_slave, grant_all=True)
# create page_a - sample page from master
page_a = create_page("pageA", "nav_playground.html", "en",
created_by=self.user_super)
assign_user_to_page(page_a, self.user_master,
can_add=True, can_change=True, can_delete=True, can_publish=True,
can_move_page=True)
# publish after creating all drafts
publish_page(self.home_page, self.user_super, 'en')
publish_page(self.master_page, self.user_super, 'en')
with self.login_user_context(self.user_slave):
# all of them are under moderation...
self.pa = create_page("pa", "nav_playground.html", "en", parent=self.slave_page)
self.pb = create_page("pb", "nav_playground.html", "en", parent=self.pa, position="right")
self.pc = create_page("pc", "nav_playground.html", "en", parent=self.pb, position="right")
self.pd = create_page("pd", "nav_playground.html", "en", parent=self.pb)
self.pe = create_page("pe", "nav_playground.html", "en", parent=self.pd, position="right")
self.pf = create_page("pf", "nav_playground.html", "en", parent=self.pe)
self.pg = create_page("pg", "nav_playground.html", "en", parent=self.pf, position="right")
self.ph = create_page("ph", "nav_playground.html", "en", parent=self.pf, position="right")
self.assertFalse(self.pg.publisher_public)
# login as master for approval
self.slave_page = self.slave_page.reload()
publish_page(self.slave_page, self.user_master, 'en')
# publish and approve them all
publish_page(self.pa, self.user_master, 'en')
publish_page(self.pb, self.user_master, 'en')
publish_page(self.pc, self.user_master, 'en')
publish_page(self.pd, self.user_master, 'en')
publish_page(self.pe, self.user_master, 'en')
publish_page(self.pf, self.user_master, 'en')
publish_page(self.pg, self.user_master, 'en')
publish_page(self.ph, self.user_master, 'en')
self.reload_pages()
def reload_pages(self):
self.pa = self.pa.reload()
self.pb = self.pb.reload()
self.pc = self.pc.reload()
self.pd = self.pd.reload()
self.pe = self.pe.reload()
self.pf = self.pf.reload()
self.pg = self.pg.reload()
self.ph = self.ph.reload()
def test_patricks_move(self):
"""
Tests permmod when moving trees of pages.
1. build following tree (master node is approved and published)
slave-home
/ | \
A B C
/ \
D E
/ | \
F G H
2. perform move operations:
1. move G under C
2. move E under G
slave-home
/ | \
A B C
/ \
D G
\
E
/ \
F H
3. approve nodes in following order:
1. approve H
2. approve G
3. approve E
4. approve F
"""
# TODO: this takes 5 seconds to run on my MBP. That's TOO LONG!
self.assertEqual(self.pg.parent_id, self.pe.pk)
self.assertEqual(self.pg.publisher_public.parent_id, self.pe.publisher_public_id)
# perform moves under slave...
self.move_page(self.pg, self.pc)
self.reload_pages()
# Draft page is now under PC
self.assertEqual(self.pg.parent_id, self.pc.pk)
# Public page is under PC
self.assertEqual(self.pg.publisher_public.parent_id, self.pc.publisher_public_id)
self.assertEqual(self.pg.publisher_public.parent.get_absolute_url(),
self.pc.publisher_public.get_absolute_url())
self.assertEqual(self.pg.get_absolute_url(), self.pg.publisher_public.get_absolute_url())
self.move_page(self.pe, self.pg)
self.reload_pages()
self.assertEqual(self.pe.parent_id, self.pg.pk)
self.assertEqual(self.pe.publisher_public.parent_id, self.pg.publisher_public_id)
self.ph = self.ph.reload()
# check urls - they should stay be the same now after the move
self.assertEqual(
self.pg.publisher_public.get_absolute_url(),
self.pg.get_absolute_url()
)
self.assertEqual(
self.ph.publisher_public.get_absolute_url(),
self.ph.get_absolute_url()
)
# public parent check after move
self.assertEqual(self.pg.publisher_public.parent.pk, self.pc.publisher_public_id)
self.assertEqual(self.pe.publisher_public.parent.pk, self.pg.publisher_public_id)
self.assertEqual(self.ph.publisher_public.parent.pk, self.pe.publisher_public_id)
# check if urls are correct after move
self.assertEqual(
self.pg.publisher_public.get_absolute_url(),
u'%smaster/slave-home/pc/pg/' % self.get_pages_root()
)
self.assertEqual(
self.ph.publisher_public.get_absolute_url(),
u'%smaster/slave-home/pc/pg/pe/ph/' % self.get_pages_root()
)
class ModeratorSwitchCommandTest(CMSTestCase):
def test_switch_moderator_on(self):
with force_language("en"):
pages_root = unquote(reverse("pages-root"))
page1 = create_page('page', 'nav_playground.html', 'en', published=True)
with disable_logger(log):
call_command('cms', 'moderator', 'on')
with force_language("en"):
path = page1.get_absolute_url()[len(pages_root):].strip('/')
page2 = get_page_from_path(path)
self.assertEqual(page1.get_absolute_url(), page2.get_absolute_url())
def test_table_name_patching(self):
"""
This tests the plugin models patching when publishing from the command line
"""
self.get_superuser()
create_page("The page!", "nav_playground.html", "en", published=True)
draft = Page.objects.drafts()[0]
draft.reverse_id = 'a_test' # we have to change *something*
draft.save()
add_plugin(draft.placeholders.get(slot=u"body"),
u"TextPlugin", u"en", body="Test content")
draft.publish('en')
add_plugin(draft.placeholders.get(slot=u"body"),
u"TextPlugin", u"en", body="Test content")
# Manually undoing table name patching
Text._meta.db_table = 'djangocms_text_ckeditor_text'
plugin_pool.patched = False
with disable_logger(log):
call_command('cms', 'moderator', 'on')
# Sanity check the database (we should have one draft and one public)
not_drafts = len(Page.objects.filter(publisher_is_draft=False))
drafts = len(Page.objects.filter(publisher_is_draft=True))
self.assertEqual(not_drafts, 1)
self.assertEqual(drafts, 1)
def test_switch_moderator_off(self):
with force_language("en"):
pages_root = unquote(reverse("pages-root"))
page1 = create_page('page', 'nav_playground.html', 'en', published=True)
path = page1.get_absolute_url()[len(pages_root):].strip('/')
page2 = get_page_from_path(path)
self.assertIsNotNone(page2)
self.assertEqual(page1.get_absolute_url(), page2.get_absolute_url())
def tearDown(self):
plugin_pool.patched = False
plugin_pool.set_plugin_meta()
class ViewPermissionBaseTests(CMSTestCase):
def setUp(self):
self.page = create_page('testpage', 'nav_playground.html', 'en')
def get_request(self, user=None):
attrs = {
'user': user or AnonymousUser(),
'REQUEST': {},
'session': {},
}
return type('Request', (object,), attrs)
@override_settings(
CMS_PERMISSION=False,
CMS_PUBLIC_FOR='staff',
)
class BasicViewPermissionTests(ViewPermissionBaseTests):
"""
Test functionality with CMS_PERMISSION set to false, as this is the
normal use case
"""
@override_settings(CMS_PUBLIC_FOR="all")
def test_unauth_public(self):
request = self.get_request()
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request))
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[self.page.pk])
def test_unauth_non_access(self):
request = self.get_request()
with self.assertNumQueries(0):
self.assertFalse(self.page.has_view_permission(request))
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[])
@override_settings(CMS_PUBLIC_FOR="all")
def test_staff_public_all(self):
request = self.get_request(self.get_staff_user_with_no_permissions())
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request))
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[self.page.pk])
def test_staff_public_staff(self):
request = self.get_request(self.get_staff_user_with_no_permissions())
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request))
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[self.page.pk])
@override_settings(CMS_PUBLIC_FOR="none")
def test_staff_basic_auth(self):
request = self.get_request(self.get_staff_user_with_no_permissions())
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request))
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[self.page.pk])
@override_settings(CMS_PUBLIC_FOR="none")
def test_normal_basic_auth(self):
request = self.get_request(self.get_standard_user())
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request))
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[self.page.pk])
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='none'
)
class UnrestrictedViewPermissionTests(ViewPermissionBaseTests):
"""
Test functionality with CMS_PERMISSION set to True but no restrictions
apply to this specific page
"""
def test_unauth_non_access(self):
request = self.get_request()
with self.assertNumQueries(1):
"""
The query is:
PagePermission query for the affected page (is the page restricted?)
"""
self.assertFalse(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertFalse(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[])
def test_global_access(self):
user = self.get_standard_user()
GlobalPagePermission.objects.create(can_view=True, user=user)
request = self.get_request(user)
with self.assertNumQueries(2):
"""The queries are:
PagePermission query for the affected page (is the page restricted?)
GlobalPagePermission query for the page site
"""
self.assertTrue(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[self.page.pk])
def test_normal_denied(self):
request = self.get_request(self.get_standard_user())
with self.assertNumQueries(4):
"""
The queries are:
PagePermission query for the affected page (is the page restricted?)
GlobalPagePermission query for the page site
User permissions query
Content type query
"""
self.assertFalse(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertFalse(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, [self.page], self.page.site),
[])
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='all'
)
class RestrictedViewPermissionTests(ViewPermissionBaseTests):
"""
Test functionality with CMS_PERMISSION set to True and view restrictions
apply to this specific page
"""
def setUp(self):
super(RestrictedViewPermissionTests, self).setUp()
self.group = Group.objects.create(name='testgroup')
self.pages = [self.page]
self.expected = [self.page.pk]
PagePermission.objects.create(page=self.page, group=self.group, can_view=True, grant_on=ACCESS_PAGE)
def test_unauthed(self):
request = self.get_request()
with self.assertNumQueries(1):
"""The queries are:
PagePermission query for the affected page (is the page restricted?)
"""
self.assertFalse(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertFalse(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, self.pages, self.page.site),
[])
def test_page_permissions(self):
user = self.get_standard_user()
request = self.get_request(user)
PagePermission.objects.create(can_view=True, user=user, page=self.page, grant_on=ACCESS_PAGE)
with self.assertNumQueries(3):
"""
The queries are:
PagePermission query (is this page restricted)
GlobalpagePermission query for user
PagePermission query for this user
"""
self.assertTrue(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, self.pages, self.page.site),
self.expected)
def test_page_group_permissions(self):
user = self.get_standard_user()
user.groups.add(self.group)
request = self.get_request(user)
with self.assertNumQueries(3):
self.assertTrue(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, self.pages, self.page.site),
self.expected)
def test_global_permission(self):
user = self.get_standard_user()
GlobalPagePermission.objects.create(can_view=True, user=user)
request = self.get_request(user)
with self.assertNumQueries(2):
"""
The queries are:
PagePermission query (is this page restricted)
GlobalpagePermission query for user
"""
self.assertTrue(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, self.pages, self.page.site),
self.expected)
def test_basic_perm_denied(self):
request = self.get_request(self.get_staff_user_with_no_permissions())
with self.assertNumQueries(5):
"""
The queries are:
PagePermission query (is this page restricted)
GlobalpagePermission query for user
PagePermission query for this user
Generic django permission lookup
content type lookup by permission lookup
"""
self.assertFalse(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertFalse(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, self.pages, self.page.site),
[])
def test_basic_perm(self):
user = self.get_standard_user()
user.user_permissions.add(Permission.objects.get(codename='view_page'))
request = self.get_request(user)
with self.assertNumQueries(5):
"""
The queries are:
PagePermission query (is this page restricted)
GlobalpagePermission query for user
PagePermission query for this user
Generic django permission lookup
content type lookup by permission lookup
"""
self.assertTrue(self.page.has_view_permission(request))
with self.assertNumQueries(0):
self.assertTrue(self.page.has_view_permission(request)) # test cache
self.assertEqual(get_visible_pages(request, self.pages, self.page.site),
self.expected)
class PublicViewPermissionTests(RestrictedViewPermissionTests):
""" Run the same tests as before, but on the public page instead. """
def setUp(self):
super(PublicViewPermissionTests, self).setUp()
self.page.publish('en')
self.pages = [self.page.publisher_public]
self.expected = [self.page.publisher_public_id]
class GlobalPermissionTests(CMSTestCase):
def test_sanity_check(self):
""" Because we have a new manager, we'll do some basic checks."""
# manager is still named the same.
self.assertTrue(hasattr(GlobalPagePermission, 'objects'))
self.assertEqual(0, GlobalPagePermission.objects.all().count())
# we are correctly inheriting from BasicPagePermissionManager
self.assertTrue(hasattr(GlobalPagePermission.objects, 'with_user'))
# If we're using the new manager, we have extra methods which ensure
# This site access OR all site access.
self.assertTrue(hasattr(GlobalPagePermission.objects, 'user_has_permission'))
# these are just convienence methods for the above.
self.assertTrue(hasattr(GlobalPagePermission.objects, 'user_has_add_permission'))
self.assertTrue(hasattr(GlobalPagePermission.objects, 'user_has_change_permission'))
self.assertTrue(hasattr(GlobalPagePermission.objects, 'user_has_view_permission'))
def test_emulate_admin_index(self):
""" Call methods that emulate the adminsite instance's index.
This test was basically the reason for the new manager, in light of the
problem highlighted in ticket #1120, which asserts that giving a user
no site-specific rights when creating a GlobalPagePermission should
allow access to all sites.
"""
# create and then ignore this user.
superuser = self._create_user("super", is_staff=True, is_active=True,
is_superuser=True)
superuser.set_password("super")
superuser.save()
# create 2 staff users
SITES = [
Site.objects.get(pk=1),
Site.objects.create(domain='example2.com', name='example2.com'),
]
USERS = [
self._create_user("staff", is_staff=True, is_active=True),
self._create_user("staff_2", is_staff=True, is_active=True),
]
for user in USERS:
user.set_password('staff')
# re-use the same methods the UserPage form does.
# Note that it internally calls .save(), as we've not done so.
save_permissions({
'can_add_page': True,
'can_change_page': True,
'can_delete_page': False
}, user)
GlobalPagePermission.objects.create(can_add=True, can_change=True,
can_delete=False, user=USERS[0])
# we're querying here to ensure that even though we've created two users
# above, we should have successfully filtered to just one perm.
self.assertEqual(1, GlobalPagePermission.objects.with_user(USERS[0]).count())
# this will confirm explicit permissions still work, by adding the first
# site instance to the many2many relationship 'sites'
GlobalPagePermission.objects.create(can_add=True, can_change=True,
can_delete=False,
user=USERS[1]).sites.add(SITES[0])
self.assertEqual(1, GlobalPagePermission.objects.with_user(USERS[1]).count())
homepage = create_page(title="master", template="nav_playground.html",
language="en", in_navigation=True, slug='/')
publish_page(page=homepage, user=superuser, language='en')
with self.settings(CMS_PERMISSION=True):
# for all users, they should have access to site 1
request = RequestFactory().get(path='/', data={'site__exact': 1})
# we need a session attribute for current_site(request), which is
# used by has_page_add_permission and has_page_change_permission
request.session = {}
for user in USERS:
# has_page_add_permission and has_page_change_permission both test
# for this explicitly, to see if it's a superuser.
request.user = user
# Note, the query count is inflated by doing additional lookups
# because there's a site param in the request.
with self.assertNumQueries(FuzzyInt(6,7)):
# PageAdmin swaps out the methods called for permissions
# if the setting is true, it makes use of cms.utils.permissions
self.assertTrue(has_page_add_permission(request))
self.assertTrue(has_page_change_permission(request))
# internally this calls PageAdmin.has_[add|change|delete]_permission()
self.assertEqual({'add': True, 'change': True, 'delete': False},
site._registry[Page].get_model_perms(request))
# can't use the above loop for this test, as we're testing that
# user 1 has access, but user 2 does not, as they are only assigned
# to site 1
request = RequestFactory().get('/', data={'site__exact': 2})
request.session = {}
# As before, the query count is inflated by doing additional lookups
# because there's a site param in the request
with self.assertNumQueries(FuzzyInt(11, 20)):
# this user shouldn't have access to site 2
request.user = USERS[1]
self.assertTrue(not has_page_add_permission(request))
self.assertTrue(not has_page_change_permission(request))
self.assertEqual({'add': False, 'change': False, 'delete': False},
site._registry[Page].get_model_perms(request))
# but, going back to the first user, they should.
request = RequestFactory().get('/', data={'site__exact': 2})
request.user = USERS[0]
self.assertTrue(has_page_add_permission(request))
self.assertTrue(has_page_change_permission(request))
self.assertEqual({'add': True, 'change': True, 'delete': False},
site._registry[Page].get_model_perms(request))
def test_has_page_add_permission_with_target(self):
page = create_page('Test', 'nav_playground.html', 'en')
user = self._create_user('user')
request = RequestFactory().get('/', data={'target': page.pk})
request.session = {}
request.user = user
has_perm = has_page_add_permission(request)
self.assertFalse(has_perm)
# Transformer/IO/_VASP.py
# -------
# Imports
# -------
import warnings;
from Transformer import Constants;
from Transformer import Structure;
from Transformer.Utilities import StructureTools;
# ---------
# Functions
# ---------
def ReadPOSCARFile(inputReader, atomicSymbolLookupTable = None):
# Variables to collect.
systemName = None;
scaleFactor = None;
latticeVectors = None;
atomTypes, atomCounts = None, None;
coordinateType, atomPositions = None, None;
# Read the system name.
systemName = next(inputReader).strip();
# Read the scale factor.
scaleFactor = float(next(inputReader).strip());
# Read the lattice vectors.
latticeVectors = [];
for i in range(0, 3):
latticeVectors.append(
[float(element) for element in next(inputReader).strip().split()][:3]
);
# Although we sliced the list returned from split(), this does not guarentee that there were at least three elements.
for latticeVector in latticeVectors:
if len(latticeVector) != 3:
raise Exception("Error: The lattice vector specification in the supplied VASP POSCAR file is invalid.");
# Read the atom types and/or atom counts.
atomTypes = [element for element in next(inputReader).strip().split()];
atomCounts = None;
if atomTypes[0].isdigit():
atomCounts = [int(item) for item in atomTypes];
atomTypes = None;
else:
atomCounts = [int(element) for element in next(inputReader).strip().split()];
# If atom types were given in the file, check the number of atom types listed is consistent with the number of atom counts.
if atomTypes != None and len(atomTypes) != len(atomCounts):
raise Exception("Error: The atom-type and atom-count lines in the supplied VASP POSCAR file contain different numbers of entries.");
# Read the coordinate type.
coordinateType = None;
keyword = next(inputReader).strip().lower();
# Check for and skip the "selective dynamics" keyword.
if keyword[0] == "s":
keyword = next(inputReader).strip().lower();
if keyword[0] == 'd':
coordinateType = 'd';
elif keyword[0] == 'c' or keyword[0] == 'k':
coordinateType = 'c';
else:
raise Exception("Error: The coordinate-type line in the supplied VASP POSCAR file contains an unexpected keyword.");
# Read the atom positions.
totalAtomCount = 0;
for atomCount in atomCounts:
totalAtomCount = totalAtomCount + atomCount;
atomPositions = [];
for i in range(0, totalAtomCount):
elements = next(inputReader).strip().split();
atomPositions.append(
[float(element) for element in elements[:3]]
);
for atomPosition in atomPositions:
if len(atomPosition) != 3:
raise Exception("Error: One or more atomic position specifications in the supplied VASP POSCAR file is invalid.");
# If a scale factor other than 1 has been set, adjust the lattice vectors.
if scaleFactor != 1.0:
for i, vector in enumerate(latticeVectors):
latticeVectors[i] = [scaleFactor * x for x in vector];
# Build a list of atom-type numbers.
atomTypeNumbers = None;
if atomTypes != None:
# If atom types were read from the POSCAR file, convert these to atomic numbers.
atomicSymbols = [];
for atomType, atomCount in zip(atomTypes, atomCounts):
atomicSymbols = atomicSymbols + [atomType] * atomCount;
# Convert the atomic symbols to atom-type numbers.
atomTypeNumbers = [
Structure.AtomTypeToAtomTypeNumber(symbol, atomicSymbolLookupTable = atomicSymbolLookupTable)
for symbol in atomicSymbols
];
else:
# If not, issue a warning and assign negative type numbers from -1.
warnings.warn("Structure objects returned by reading VASP 4-format POSCAR files numbers will be initialised with negative atomic numbers from -1.", UserWarning);
atomTypeNumbers = [];
for i, atomCount in enumerate(atomCounts):
atomTypeNumbers = atomTypeNumbers + [-1 * (i + 1)] * atomCount;
# If the atom positions are given in Cartesian coordinates, convert them to fractional coordinates.
if coordinateType == 'c':
atomPositions = StructureTools.CartesianToFractionalCoordinates(latticeVectors, atomPositions);
# Return a Structure object.
return Structure.Structure(latticeVectors, atomPositions, atomTypeNumbers, name = systemName);
def WritePOSCARFile(structure, outputWriter, atomicSymbolLookupTable = None):
# Write the system name; Structure.GetName() returns a sensible default value if a name is not set.
outputWriter.write("{0}\n".format(structure.GetName()));
# Write the scale factor.
outputWriter.write(" {0: >19.16f}\n".format(1.0));
# Write the lattice vectors.
for ax, ay, az in structure.GetLatticeVectors():
outputWriter.write(" {0: >21.16f} {1: >21.16f} {2: >21.16f}\n".format(ax, ay, az));
# Write the atom types and counts.
atomicSymbols, atomCounts = structure.GetAtomicSymbolsCounts(atomicSymbolLookupTable = atomicSymbolLookupTable);
for atomicSymbol in atomicSymbols:
outputWriter.write(" {0: >3}".format(atomicSymbol));
outputWriter.write("\n");
for atomCount in atomCounts:
outputWriter.write(" {0: >3}".format(atomCount));
outputWriter.write("\n");
# Write the coordinate type.
outputWriter.write("Direct\n");
# Write the atom positions.
for x, y, z in structure.GetAtomPositions():
outputWriter.write(" {0: >21.16f} {1: >21.16f} {2: >21.16f}\n".format(x, y, z));
"""SCons.Tool.gas
Tool-specific initialization for as, the Gnu assembler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/gas.py 2014/09/27 12:51:43 garyo"
as_module = __import__('as', globals(), locals(), [])
assemblers = ['as', 'gas']
def generate(env):
"""Add Builders and construction variables for as to an Environment."""
as_module.generate(env)
env['AS'] = env.Detect(assemblers) or 'as'
def exists(env):
return env.Detect(assemblers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
#!/usr/bin/env python
#
# Copyright 2014 The LibYuv Project Authors. All rights reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
# This script is a modified copy of the src/build/gyp_chromium.py file.
# It is needed for parallel processing.
# This file is (possibly, depending on python version) imported by
# gyp_libyuv when GYP_PARALLEL=1 and it creates sub-processes
# through the multiprocessing library.
# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
# imports that don't end in .py (and aren't directories with an
# __init__.py). This wrapper makes "import gyp_libyuv" work with
# those old versions and makes it possible to execute gyp_libyuv.py
# directly on Windows where the extension is useful.
import os
path = os.path.abspath(os.path.split(__file__)[0])
execfile(os.path.join(path, 'gyp_libyuv'))
import warnings
warnings.warn("eventlet.processes is deprecated in favor of "
"eventlet.green.subprocess, which is API-compatible with the standard "
" library subprocess module.",
DeprecationWarning, stacklevel=2)
import errno
import os
import popen2
import signal
from eventlet import api
from eventlet import pools
from eventlet import greenio
class DeadProcess(RuntimeError):
pass
def cooperative_wait(pobj, check_interval=0.01):
""" Waits for a child process to exit, returning the status
code.
Unlike ``os.wait``, :func:`cooperative_wait` does not block the entire
process, only the calling coroutine. If the child process does not die,
:func:`cooperative_wait` could wait forever.
The argument *check_interval* is the amount of time, in seconds, that
:func:`cooperative_wait` will sleep between calls to ``os.waitpid``.
"""
try:
while True:
status = pobj.poll()
if status >= 0:
return status
api.sleep(check_interval)
except OSError, e:
if e.errno == errno.ECHILD:
# no child process, this happens if the child process
# already died and has been cleaned up, or if you just
# called with a random pid value
return -1
else:
raise
class Process(object):
"""Construct Process objects, then call read, and write on them."""
process_number = 0
def __init__(self, command, args, dead_callback=lambda:None):
self.process_number = self.process_number + 1
Process.process_number = self.process_number
self.command = command
self.args = args
self._dead_callback = dead_callback
self.run()
def run(self):
self.dead = False
self.started = False
self.popen4 = None
## We use popen4 so that read() will read from either stdout or stderr
self.popen4 = popen2.Popen4([self.command] + self.args)
child_stdout_stderr = self.popen4.fromchild
child_stdin = self.popen4.tochild
self.child_stdout_stderr = greenio.GreenPipe(child_stdout_stderr, child_stdout_stderr.mode, 0)
self.child_stdin = greenio.GreenPipe(child_stdin, child_stdin.mode, 0)
self.sendall = self.child_stdin.write
self.send = self.child_stdin.write
self.recv = self.child_stdout_stderr.read
self.readline = self.child_stdout_stderr.readline
self._read_first_result = False
def wait(self):
return cooperative_wait(self.popen4)
def dead_callback(self):
self.wait()
self.dead = True
if self._dead_callback:
self._dead_callback()
def makefile(self, mode, *arg):
if mode.startswith('r'):
return self.child_stdout_stderr
if mode.startswith('w'):
return self.child_stdin
raise RuntimeError("Unknown mode", mode)
def read(self, amount=None):
"""Reads from the stdout and stderr of the child process.
The first call to read() will return a string; subsequent
calls may raise a DeadProcess when EOF occurs on the pipe.
"""
result = self.child_stdout_stderr.read(amount)
if result == '' and self._read_first_result:
# This process is dead.
self.dead_callback()
raise DeadProcess
else:
self._read_first_result = True
return result
def write(self, stuff):
written = 0
try:
written = self.child_stdin.write(stuff)
self.child_stdin.flush()
except ValueError, e:
## File was closed
assert str(e) == 'I/O operation on closed file'
if written == 0:
self.dead_callback()
raise DeadProcess
def flush(self):
self.child_stdin.flush()
def close(self):
self.child_stdout_stderr.close()
self.child_stdin.close()
self.dead_callback()
def close_stdin(self):
self.child_stdin.close()
def kill(self, sig=None):
if sig == None:
sig = signal.SIGTERM
pid = self.getpid()
os.kill(pid, sig)
def getpid(self):
return self.popen4.pid
class ProcessPool(pools.Pool):
def __init__(self, command, args=None, min_size=0, max_size=4):
"""*command*
the command to run
"""
self.command = command
if args is None:
args = []
self.args = args
pools.Pool.__init__(self, min_size, max_size)
def create(self):
"""Generate a process
"""
def dead_callback():
self.current_size -= 1
return Process(self.command, self.args, dead_callback)
def put(self, item):
if not item.dead:
if item.popen4.poll() != -1:
item.dead_callback()
else:
pools.Pool.put(self, item)
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Created by Chris Huegle for TellApart, Inc.
class ConnectionSettingAttribute(object):
"""
Represents the ConnectionSetting segment of ELB Attributes.
"""
def __init__(self, connection=None):
self.idle_timeout = None
def __repr__(self):
return 'ConnectionSettingAttribute(%s)' % (
self.idle_timeout)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'IdleTimeout':
self.idle_timeout = int(value)
class CrossZoneLoadBalancingAttribute(object):
"""
Represents the CrossZoneLoadBalancing segement of ELB Attributes.
"""
def __init__(self, connection=None):
self.enabled = None
def __repr__(self):
return 'CrossZoneLoadBalancingAttribute(%s)' % (
self.enabled)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Enabled':
if value.lower() == 'true':
self.enabled = True
else:
self.enabled = False
class AccessLogAttribute(object):
"""
Represents the AccessLog segment of ELB attributes.
"""
def __init__(self, connection=None):
self.enabled = None
self.s3_bucket_name = None
self.s3_bucket_prefix = None
self.emit_interval = None
def __repr__(self):
return 'AccessLog(%s, %s, %s, %s)' % (
self.enabled,
self.s3_bucket_name,
self.s3_bucket_prefix,
self.emit_interval
)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Enabled':
if value.lower() == 'true':
self.enabled = True
else:
self.enabled = False
elif name == 'S3BucketName':
self.s3_bucket_name = value
elif name == 'S3BucketPrefix':
self.s3_bucket_prefix = value
elif name == 'EmitInterval':
self.emit_interval = int(value)
class ConnectionDrainingAttribute(object):
"""
Represents the ConnectionDraining segment of ELB attributes.
"""
def __init__(self, connection=None):
self.enabled = None
self.timeout = None
def __repr__(self):
return 'ConnectionDraining(%s, %s)' % (
self.enabled,
self.timeout
)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Enabled':
if value.lower() == 'true':
self.enabled = True
else:
self.enabled = False
elif name == 'Timeout':
self.timeout = int(value)
class LbAttributes(object):
"""
Represents the Attributes of an Elastic Load Balancer.
"""
def __init__(self, connection=None):
self.connection = connection
self.cross_zone_load_balancing = CrossZoneLoadBalancingAttribute(
self.connection)
self.access_log = AccessLogAttribute(self.connection)
self.connection_draining = ConnectionDrainingAttribute(self.connection)
self.connecting_settings = ConnectionSettingAttribute(self.connection)
def __repr__(self):
return 'LbAttributes(%s, %s, %s, %s)' % (
repr(self.cross_zone_load_balancing),
repr(self.access_log),
repr(self.connection_draining),
repr(self.connecting_settings))
def startElement(self, name, attrs, connection):
if name == 'CrossZoneLoadBalancing':
return self.cross_zone_load_balancing
if name == 'AccessLog':
return self.access_log
if name == 'ConnectionDraining':
return self.connection_draining
if name == 'ConnectionSettings':
return self.connecting_settings
def endElement(self, name, value, connection):
pass
#=======================================================================
#
# Python Lexical Analyser
#
# Actions for use in token specifications
#
#=======================================================================
class Action(object):
def perform(self, token_stream, text):
pass # abstract
def same_as(self, other):
return self is other
class Return(Action):
"""
Internal Plex action which causes |value| to
be returned as the value of the associated token
"""
def __init__(self, value):
self.value = value
def perform(self, token_stream, text):
return self.value
def same_as(self, other):
return isinstance(other, Return) and self.value == other.value
def __repr__(self):
return "Return(%s)" % repr(self.value)
class Call(Action):
"""
Internal Plex action which causes a function to be called.
"""
def __init__(self, function):
self.function = function
def perform(self, token_stream, text):
return self.function(token_stream, text)
def __repr__(self):
return "Call(%s)" % self.function.__name__
def same_as(self, other):
return isinstance(other, Call) and self.function is other.function
class Begin(Action):
"""
Begin(state_name) is a Plex action which causes the Scanner to
enter the state |state_name|. See the docstring of Plex.Lexicon
for more information.
"""
def __init__(self, state_name):
self.state_name = state_name
def perform(self, token_stream, text):
token_stream.begin(self.state_name)
def __repr__(self):
return "Begin(%s)" % self.state_name
def same_as(self, other):
return isinstance(other, Begin) and self.state_name == other.state_name
class Ignore(Action):
"""
IGNORE is a Plex action which causes its associated token
to be ignored. See the docstring of Plex.Lexicon for more
information.
"""
def perform(self, token_stream, text):
return None
def __repr__(self):
return "IGNORE"
IGNORE = Ignore()
#IGNORE.__doc__ = Ignore.__doc__
class Text(Action):
"""
TEXT is a Plex action which causes the text of a token to
be returned as the value of the token. See the docstring of
Plex.Lexicon for more information.
"""
def perform(self, token_stream, text):
return text
def __repr__(self):
return "TEXT"
TEXT = Text()
#TEXT.__doc__ = Text.__doc__
try:
from urllib.parse import urljoin
except ImportError:
# Python 2
from urlparse import urljoin
from django.conf import settings
# See http://docs.cksource.com/ckeditor_api/symbols/CKEDITOR.config.html
# for all settings
CKEDITOR_SETTINGS = getattr(settings, 'CKEDITOR_SETTINGS', {
'language': '{{ language }}',
'toolbar': 'CMS',
'skin': 'moono',
'toolbarCanCollapse': False,
})
INSTALLED_APPS = getattr(settings, 'INSTALLED_APPS', [])
if 'cms.plugins.picture' in INSTALLED_APPS or 'djangocms_picture' in INSTALLED_APPS:
save_function_default = 'djangocms_text_ckeditor.picture_save.create_picture_plugin'
else:
save_function_default = None
TEXT_SAVE_IMAGE_FUNCTION = getattr(settings, 'TEXT_SAVE_IMAGE_FUNCTION', save_function_default)
TEXT_ADDITIONAL_TAGS = getattr(settings, 'TEXT_ADDITIONAL_TAGS', ())
TEXT_ADDITIONAL_ATTRIBUTES = getattr(settings, 'TEXT_ADDITIONAL_ATTRIBUTES', ())
TEXT_ADDITIONAL_PROTOCOLS = getattr(settings, 'TEXT_ADDITIONAL_PROTOCOLS', ())
TEXT_CKEDITOR_CONFIGURATION = getattr(settings, 'TEXT_CKEDITOR_CONFIGURATION', None)
TEXT_HTML_SANITIZE = getattr(settings, 'TEXT_HTML_SANITIZE', True)
TEXT_CKEDITOR_BASE_PATH = getattr(settings, 'TEXT_CKEDITOR_BASE_PATH', urljoin(settings.STATIC_URL, 'djangocms_text_ckeditor/ckeditor/'))
TEXT_AUTO_HYPHENATE = getattr(settings, 'TEXT_AUTO_HYPHENATE', True)
ALLOW_TOKEN_PARSERS = (
'djangocms_text_ckeditor.attribute_parsers.DataAttributeParser',
)
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from mock import Mock
from airflow.models import TaskInstance
from airflow.ti_deps.deps.dag_ti_slots_available_dep import DagTISlotsAvailableDep
class DagTISlotsAvailableDepTest(unittest.TestCase):
def test_concurrency_reached(self):
"""
Test concurrency reached should fail dep
"""
dag = Mock(concurrency=1, concurrency_reached=True)
task = Mock(dag=dag)
ti = TaskInstance(task, execution_date=None)
self.assertFalse(DagTISlotsAvailableDep().is_met(ti=ti))
def test_all_conditions_met(self):
"""
Test all conditions met should pass dep
"""
dag = Mock(concurrency=1, concurrency_reached=False)
task = Mock(dag=dag)
ti = TaskInstance(task, execution_date=None)
self.assertTrue(DagTISlotsAvailableDep().is_met(ti=ti))
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import sqlalchemy as sa
from twisted.trial import unittest
from buildbot.test.util import migration
from buildbot.util import sautils
class Migration(migration.MigrateTestMixin, unittest.TestCase):
def setUp(self):
return self.setUpMigrateTest()
def tearDown(self):
return self.tearDownMigrateTest()
def create_tables_thd(self, conn):
metadata = sa.MetaData()
metadata.bind = conn
patches = sautils.Table(
'patches', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('patchlevel', sa.Integer, nullable=False),
sa.Column('patch_base64', sa.Text, nullable=False),
sa.Column('patch_author', sa.Text, nullable=False),
sa.Column('patch_comment', sa.Text, nullable=False),
sa.Column('subdir', sa.Text),
)
sourcestamps = sautils.Table(
'sourcestamps', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('ss_hash', sa.String(40), nullable=False),
sa.Column('branch', sa.String(256)),
sa.Column('revision', sa.String(256)),
sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')),
sa.Column('repository', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('codebase', sa.String(256), nullable=False,
server_default=sa.DefaultClause("")),
sa.Column('project', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('created_at', sa.Integer, nullable=False),
)
changes = sautils.Table(
'changes', metadata,
sa.Column('changeid', sa.Integer, primary_key=True),
sa.Column('author', sa.String(256), nullable=False),
sa.Column('comments', sa.Text, nullable=False),
sa.Column('branch', sa.String(256)),
sa.Column('revision', sa.String(256)),
sa.Column('revlink', sa.String(256)),
sa.Column('when_timestamp', sa.Integer, nullable=False),
sa.Column('category', sa.String(256)),
sa.Column('repository', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('codebase', sa.String(256), nullable=False,
server_default=sa.DefaultClause("")),
sa.Column('project', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('sourcestampid', sa.Integer,
sa.ForeignKey('sourcestamps.id')),
)
patches.create()
sourcestamps.create()
changes.create()
def test_update(self):
def setup_thd(conn):
self.create_tables_thd(conn)
def verify_thd(conn):
metadata = sa.MetaData()
metadata.bind = conn
changes = sautils.Table('changes', metadata, autoload=True)
self.assertIsInstance(changes.c.parent_changeids.type, sa.Integer)
return self.do_test_migration(42, 43, setup_thd, verify_thd)
#!/usr/bin/env python
import ConfigParser
import socket
import os
import json
def main():
cf = ConfigParser.ConfigParser()
cf.read("conf/oj.conf")
judge_host = cf.get('sandbox', 'judgeHost')
userid = int(cf.get('sandbox', 'userid'))
server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if os.path.exists("/tmp/judge_root.sock"):
os.unlink("/tmp/judge_root.sock")
server.bind("/tmp/judge_root.sock")
os.system('chmod 777 /tmp/judge_root.sock')
server.listen(0)
while True:
connection, address = server.accept()
infor = json.loads(connection.recv(1024).decode())
work_dir, bin, usrout, errout, input_dir, stdin, time_limit, mem_limit = infor
cmd = "%s %s %s %s %s %s %s %s %s %s"%(judge_host, work_dir, bin, usrout, errout, input_dir, stdin, time_limit, mem_limit, userid)
print cmd
tmp = os.popen(cmd).read()
result, time_used, mem_used = [int(s) for s in tmp.split()]
success = result == 0
time_exceeded = result == 2
mem_exceeded = result == 3
connection.send(json.dumps([success, time_exceeded, mem_exceeded, time_used, mem_used]).encode())
if __name__ == '__main__':
main()
"""Tests for quotient rings."""
from sympy import QQ, ZZ
from sympy.abc import x, y
from sympy.polys.polyerrors import NotReversible
from sympy.utilities.pytest import raises
from sympy.core.compatibility import range
def test_QuotientRingElement():
R = QQ.old_poly_ring(x)/[x**10]
X = R.convert(x)
assert X*(X + 1) == R.convert(x**2 + x)
assert X*x == R.convert(x**2)
assert x*X == R.convert(x**2)
assert X + x == R.convert(2*x)
assert x + X == 2*X
assert X**2 == R.convert(x**2)
assert 1/(1 - X) == R.convert(sum(x**i for i in range(10)))
assert X**10 == R.zero
assert X != x
raises(NotReversible, lambda: 1/X)
def test_QuotientRing():
I = QQ.old_poly_ring(x).ideal(x**2 + 1)
R = QQ.old_poly_ring(x)/I
assert R == QQ.old_poly_ring(x)/[x**2 + 1]
assert R == QQ.old_poly_ring(x)/QQ.old_poly_ring(x).ideal(x**2 + 1)
assert R != QQ.old_poly_ring(x)
assert R.convert(1)/x == -x + I
assert -1 + I == x**2 + I
assert R.convert(ZZ(1), ZZ) == 1 + I
assert R.convert(R.convert(x), R) == R.convert(x)
X = R.convert(x)
Y = QQ.old_poly_ring(x).convert(x)
assert -1 + I == X**2 + I
assert -1 + I == Y**2 + I
assert R.to_sympy(X) == x
raises(ValueError, lambda: QQ.old_poly_ring(x)/QQ.old_poly_ring(x, y).ideal(x))
R = QQ.old_poly_ring(x, order="ilex")
I = R.ideal(x)
assert R.convert(1) + I == (R/I).convert(1)
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: NamespaceA
import flatbuffers
class SecondTableInA(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsSecondTableInA(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = SecondTableInA()
x.Init(buf, n + offset)
return x
# SecondTableInA
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# SecondTableInA
def ReferToC(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .TableInC import TableInC
obj = TableInC()
obj.Init(self._tab.Bytes, x)
return obj
return None
def SecondTableInAStart(builder): builder.StartObject(1)
def SecondTableInAAddReferToC(builder, referToC): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(referToC), 0)
def SecondTableInAEnd(builder): return builder.EndObject()
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Connectors wrap the details of communicating with different Bitcoin clients and implementations.
"""
import io
import logging
import time
import bitcoin.rpc
import requests
from bitcoin.core import CTransaction
from cert_schema import Chain
from pycoin.serialize import b2h
from pycoin.services import providers
from pycoin.services.blockr_io import BlockrioProvider
from pycoin.services.insight import InsightProvider
from pycoin.services.providers import service_provider_methods
from pycoin.tx import Spendable
from cert_issuer.errors import ConnectorError, BroadcastError
from cert_issuer.helpers import hexlify
from cert_issuer.helpers import unhexlify
BROADCAST_RETRY_INTERVAL = 30
try:
from urllib2 import urlopen, HTTPError
from urllib import urlencode
except ImportError:
from urllib.request import urlopen, HTTPError
from urllib.parse import urlencode
MAX_BROADCAST_ATTEMPTS = 3
def try_get(url):
"""throw error if call fails"""
response = requests.get(url)
if int(response.status_code) != 200:
error_message = 'Error! status_code={}, error={}'.format(
response.status_code, response.json()['error'])
logging.error(error_message)
raise ConnectorError(error_message)
return response
def to_hex(transaction):
s = io.BytesIO()
transaction.stream(s)
tx_as_hex = b2h(s.getvalue())
return tx_as_hex
class BlockExplorerBroadcaster(object):
def __init__(self, base_url):
self.base_url = base_url
def broadcast_tx(self, tx):
hextx = to_hex(tx)
broadcast_url = self.base_url + '/tx/send'
response = requests.post(broadcast_url, json={'rawtx': hextx})
if int(response.status_code) == 200:
tx_id = response.json().get('txid', None)
return tx_id
logging.error('Error broadcasting the transaction through the BlockExplorer API. Error msg: %s', response.text)
raise BroadcastError(response.text)
class BlockcypherBroadcaster(object):
"""
Note that this needs an API token
"""
def __init__(self, base_url, api_token):
self.base_url = base_url
self.api_token = api_token
def broadcast_tx(self, tx):
hextx = to_hex(tx)
broadcast_url = self.base_url + '/txs/push?token=' + self.api_token
response = requests.post(broadcast_url, json={'tx': hextx})
if int(response.status_code) == 200:
tx_id = response.json().get('txid', None)
return tx_id
logging.error('Error broadcasting the transaction through the Blockcypher API. Error msg: %s', response.text)
raise BroadcastError(response.text)
class BlockrIOBroadcaster(object):
def __init__(self, base_url):
self.base_url = base_url
def broadcast_tx(self, tx):
hextx = to_hex(tx)
url = self.base_url + '/tx/push'
response = requests.post(url, json={'hex': hextx})
if int(response.status_code) == 200:
tx_id = response.json().get('data', None)
return tx_id
logging.error('Error broadcasting the transaction through the Blockr.IO API. Error msg: %s', response.text)
raise BroadcastError(response.text)
class BitcoindConnector(object):
def __init__(self, netcode):
self.netcode = netcode
def broadcast_tx(self, transaction):
as_hex = transaction.as_hex()
transaction = CTransaction.deserialize(unhexlify(as_hex))
tx_id = bitcoin.rpc.Proxy().sendrawtransaction(transaction)
# reverse endianness for bitcoind
return hexlify(bytearray(tx_id)[::-1])
def spendables_for_address(self, address):
"""
Converts to pycoin Spendable type
:param address:
:return: list of Spendables
"""
unspent_outputs = bitcoin.rpc.Proxy().listunspent(addrs=[address])
logging.debug('spendables_for_address %s', address)
spendables = []
for unspent in unspent_outputs:
coin_value = unspent.get('amount', 0)
outpoint = unspent.get('outpoint')
script = unspent.get('scriptPubKey')
previous_hash = outpoint.hash
previous_index = outpoint.n
spendables.append(Spendable(coin_value, script, previous_hash, previous_index))
return spendables
class ServiceProviderConnector(object):
def __init__(self, bitcoin_chain):
self.bitcoin_chain = bitcoin_chain
def spendables_for_address(self, bitcoin_address):
for m in service_provider_methods('spendables_for_address', get_providers_for_chain(self.bitcoin_chain)):
try:
logging.debug('m=%s', m)
spendables = m(bitcoin_address)
return spendables
except Exception as e:
logging.warning(e)
pass
return []
def get_unspent_outputs(self, address):
"""
Get unspent outputs at the address
:param address:
:return:
"""
logging.debug('get_unspent_outputs for address=%s', address)
spendables = self.spendables_for_address(bitcoin_address=address)
if spendables:
return sorted(spendables, key=lambda x: hash(x.coin_value))
return None
def get_balance(self, address):
"""
Get balance available to spend at the address
:param address:
:return:
"""
spendables = self.get_unspent_outputs(address)
if not spendables:
logging.warning('address %s has a balance of 0', address)
return 0
balance = sum(s.coin_value for s in spendables)
return balance
def broadcast_tx(self, tx):
"""
Broadcast the transaction through the configured set of providers
:param tx:
:return:
"""
return ServiceProviderConnector.broadcast_tx_with_chain(tx, self.bitcoin_chain)
@staticmethod
def broadcast_tx_with_chain(tx, bitcoin_chain):
"""
Broadcast the transaction through the configured set of providers
:param tx:
:param bitcoin_chain:
:return:
"""
last_exception = None
final_tx_id = None
# Unlike other providers, we want to broadcast to all available apis
for attempt_number in range(0, MAX_BROADCAST_ATTEMPTS):
for method_provider in service_provider_methods('broadcast_tx',
get_providers_for_chain(bitcoin_chain)):
try:
tx_id = method_provider(tx)
if tx_id:
logging.info('Broadcasting succeeded with method_provider=%s, txid=%s', str(method_provider),
tx_id)
if final_tx_id and final_tx_id != tx_id:
logging.error(
'This should never happen; fail and investigate if it does. Got conflicting tx_ids=%s and %s. Hextx=%s',
final_tx_id, tx_id, tx.as_hex())
raise Exception('Got conflicting tx_ids.')
final_tx_id = tx_id
except Exception as e:
logging.warning('Caught exception trying provider %s. Trying another. Exception=%s',
str(method_provider), e)
last_exception = e
# At least 1 provider succeeded, so return
if final_tx_id:
return final_tx_id
else:
logging.warning('Broadcasting failed. Waiting before retrying. This is attempt number %d',
attempt_number)
time.sleep(BROADCAST_RETRY_INTERVAL)
logging.error('Failed broadcasting through all providers')
logging.error(last_exception, exc_info=True)
raise BroadcastError(last_exception)
PYCOIN_BTC_PROVIDERS = "blockchain.info blockexplorer.com blockcypher.com chain.so"
PYCOIN_XTN_PROVIDERS = "blockexplorer.com" # chain.so
# initialize connectors
connectors = {}
# configure mainnet providers
provider_list = providers.providers_for_config_string(PYCOIN_BTC_PROVIDERS, Chain.mainnet.netcode)
provider_list.append(BlockrIOBroadcaster('https://btc.blockr.io/api/v1'))
provider_list.append(BlockExplorerBroadcaster('https://blockexplorer.com/api'))
provider_list.append(BlockrioProvider(Chain.mainnet.netcode))
provider_list.append(InsightProvider(netcode=Chain.mainnet.netcode))
connectors[Chain.mainnet.netcode] = provider_list
# configure testnet providers
xtn_provider_list = providers.providers_for_config_string(PYCOIN_XTN_PROVIDERS, Chain.testnet.netcode)
xtn_provider_list.append(InsightProvider(base_url='https://test-insight.bitpay.com', netcode=Chain.testnet.netcode))
xtn_provider_list.append(BlockrIOBroadcaster('https://tbtc.blockr.io/api/v1'))
xtn_provider_list.append(BlockExplorerBroadcaster('https://testnet.blockexplorer.com/api'))
xtn_provider_list.append(BlockrioProvider(Chain.testnet.netcode))
connectors[Chain.testnet.netcode] = xtn_provider_list
# workaround for regtest
connectors['REG'] = [BitcoindConnector(Chain.testnet.netcode)]
def get_providers_for_chain(bitcoin_chain):
if bitcoin_chain == Chain.regtest:
return connectors['REG']
return connectors[bitcoin_chain.netcode]
__author__ = 'firatlepirate'
import socket
import threading
import Queue
import time
import errno
class WriteThread (threading.Thread):
def __init__(self, name, cSocket, address,fihrist,threadQueue, logQueue ):
threading.Thread.__init__(self)
self.name = name
self.cSocket = cSocket
self.address = address
self.lQueue = logQueue
self.tQueue = threadQueue
self.fihrist = fihrist
self.nickname=""
self.flag=False
def run(self):
self.lQueue.put("Starting " + self.name)
while True:
# burasi kuyrukta sirasi gelen mesajlari
# gondermek icin kullanilacak
if self.tQueue.qsize()>0 or self.flag:
if self.nickname=="":
self.nickname=self.tQueue.get()
self.flag=True
if self.fihrist[self.nickname].qsize()>0:
queue_message = self.fihrist[self.nickname].get()
# gonderilen ozel mesajsa
if not queue_message[0]=="SAY" and len(queue_message)==3 and not queue_message=="QUI" :
message_to_send = "MSG "+str(queue_message[0])+":"+str(queue_message[1])+";"+str(queue_message[2])
self.cSocket.send(str(message_to_send))
# genel mesajsa
elif queue_message[0]=="SAY":
message_to_send = "SAY "+str(queue_message[1])+":"+str(queue_message[2])
self.cSocket.send(str(message_to_send))
print(message_to_send)
elif queue_message=="QUI":
# fihristten sil
del self.fihrist[self.nickname]
break
# hicbiri degilse sistem mesajidir
else:
message_to_send = "SYS "+str(queue_message[1])
self.cSocket.send(str(message_to_send))
self.lQueue.put("Exiting " + self.name)
class ReadThread (threading.Thread):
def __init__(self, name, cSocket, address,fihrist,threadQueue,logQueue):
threading.Thread.__init__(self)
self.name = name
self.cSocket = cSocket
self.address = address
self.lQueue = logQueue
self.fihrist = fihrist
self.tQueue = threadQueue
self.nickname=""
def parser(self, data):
#data = data.strip()
# henuz login olmadiysa
if not self.nickname and not data[0:3] == "USR":
response="ERL"
self.cSocket.send(response)
else:
# data sekli bozuksa
if len(data)<3:
response = "ERR"
self.cSocket.send(response)
return 0
if data[0:3] == "USR":
if len(data)>4 and data[3]==" " and not data[3:len(data)]==" ":
self.nickname = data[4:]
if not self.nickname in self.fihrist:
# kullanici yoksa
response = "HEL " + self.nickname
self.cSocket.send(response)
self.fihrist[self.nickname]=Queue.Queue(10)
# fihristi guncelle
#self.fihrist.update(...)
self.lQueue.put(self.nickname + " has joined.")
self.tQueue.put(self.nickname)
queue_message = ("SYS", self.nickname)
for items in self.fihrist.keys():
self.fihrist[items].put(queue_message)
return 0
else:
# kullanici reddedilecek
response = "REJ " + self.nickname
self.cSocket.send(response)
# baglantiyi kapat
# self.cSocket.close()
return 1
else:
response = "ERR"
self.cSocket.send(response)
elif data[0:3] == "QUI":
response = "BYE " + self.nickname
self.cSocket.send(response)
queue_message="QUI"
self.fihrist[self.nickname].put(queue_message)
# log gonder
self.lQueue.put(self.nickname + " has left.")
# baglantiyi sil
self.cSocket.close()
return queue_message
elif data[0:3] == "LSQ":
a=" "
for i in self.fihrist.keys():
a=a+i+":"
response="LSA"+a[:-1]
self.cSocket.send(response)
elif data[0:3] == "TIC":
response="TOC"
self.cSocket.send(response)
elif data[0:3] == "SAY":
if len(data)>4 and data[3]==" " and not data[4:]==" ":
message=data[4:]
queue_message = ("SAY", self.nickname, message)
for items in self.fihrist.keys():
self.fihrist[items].put(queue_message)
response="SOK"
self.cSocket.send(response)
elif data[0:3] == "MSG":
c=":"
if not data[4:]==" " and c in data[4:]:
to_nickname=data[4:data.index(":")]
message=data[data.index(":")+1:]
if not to_nickname in self.fihrist.keys():
response = "MNO"
else:
queue_message = (to_nickname, self.nickname, message)
# gonderilecek threadQueueyu fihristten alip icine yaz
self.fihrist[to_nickname].put(queue_message)
response = "MOK"
self.cSocket.send(response)
else:
# bir seye uymadiysa protokol hatasi verilecek
response = "ERR"
self.cSocket.send(response)
def run(self):
self.lQueue.put("Starting " + self.name)
while True:
try:
incoming_data=self.cSocket.recv(1024)
except socket.error ,e:
err=e.args[0]
if err == errno.EAGAIN or err == errno.EWOULDBLOCK:
time.sleep(1)
print 'No data available'
continue
else:
print("ERROR"+str(e))
queue_message = self.parser(incoming_data)
if(queue_message)=="QUI":
break
self.lQueue.put("Exiting " + self.name)
print(threading.activeCount())
class LoggerThread (threading.Thread):
def __init__(self, name, logQueue, logFileName):
threading.Thread.__init__(self)
self.name = name
self.lQueue = logQueue
self.fileName=logFileName
# dosyayi appendable olarak ac
self.fid = open(self.fileName, "a")
def log(self,message):
# gelen mesaji zamanla beraber bastir
t = time.ctime()
self.fid.write(t+":"+" "+ message+"\n")
self.fid.flush()
def run(self):
self.log("Starting " + self.name)
while True:
if self.lQueue.qsize() > 0:
# lQueue'da yeni mesaj varsa
# self.log() metodunu cagir
to_be_logged = self.lQueue.get()
self.log(to_be_logged)
self.log("Exiting" + self.name)
self.fid.close()
userList={}
loggerQueue= Queue.Queue()
thread3=LoggerThread("LoggerThread",loggerQueue,"log.txt")
thread3.start()
s = socket.socket()
#host = socket.gethostname()
host="127.0.0.1"
print("host"+host)
port = 12345
s.bind((host, port))
s.listen(5)
threadCounter=0
threadCounter2=0
while True:
loggerQueue.put("Waiting for connection")
print "Waiting for connection"
c, addr = s.accept()
workQueue = Queue.Queue()
loggerQueue.put("Got a connection from " + str(addr))
print "Got a connection from ", addr
threadCounter += 1
thread = ReadThread("ReadThread"+str(threadCounter), c, addr,userList,workQueue,loggerQueue)
threadCounter2 += 1
thread2 = WriteThread("WriteThread"+str(threadCounter2), c, addr,userList,workQueue,loggerQueue)
thread.start()
thread2.start()
# -*- coding: utf-8 -*-
"PDF Template Helper for FPDF.py"
__author__ = "Mariano Reingart "
__copyright__ = "Copyright (C) 2010 Mariano Reingart"
__license__ = "LGPL 3.0"
#PyFPDF-cover-test:format=PDF
#PyFPDF-cover-test:fn=invoice.pdf
#PyFPDF-cover-test:hash=5844bbebe3e33b0ac9cc15ac39327a81
#PyFPDF-cover-test:res=invoice.csv
import common
from fpdf import Template
import os
class randomfake:
RINT1_10 = [8, 5, 7, 9, 10, 8, 1, 9, 1, 7, 6, 2, 3, 7, 8, 4, 6, 5, 7, 2, \
5, 8, 6, 5, 5, 8, 7, 7, 6]
RINT65_90 = [67, 67, 87, 78, 84, 67, 86, 75, 86, 89, 81, 69, 72, 71, 84, \
80, 71 , 86 , 82 , 70 , 84 , 69 , 70]
RFLT = [0.820710198665, 0.342854771472, 0.0238515965298, 0.177658111957, \
0.422301628067, 0.701867781693, 0.168650983171, 0.329723498664, \
0.490481106182, 0.892634029991, 0.994758791625, 0.998243714035, \
0.596244312914 ,0.318601111178 ,0.321593673214 ,0.203486335469]
def __init__(self):
self.icnt1_10 = 0
self.icnt65_90 = 0
self.fcnt = 0
def randint(self, beg, end):
if beg == 1 and end == 10:
self.icnt1_10 += 1
if self.icnt1_10 > len(self.RINT1_10):
self.icnt1_10 = 1
return self.RINT1_10[self.icnt1_10 - 1]
if beg == 65 and end == 90:
self.icnt65_90 += 1
if self.icnt65_90 > len(self.RINT65_90):
self.icnt65_90 = 1
return self.RINT65_90[self.icnt65_90 - 1]
raise Exception("Not implemented")
def random(self):
self.fcnt += 1
if self.fcnt > len(self.RFLT):
self.fcnt = 1
return self.RFLT[self.fcnt - 1]
@common.add_unittest
def dotest(outputname, nostamp):
# generate sample invoice (according Argentina's regulations)
from decimal import Decimal
f = Template(format="A4",
title="Sample Invoice", author="Sample Company",
subject="Sample Customer", keywords="Electronic TAX Invoice")
if nostamp:
f.pdf._putinfo = lambda: common.test_putinfo(f.pdf)
random = randomfake()
else:
import random
csvpath = os.path.join(common.basepath, "invoice.csv")
f.parse_csv(infile=csvpath, delimiter=";", decimal_sep=",")
detail = "Lorem ipsum dolor sit amet, consectetur. " * 30
items = []
for i in range(1, 30):
ds = "Sample product %s" % i
qty = random.randint(1,10)
price = round(random.random()*100,3)
code = "%s%s%02d" % (chr(random.randint(65,90)), chr(random.randint(65,90)),i)
items.append(dict(code=code, unit='u',
qty=qty, price=price,
amount=qty*price,
ds="%s: %s" % (i,ds)))
# divide and count lines
lines = 0
li_items = []
for it in items:
qty = it['qty']
code = it['code']
unit = it['unit']
for ds in f.split_multicell(it['ds'], 'item_description01'):
# add item description line (without price nor amount)
li_items.append(dict(code=code, ds=ds, qty=qty, unit=unit, price=None, amount=None))
# clean qty and code (show only at first)
unit = qty = code = None
# set last item line price and amount
li_items[-1].update(amount = it['amount'],
price = it['price'])
obs="\nDetail:\n\n" + detail
for ds in f.split_multicell(obs, 'item_description01'):
li_items.append(dict(code=code, ds=ds, qty=qty, unit=unit, price=None, amount=None))
# calculate pages:
lines = len(li_items)
max_lines_per_page = 24
pages = int(lines / (max_lines_per_page - 1))
if lines % (max_lines_per_page - 1): pages = pages + 1
# completo campos y hojas
for page in range(1, int(pages)+1):
f.add_page()
f['page'] = 'Page %s of %s' % (page, pages)
if pages>1 and page page * (max_lines_per_page - 1):
break
if it['amount']:
total += Decimal("%.6f" % it['amount'])
if k > (page - 1) * (max_lines_per_page - 1):
li += 1
if it['qty'] is not None:
f['item_quantity%02d' % li] = it['qty']
if it['code'] is not None:
f['item_code%02d' % li] = it['code']
if it['unit'] is not None:
f['item_unit%02d' % li] = it['unit']
f['item_description%02d' % li] = it['ds']
if it['price'] is not None:
f['item_price%02d' % li] = "%0.3f" % it['price']
if it['amount'] is not None:
f['item_amount%02d' % li] = "%0.2f" % it['amount']
if pages == page:
f['net'] = "%0.2f" % (total/Decimal("1.21"))
f['vat'] = "%0.2f" % (total*(1-1/Decimal("1.21")))
f['total_label'] = 'Total:'
else:
f['total_label'] = 'SubTotal:'
f['total'] = "%0.2f" % total
f.render(outputname)
if __name__ == "__main__":
common.testmain(__file__, dotest)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
#
# FreeType high-level python API - Copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
#
# -----------------------------------------------------------------------------
'''
Show how to access glyph outline description.
'''
from freetype import *
if __name__ == '__main__':
import numpy
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
face = Face(b'./Vera.ttf')
face.set_char_size( 32*64 )
face.load_char('g')
slot = face.glyph
bitmap = face.glyph.bitmap
width = face.glyph.bitmap.width
rows = face.glyph.bitmap.rows
pitch = face.glyph.bitmap.pitch
data = []
for i in range(rows):
data.extend(bitmap.buffer[i*pitch:i*pitch+width])
Z = numpy.array(data,dtype=numpy.ubyte).reshape(rows, width)
outline = slot.outline
points = numpy.array(outline.points, dtype=[('x',float), ('y',float)])
x, y = points['x'], points['y']
figure = plt.figure(figsize=(8,10))
axis = figure.add_subplot(111)
#axis.scatter(points['x'], points['y'], alpha=.25)
start, end = 0, 0
VERTS, CODES = [], []
# Iterate over each contour
for i in range(len(outline.contours)):
end = outline.contours[i]
points = outline.points[start:end+1]
points.append(points[0])
tags = outline.tags[start:end+1]
tags.append(tags[0])
segments = [ [points[0],], ]
for j in range(1, len(points) ):
segments[-1].append(points[j])
if tags[j] & (1 << 0) and j < (len(points)-1):
segments.append( [points[j],] )
verts = [points[0], ]
codes = [Path.MOVETO,]
for segment in segments:
if len(segment) == 2:
verts.extend(segment[1:])
codes.extend([Path.LINETO])
elif len(segment) == 3:
verts.extend(segment[1:])
codes.extend([Path.CURVE3, Path.CURVE3])
else:
verts.append(segment[1])
codes.append(Path.CURVE3)
for i in range(1,len(segment)-2):
A,B = segment[i], segment[i+1]
C = ((A[0]+B[0])/2.0, (A[1]+B[1])/2.0)
verts.extend([ C, B ])
codes.extend([ Path.CURVE3, Path.CURVE3])
verts.append(segment[-1])
codes.append(Path.CURVE3)
VERTS.extend(verts)
CODES.extend(codes)
start = end+1
# Draw glyph
path = Path(VERTS, CODES)
glyph = patches.PathPatch(path, fill = True, facecolor=(0.8,0.5,0.8), alpha=.25, lw=0)
glyph_outline = patches.PathPatch(path, fill = False, edgecolor='black', lw=3)
plt.imshow(Z, extent=[x.min(), x.max(),y.min(), y.max()],
interpolation='nearest', cmap = plt.cm.gray_r, vmin=0, vmax=400)
plt.xticks(numpy.linspace(x.min(), x.max(), Z.shape[1]+1), ())
plt.yticks(numpy.linspace(y.min(), y.max(), Z.shape[0]+1), ())
plt.grid(color='k', linewidth=1, linestyle='-')
axis.add_patch(glyph)
axis.add_patch(glyph_outline)
axis.set_xlim(x.min(), x.max())
axis.set_ylim(y.min(), y.max())
plt.savefig('test.pdf')
plt.show()
#!/usr/bin/env python2.7
from __future__ import absolute_import, unicode_literals, print_function, division
from sys import argv
from os import environ, stat, remove as _delete_file
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
BYTES_PER_MB = 1024 * 1024
try:
BUCKET_NAME = environ['TWBS_S3_BUCKET']
except KeyError:
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
hasher.update(input_file.read())
file_hash = hasher.hexdigest()
print('sha256({}) = {}'.format(filename, file_hash))
return file_hash
def _delete_file_quietly(filename):
try:
_delete_file(filename)
except (OSError, IOError):
pass
def _tarball_size(directory):
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
return "{} MiB".format(kib)
def _tarball_filename_for(directory):
return abspath('./{}.tar.gz'.format(basename(directory)))
def _create_tarball(directory):
print("Creating tarball of {}...".format(directory))
run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
def _extract_tarball(directory):
print("Extracting tarball of {}...".format(directory))
run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading {} tarball from S3...".format(friendly_name))
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
raise SystemExit("Cached {} download failed!".format(friendly_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(friendly_name))
def upload(directory):
_create_tarball(directory)
print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(friendly_name))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
if __name__ == '__main__':
# Uses environment variables:
# AWS_ACCESS_KEY_ID -- AWS Access Key ID
# AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
argv.pop(0)
if len(argv) != 4:
raise SystemExit("USAGE: s3_cache.py ")
mode, friendly_name, dependencies_file, directory = argv
conn = S3Connection()
bucket = conn.lookup(BUCKET_NAME, validate=False)
if bucket is None:
raise SystemExit("Could not access bucket!")
dependencies_file_hash = _sha256_of_file(dependencies_file)
key = Key(bucket, dependencies_file_hash)
key.storage_class = 'REDUCED_REDUNDANCY'
if mode == 'download':
download(directory)
elif mode == 'upload':
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
upload(directory)
else:
print("No need to upload anything.")
else:
raise SystemExit("Unrecognized mode {!r}".format(mode))
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import hmac
import httplib2
from neutronclient.v2_0 import client
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
import six
import six.moves.urllib.parse as urlparse
import webob
from neutron.agent.linux import utils as agent_utils
from neutron.agent.metadata import config
from neutron.agent import rpc as agent_rpc
from neutron.common import constants as n_const
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron.i18n import _LE, _LW
from neutron.openstack.common.cache import cache
LOG = logging.getLogger(__name__)
MODE_MAP = {
config.USER_MODE: 0o644,
config.GROUP_MODE: 0o664,
config.ALL_MODE: 0o666,
}
class MetadataPluginAPI(object):
"""Agent-side RPC for metadata agent-to-plugin interaction.
This class implements the client side of an rpc interface used by the
metadata service to make calls back into the Neutron plugin. The server
side is defined in
neutron.api.rpc.handlers.metadata_rpc.MetadataRpcCallback. For more
information about changing rpc interfaces, see
doc/source/devref/rpc_api.rst.
API version history:
1.0 - Initial version.
"""
def __init__(self, topic):
target = oslo_messaging.Target(
topic=topic,
namespace=n_const.RPC_NAMESPACE_METADATA,
version='1.0')
self.client = n_rpc.get_client(target)
def get_ports(self, context, filters):
cctxt = self.client.prepare()
return cctxt.call(context, 'get_ports', filters=filters)
class MetadataProxyHandler(object):
def __init__(self, conf):
self.conf = conf
self.auth_info = {}
if self.conf.cache_url:
self._cache = cache.get_cache(self.conf.cache_url)
else:
self._cache = False
self.plugin_rpc = MetadataPluginAPI(topics.PLUGIN)
self.context = context.get_admin_context_without_session()
# Use RPC by default
self.use_rpc = True
def _get_neutron_client(self):
params = {
'username': self.conf.admin_user,
'password': self.conf.admin_password,
'tenant_name': self.conf.admin_tenant_name,
'auth_url': self.conf.auth_url,
'auth_strategy': self.conf.auth_strategy,
'region_name': self.conf.auth_region,
'token': self.auth_info.get('auth_token'),
'insecure': self.conf.auth_insecure,
'ca_cert': self.conf.auth_ca_cert,
}
if self.conf.endpoint_url:
params['endpoint_url'] = self.conf.endpoint_url
else:
params['endpoint_url'] = self.auth_info.get('endpoint_url')
params['endpoint_type'] = self.conf.endpoint_type
return client.Client(**params)
@webob.dec.wsgify(RequestClass=webob.Request)
def __call__(self, req):
try:
LOG.debug("Request: %s", req)
instance_id, tenant_id = self._get_instance_and_tenant_id(req)
if instance_id:
return self._proxy_request(instance_id, tenant_id, req)
else:
return webob.exc.HTTPNotFound()
except Exception:
LOG.exception(_LE("Unexpected error."))
msg = _('An unknown error has occurred. '
'Please try your request again.')
explanation = six.text_type(msg)
return webob.exc.HTTPInternalServerError(explanation=explanation)
def _get_ports_from_server(self, router_id=None, ip_address=None,
networks=None):
"""Either get ports from server by RPC or fallback to neutron client"""
filters = self._get_port_filters(router_id, ip_address, networks)
if self.use_rpc:
try:
return self.plugin_rpc.get_ports(self.context, filters)
except (oslo_messaging.MessagingException, AttributeError):
# TODO(obondarev): remove fallback once RPC is proven
# to work fine with metadata agent (K or L release at most)
LOG.warning(_LW('Server does not support metadata RPC, '
'fallback to using neutron client'))
self.use_rpc = False
return self._get_ports_using_client(filters)
def _get_port_filters(self, router_id=None, ip_address=None,
networks=None):
filters = {}
if router_id:
filters['device_id'] = [router_id]
filters['device_owner'] = n_const.ROUTER_INTERFACE_OWNERS
if ip_address:
filters['fixed_ips'] = {'ip_address': [ip_address]}
if networks:
filters['network_id'] = networks
return filters
@utils.cache_method_results
def _get_router_networks(self, router_id):
"""Find all networks connected to given router."""
internal_ports = self._get_ports_from_server(router_id=router_id)
return tuple(p['network_id'] for p in internal_ports)
@utils.cache_method_results
def _get_ports_for_remote_address(self, remote_address, networks):
"""Get list of ports that has given ip address and are part of
given networks.
:param networks: list of networks in which the ip address will be
searched for
"""
return self._get_ports_from_server(networks=networks,
ip_address=remote_address)
def _get_ports_using_client(self, filters):
# reformat filters for neutron client
if 'device_id' in filters:
filters['device_id'] = filters['device_id'][0]
if 'fixed_ips' in filters:
filters['fixed_ips'] = [
'ip_address=%s' % filters['fixed_ips']['ip_address'][0]]
client = self._get_neutron_client()
ports = client.list_ports(**filters)
self.auth_info = client.get_auth_info()
return ports['ports']
def _get_ports(self, remote_address, network_id=None, router_id=None):
"""Search for all ports that contain passed ip address and belongs to
given network.
If no network is passed ports are searched on all networks connected to
given router. Either one of network_id or router_id must be passed.
"""
if network_id:
networks = (network_id,)
elif router_id:
networks = self._get_router_networks(router_id)
else:
raise TypeError(_("Either one of parameter network_id or router_id"
" must be passed to _get_ports method."))
return self._get_ports_for_remote_address(remote_address, networks)
def _get_instance_and_tenant_id(self, req):
remote_address = req.headers.get('X-Forwarded-For')
network_id = req.headers.get('X-Neutron-Network-ID')
router_id = req.headers.get('X-Neutron-Router-ID')
ports = self._get_ports(remote_address, network_id, router_id)
if len(ports) == 1:
return ports[0]['device_id'], ports[0]['tenant_id']
return None, None
def _proxy_request(self, instance_id, tenant_id, req):
headers = {
'X-Forwarded-For': req.headers.get('X-Forwarded-For'),
'X-Instance-ID': instance_id,
'X-Tenant-ID': tenant_id,
'X-Instance-ID-Signature': self._sign_instance_id(instance_id)
}
nova_ip_port = '%s:%s' % (self.conf.nova_metadata_ip,
self.conf.nova_metadata_port)
url = urlparse.urlunsplit((
self.conf.nova_metadata_protocol,
nova_ip_port,
req.path_info,
req.query_string,
''))
h = httplib2.Http(
ca_certs=self.conf.auth_ca_cert,
disable_ssl_certificate_validation=self.conf.nova_metadata_insecure
)
if self.conf.nova_client_cert and self.conf.nova_client_priv_key:
h.add_certificate(self.conf.nova_client_priv_key,
self.conf.nova_client_cert,
nova_ip_port)
resp, content = h.request(url, method=req.method, headers=headers,
body=req.body)
if resp.status == 200:
LOG.debug(str(resp))
req.response.content_type = resp['content-type']
req.response.body = content
return req.response
elif resp.status == 403:
LOG.warn(_LW(
'The remote metadata server responded with Forbidden. This '
'response usually occurs when shared secrets do not match.'
))
return webob.exc.HTTPForbidden()
elif resp.status == 400:
return webob.exc.HTTPBadRequest()
elif resp.status == 404:
return webob.exc.HTTPNotFound()
elif resp.status == 409:
return webob.exc.HTTPConflict()
elif resp.status == 500:
msg = _(
'Remote metadata server experienced an internal server error.'
)
LOG.warn(msg)
explanation = six.text_type(msg)
return webob.exc.HTTPInternalServerError(explanation=explanation)
else:
raise Exception(_('Unexpected response code: %s') % resp.status)
def _sign_instance_id(self, instance_id):
secret = self.conf.metadata_proxy_shared_secret
if isinstance(secret, six.text_type):
secret = secret.encode('utf-8')
if isinstance(instance_id, six.text_type):
instance_id = instance_id.encode('utf-8')
return hmac.new(secret, instance_id, hashlib.sha256).hexdigest()
class UnixDomainMetadataProxy(object):
def __init__(self, conf):
self.conf = conf
agent_utils.ensure_directory_exists_without_file(
cfg.CONF.metadata_proxy_socket)
self._init_state_reporting()
def _init_state_reporting(self):
self.context = context.get_admin_context_without_session()
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.PLUGIN)
self.agent_state = {
'binary': 'neutron-metadata-agent',
'host': cfg.CONF.host,
'topic': 'N/A',
'configurations': {
'metadata_proxy_socket': cfg.CONF.metadata_proxy_socket,
'nova_metadata_ip': cfg.CONF.nova_metadata_ip,
'nova_metadata_port': cfg.CONF.nova_metadata_port,
'log_agent_heartbeats': cfg.CONF.AGENT.log_agent_heartbeats,
},
'start_flag': True,
'agent_type': n_const.AGENT_TYPE_METADATA}
report_interval = cfg.CONF.AGENT.report_interval
if report_interval:
self.heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
self.heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.state_rpc.report_state(
self.context,
self.agent_state,
use_call=self.agent_state.get('start_flag'))
except AttributeError:
# This means the server does not support report_state
LOG.warn(_LW('Neutron server does not support state report.'
' State report for this agent will be disabled.'))
self.heartbeat.stop()
return
except Exception:
LOG.exception(_LE("Failed reporting state!"))
return
self.agent_state.pop('start_flag', None)
def _get_socket_mode(self):
mode = self.conf.metadata_proxy_socket_mode
if mode == config.DEDUCE_MODE:
user = self.conf.metadata_proxy_user
if (not user or user == '0' or user == 'root'
or agent_utils.is_effective_user(user)):
# user is agent effective user or root => USER_MODE
mode = config.USER_MODE
else:
group = self.conf.metadata_proxy_group
if not group or agent_utils.is_effective_group(group):
# group is agent effective group => GROUP_MODE
mode = config.GROUP_MODE
else:
# otherwise => ALL_MODE
mode = config.ALL_MODE
return MODE_MAP[mode]
def run(self):
server = agent_utils.UnixDomainWSGIServer('neutron-metadata-agent')
server.start(MetadataProxyHandler(self.conf),
self.conf.metadata_proxy_socket,
workers=self.conf.metadata_workers,
backlog=self.conf.metadata_backlog,
mode=self._get_socket_mode())
server.wait()
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsFieldModel
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '14/11/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import qgis # NOQA
from qgis.core import (QgsField,
QgsFields,
QgsVectorLayer,
QgsFieldModel,
QgsFieldProxyModel,
QgsEditorWidgetSetup,
QgsProject,
QgsVectorLayerJoinInfo,
QgsFieldConstraints)
from qgis.PyQt.QtCore import QVariant, Qt, QModelIndex
from qgis.testing import start_app, unittest
start_app()
def create_layer():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
layer.setEditorWidgetSetup(0, QgsEditorWidgetSetup('Hidden', {}))
layer.setEditorWidgetSetup(1, QgsEditorWidgetSetup('ValueMap', {}))
assert layer.isValid()
return layer
def create_model():
l = create_layer()
m = QgsFieldModel()
m.setLayer(l)
return l, m
class TestQgsFieldModel(unittest.TestCase):
def testGettersSetters(self):
""" test model getters/setters """
l = create_layer()
m = QgsFieldModel()
self.assertFalse(m.layer())
m.setLayer(l)
self.assertEqual(m.layer(), l)
m.setAllowExpression(True)
self.assertTrue(m.allowExpression())
m.setAllowExpression(False)
self.assertFalse(m.allowExpression())
m.setAllowEmptyFieldName(True)
self.assertTrue(m.allowEmptyFieldName())
m.setAllowEmptyFieldName(False)
self.assertFalse(m.allowEmptyFieldName())
fields = QgsFields()
fields.append(QgsField('test1', QVariant.String))
fields.append(QgsField('test2', QVariant.String))
m.setFields(fields)
self.assertIsNone(m.layer())
self.assertEqual(m.fields(), fields)
def testIndexFromName(self):
l, m = create_model()
i = m.indexFromName('fldtxt')
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 0)
i = m.indexFromName('fldint')
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 1)
i = m.indexFromName('not a field')
self.assertFalse(i.isValid())
# test with alias
i = m.indexFromName('text field')
self.assertFalse(i.isValid())
l.setFieldAlias(0, 'text field')
i = m.indexFromName('text field')
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 0)
i = m.indexFromName('int field')
self.assertFalse(i.isValid())
l.setFieldAlias(1, 'int field')
i = m.indexFromName('int field')
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 1)
# should be case insensitive
i = m.indexFromName('FLDTXT')
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 0)
i = m.indexFromName('FLDINT')
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 1)
# try with expression
m.setAllowExpression(True)
i = m.indexFromName('not a field')
# still not valid - needs expression set first
self.assertFalse(i.isValid())
m.setExpression('not a field')
i = m.indexFromName('not a field')
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 2)
# try with null
i = m.indexFromName(None)
self.assertFalse(i.isValid())
m.setAllowEmptyFieldName(True)
i = m.indexFromName(None)
self.assertTrue(i.isValid())
self.assertEqual(i.row(), 0)
# when null is shown, all other rows should be offset
self.assertEqual(m.indexFromName('fldtxt').row(), 1)
self.assertEqual(m.indexFromName('fldint').row(), 2)
self.assertEqual(m.indexFromName('not a field').row(), 3)
self.assertEqual(m.indexFromName('FLDTXT').row(), 1)
self.assertEqual(m.indexFromName('FLDINT').row(), 2)
def testIsField(self):
l, m = create_model()
self.assertTrue(m.isField('fldtxt'))
self.assertTrue(m.isField('fldint'))
self.assertFalse(m.isField(None))
self.assertFalse(m.isField('an expression'))
def testRowCount(self):
l, m = create_model()
self.assertEqual(m.rowCount(), 2)
m.setAllowEmptyFieldName(True)
self.assertEqual(m.rowCount(), 3)
m.setAllowExpression(True)
m.setExpression('not a field')
self.assertEqual(m.rowCount(), 4)
m.setExpression('not a field')
self.assertEqual(m.rowCount(), 4)
m.setExpression('not a field 2')
self.assertEqual(m.rowCount(), 4)
m.removeExpression()
self.assertEqual(m.rowCount(), 3)
def testFieldNameRole(self):
l, m = create_model()
self.assertEqual(m.data(m.indexFromName('fldtxt'), QgsFieldModel.FieldNameRole), 'fldtxt')
self.assertEqual(m.data(m.indexFromName('fldint'), QgsFieldModel.FieldNameRole), 'fldint')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldNameRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldNameRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldNameRole))
m.setAllowEmptyFieldName(True)
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldNameRole))
def testExpressionRole(self):
l, m = create_model()
self.assertEqual(m.data(m.indexFromName('fldtxt'), QgsFieldModel.ExpressionRole), 'fldtxt')
self.assertEqual(m.data(m.indexFromName('fldint'), QgsFieldModel.ExpressionRole), 'fldint')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.ExpressionRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.ExpressionRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertEqual(m.data(m.indexFromName('an expression'), QgsFieldModel.ExpressionRole), 'an expression')
m.setAllowEmptyFieldName(True)
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.ExpressionRole))
def testFieldIndexRole(self):
l, m = create_model()
self.assertEqual(m.data(m.indexFromName('fldtxt'), QgsFieldModel.FieldIndexRole), 0)
self.assertEqual(m.data(m.indexFromName('fldint'), QgsFieldModel.FieldIndexRole), 1)
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldIndexRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldIndexRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldIndexRole))
m.setAllowEmptyFieldName(True)
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldIndexRole))
def testIsExpressionRole(self):
l, m = create_model()
self.assertFalse(m.data(m.indexFromName('fldtxt'), QgsFieldModel.IsExpressionRole))
self.assertFalse(m.data(m.indexFromName('fldint'), QgsFieldModel.IsExpressionRole))
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.IsExpressionRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.IsExpressionRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertTrue(m.data(m.indexFromName('an expression'), QgsFieldModel.IsExpressionRole))
m.setAllowEmptyFieldName(True)
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.IsExpressionRole))
def testExpressionValidityRole(self):
l, m = create_model()
self.assertTrue(m.data(m.indexFromName('fldtxt'), QgsFieldModel.ExpressionValidityRole))
self.assertTrue(m.data(m.indexFromName('fldint'), QgsFieldModel.ExpressionValidityRole))
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.ExpressionValidityRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.ExpressionValidityRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.ExpressionValidityRole))
m.setAllowEmptyFieldName(True)
self.assertTrue(m.data(m.indexFromName(None), QgsFieldModel.ExpressionValidityRole))
def testFieldTypeRole(self):
l, m = create_model()
self.assertEqual(m.data(m.indexFromName('fldtxt'), QgsFieldModel.FieldTypeRole), QVariant.String)
self.assertEqual(m.data(m.indexFromName('fldint'), QgsFieldModel.FieldTypeRole), QVariant.Int)
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldTypeRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldTypeRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldTypeRole))
m.setAllowEmptyFieldName(True)
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldTypeRole))
def testFieldOriginRole(self):
l, m = create_model()
self.assertEqual(m.data(m.indexFromName('fldtxt'), QgsFieldModel.FieldOriginRole), QgsFields.OriginProvider)
self.assertEqual(m.data(m.indexFromName('fldint'), QgsFieldModel.FieldOriginRole), QgsFields.OriginProvider)
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldOriginRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldOriginRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldOriginRole))
m.setAllowEmptyFieldName(True)
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldOriginRole))
def testIsEmptyRole(self):
l, m = create_model()
self.assertFalse(m.data(m.indexFromName('fldtxt'), QgsFieldModel.IsEmptyRole), QgsFields.OriginProvider)
self.assertFalse(m.data(m.indexFromName('fldint'), QgsFieldModel.IsEmptyRole), QgsFields.OriginProvider)
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.IsEmptyRole))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.IsEmptyRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.IsEmptyRole))
m.setAllowEmptyFieldName(True)
self.assertTrue(m.data(m.indexFromName(None), QgsFieldModel.IsEmptyRole))
def testDisplayRole(self):
l, m = create_model()
self.assertEqual(m.data(m.indexFromName('fldtxt'), Qt.DisplayRole), 'fldtxt')
self.assertEqual(m.data(m.indexFromName('fldint'), Qt.DisplayRole), 'fldint')
self.assertFalse(m.data(m.indexFromName('an expression'), Qt.DisplayRole))
self.assertFalse(m.data(m.indexFromName(None), Qt.DisplayRole))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertEqual(m.data(m.indexFromName('an expression'), Qt.DisplayRole), 'an expression')
m.setAllowEmptyFieldName(True)
self.assertFalse(m.data(m.indexFromName(None), Qt.DisplayRole))
def testManualFields(self):
_, m = create_model()
fields = QgsFields()
fields.append(QgsField('f1', QVariant.String))
fields.append(QgsField('f2', QVariant.String))
m.setFields(fields)
self.assertEqual(m.rowCount(), 2)
self.assertEqual(m.data(m.index(0, 0, QModelIndex()), Qt.DisplayRole), 'f1')
self.assertEqual(m.data(m.index(1, 0, QModelIndex()), Qt.DisplayRole), 'f2')
def testEditorWidgetTypeRole(self):
l, m = create_model()
self.assertEqual(m.data(m.indexFromName('fldtxt'), QgsFieldModel.EditorWidgetType), 'Hidden')
self.assertEqual(m.data(m.indexFromName('fldint'), QgsFieldModel.EditorWidgetType), 'ValueMap')
self.assertIsNone(m.data(m.indexFromName('an expression'), QgsFieldModel.EditorWidgetType))
self.assertIsNone(m.data(m.indexFromName(None), QgsFieldModel.EditorWidgetType))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertIsNone(m.data(m.indexFromName('an expression'), QgsFieldModel.EditorWidgetType))
m.setAllowEmptyFieldName(True)
self.assertIsNone(m.data(m.indexFromName(None), QgsFieldModel.EditorWidgetType))
def testJoinedFieldIsEditableRole(self):
layer = QgsVectorLayer("Point?field=id_a:integer",
"addfeat", "memory")
layer2 = QgsVectorLayer("Point?field=id_b:integer&field=value_b",
"addfeat", "memory")
QgsProject.instance().addMapLayers([layer, layer2])
# editable join
join_info = QgsVectorLayerJoinInfo()
join_info.setTargetFieldName("id_a")
join_info.setJoinLayer(layer2)
join_info.setJoinFieldName("id_b")
join_info.setPrefix("B_")
join_info.setEditable(True)
join_info.setUpsertOnEdit(True)
layer.addJoin(join_info)
m = QgsFieldModel()
m.setLayer(layer)
self.assertIsNone(m.data(m.indexFromName('id_a'), QgsFieldModel.JoinedFieldIsEditable))
self.assertTrue(m.data(m.indexFromName('B_value_b'), QgsFieldModel.JoinedFieldIsEditable))
self.assertIsNone(m.data(m.indexFromName('an expression'), QgsFieldModel.JoinedFieldIsEditable))
self.assertIsNone(m.data(m.indexFromName(None), QgsFieldModel.JoinedFieldIsEditable))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertIsNone(m.data(m.indexFromName('an expression'), QgsFieldModel.JoinedFieldIsEditable))
m.setAllowEmptyFieldName(True)
self.assertIsNone(m.data(m.indexFromName(None), QgsFieldModel.JoinedFieldIsEditable))
proxy_m = QgsFieldProxyModel()
proxy_m.setFilters(QgsFieldProxyModel.AllTypes | QgsFieldProxyModel.HideReadOnly)
proxy_m.sourceFieldModel().setLayer(layer)
self.assertEqual(proxy_m.rowCount(), 2)
self.assertEqual(proxy_m.data(proxy_m.index(0, 0)), 'id_a')
self.assertEqual(proxy_m.data(proxy_m.index(1, 0)), 'B_value_b')
# not editable join
layer3 = QgsVectorLayer("Point?field=id_a:integer",
"addfeat", "memory")
QgsProject.instance().addMapLayers([layer3])
join_info = QgsVectorLayerJoinInfo()
join_info.setTargetFieldName("id_a")
join_info.setJoinLayer(layer2)
join_info.setJoinFieldName("id_b")
join_info.setPrefix("B_")
join_info.setEditable(False)
layer3.addJoin(join_info)
m = QgsFieldModel()
m.setLayer(layer3)
self.assertIsNone(m.data(m.indexFromName('id_a'), QgsFieldModel.JoinedFieldIsEditable))
self.assertFalse(m.data(m.indexFromName('B_value_b'), QgsFieldModel.JoinedFieldIsEditable))
self.assertIsNone(m.data(m.indexFromName('an expression'), QgsFieldModel.JoinedFieldIsEditable))
self.assertIsNone(m.data(m.indexFromName(None), QgsFieldModel.JoinedFieldIsEditable))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertIsNone(m.data(m.indexFromName('an expression'), QgsFieldModel.JoinedFieldIsEditable))
m.setAllowEmptyFieldName(True)
self.assertIsNone(m.data(m.indexFromName(None), QgsFieldModel.JoinedFieldIsEditable))
proxy_m = QgsFieldProxyModel()
proxy_m.sourceFieldModel().setLayer(layer3)
proxy_m.setFilters(QgsFieldProxyModel.AllTypes | QgsFieldProxyModel.HideReadOnly)
self.assertEqual(proxy_m.rowCount(), 1)
self.assertEqual(proxy_m.data(proxy_m.index(0, 0)), 'id_a')
def testFieldIsWidgetEditableRole(self):
l, m = create_model()
self.assertTrue(m.data(m.indexFromName('fldtxt'), QgsFieldModel.FieldIsWidgetEditable))
self.assertTrue(m.data(m.indexFromName('fldint'), QgsFieldModel.FieldIsWidgetEditable))
self.assertFalse(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldIsWidgetEditable))
self.assertFalse(m.data(m.indexFromName(None), QgsFieldModel.FieldIsWidgetEditable))
m.setAllowExpression(True)
m.setExpression('an expression')
self.assertTrue(m.data(m.indexFromName('an expression'), QgsFieldModel.FieldIsWidgetEditable))
m.setAllowEmptyFieldName(True)
self.assertTrue(m.data(m.indexFromName(None), QgsFieldModel.FieldIsWidgetEditable))
editFormConfig = l.editFormConfig()
idx = l.fields().indexOf('fldtxt')
# Make fldtxt readOnly
editFormConfig.setReadOnly(idx, True)
l.setEditFormConfig(editFormConfig)
# It's read only, so the widget is NOT editable
self.assertFalse(m.data(m.indexFromName('fldtxt'), QgsFieldModel.FieldIsWidgetEditable))
def testFieldTooltip(self):
f = QgsField('my_string', QVariant.String, 'string')
self.assertEqual(QgsFieldModel.fieldToolTip(f), "my_string
string NULL")
f.setAlias('my alias')
self.assertEqual(QgsFieldModel.fieldToolTip(f), "my alias (my_string)
string NULL")
f.setLength(20)
self.assertEqual(QgsFieldModel.fieldToolTip(f), "my alias (my_string)
string(20) NULL")
f = QgsField('my_real', QVariant.Double, 'real', 8, 3)
self.assertEqual(QgsFieldModel.fieldToolTip(f), "my_real
real(8, 3) NULL")
f.setComment('Comment text')
self.assertEqual(QgsFieldModel.fieldToolTip(f), "my_real
real(8, 3) NULL
Comment text")
def testFieldTooltipExtended(self):
layer = QgsVectorLayer("Point?", "tooltip", "memory")
f = QgsField('my_real', QVariant.Double, 'real', 8, 3, 'Comment text')
layer.addExpressionField('1+1', f)
layer.updateFields()
self.assertEqual(QgsFieldModel.fieldToolTipExtended(QgsField('my_string', QVariant.String, 'string'), layer), '')
self.assertEqual(QgsFieldModel.fieldToolTipExtended(f, layer), "my_real
real(8, 3) NULL
Comment text
1+1")
f.setAlias('my alias')
constraints = f.constraints()
constraints.setConstraint(QgsFieldConstraints.ConstraintUnique)
f.setConstraints(constraints)
self.assertEqual(QgsFieldModel.fieldToolTipExtended(f, layer), "my alias (my_real)
real(8, 3) NULL UNIQUE
Comment text
1+1")
if __name__ == '__main__':
unittest.main()
# -*- test-case-name: twisted.internet.test.test_sigchld -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module is used to integrate child process termination into a
reactor event loop. This is a challenging feature to provide because
most platforms indicate process termination via SIGCHLD and do not
provide a way to wait for that signal and arbitrary I/O events at the
same time. The naive implementation involves installing a Python
SIGCHLD handler; unfortunately this leads to other syscalls being
interrupted (whenever SIGCHLD is received) and failing with EINTR
(which almost no one is prepared to handle). This interruption can be
disabled via siginterrupt(2) (or one of the equivalent mechanisms);
however, if the SIGCHLD is delivered by the platform to a non-main
thread (not a common occurrence, but difficult to prove impossible),
the main thread (waiting on select() or another event notification
API) may not wake up leading to an arbitrary delay before the child
termination is noticed.
The basic solution to all these issues involves enabling SA_RESTART
(ie, disabling system call interruption) and registering a C signal
handler which writes a byte to a pipe. The other end of the pipe is
registered with the event loop, allowing it to wake up shortly after
SIGCHLD is received. See L{twisted.internet.posixbase._SIGCHLDWaker}
for the implementation of the event loop side of this solution. The
use of a pipe this way is known as the U{self-pipe
trick}.
From Python version 2.6, C{signal.siginterrupt} and C{signal.set_wakeup_fd}
provide the necessary C signal handler which writes to the pipe to be
registered with C{SA_RESTART}.
"""
from __future__ import division, absolute_import
import signal
def installHandler(fd):
"""
Install a signal handler which will write a byte to C{fd} when
I{SIGCHLD} is received.
This is implemented by installing a SIGCHLD handler that does nothing,
setting the I{SIGCHLD} handler as not allowed to interrupt system calls,
and using L{signal.set_wakeup_fd} to do the actual writing.
@param fd: The file descriptor to which to write when I{SIGCHLD} is
received.
@type fd: C{int}
"""
if fd == -1:
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
else:
def noopSignalHandler(*args):
pass
signal.signal(signal.SIGCHLD, noopSignalHandler)
signal.siginterrupt(signal.SIGCHLD, False)
return signal.set_wakeup_fd(fd)
def isDefaultHandler():
"""
Determine whether the I{SIGCHLD} handler is the default or not.
"""
return signal.getsignal(signal.SIGCHLD) == signal.SIG_DFL
import os
import platform
from twisted.internet import defer
from .. import data, helper
from p2pool.util import pack
P2P_PREFIX = 'fcc1b7dc'.decode('hex')
P2P_PORT = 19333
ADDRESS_VERSION = 111
RPC_PORT = 19332
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
'litecoinaddress' in (yield bitcoind.rpc_help()) and
(yield bitcoind.rpc_getinfo())['testnet']
))
SUBSIDY_FUNC = lambda height: 50*100000000 >> (height + 1)//840000
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('ltc_scrypt').getPoWHash(data))
BLOCK_PERIOD = 150 # s
SYMBOL = 'tLTC'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'Litecoin') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/Litecoin/') if platform.system() == 'Darwin' else os.path.expanduser('~/.litecoin'), 'litecoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://nonexistent-litecoin-testnet-explorer/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://nonexistent-litecoin-testnet-explorer/address/'
TX_EXPLORER_URL_PREFIX = 'http://nonexistent-litecoin-testnet-explorer/tx/'
SANE_TARGET_RANGE = (2**256//1000000000 - 1, 2**256 - 1)
DUMB_SCRYPT_DIFF = 2**16
DUST_THRESHOLD = 1e8
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script computs the number of concurrent links we want to run in the build
# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
import optparse
import os
import re
import subprocess
import sys
def _GetTotalMemoryInBytes():
if sys.platform in ('win32', 'cygwin'):
import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
("dwLength", ctypes.c_ulong),
("dwMemoryLoad", ctypes.c_ulong),
("ullTotalPhys", ctypes.c_ulonglong),
("ullAvailPhys", ctypes.c_ulonglong),
("ullTotalPageFile", ctypes.c_ulonglong),
("ullAvailPageFile", ctypes.c_ulonglong),
("ullTotalVirtual", ctypes.c_ulonglong),
("ullAvailVirtual", ctypes.c_ulonglong),
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
return stat.ullTotalPhys
elif sys.platform.startswith('linux'):
if os.path.exists("/proc/meminfo"):
with open("/proc/meminfo") as meminfo:
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
for line in meminfo:
match = memtotal_re.match(line)
if not match:
continue
return float(match.group(1)) * 2**10
elif sys.platform == 'darwin':
try:
return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
except Exception:
return 0
# TODO(scottmg): Implement this for other platforms.
return 0
def _GetDefaultConcurrentLinks(mem_per_link_gb, reserve_mem_gb):
# Inherit the legacy environment variable for people that have set it in GYP.
pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
if pool_size:
return pool_size
mem_total_bytes = _GetTotalMemoryInBytes()
mem_total_bytes = max(0, mem_total_bytes - reserve_mem_gb * 2**30)
num_concurrent_links = int(max(1, mem_total_bytes / mem_per_link_gb / 2**30))
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(num_concurrent_links, hard_cap)
def main():
parser = optparse.OptionParser()
parser.add_option('--mem_per_link_gb', action="store", type="int", default=8)
parser.add_option('--reserve_mem_gb', action="store", type="int", default=0)
parser.disable_interspersed_args()
options, _ = parser.parse_args()
print _GetDefaultConcurrentLinks(options.mem_per_link_gb,
options.reserve_mem_gb)
return 0
if __name__ == '__main__':
sys.exit(main())
from __future__ import print_function, division, absolute_import
try:
from .core import (DataFrame, Series, Index, _Frame, map_partitions,
repartition, to_datetime, to_timedelta)
from .groupby import Aggregation
from .io import (from_array, from_pandas, from_bcolz,
from_dask_array, read_hdf, read_sql_table,
from_delayed, read_csv, to_csv, read_table,
demo, to_hdf, to_records, to_bag, read_json, to_json)
from .optimize import optimize
from .multi import merge, concat
from . import rolling
from ..base import compute
from .reshape import get_dummies, pivot_table, melt
from .utils import assert_eq
from .io.orc import read_orc
try:
from .io import read_parquet, to_parquet
except ImportError:
pass
try:
from .core import isna
except ImportError:
pass
except ImportError as e:
msg = ("Dask dataframe requirements are not installed.\n\n"
"Please either conda or pip install as follows:\n\n"
" conda install dask # either conda install\n"
" pip install dask[dataframe] --upgrade # or pip install")
raise ImportError(str(e) + '\n\n' + msg)
# Copyright (c) Victor van den Elzen
# Released under the Expat license, see LICENSE file for details
from struct import pack, unpack, calcsize
from collections import OrderedDict
def getbytes(s, n):
b = s.read(n)
assert len(b) == n, "Unexpected EOF"
return b
def getbyte(s):
return getbytes(s, 1)
class Seek(object):
def __init__(self, s, *args, **kwargs):
self.old_pos = None
self.s = s
self.args = args
self.kwargs = kwargs
def __enter__(self):
self.old_pos = self.s.tell()
self.s.seek(*self.args, **self.kwargs)
def __exit__(self, exc_type, exc_value, traceback):
self.s.seek(self.old_pos)
class FakeWriteStream(object):
def __init__(self, offset=0):
self.offset = offset
def seek(self, offset):
self.offset = offset
def tell(self):
return self.offset
def write(self, data):
self.offset += len(data)
return len(data)
class BaseField(object):
def unpack(self, s):
self.data = self.unpack_data(s)
def unpack_data(self, s):
raise notImplementedError
def pack(self, s):
self.pack_data(s, self.data)
def pack_data(self, s, data):
raise NotImplementedError(self)
def full_pack(self, s):
new_data = self.data
while True:
old_data = new_data
self.pack(FakeWriteStream(s.tell()))
new_data = self.data
if old_data == new_data:
break
self.pack(s)
def serialize(self):
return self.data
class ContainerField(BaseField):
def __getitem__(self, key):
return self.field[key]
def __setitem__(self, key, value):
self.field[key] = value
def __delitem__(self, key):
del self.field[key]
def __len__(self):
return len(self.field)
def __iter__(self):
return iter(self.field)
def __contains__(self, key):
return key in self.field
def serialize(self):
return self.field.serialize()
class Struct(ContainerField):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def add_field(self, name, f):
assert name not in self, name
self[name] = f
input_type, v = self.input
if input_type == "data":
f.data = v.get(name, None)
elif input_type == "stream":
f.unpack(v)
else:
assert False, input_type
return f
def F(self, name, f):
return self.add_field(name, f)
def unpack(self, s):
self.field = OrderedDict()
self.input = ("stream", s)
self.fields(*self.args, **self.kwargs)
del self.input
def pack(self, s):
for name, f in self.field.items():
f.pack(s)
@property
def data(self):
data = OrderedDict()
for k, v in self.field.items():
data[k] = v.data
return data
@data.setter
def data(self, v):
self.field = OrderedDict()
self.input = ("data", v)
self.fields(*self.args, **self.kwargs)
del self.input
def serialize(self):
data = OrderedDict()
for k, v in self.field.items():
if self.should_serialize(k, v):
data[k] = v.serialize()
return data
def should_serialize(self, k, v):
return True
def fields(self):
raise NotImplementedError(self)
class Magic(BaseField):
def __init__(self, magic):
if isinstance(magic, str):
magic = magic.encode()
self.magic = magic
def unpack(self, s):
data = getbytes(s, len(self.magic))
assert data == self.magic
def pack(self, s):
s.write(self.magic)
@property
def data(self):
return self.magic.decode()
@data.setter
def data(self, v):
assert v == self.magic or v is None, v
class Format(BaseField):
def __init__(self, fmt):
if fmt[0] in "@=<>!":
bosa = fmt[0]
fmt = fmt[1:]
else:
bosa = "<"
self.bosa = bosa
self.fmt = fmt
self.single = len(fmt) == 1
def unpack_data(self, s):
fmt = self.bosa + self.fmt
size = calcsize(fmt)
b = getbytes(s, size)
data = unpack(fmt, b)
if self.single:
assert len(data) == 1
data = data[0]
return data
def pack_data(self, s, data):
if self.single:
data = (data,)
s.write(pack(self.fmt, *data))
class BaseArray(ContainerField):
def __init__(self, field_maker=None, field_function=None):
if field_function is None:
field_function = lambda i, f: field_maker()
self.field_fun = field_function
self._dict = None
def unpack(self, s):
self.field = [self.field_fun(i, self) for i in range(self.size)]
for f in self:
f.unpack(s)
def pack(self, s):
for f in self:
f.pack(s)
@property
def data(self):
return [f.data for f in self]
def index(self, field):
if self._dict is None:
self._dict = {}
for i in range(len(self.field)):
self._dict[self.field[i]] = i
return self._dict[field]
@data.setter
def data(self, v):
self.field = [self.field_fun(i, self) for i in range(len(v))]
for f, fv in zip(self.field, v):
f.data = fv
self._dict = None
def serialize(self):
return [f.serialize() for f in self]
def append_data(self, v):
idx = len(self.field)
f = self.field_fun(idx, self)
self.field.append(f)
f.data = v
if self._dict is not None:
self._dict[f] = idx
class Array(BaseArray):
def __init__(self, size, *args, **kwargs):
self.size = size
BaseArray.__init__(self, *args, **kwargs)
class PrefixedArray(BaseArray):
def __init__(self, prefix_field, *args, **kwargs):
self.prefix_field = prefix_field
BaseArray.__init__(self, *args, **kwargs)
@property
def size(self):
return self.prefix_field.data
def unpack(self, s):
self.prefix_field.unpack(s)
BaseArray.unpack(self, s)
def pack(self, s):
self.prefix_field.data = len(self)
self.prefix_field.pack(s)
BaseArray.pack(self, s)
class BaseBlob(BaseField):
def unpack_data(self, s):
return getbytes(s, self.size)
def pack_data(self, s, data):
s.write(data)
class Blob(BaseBlob):
def __init__(self, size):
self.size = size
def serialize(self):
return None
class PrefixedBlob(BaseBlob):
def __init__(self, prefix_field, *args, **kwargs):
self.prefix_field = prefix_field
BaseBlob.__init__(self, *args, **kwargs)
@property
def size(self):
return self.prefix_field.data
def unpack(self, s):
self.prefix_field.unpack(s)
BaseBlob.unpack(self, s)
def pack(self, s):
self.prefix_field.data = len(self)
self.prefix_field.pack(s)
BaseBlob.pack(self, s)
class String(BaseField):
def unpack_data(self, s):
lc = []
c = getbyte(s)
while c != b"\0":
lc.append(c)
c = getbyte(s)
return b"".join(lc).decode()
def pack_data(self, s, data):
s.write(data.encode())
s.write(b"\0")
class FixedString(BaseField):
def __init__(self, size):
self.size = size
def unpack_data(self, s):
data = getbytes(s, self.size)
data = data.rstrip(b"\0").decode()
return data
def pack_data(self, s, data):
data = data.encode().ljust(self.size, b"\0")
s.write(data)
class Index(BaseField):
def __init__(self, array, index_field):
self.array = array
self.index_field = index_field
def unpack_data(self, s):
self.index_field.unpack(s)
return self.array[self.index_field.data].data
def pack_data(self, s, data):
try:
index = self.array.data.index(data)
except ValueError:
index = len(self.array)
self.array.append_data(data)
self.index_field.data = index
self.index_field.pack(s)
class Offset(BaseField):
def unpack_data(self, s):
return s.tell()
def pack_data(self, s, data):
self.data = s.tell()
class Pointer(ContainerField):
def __init__(self, offset, field):
self.offset = offset
self.field = field
def unpack(self, s):
with Seek(s, self.offset):
self.field.unpack(s)
@property
def data(self):
return self.field.data
@data.setter
def data(self, v):
self.field.data = v
def pack_data(self, s, data):
pass
class DataPointer(ContainerField):
def __init__(self, offset_field, field):
self.offset_field = offset_field
self.field = field
def unpack(self, s):
self.offset_field.unpack(s)
with Seek(s, self.offset_field.data):
self.field.unpack(s)
@property
def data(self):
return self.field.data
@data.setter
def data(self, v):
self.field.data = v
class Mapping(BaseField):
def __init__(self, field, mapping):
self.field = field
self.mapping = mapping
def unpack_data(self, s):
data = self.field.unpack_data(s)
return self.mapping[data]
class Flags(BaseField):
def __init__(self, field, flags):
self.field = field
self.flags = flags
def unpack_data(self, s):
data = self.field.unpack_data(s)
flag_data = []
for mask, name in self.flags:
if mask & data:
flag_data.append(name)
return flag_data
# (c) 2015, Marius Gedminas
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# alongwith Ansible. If not, see .
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import shlex
from ansible.module_utils.six import PY3
from ansible.module_utils._text import to_bytes, to_text
if PY3:
# shlex.split() wants Unicode (i.e. ``str``) input on Python 3
shlex_split = shlex.split
else:
# shlex.split() wants bytes (i.e. ``str``) input on Python 2
def shlex_split(s, comments=False, posix=True):
return map(to_text, shlex.split(to_bytes(s), comments, posix))
shlex_split.__doc__ = shlex.split.__doc__
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_sshkeypair
short_description: Manages SSH keys on Apache CloudStack based clouds.
description:
- Create, register and remove SSH keys.
- If no key was found and no public key was provided and a new SSH
private/public key pair will be created and the private key will be returned.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
name:
description:
- Name of public key.
required: true
domain:
description:
- Domain the public key is related to.
required: false
default: null
account:
description:
- Account the public key is related to.
required: false
default: null
project:
description:
- Name of the project the public key to be registered in.
required: false
default: null
state:
description:
- State of the public key.
required: false
default: 'present'
choices: [ 'present', 'absent' ]
public_key:
description:
- String of the public key.
required: false
default: null
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# create a new private / public key pair:
- cs_sshkeypair:
name: linus@example.com
delegate_to: localhost
register: key
- debug:
msg: 'Private key is {{ key.private_key }}'
# remove a public key by its name:
- cs_sshkeypair:
name: linus@example.com
state: absent
delegate_to: localhost
# register your existing local public key:
- cs_sshkeypair:
name: linus@example.com
public_key: "{{ lookup('file', '~/.ssh/id_rsa.pub') }}"
delegate_to: localhost
'''
RETURN = '''
---
id:
description: UUID of the SSH public key.
returned: success
type: string
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
name:
description: Name of the SSH public key.
returned: success
type: string
sample: linus@example.com
fingerprint:
description: Fingerprint of the SSH public key.
returned: success
type: string
sample: "86:5e:a3:e8:bd:95:7b:07:7c:c2:5c:f7:ad:8b:09:28"
private_key:
description: Private key of generated SSH keypair.
returned: changed
type: string
sample: "-----BEGIN RSA PRIVATE KEY-----\nMII...8tO\n-----END RSA PRIVATE KEY-----\n"
'''
try:
import sshpubkeys
HAS_LIB_SSHPUBKEYS = True
except ImportError:
HAS_LIB_SSHPUBKEYS = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
CloudStackException,
cs_required_together,
cs_argument_spec
)
class AnsibleCloudStackSshKey(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackSshKey, self).__init__(module)
self.returns = {
'privatekey': 'private_key',
'fingerprint': 'fingerprint',
}
self.ssh_key = None
def register_ssh_key(self, public_key):
ssh_key = self.get_ssh_key()
args = self._get_common_args()
name = self.module.params.get('name')
res = None
if not ssh_key:
self.result['changed'] = True
args['publickey'] = public_key
if not self.module.check_mode:
args['name'] = name
res = self.cs.registerSSHKeyPair(**args)
else:
fingerprint = self._get_ssh_fingerprint(public_key)
if ssh_key['fingerprint'] != fingerprint:
self.result['changed'] = True
if not self.module.check_mode:
# delete the ssh key with matching name but wrong fingerprint
args['name'] = name
self.cs.deleteSSHKeyPair(**args)
elif ssh_key['name'].lower() != name.lower():
self.result['changed'] = True
if not self.module.check_mode:
# delete the ssh key with matching fingerprint but wrong name
args['name'] = ssh_key['name']
self.cs.deleteSSHKeyPair(**args)
# First match for key retrievment will be the fingerprint.
# We need to make another lookup if there is a key with identical name.
self.ssh_key = None
ssh_key = self.get_ssh_key()
if ssh_key['fingerprint'] != fingerprint:
args['name'] = name
self.cs.deleteSSHKeyPair(**args)
if not self.module.check_mode and self.result['changed']:
args['publickey'] = public_key
args['name'] = name
res = self.cs.registerSSHKeyPair(**args)
if res and 'keypair' in res:
ssh_key = res['keypair']
return ssh_key
def create_ssh_key(self):
ssh_key = self.get_ssh_key()
if not ssh_key:
self.result['changed'] = True
args = self._get_common_args()
args['name'] = self.module.params.get('name')
if not self.module.check_mode:
res = self.cs.createSSHKeyPair(**args)
ssh_key = res['keypair']
return ssh_key
def remove_ssh_key(self, name=None):
ssh_key = self.get_ssh_key()
if ssh_key:
self.result['changed'] = True
args = self._get_common_args()
args['name'] = name or self.module.params.get('name')
if not self.module.check_mode:
self.cs.deleteSSHKeyPair(**args)
return ssh_key
def _get_common_args(self):
return {
'domainid': self.get_domain('id'),
'account': self.get_account('name'),
'projectid': self.get_project('id')
}
def get_ssh_key(self):
if not self.ssh_key:
public_key = self.module.params.get('public_key')
if public_key:
# Query by fingerprint of the public key
args_fingerprint = self._get_common_args()
args_fingerprint['fingerprint'] = self._get_ssh_fingerprint(public_key)
ssh_keys = self.cs.listSSHKeyPairs(**args_fingerprint)
if ssh_keys and 'sshkeypair' in ssh_keys:
self.ssh_key = ssh_keys['sshkeypair'][0]
# When key has not been found by fingerprint, use the name
if not self.ssh_key:
args_name = self._get_common_args()
args_name['name'] = self.module.params.get('name')
ssh_keys = self.cs.listSSHKeyPairs(**args_name)
if ssh_keys and 'sshkeypair' in ssh_keys:
self.ssh_key = ssh_keys['sshkeypair'][0]
return self.ssh_key
def _get_ssh_fingerprint(self, public_key):
key = sshpubkeys.SSHKey(public_key)
if hasattr(key, 'hash_md5'):
return key.hash_md5().replace(to_native('MD5:'), to_native(''))
return key.hash()
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
public_key=dict(),
domain=dict(),
account=dict(),
project=dict(),
state=dict(choices=['present', 'absent'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
if not HAS_LIB_SSHPUBKEYS:
module.fail_json(msg="python library sshpubkeys required: pip install sshpubkeys")
try:
acs_sshkey = AnsibleCloudStackSshKey(module)
state = module.params.get('state')
if state in ['absent']:
ssh_key = acs_sshkey.remove_ssh_key()
else:
public_key = module.params.get('public_key')
if public_key:
ssh_key = acs_sshkey.register_ssh_key(public_key)
else:
ssh_key = acs_sshkey.create_ssh_key()
result = acs_sshkey.get_result(ssh_key)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
if __name__ == '__main__':
main()
from django.views.generic import TemplateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.shortcuts import get_object_or_404
from django.http import JsonResponse
from Hindlebook.models import Author
class FriendsView(TemplateView):
template_name = 'friends/friends.html'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
uuid = kwargs.get('authorUUID', self.request.user.author.uuid)
self.author = get_object_or_404(Author, uuid=uuid)
return super(FriendsView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(FriendsView, self).get_context_data(**kwargs)
context['author'] = self.author
return context
class NotificationsView(TemplateView):
template_name = 'friends/notifications.html'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
self.author = self.request.user.author
return super(NotificationsView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(NotificationsView, self).get_context_data(**kwargs)
context['author'] = self.author
return context
"""
Russian-specific forms helpers
"""
from __future__ import absolute_import
import re
from django.contrib.localflavor.ru.ru_regions import RU_COUNTY_CHOICES, RU_REGIONS_CHOICES
from django.forms.fields import RegexField, Select
from django.utils.translation import ugettext_lazy as _
phone_digits_re = re.compile(r'^(?:[78]-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$')
class RUCountySelect(Select):
"""
A Select widget that uses a list of Russian Counties as its choices.
"""
def __init__(self, attrs=None):
super(RUCountySelect, self).__init__(attrs, choices=RU_COUNTY_CHOICES)
class RURegionSelect(Select):
"""
A Select widget that uses a list of Russian Regions as its choices.
"""
def __init__(self, attrs=None):
super(RURegionSelect, self).__init__(attrs, choices=RU_REGIONS_CHOICES)
class RUPostalCodeField(RegexField):
"""
Russian Postal code field.
Format: XXXXXX, where X is any digit, and first digit is not zero.
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(RUPostalCodeField, self).__init__(r'^\d{6}$',
max_length, min_length, *args, **kwargs)
class RUPassportNumberField(RegexField):
"""
Russian internal passport number format:
XXXX XXXXXX where X - any digit.
"""
default_error_messages = {
'invalid': _(u'Enter a passport number in the format XXXX XXXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(RUPassportNumberField, self).__init__(r'^\d{4} \d{6}$',
max_length, min_length, *args, **kwargs)
class RUAlienPassportNumberField(RegexField):
"""
Russian alien's passport number format:
XX XXXXXXX where X - any digit.
"""
default_error_messages = {
'invalid': _(u'Enter a passport number in the format XX XXXXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(RUAlienPassportNumberField, self).__init__(r'^\d{2} \d{7}$',
max_length, min_length, *args, **kwargs)
#!/usr/bin/env python
# Copyright 2008-2013 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module implementing the command line entry point for the `Libdoc` tool.
This module can be executed from the command line using the following
approaches::
python -m robot.libdoc
python path/to/robot/libdoc.py
Instead of ``python`` it is possible to use also other Python interpreters.
This module also provides :func:`libdoc` and :func:`libdoc_cli` functions
that can be used programmatically. Other code is for internal usage.
Libdoc itself is implemented in the :mod:`~robot.libdocpkg` package.
"""
USAGE = """robot.libdoc -- Robot Framework library documentation generator
Version:
Usage: python -m robot.libdoc [options] library output_file
or: python -m robot.libdoc [options] library list|show|version [names]
Libdoc tool can generate keyword documentation in HTML and XML formats both
for test libraries and resource files. HTML format is suitable for humans and
XML specs for RIDE and other tools. Libdoc also has few special commands to
show library or resource information on the console.
Libdoc supports all library and resource types and also earlier generated XML
specs can be used as input. If a library needs arguments, they must be given
as part of the library name and separated by two colons, for example, like
`LibraryName::arg1::arg2`.
Options
=======
-f --format HTML|XML Specifies whether to generate HTML or XML output.
If this options is not used, the format is got
from the extension of the output file.
-F --docformat ROBOT|HTML|TEXT|REST
Specifies the source documentation format. Possible
values are Robot Framework's documentation format,
HTML, plain text, and reStructuredText. The default
value can be specified in test library source code
and the initial default value is `ROBOT`.
New in Robot Framework 2.7.5.
-n --name newname Sets the name of the documented library or resource.
-v --version newversion Sets the version of the documented library or
resource.
-P --pythonpath path * Additional locations where to search for libraries
and resources.
-E --escape what:with * Escapes characters which are problematic in console.
'what' is the name of the character to escape and
'with' is the string to escape it with.
<-------------------ESCAPES------------------------>
-h -? --help Print this help.
Creating documentation
======================
When creating documentation in HTML or XML format, the output file must
be specified as a second argument after the library/resource name or path.
Output format is got automatically from the extension but can also be set
with `--format` option.
Examples:
python -m robot.libdoc src/MyLib.py doc/MyLib.html
jython -m robot.libdoc MyJavaLibrary.java MyJavaLibrary.html
python -m robot.libdoc --name MyLib Remote::10.0.0.42:8270 MyLib.xml
Viewing information on console
==============================
Libdoc has three special commands to show information on the console. These
commands are used instead of the name of the output file, and they can also
take additional arguments.
list: List names of the keywords the library/resource contains. Can be
limited to show only certain keywords by passing optional patterns as
arguments. Keyword is listed if its name contains any given pattern.
show: Show library/resource documentation. Can be limited to show only
certain keywords by passing names as arguments. Keyword is shown if
its name matches any given name. Special argument `intro` will show
the library introduction and importing sections.
version: Show library version
Optional patterns given to `list` and `show` are case and space insensitive.
Both also accept `*` and `?` as wildcards.
Examples:
python -m robot.libdoc Dialogs list
python -m robot.libdoc Selenium2Library list browser
python -m robot.libdoc Remote::10.0.0.42:8270 show
python -m robot.libdoc Dialogs show PauseExecution execute*
python -m robot.libdoc Selenium2Library show intro
python -m robot.libdoc Selenium2Library version
Alternative execution
=====================
Libdoc works with all interpreters supported by Robot Framework (Python,
Jython and IronPython). In the examples above libdoc is executed as an
installed module, but it can also be executed as a script like
`python path/robot/libdoc.py`.
For more information see libdoc section in Robot Framework User Guide at
http://code.google.com/p/robotframework/wiki/UserGuide
"""
import sys
import os
# Allows running as a script. __name__ check needed with multiprocessing:
# http://code.google.com/p/robotframework/issues/detail?id=1137
if 'robot' not in sys.modules and __name__ == '__main__':
import pythonpathsetter
from robot.utils import Application, seq2str
from robot.errors import DataError
from robot.libdocpkg import LibraryDocumentation, ConsoleViewer
class LibDoc(Application):
def __init__(self):
Application.__init__(self, USAGE, arg_limits=(2,), auto_version=False)
def validate(self, options, arguments):
if ConsoleViewer.handles(arguments[1]):
ConsoleViewer.validate_command(arguments[1], arguments[2:])
elif len(arguments) > 2:
raise DataError('Only two arguments allowed when writing output.')
return options, arguments
def main(self, args, name='', version='', format=None, docformat=None):
lib_or_res, output = args[:2]
libdoc = LibraryDocumentation(lib_or_res, name, version,
self._get_doc_format(docformat))
if ConsoleViewer.handles(output):
ConsoleViewer(libdoc).view(output, *args[2:])
else:
libdoc.save(output, self._get_output_format(format, output))
self.console(os.path.abspath(output))
def _get_doc_format(self, format):
if not format:
return None
return self._verify_format('Doc format', format,
['ROBOT', 'TEXT', 'HTML', 'REST'])
def _get_output_format(self, format, output):
default = os.path.splitext(output)[1][1:]
return self._verify_format('Format', format or default, ['HTML', 'XML'])
def _verify_format(self, type, format, valid):
format = format.upper()
if format not in valid:
raise DataError("%s must be %s, got '%s'."
% (type, seq2str(valid, lastsep=' or '), format))
return format
def libdoc_cli(arguments):
"""Executes Libdoc similarly as from the command line.
:param arguments: Command line arguments as a list of strings.
For programmatic usage the :func:`libdoc` function is typically better. It
has a better API for that usage and does not call :func:`sys.exit` like
this function.
Example::
from robot.libdoc import libdoc_cli
libdoc_cli(['--version', '1.0', 'MyLibrary.py', 'MyLibraryDoc.html'])
"""
LibDoc().execute_cli(arguments)