repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
sqlalchemy/sqlalchemy | test/orm/test_rel_fn.py | 3 | 45031 | from sqlalchemy import and_
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import exc
from sqlalchemy import ForeignKey
from sqlalchemy import ForeignKeyConstraint
from sqlalchemy import func
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.orm import foreign
from sqlalchemy.orm import relationship
from sqlalchemy.orm import relationships
from sqlalchemy.orm import remote
from sqlalchemy.orm.interfaces import MANYTOONE
from sqlalchemy.orm.interfaces import ONETOMANY
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
class _JoinFixtures(object):
@classmethod
def setup_test_class(cls):
m = MetaData()
cls.left = Table(
"lft",
m,
Column("id", Integer, primary_key=True),
Column("x", Integer),
Column("y", Integer),
)
cls.right = Table(
"rgt",
m,
Column("id", Integer, primary_key=True),
Column("lid", Integer, ForeignKey("lft.id")),
Column("x", Integer),
Column("y", Integer),
)
cls.right_multi_fk = Table(
"rgt_multi_fk",
m,
Column("id", Integer, primary_key=True),
Column("lid1", Integer, ForeignKey("lft.id")),
Column("lid2", Integer, ForeignKey("lft.id")),
)
cls.selfref = Table(
"selfref",
m,
Column("id", Integer, primary_key=True),
Column("sid", Integer, ForeignKey("selfref.id")),
)
cls.composite_selfref = Table(
"composite_selfref",
m,
Column("id", Integer, primary_key=True),
Column("group_id", Integer, primary_key=True),
Column("parent_id", Integer),
ForeignKeyConstraint(
["parent_id", "group_id"],
["composite_selfref.id", "composite_selfref.group_id"],
),
)
cls.m2mleft = Table(
"m2mlft", m, Column("id", Integer, primary_key=True)
)
cls.m2mright = Table(
"m2mrgt", m, Column("id", Integer, primary_key=True)
)
cls.m2msecondary = Table(
"m2msecondary",
m,
Column("lid", Integer, ForeignKey("m2mlft.id"), primary_key=True),
Column("rid", Integer, ForeignKey("m2mrgt.id"), primary_key=True),
)
cls.m2msecondary_no_fks = Table(
"m2msecondary_no_fks",
m,
Column("lid", Integer, primary_key=True),
Column("rid", Integer, primary_key=True),
)
cls.m2msecondary_ambig_fks = Table(
"m2msecondary_ambig_fks",
m,
Column("lid1", Integer, ForeignKey("m2mlft.id"), primary_key=True),
Column("rid1", Integer, ForeignKey("m2mrgt.id"), primary_key=True),
Column("lid2", Integer, ForeignKey("m2mlft.id"), primary_key=True),
Column("rid2", Integer, ForeignKey("m2mrgt.id"), primary_key=True),
)
cls.base_w_sub_rel = Table(
"base_w_sub_rel",
m,
Column("id", Integer, primary_key=True),
Column("sub_id", Integer, ForeignKey("rel_sub.id")),
)
cls.rel_sub = Table(
"rel_sub",
m,
Column(
"id",
Integer,
ForeignKey("base_w_sub_rel.id"),
primary_key=True,
),
)
cls.base = Table(
"base",
m,
Column("id", Integer, primary_key=True),
Column("flag", Boolean),
)
cls.sub = Table(
"sub",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
)
cls.sub_w_base_rel = Table(
"sub_w_base_rel",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("base_id", Integer, ForeignKey("base.id")),
)
cls.sub_w_sub_rel = Table(
"sub_w_sub_rel",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("sub_id", Integer, ForeignKey("sub.id")),
)
cls.right_w_base_rel = Table(
"right_w_base_rel",
m,
Column("id", Integer, primary_key=True),
Column("base_id", Integer, ForeignKey("base.id")),
)
cls.three_tab_a = Table(
"three_tab_a", m, Column("id", Integer, primary_key=True)
)
cls.three_tab_b = Table(
"three_tab_b",
m,
Column("id", Integer, primary_key=True),
Column("aid", Integer, ForeignKey("three_tab_a.id")),
)
cls.three_tab_c = Table(
"three_tab_c",
m,
Column("id", Integer, primary_key=True),
Column("aid", Integer, ForeignKey("three_tab_a.id")),
Column("bid", Integer, ForeignKey("three_tab_b.id")),
)
cls.composite_target = Table(
"composite_target",
m,
Column("uid", Integer, primary_key=True),
Column("oid", Integer, primary_key=True),
)
cls.composite_multi_ref = Table(
"composite_multi_ref",
m,
Column("uid1", Integer),
Column("uid2", Integer),
Column("oid", Integer),
ForeignKeyConstraint(
("uid1", "oid"),
("composite_target.uid", "composite_target.oid"),
),
ForeignKeyConstraint(
("uid2", "oid"),
("composite_target.uid", "composite_target.oid"),
),
)
cls.purely_single_col = Table(
"purely_single_col", m, Column("path", String)
)
def _join_fixture_overlapping_three_tables(self, **kw):
def _can_sync(*cols):
for c in cols:
if self.three_tab_c.c.contains_column(c):
return False
else:
return True
return relationships.JoinCondition(
self.three_tab_a,
self.three_tab_b,
self.three_tab_a,
self.three_tab_b,
support_sync=False,
can_be_synced_fn=_can_sync,
primaryjoin=and_(
self.three_tab_a.c.id == self.three_tab_b.c.aid,
self.three_tab_c.c.bid == self.three_tab_b.c.id,
self.three_tab_c.c.aid == self.three_tab_a.c.id,
),
)
def _join_fixture_m2m(self, **kw):
return relationships.JoinCondition(
self.m2mleft,
self.m2mright,
self.m2mleft,
self.m2mright,
secondary=self.m2msecondary,
**kw
)
def _join_fixture_m2m_backref(self, **kw):
"""return JoinCondition in the same way RelationshipProperty
calls it for a backref on an m2m.
"""
j1 = self._join_fixture_m2m()
return (
j1,
relationships.JoinCondition(
self.m2mright,
self.m2mleft,
self.m2mright,
self.m2mleft,
secondary=self.m2msecondary,
primaryjoin=j1.secondaryjoin_minus_local,
secondaryjoin=j1.primaryjoin_minus_local,
),
)
def _join_fixture_o2m(self, **kw):
return relationships.JoinCondition(
self.left, self.right, self.left, self.right, **kw
)
def _join_fixture_m2o(self, **kw):
return relationships.JoinCondition(
self.right, self.left, self.right, self.left, **kw
)
def _join_fixture_o2m_selfref(self, **kw):
return relationships.JoinCondition(
self.selfref, self.selfref, self.selfref, self.selfref, **kw
)
def _join_fixture_m2o_selfref(self, **kw):
return relationships.JoinCondition(
self.selfref,
self.selfref,
self.selfref,
self.selfref,
remote_side=set([self.selfref.c.id]),
**kw
)
def _join_fixture_o2m_composite_selfref(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
**kw
)
def _join_fixture_m2o_composite_selfref(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
remote_side=set(
[
self.composite_selfref.c.id,
self.composite_selfref.c.group_id,
]
),
**kw
)
def _join_fixture_o2m_composite_selfref_func(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
primaryjoin=and_(
self.composite_selfref.c.group_id
== func.foo(self.composite_selfref.c.group_id),
self.composite_selfref.c.parent_id
== self.composite_selfref.c.id,
),
**kw
)
def _join_fixture_o2m_composite_selfref_func_remote_side(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
primaryjoin=and_(
self.composite_selfref.c.group_id
== func.foo(self.composite_selfref.c.group_id),
self.composite_selfref.c.parent_id
== self.composite_selfref.c.id,
),
remote_side=set([self.composite_selfref.c.parent_id]),
**kw
)
def _join_fixture_o2m_composite_selfref_func_annotated(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
primaryjoin=and_(
remote(self.composite_selfref.c.group_id)
== func.foo(self.composite_selfref.c.group_id),
remote(self.composite_selfref.c.parent_id)
== self.composite_selfref.c.id,
),
**kw
)
def _join_fixture_compound_expression_1(self, **kw):
return relationships.JoinCondition(
self.left,
self.right,
self.left,
self.right,
primaryjoin=(self.left.c.x + self.left.c.y)
== relationships.remote(
relationships.foreign(self.right.c.x * self.right.c.y)
),
**kw
)
def _join_fixture_compound_expression_2(self, **kw):
return relationships.JoinCondition(
self.left,
self.right,
self.left,
self.right,
primaryjoin=(self.left.c.x + self.left.c.y)
== relationships.foreign(self.right.c.x * self.right.c.y),
**kw
)
def _join_fixture_compound_expression_1_non_annotated(self, **kw):
return relationships.JoinCondition(
self.left,
self.right,
self.left,
self.right,
primaryjoin=(self.left.c.x + self.left.c.y)
== (self.right.c.x * self.right.c.y),
**kw
)
def _join_fixture_base_to_joined_sub(self, **kw):
# see test/orm/inheritance/test_abc_inheritance:TestaTobM2O
# and others there
right = self.base_w_sub_rel.join(
self.rel_sub, self.base_w_sub_rel.c.id == self.rel_sub.c.id
)
return relationships.JoinCondition(
self.base_w_sub_rel,
right,
self.base_w_sub_rel,
self.rel_sub,
primaryjoin=self.base_w_sub_rel.c.sub_id == self.rel_sub.c.id,
**kw
)
def _join_fixture_o2m_joined_sub_to_base(self, **kw):
left = self.base.join(
self.sub_w_base_rel, self.base.c.id == self.sub_w_base_rel.c.id
)
return relationships.JoinCondition(
left,
self.base,
self.sub_w_base_rel,
self.base,
primaryjoin=self.sub_w_base_rel.c.base_id == self.base.c.id,
)
def _join_fixture_m2o_joined_sub_to_sub_on_base(self, **kw):
# this is a late add - a variant of the test case
# in #2491 where we join on the base cols instead. only
# m2o has a problem at the time of this test.
left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
right = self.base.join(
self.sub_w_base_rel, self.base.c.id == self.sub_w_base_rel.c.id
)
return relationships.JoinCondition(
left,
right,
self.sub,
self.sub_w_base_rel,
primaryjoin=self.sub_w_base_rel.c.base_id == self.base.c.id,
)
def _join_fixture_o2m_joined_sub_to_sub(self, **kw):
left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
right = self.base.join(
self.sub_w_sub_rel, self.base.c.id == self.sub_w_sub_rel.c.id
)
return relationships.JoinCondition(
left,
right,
self.sub,
self.sub_w_sub_rel,
primaryjoin=self.sub.c.id == self.sub_w_sub_rel.c.sub_id,
)
def _join_fixture_m2o_sub_to_joined_sub(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
right = self.base.join(
self.right_w_base_rel, self.base.c.id == self.right_w_base_rel.c.id
)
return relationships.JoinCondition(
self.right_w_base_rel,
right,
self.right_w_base_rel,
self.right_w_base_rel,
)
def _join_fixture_m2o_sub_to_joined_sub_func(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
right = self.base.join(
self.right_w_base_rel, self.base.c.id == self.right_w_base_rel.c.id
)
return relationships.JoinCondition(
self.right_w_base_rel,
right,
self.right_w_base_rel,
self.right_w_base_rel,
primaryjoin=self.right_w_base_rel.c.base_id
== func.foo(self.base.c.id),
)
def _join_fixture_o2o_joined_sub_to_base(self, **kw):
left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
# see test_relationships->AmbiguousJoinInterpretedAsSelfRef
return relationships.JoinCondition(left, self.sub, left, self.sub)
def _join_fixture_o2m_to_annotated_func(self, **kw):
return relationships.JoinCondition(
self.left,
self.right,
self.left,
self.right,
primaryjoin=self.left.c.id == foreign(func.foo(self.right.c.lid)),
**kw
)
def _join_fixture_o2m_to_oldstyle_func(self, **kw):
return relationships.JoinCondition(
self.left,
self.right,
self.left,
self.right,
primaryjoin=self.left.c.id == func.foo(self.right.c.lid),
consider_as_foreign_keys={self.right.c.lid},
**kw
)
def _join_fixture_overlapping_composite_fks(self, **kw):
return relationships.JoinCondition(
self.composite_target,
self.composite_multi_ref,
self.composite_target,
self.composite_multi_ref,
consider_as_foreign_keys={
self.composite_multi_ref.c.uid2,
self.composite_multi_ref.c.oid,
},
**kw
)
def _join_fixture_o2m_o_side_none(self, **kw):
return relationships.JoinCondition(
self.left,
self.right,
self.left,
self.right,
primaryjoin=and_(
self.left.c.id == self.right.c.lid, self.left.c.x == 5
),
**kw
)
def _join_fixture_purely_single_o2m(self, **kw):
return relationships.JoinCondition(
self.purely_single_col,
self.purely_single_col,
self.purely_single_col,
self.purely_single_col,
support_sync=False,
primaryjoin=self.purely_single_col.c.path.like(
remote(foreign(self.purely_single_col.c.path.concat("%")))
),
)
def _join_fixture_purely_single_m2o(self, **kw):
return relationships.JoinCondition(
self.purely_single_col,
self.purely_single_col,
self.purely_single_col,
self.purely_single_col,
support_sync=False,
primaryjoin=remote(self.purely_single_col.c.path).like(
foreign(self.purely_single_col.c.path.concat("%"))
),
)
def _join_fixture_remote_local_multiple_ref(self, **kw):
def fn(a, b):
return (a == b) | (b == a)
return relationships.JoinCondition(
self.selfref,
self.selfref,
self.selfref,
self.selfref,
support_sync=False,
primaryjoin=fn(
# we're putting a do-nothing annotation on
# "a" so that the left/right is preserved;
# annotation vs. non seems to affect __eq__ behavior
self.selfref.c.sid._annotate({"foo": "bar"}),
foreign(remote(self.selfref.c.sid)),
),
)
def _join_fixture_inh_selfref_w_entity(self, **kw):
fake_logger = mock.Mock(info=lambda *arg, **kw: None)
prop = mock.Mock(
parent=mock.Mock(), mapper=mock.Mock(), logger=fake_logger
)
local_selectable = self.base.join(self.sub)
remote_selectable = self.base.join(self.sub_w_sub_rel)
# note this test requires that "parentmapper" annotation is
# present in the columns ahead of time
sub_w_sub_rel__sub_id = self.sub_w_sub_rel.c.sub_id._annotate(
{"parentmapper": prop.mapper}
)
sub__id = self.sub.c.id._annotate({"parentmapper": prop.parent})
sub_w_sub_rel__flag = self.base.c.flag._annotate(
{"parentmapper": prop.mapper}
)
return relationships.JoinCondition(
local_selectable,
remote_selectable,
local_selectable,
remote_selectable,
primaryjoin=and_(
sub_w_sub_rel__sub_id == sub__id,
sub_w_sub_rel__flag == True, # noqa
),
prop=prop,
)
def _assert_non_simple_warning(self, fn):
assert_raises_message(
exc.SAWarning,
"Non-simple column elements in "
"primary join condition for property "
r"None - consider using remote\(\) "
"annotations to mark the remote side.",
fn,
)
def _assert_raises_no_relevant_fks(
self, fn, expr, relname, primary, *arg, **kw
):
assert_raises_message(
exc.ArgumentError,
r"Could not locate any relevant foreign key columns "
r"for %s join condition '%s' on relationship %s. "
r"Ensure that referencing columns are associated with "
r"a ForeignKey or ForeignKeyConstraint, or are annotated "
r"in the join condition with the foreign\(\) annotation."
% (primary, expr, relname),
fn,
*arg,
**kw
)
def _assert_raises_no_equality(
self, fn, expr, relname, primary, *arg, **kw
):
assert_raises_message(
exc.ArgumentError,
"Could not locate any simple equality expressions "
"involving locally mapped foreign key columns for %s join "
"condition '%s' on relationship %s. "
"Ensure that referencing columns are associated with a "
"ForeignKey or ForeignKeyConstraint, or are annotated in "
r"the join condition with the foreign\(\) annotation. "
"To allow comparison operators other than '==', "
"the relationship can be marked as viewonly=True."
% (primary, expr, relname),
fn,
*arg,
**kw
)
def _assert_raises_ambig_join(
self, fn, relname, secondary_arg, *arg, **kw
):
if secondary_arg is not None:
assert_raises_message(
exc.AmbiguousForeignKeysError,
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are multiple foreign key paths linking the "
"tables via secondary table '%s'. "
"Specify the 'foreign_keys' argument, providing a list "
"of those columns which should be counted as "
"containing a foreign key reference from the "
"secondary table to each of the parent and child tables."
% (relname, secondary_arg),
fn,
*arg,
**kw
)
else:
assert_raises_message(
exc.AmbiguousForeignKeysError,
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables. "
% (relname,),
fn,
*arg,
**kw
)
def _assert_raises_no_join(self, fn, relname, secondary_arg, *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.NoForeignKeysError,
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables "
"via secondary table '%s'. "
"Ensure that referencing columns are associated "
"with a ForeignKey "
"or ForeignKeyConstraint, or specify 'primaryjoin' and "
"'secondaryjoin' expressions" % (relname, secondary_arg),
fn,
*arg,
**kw
)
else:
assert_raises_message(
exc.NoForeignKeysError,
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables. "
"Ensure that referencing columns are associated "
"with a ForeignKey "
"or ForeignKeyConstraint, or specify a 'primaryjoin' "
"expression." % (relname,),
fn,
*arg,
**kw
)
class ColumnCollectionsTest(
_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
):
def test_determine_local_remote_pairs_o2o_joined_sub_to_base(self):
joincond = self._join_fixture_o2o_joined_sub_to_base()
eq_(joincond.local_remote_pairs, [(self.base.c.id, self.sub.c.id)])
def test_determine_synchronize_pairs_o2m_to_annotated_func(self):
joincond = self._join_fixture_o2m_to_annotated_func()
eq_(joincond.synchronize_pairs, [(self.left.c.id, self.right.c.lid)])
def test_determine_synchronize_pairs_o2m_to_oldstyle_func(self):
joincond = self._join_fixture_o2m_to_oldstyle_func()
eq_(joincond.synchronize_pairs, [(self.left.c.id, self.right.c.lid)])
def test_determinelocal_remote_m2o_joined_sub_to_sub_on_base(self):
joincond = self._join_fixture_m2o_joined_sub_to_sub_on_base()
eq_(
joincond.local_remote_pairs,
[(self.base.c.id, self.sub_w_base_rel.c.base_id)],
)
def test_determine_local_remote_base_to_joined_sub(self):
joincond = self._join_fixture_base_to_joined_sub()
eq_(
joincond.local_remote_pairs,
[(self.base_w_sub_rel.c.sub_id, self.rel_sub.c.id)],
)
def test_determine_local_remote_o2m_joined_sub_to_base(self):
joincond = self._join_fixture_o2m_joined_sub_to_base()
eq_(
joincond.local_remote_pairs,
[(self.sub_w_base_rel.c.base_id, self.base.c.id)],
)
def test_determine_local_remote_m2o_sub_to_joined_sub(self):
joincond = self._join_fixture_m2o_sub_to_joined_sub()
eq_(
joincond.local_remote_pairs,
[(self.right_w_base_rel.c.base_id, self.base.c.id)],
)
def test_determine_remote_columns_o2m_joined_sub_to_sub(self):
joincond = self._join_fixture_o2m_joined_sub_to_sub()
eq_(
joincond.local_remote_pairs,
[(self.sub.c.id, self.sub_w_sub_rel.c.sub_id)],
)
def test_determine_remote_columns_compound_1(self):
joincond = self._join_fixture_compound_expression_1(support_sync=False)
eq_(joincond.remote_columns, set([self.right.c.x, self.right.c.y]))
def test_determine_local_remote_compound_1(self):
joincond = self._join_fixture_compound_expression_1(support_sync=False)
eq_(
joincond.local_remote_pairs,
[
(self.left.c.x, self.right.c.x),
(self.left.c.x, self.right.c.y),
(self.left.c.y, self.right.c.x),
(self.left.c.y, self.right.c.y),
],
)
def test_determine_local_remote_compound_2(self):
joincond = self._join_fixture_compound_expression_2(support_sync=False)
eq_(
joincond.local_remote_pairs,
[
(self.left.c.x, self.right.c.x),
(self.left.c.x, self.right.c.y),
(self.left.c.y, self.right.c.x),
(self.left.c.y, self.right.c.y),
],
)
def test_determine_local_remote_compound_3(self):
joincond = self._join_fixture_compound_expression_1()
eq_(
joincond.local_remote_pairs,
[
(self.left.c.x, self.right.c.x),
(self.left.c.x, self.right.c.y),
(self.left.c.y, self.right.c.x),
(self.left.c.y, self.right.c.y),
],
)
def test_err_local_remote_compound_1(self):
self._assert_raises_no_relevant_fks(
self._join_fixture_compound_expression_1_non_annotated,
r"lft.x \+ lft.y = rgt.x \* rgt.y",
"None",
"primary",
)
def test_determine_remote_columns_compound_2(self):
joincond = self._join_fixture_compound_expression_2(support_sync=False)
eq_(joincond.remote_columns, set([self.right.c.x, self.right.c.y]))
def test_determine_remote_columns_o2m(self):
joincond = self._join_fixture_o2m()
eq_(joincond.remote_columns, set([self.right.c.lid]))
def test_determine_remote_columns_o2m_selfref(self):
joincond = self._join_fixture_o2m_selfref()
eq_(joincond.remote_columns, set([self.selfref.c.sid]))
def test_determine_local_remote_pairs_o2m_composite_selfref(self):
joincond = self._join_fixture_o2m_composite_selfref()
eq_(
joincond.local_remote_pairs,
[
(
self.composite_selfref.c.group_id,
self.composite_selfref.c.group_id,
),
(
self.composite_selfref.c.id,
self.composite_selfref.c.parent_id,
),
],
)
def test_determine_local_remote_pairs_o2m_composite_selfref_func_warning(
self,
):
self._assert_non_simple_warning(
self._join_fixture_o2m_composite_selfref_func
)
def test_determine_local_remote_pairs_o2m_composite_selfref_func_rs(self):
# no warning
self._join_fixture_o2m_composite_selfref_func_remote_side()
def test_determine_local_remote_pairs_o2m_overlap_func_warning(self):
self._assert_non_simple_warning(
self._join_fixture_m2o_sub_to_joined_sub_func
)
def test_determine_local_remote_pairs_o2m_composite_selfref_func_annotated(
self,
):
joincond = self._join_fixture_o2m_composite_selfref_func_annotated()
eq_(
joincond.local_remote_pairs,
[
(
self.composite_selfref.c.group_id,
self.composite_selfref.c.group_id,
),
(
self.composite_selfref.c.id,
self.composite_selfref.c.parent_id,
),
],
)
def test_determine_remote_columns_m2o_composite_selfref(self):
joincond = self._join_fixture_m2o_composite_selfref()
eq_(
joincond.remote_columns,
set(
[
self.composite_selfref.c.id,
self.composite_selfref.c.group_id,
]
),
)
def test_determine_remote_columns_m2o(self):
joincond = self._join_fixture_m2o()
eq_(joincond.remote_columns, set([self.left.c.id]))
def test_determine_local_remote_pairs_o2m(self):
joincond = self._join_fixture_o2m()
eq_(joincond.local_remote_pairs, [(self.left.c.id, self.right.c.lid)])
def test_determine_synchronize_pairs_m2m(self):
joincond = self._join_fixture_m2m()
eq_(
joincond.synchronize_pairs,
[(self.m2mleft.c.id, self.m2msecondary.c.lid)],
)
eq_(
joincond.secondary_synchronize_pairs,
[(self.m2mright.c.id, self.m2msecondary.c.rid)],
)
def test_determine_local_remote_pairs_o2m_backref(self):
joincond = self._join_fixture_o2m()
joincond2 = self._join_fixture_m2o(
primaryjoin=joincond.primaryjoin_reverse_remote
)
eq_(joincond2.local_remote_pairs, [(self.right.c.lid, self.left.c.id)])
def test_determine_local_remote_pairs_m2m(self):
joincond = self._join_fixture_m2m()
eq_(
joincond.local_remote_pairs,
[
(self.m2mleft.c.id, self.m2msecondary.c.lid),
(self.m2mright.c.id, self.m2msecondary.c.rid),
],
)
def test_determine_local_remote_pairs_m2m_backref(self):
j1, j2 = self._join_fixture_m2m_backref()
eq_(
j1.local_remote_pairs,
[
(self.m2mleft.c.id, self.m2msecondary.c.lid),
(self.m2mright.c.id, self.m2msecondary.c.rid),
],
)
eq_(
j2.local_remote_pairs,
[
(self.m2mright.c.id, self.m2msecondary.c.rid),
(self.m2mleft.c.id, self.m2msecondary.c.lid),
],
)
def test_determine_local_columns_m2m_backref(self):
j1, j2 = self._join_fixture_m2m_backref()
eq_(j1.local_columns, set([self.m2mleft.c.id]))
eq_(j2.local_columns, set([self.m2mright.c.id]))
def test_determine_remote_columns_m2m_backref(self):
j1, j2 = self._join_fixture_m2m_backref()
eq_(
j1.remote_columns,
set([self.m2msecondary.c.lid, self.m2msecondary.c.rid]),
)
eq_(
j2.remote_columns,
set([self.m2msecondary.c.lid, self.m2msecondary.c.rid]),
)
def test_determine_remote_columns_m2o_selfref(self):
joincond = self._join_fixture_m2o_selfref()
eq_(joincond.remote_columns, set([self.selfref.c.id]))
def test_determine_local_remote_cols_three_tab_viewonly(self):
joincond = self._join_fixture_overlapping_three_tables()
eq_(
joincond.local_remote_pairs,
[(self.three_tab_a.c.id, self.three_tab_b.c.aid)],
)
eq_(
joincond.remote_columns,
set([self.three_tab_b.c.id, self.three_tab_b.c.aid]),
)
def test_determine_local_remote_overlapping_composite_fks(self):
joincond = self._join_fixture_overlapping_composite_fks()
eq_(
joincond.local_remote_pairs,
[
(self.composite_target.c.uid, self.composite_multi_ref.c.uid2),
(self.composite_target.c.oid, self.composite_multi_ref.c.oid),
],
)
def test_determine_local_remote_pairs_purely_single_col_o2m(self):
joincond = self._join_fixture_purely_single_o2m()
eq_(
joincond.local_remote_pairs,
[(self.purely_single_col.c.path, self.purely_single_col.c.path)],
)
def test_determine_local_remote_pairs_inh_selfref_w_entities(self):
joincond = self._join_fixture_inh_selfref_w_entity()
eq_(
joincond.local_remote_pairs,
[(self.sub.c.id, self.sub_w_sub_rel.c.sub_id)],
)
eq_(
joincond.remote_columns,
set([self.base.c.flag, self.sub_w_sub_rel.c.sub_id]),
)
class DirectionTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
def test_determine_direction_compound_2(self):
joincond = self._join_fixture_compound_expression_2(support_sync=False)
is_(joincond.direction, ONETOMANY)
def test_determine_direction_o2m(self):
joincond = self._join_fixture_o2m()
is_(joincond.direction, ONETOMANY)
def test_determine_direction_o2m_selfref(self):
joincond = self._join_fixture_o2m_selfref()
is_(joincond.direction, ONETOMANY)
def test_determine_direction_m2o_selfref(self):
joincond = self._join_fixture_m2o_selfref()
is_(joincond.direction, MANYTOONE)
def test_determine_direction_o2m_composite_selfref(self):
joincond = self._join_fixture_o2m_composite_selfref()
is_(joincond.direction, ONETOMANY)
def test_determine_direction_m2o_composite_selfref(self):
joincond = self._join_fixture_m2o_composite_selfref()
is_(joincond.direction, MANYTOONE)
def test_determine_direction_m2o(self):
joincond = self._join_fixture_m2o()
is_(joincond.direction, MANYTOONE)
def test_determine_direction_purely_single_o2m(self):
joincond = self._join_fixture_purely_single_o2m()
is_(joincond.direction, ONETOMANY)
def test_determine_direction_purely_single_m2o(self):
joincond = self._join_fixture_purely_single_m2o()
is_(joincond.direction, MANYTOONE)
class DetermineJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
def test_determine_join_o2m(self):
joincond = self._join_fixture_o2m()
self.assert_compile(joincond.primaryjoin, "lft.id = rgt.lid")
def test_determine_join_o2m_selfref(self):
joincond = self._join_fixture_o2m_selfref()
self.assert_compile(joincond.primaryjoin, "selfref.id = selfref.sid")
def test_determine_join_m2o_selfref(self):
joincond = self._join_fixture_m2o_selfref()
self.assert_compile(joincond.primaryjoin, "selfref.id = selfref.sid")
def test_determine_join_o2m_composite_selfref(self):
joincond = self._join_fixture_o2m_composite_selfref()
self.assert_compile(
joincond.primaryjoin,
"composite_selfref.group_id = composite_selfref.group_id "
"AND composite_selfref.id = composite_selfref.parent_id",
)
def test_determine_join_m2o_composite_selfref(self):
joincond = self._join_fixture_m2o_composite_selfref()
self.assert_compile(
joincond.primaryjoin,
"composite_selfref.group_id = composite_selfref.group_id "
"AND composite_selfref.id = composite_selfref.parent_id",
)
def test_determine_join_m2o(self):
joincond = self._join_fixture_m2o()
self.assert_compile(joincond.primaryjoin, "lft.id = rgt.lid")
def test_determine_join_ambiguous_fks_o2m(self):
assert_raises_message(
exc.AmbiguousForeignKeysError,
"Could not determine join condition between "
"parent/child tables on relationship None - "
"there are multiple foreign key paths linking "
"the tables. Specify the 'foreign_keys' argument, "
"providing a list of those columns which "
"should be counted as containing a foreign "
"key reference to the parent table.",
relationships.JoinCondition,
self.left,
self.right_multi_fk,
self.left,
self.right_multi_fk,
)
def test_determine_join_no_fks_o2m(self):
self._assert_raises_no_join(
relationships.JoinCondition,
"None",
None,
self.left,
self.selfref,
self.left,
self.selfref,
)
def test_determine_join_ambiguous_fks_m2m(self):
self._assert_raises_ambig_join(
relationships.JoinCondition,
"None",
self.m2msecondary_ambig_fks,
self.m2mleft,
self.m2mright,
self.m2mleft,
self.m2mright,
secondary=self.m2msecondary_ambig_fks,
)
def test_determine_join_no_fks_m2m(self):
self._assert_raises_no_join(
relationships.JoinCondition,
"None",
self.m2msecondary_no_fks,
self.m2mleft,
self.m2mright,
self.m2mleft,
self.m2mright,
secondary=self.m2msecondary_no_fks,
)
def _join_fixture_fks_ambig_m2m(self):
return relationships.JoinCondition(
self.m2mleft,
self.m2mright,
self.m2mleft,
self.m2mright,
secondary=self.m2msecondary_ambig_fks,
consider_as_foreign_keys={
self.m2msecondary_ambig_fks.c.lid1,
self.m2msecondary_ambig_fks.c.rid1,
},
)
def test_determine_join_w_fks_ambig_m2m(self):
joincond = self._join_fixture_fks_ambig_m2m()
self.assert_compile(
joincond.primaryjoin, "m2mlft.id = m2msecondary_ambig_fks.lid1"
)
self.assert_compile(
joincond.secondaryjoin, "m2mrgt.id = m2msecondary_ambig_fks.rid1"
)
class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
def test_join_targets_o2m_selfref(self):
joincond = self._join_fixture_o2m_selfref()
left = select(joincond.parent_persist_selectable).alias("pj")
pj, sj, sec, adapter, ds = joincond.join_targets(
left, joincond.child_persist_selectable, True
)
self.assert_compile(pj, "pj.id = selfref.sid")
self.assert_compile(pj, "pj.id = selfref.sid")
right = select(joincond.child_persist_selectable).alias("pj")
pj, sj, sec, adapter, ds = joincond.join_targets(
joincond.parent_persist_selectable, right, True
)
self.assert_compile(pj, "selfref.id = pj.sid")
self.assert_compile(pj, "selfref.id = pj.sid")
def test_join_targets_o2m_plain(self):
joincond = self._join_fixture_o2m()
pj, sj, sec, adapter, ds = joincond.join_targets(
joincond.parent_persist_selectable,
joincond.child_persist_selectable,
False,
)
self.assert_compile(pj, "lft.id = rgt.lid")
self.assert_compile(pj, "lft.id = rgt.lid")
def test_join_targets_o2m_left_aliased(self):
joincond = self._join_fixture_o2m()
left = select(joincond.parent_persist_selectable).alias("pj")
pj, sj, sec, adapter, ds = joincond.join_targets(
left, joincond.child_persist_selectable, True
)
self.assert_compile(pj, "pj.id = rgt.lid")
self.assert_compile(pj, "pj.id = rgt.lid")
def test_join_targets_o2m_right_aliased(self):
joincond = self._join_fixture_o2m()
right = select(joincond.child_persist_selectable).alias("pj")
pj, sj, sec, adapter, ds = joincond.join_targets(
joincond.parent_persist_selectable, right, True
)
self.assert_compile(pj, "lft.id = pj.lid")
self.assert_compile(pj, "lft.id = pj.lid")
def test_join_targets_o2m_composite_selfref(self):
joincond = self._join_fixture_o2m_composite_selfref()
right = select(joincond.child_persist_selectable).alias("pj")
pj, sj, sec, adapter, ds = joincond.join_targets(
joincond.parent_persist_selectable, right, True
)
self.assert_compile(
pj,
"pj.group_id = composite_selfref.group_id "
"AND composite_selfref.id = pj.parent_id",
)
def test_join_targets_m2o_composite_selfref(self):
joincond = self._join_fixture_m2o_composite_selfref()
right = select(joincond.child_persist_selectable).alias("pj")
pj, sj, sec, adapter, ds = joincond.join_targets(
joincond.parent_persist_selectable, right, True
)
self.assert_compile(
pj,
"pj.group_id = composite_selfref.group_id "
"AND pj.id = composite_selfref.parent_id",
)
class LazyClauseTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
def test_lazy_clause_o2m(self):
joincond = self._join_fixture_o2m()
lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause()
self.assert_compile(lazywhere, ":param_1 = rgt.lid")
def test_lazy_clause_o2m_reverse(self):
joincond = self._join_fixture_o2m()
lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause(
reverse_direction=True
)
self.assert_compile(lazywhere, "lft.id = :param_1")
def test_lazy_clause_o2m_o_side_none(self):
# test for #2948. When the join is "o.id == m.oid
# AND o.something == something",
# we don't want 'o' brought into the lazy load for 'm'
joincond = self._join_fixture_o2m_o_side_none()
lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause()
self.assert_compile(
lazywhere,
":param_1 = rgt.lid AND :param_2 = :x_1",
checkparams={"param_1": None, "param_2": None, "x_1": 5},
)
def test_lazy_clause_o2m_o_side_none_reverse(self):
# continued test for #2948.
joincond = self._join_fixture_o2m_o_side_none()
lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause(
reverse_direction=True
)
self.assert_compile(
lazywhere,
"lft.id = :param_1 AND lft.x = :x_1",
checkparams={"param_1": None, "x_1": 5},
)
def test_lazy_clause_remote_local_multiple_ref(self):
joincond = self._join_fixture_remote_local_multiple_ref()
lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause()
self.assert_compile(
lazywhere,
":param_1 = selfref.sid OR selfref.sid = :param_1",
checkparams={"param_1": None},
)
class DeannotateCorrectlyTest(fixtures.TestBase):
def test_pj_deannotates(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
class B(Base):
__tablename__ = "b"
id = Column(Integer, primary_key=True)
a_id = Column(ForeignKey(A.id))
a = relationship(A)
eq_(
B.a.property.primaryjoin.left._annotations,
{"parentmapper": A.__mapper__, "remote": True},
)
eq_(
B.a.property.primaryjoin.right._annotations,
{"foreign": True, "local": True, "parentmapper": B.__mapper__},
)
| mit | -7,629,121,777,916,075,000 | 34.653998 | 79 | 0.554907 | false |
igor-rangel7l/novoigorrangel.repository | plugin.video.SportsDevil/service/asn1crypto/keys.py | 13 | 35048 | # coding: utf-8
"""
ASN.1 type classes for public and private keys. Exports the following items:
- DSAPrivateKey()
- ECPrivateKey()
- EncryptedPrivateKeyInfo()
- PrivateKeyInfo()
- PublicKeyInfo()
- RSAPrivateKey()
- RSAPublicKey()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
import hashlib
import math
from ._elliptic_curve import (
SECP192R1_BASE_POINT,
SECP224R1_BASE_POINT,
SECP256R1_BASE_POINT,
SECP384R1_BASE_POINT,
SECP521R1_BASE_POINT,
PrimeCurve,
PrimePoint,
)
from ._errors import unwrap
from ._types import type_name, str_cls, byte_cls
from .algos import _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm
from .core import (
Any,
Asn1Value,
BitString,
Choice,
Integer,
IntegerOctetString,
Null,
ObjectIdentifier,
OctetBitString,
OctetString,
ParsableOctetString,
ParsableOctetBitString,
Sequence,
SequenceOf,
SetOf,
)
from .util import int_from_bytes, int_to_bytes
class OtherPrimeInfo(Sequence):
"""
Source: https://tools.ietf.org/html/rfc3447#page-46
"""
_fields = [
('prime', Integer),
('exponent', Integer),
('coefficient', Integer),
]
class OtherPrimeInfos(SequenceOf):
"""
Source: https://tools.ietf.org/html/rfc3447#page-46
"""
_child_spec = OtherPrimeInfo
class RSAPrivateKeyVersion(Integer):
"""
Original Name: Version
Source: https://tools.ietf.org/html/rfc3447#page-45
"""
_map = {
0: 'two-prime',
1: 'multi',
}
class RSAPrivateKey(Sequence):
"""
Source: https://tools.ietf.org/html/rfc3447#page-45
"""
_fields = [
('version', RSAPrivateKeyVersion),
('modulus', Integer),
('public_exponent', Integer),
('private_exponent', Integer),
('prime1', Integer),
('prime2', Integer),
('exponent1', Integer),
('exponent2', Integer),
('coefficient', Integer),
('other_prime_infos', OtherPrimeInfos, {'optional': True})
]
class RSAPublicKey(Sequence):
"""
Source: https://tools.ietf.org/html/rfc3447#page-44
"""
_fields = [
('modulus', Integer),
('public_exponent', Integer)
]
class DSAPrivateKey(Sequence):
"""
The ASN.1 structure that OpenSSL uses to store a DSA private key that is
not part of a PKCS#8 structure. Reversed engineered from english-language
description on linked OpenSSL documentation page.
Original Name: None
Source: https://www.openssl.org/docs/apps/dsa.html
"""
_fields = [
('version', Integer),
('p', Integer),
('q', Integer),
('g', Integer),
('public_key', Integer),
('private_key', Integer),
]
class _ECPoint():
"""
In both PublicKeyInfo and PrivateKeyInfo, the EC public key is a byte
string that is encoded as a bit string. This class adds convenience
methods for converting to and from the byte string to a pair of integers
that are the X and Y coordinates.
"""
@classmethod
def from_coords(cls, x, y):
"""
Creates an ECPoint object from the X and Y integer coordinates of the
point
:param x:
The X coordinate, as an integer
:param y:
The Y coordinate, as an integer
:return:
An ECPoint object
"""
x_bytes = int(math.ceil(math.log(x, 2) / 8.0))
y_bytes = int(math.ceil(math.log(y, 2) / 8.0))
num_bytes = max(x_bytes, y_bytes)
byte_string = b'\x04'
byte_string += int_to_bytes(x, width=num_bytes)
byte_string += int_to_bytes(y, width=num_bytes)
return cls(byte_string)
def to_coords(self):
"""
Returns the X and Y coordinates for this EC point, as native Python
integers
:return:
A 2-element tuple containing integers (X, Y)
"""
data = self.native
first_byte = data[0:1]
# Uncompressed
if first_byte == b'\x04':
remaining = data[1:]
field_len = len(remaining) // 2
x = int_from_bytes(remaining[0:field_len])
y = int_from_bytes(remaining[field_len:])
return (x, y)
if first_byte not in set([b'\x02', b'\x03']):
raise ValueError(unwrap(
'''
Invalid EC public key - first byte is incorrect
'''
))
raise ValueError(unwrap(
'''
Compressed representations of EC public keys are not supported due
to patent US6252960
'''
))
class ECPoint(OctetString, _ECPoint):
pass
class ECPointBitString(OctetBitString, _ECPoint):
pass
class SpecifiedECDomainVersion(Integer):
"""
Source: http://www.secg.org/sec1-v2.pdf page 104
"""
_map = {
1: 'ecdpVer1',
2: 'ecdpVer2',
3: 'ecdpVer3',
}
class FieldType(ObjectIdentifier):
"""
Original Name: None
Source: http://www.secg.org/sec1-v2.pdf page 101
"""
_map = {
'1.2.840.10045.1.1': 'prime_field',
'1.2.840.10045.1.2': 'characteristic_two_field',
}
class CharacteristicTwoBasis(ObjectIdentifier):
"""
Original Name: None
Source: http://www.secg.org/sec1-v2.pdf page 102
"""
_map = {
'1.2.840.10045.1.2.1.1': 'gn_basis',
'1.2.840.10045.1.2.1.2': 'tp_basis',
'1.2.840.10045.1.2.1.3': 'pp_basis',
}
class Pentanomial(Sequence):
"""
Source: http://www.secg.org/sec1-v2.pdf page 102
"""
_fields = [
('k1', Integer),
('k2', Integer),
('k3', Integer),
]
class CharacteristicTwo(Sequence):
"""
Original Name: Characteristic-two
Source: http://www.secg.org/sec1-v2.pdf page 101
"""
_fields = [
('m', Integer),
('basis', CharacteristicTwoBasis),
('parameters', Any),
]
_oid_pair = ('basis', 'parameters')
_oid_specs = {
'gn_basis': Null,
'tp_basis': Integer,
'pp_basis': Pentanomial,
}
class FieldID(Sequence):
"""
Source: http://www.secg.org/sec1-v2.pdf page 100
"""
_fields = [
('field_type', FieldType),
('parameters', Any),
]
_oid_pair = ('field_type', 'parameters')
_oid_specs = {
'prime_field': Integer,
'characteristic_two_field': CharacteristicTwo,
}
class Curve(Sequence):
"""
Source: http://www.secg.org/sec1-v2.pdf page 104
"""
_fields = [
('a', OctetString),
('b', OctetString),
('seed', OctetBitString, {'optional': True}),
]
class SpecifiedECDomain(Sequence):
"""
Source: http://www.secg.org/sec1-v2.pdf page 103
"""
_fields = [
('version', SpecifiedECDomainVersion),
('field_id', FieldID),
('curve', Curve),
('base', ECPoint),
('order', Integer),
('cofactor', Integer, {'optional': True}),
('hash', DigestAlgorithm, {'optional': True}),
]
class NamedCurve(ObjectIdentifier):
"""
Various named curves
Original Name: None
Source: https://tools.ietf.org/html/rfc3279#page-23,
https://tools.ietf.org/html/rfc5480#page-5
"""
_map = {
# https://tools.ietf.org/html/rfc3279#page-23
'1.2.840.10045.3.0.1': 'c2pnb163v1',
'1.2.840.10045.3.0.2': 'c2pnb163v2',
'1.2.840.10045.3.0.3': 'c2pnb163v3',
'1.2.840.10045.3.0.4': 'c2pnb176w1',
'1.2.840.10045.3.0.5': 'c2tnb191v1',
'1.2.840.10045.3.0.6': 'c2tnb191v2',
'1.2.840.10045.3.0.7': 'c2tnb191v3',
'1.2.840.10045.3.0.8': 'c2onb191v4',
'1.2.840.10045.3.0.9': 'c2onb191v5',
'1.2.840.10045.3.0.10': 'c2pnb208w1',
'1.2.840.10045.3.0.11': 'c2tnb239v1',
'1.2.840.10045.3.0.12': 'c2tnb239v2',
'1.2.840.10045.3.0.13': 'c2tnb239v3',
'1.2.840.10045.3.0.14': 'c2onb239v4',
'1.2.840.10045.3.0.15': 'c2onb239v5',
'1.2.840.10045.3.0.16': 'c2pnb272w1',
'1.2.840.10045.3.0.17': 'c2pnb304w1',
'1.2.840.10045.3.0.18': 'c2tnb359v1',
'1.2.840.10045.3.0.19': 'c2pnb368w1',
'1.2.840.10045.3.0.20': 'c2tnb431r1',
'1.2.840.10045.3.1.2': 'prime192v2',
'1.2.840.10045.3.1.3': 'prime192v3',
'1.2.840.10045.3.1.4': 'prime239v1',
'1.2.840.10045.3.1.5': 'prime239v2',
'1.2.840.10045.3.1.6': 'prime239v3',
# https://tools.ietf.org/html/rfc5480#page-5
'1.3.132.0.1': 'sect163k1',
'1.3.132.0.15': 'sect163r2',
'1.2.840.10045.3.1.1': 'secp192r1',
'1.3.132.0.33': 'secp224r1',
'1.3.132.0.26': 'sect233k1',
'1.2.840.10045.3.1.7': 'secp256r1',
'1.3.132.0.27': 'sect233r1',
'1.3.132.0.16': 'sect283k1',
'1.3.132.0.17': 'sect283r1',
'1.3.132.0.34': 'secp384r1',
'1.3.132.0.36': 'sect409k1',
'1.3.132.0.37': 'sect409r1',
'1.3.132.0.35': 'secp521r1',
'1.3.132.0.38': 'sect571k1',
'1.3.132.0.39': 'sect571r1',
}
class ECDomainParameters(Choice):
"""
Source: http://www.secg.org/sec1-v2.pdf page 102
"""
_alternatives = [
('specified', SpecifiedECDomain),
('named', NamedCurve),
('implicit_ca', Null),
]
class ECPrivateKeyVersion(Integer):
"""
Original Name: None
Source: http://www.secg.org/sec1-v2.pdf page 108
"""
_map = {
1: 'ecPrivkeyVer1',
}
class ECPrivateKey(Sequence):
"""
Source: http://www.secg.org/sec1-v2.pdf page 108
"""
_fields = [
('version', ECPrivateKeyVersion),
('private_key', IntegerOctetString),
('parameters', ECDomainParameters, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
('public_key', ECPointBitString, {'tag_type': 'explicit', 'tag': 1, 'optional': True}),
]
class DSAParams(Sequence):
"""
Parameters for a DSA public or private key
Original Name: Dss-Parms
Source: https://tools.ietf.org/html/rfc3279#page-9
"""
_fields = [
('p', Integer),
('q', Integer),
('g', Integer),
]
class Attribute(Sequence):
"""
Source: https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.501-198811-S!!PDF-E&type=items page 8
"""
_fields = [
('type', ObjectIdentifier),
('values', SetOf, {'spec': Any}),
]
class Attributes(SetOf):
"""
Source: https://tools.ietf.org/html/rfc5208#page-3
"""
_child_spec = Attribute
class PrivateKeyAlgorithmId(ObjectIdentifier):
"""
These OIDs for various public keys are reused when storing private keys
inside of a PKCS#8 structure
Original Name: None
Source: https://tools.ietf.org/html/rfc3279
"""
_map = {
# https://tools.ietf.org/html/rfc3279#page-19
'1.2.840.113549.1.1.1': 'rsa',
# https://tools.ietf.org/html/rfc3279#page-18
'1.2.840.10040.4.1': 'dsa',
# https://tools.ietf.org/html/rfc3279#page-13
'1.2.840.10045.2.1': 'ec',
}
class PrivateKeyAlgorithm(_ForceNullParameters, Sequence):
"""
Original Name: PrivateKeyAlgorithmIdentifier
Source: https://tools.ietf.org/html/rfc5208#page-3
"""
_fields = [
('algorithm', PrivateKeyAlgorithmId),
('parameters', Any, {'optional': True}),
]
_oid_pair = ('algorithm', 'parameters')
_oid_specs = {
'dsa': DSAParams,
'ec': ECDomainParameters,
}
class PrivateKeyInfo(Sequence):
"""
Source: https://tools.ietf.org/html/rfc5208#page-3
"""
_fields = [
('version', Integer),
('private_key_algorithm', PrivateKeyAlgorithm),
('private_key', ParsableOctetString),
('attributes', Attributes, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
]
def _private_key_spec(self):
algorithm = self['private_key_algorithm']['algorithm'].native
return {
'rsa': RSAPrivateKey,
'dsa': Integer,
'ec': ECPrivateKey,
}[algorithm]
_spec_callbacks = {
'private_key': _private_key_spec
}
_algorithm = None
_bit_size = None
_public_key = None
_fingerprint = None
@classmethod
def wrap(cls, private_key, algorithm):
"""
Wraps a private key in a PrivateKeyInfo structure
:param private_key:
A byte string or Asn1Value object of the private key
:param algorithm:
A unicode string of "rsa", "dsa" or "ec"
:return:
A PrivateKeyInfo object
"""
if not isinstance(private_key, byte_cls) and not isinstance(private_key, Asn1Value):
raise TypeError(unwrap(
'''
private_key must be a byte string or Asn1Value, not %s
''',
type_name(private_key)
))
if algorithm == 'rsa':
if not isinstance(private_key, RSAPrivateKey):
private_key = RSAPrivateKey.load(private_key)
params = Null()
elif algorithm == 'dsa':
if not isinstance(private_key, DSAPrivateKey):
private_key = DSAPrivateKey.load(private_key)
params = DSAParams()
params['p'] = private_key['p']
params['q'] = private_key['q']
params['g'] = private_key['g']
public_key = private_key['public_key']
private_key = private_key['private_key']
elif algorithm == 'ec':
if not isinstance(private_key, ECPrivateKey):
private_key = ECPrivateKey.load(private_key)
else:
private_key = private_key.copy()
params = private_key['parameters']
del private_key['parameters']
else:
raise ValueError(unwrap(
'''
algorithm must be one of "rsa", "dsa", "ec", not %s
''',
repr(algorithm)
))
private_key_algo = PrivateKeyAlgorithm()
private_key_algo['algorithm'] = PrivateKeyAlgorithmId(algorithm)
private_key_algo['parameters'] = params
container = cls()
container._algorithm = algorithm
container['version'] = Integer(0)
container['private_key_algorithm'] = private_key_algo
container['private_key'] = private_key
# Here we save the DSA public key if possible since it is not contained
# within the PKCS#8 structure for a DSA key
if algorithm == 'dsa':
container._public_key = public_key
return container
def _compute_public_key(self):
"""
Computes the public key corresponding to the current private key.
:return:
For RSA keys, an RSAPublicKey object. For DSA keys, an Integer
object. For EC keys, an ECPointBitString.
"""
if self.algorithm == 'dsa':
params = self['private_key_algorithm']['parameters']
return Integer(pow(
params['g'].native,
self['private_key'].parsed.native,
params['p'].native
))
if self.algorithm == 'rsa':
key = self['private_key'].parsed
return RSAPublicKey({
'modulus': key['modulus'],
'public_exponent': key['public_exponent'],
})
if self.algorithm == 'ec':
curve_type, details = self.curve
if curve_type == 'implicit_ca':
raise ValueError(unwrap(
'''
Unable to compute public key for EC key using Implicit CA
parameters
'''
))
if curve_type == 'specified':
if details['field_id']['field_type'] == 'characteristic_two_field':
raise ValueError(unwrap(
'''
Unable to compute public key for EC key over a
characteristic two field
'''
))
curve = PrimeCurve(
details['field_id']['parameters'],
int_from_bytes(details['curve']['a']),
int_from_bytes(details['curve']['b'])
)
base_x, base_y = self['private_key_algorithm']['parameters'].chosen['base'].to_coords()
base_point = PrimePoint(curve, base_x, base_y)
elif curve_type == 'named':
if details not in ('secp192r1', 'secp224r1', 'secp256r1', 'secp384r1', 'secp521r1'):
raise ValueError(unwrap(
'''
Unable to compute public key for EC named curve %s,
parameters not currently included
''',
details
))
base_point = {
'secp192r1': SECP192R1_BASE_POINT,
'secp224r1': SECP224R1_BASE_POINT,
'secp256r1': SECP256R1_BASE_POINT,
'secp384r1': SECP384R1_BASE_POINT,
'secp521r1': SECP521R1_BASE_POINT,
}[details]
public_point = base_point * self['private_key'].parsed['private_key'].native
return ECPointBitString.from_coords(public_point.x, public_point.y)
def unwrap(self):
"""
Unwraps the private key into an RSAPrivateKey, DSAPrivateKey or
ECPrivateKey object
:return:
An RSAPrivateKey, DSAPrivateKey or ECPrivateKey object
"""
if self.algorithm == 'rsa':
return self['private_key'].parsed
if self.algorithm == 'dsa':
params = self['private_key_algorithm']['parameters']
return DSAPrivateKey({
'version': 0,
'p': params['p'],
'q': params['q'],
'g': params['g'],
'public_key': self.public_key,
'private_key': self['private_key'].parsed,
})
if self.algorithm == 'ec':
output = self['private_key'].parsed
output['parameters'] = self['private_key_algorithm']['parameters']
output['public_key'] = self.public_key
return output
@property
def curve(self):
"""
Returns information about the curve used for an EC key
:raises:
ValueError - when the key is not an EC key
:return:
A two-element tuple, with the first element being a unicode string
of "implicit_ca", "specified" or "named". If the first element is
"implicit_ca", the second is None. If "specified", the second is
an OrderedDict that is the native version of SpecifiedECDomain. If
"named", the second is a unicode string of the curve name.
"""
if self.algorithm != 'ec':
raise ValueError(unwrap(
'''
Only EC keys have a curve, this key is %s
''',
self.algorithm.upper()
))
params = self['private_key_algorithm']['parameters']
chosen = params.chosen
if params.name == 'implicit_ca':
value = None
else:
value = chosen.native
return (params.name, value)
@property
def hash_algo(self):
"""
Returns the name of the family of hash algorithms used to generate a
DSA key
:raises:
ValueError - when the key is not a DSA key
:return:
A unicode string of "sha1" or "sha2"
"""
if self.algorithm != 'dsa':
raise ValueError(unwrap(
'''
Only DSA keys are generated using a hash algorithm, this key is
%s
''',
self.algorithm.upper()
))
byte_len = math.log(self['private_key_algorithm']['parameters']['q'].native, 2) / 8
return 'sha1' if byte_len <= 20 else 'sha2'
@property
def algorithm(self):
"""
:return:
A unicode string of "rsa", "dsa" or "ec"
"""
if self._algorithm is None:
self._algorithm = self['private_key_algorithm']['algorithm'].native
return self._algorithm
@property
def bit_size(self):
"""
:return:
The bit size of the private key, as an integer
"""
if self._bit_size is None:
if self.algorithm == 'rsa':
prime = self['private_key'].parsed['modulus'].native
elif self.algorithm == 'dsa':
prime = self['private_key_algorithm']['parameters']['p'].native
elif self.algorithm == 'ec':
prime = self['private_key'].parsed['private_key'].native
self._bit_size = int(math.ceil(math.log(prime, 2)))
modulus = self._bit_size % 8
if modulus != 0:
self._bit_size += 8 - modulus
return self._bit_size
@property
def byte_size(self):
"""
:return:
The byte size of the private key, as an integer
"""
return int(math.ceil(self.bit_size / 8))
@property
def public_key(self):
"""
:return:
If an RSA key, an RSAPublicKey object. If a DSA key, an Integer
object. If an EC key, an ECPointBitString object.
"""
if self._public_key is None:
if self.algorithm == 'ec':
key = self['private_key'].parsed
if key['public_key']:
self._public_key = key['public_key'].untag()
else:
self._public_key = self._compute_public_key()
else:
self._public_key = self._compute_public_key()
return self._public_key
@property
def public_key_info(self):
"""
:return:
A PublicKeyInfo object derived from this private key.
"""
return PublicKeyInfo({
'algorithm': {
'algorithm': self.algorithm,
'parameters': self['private_key_algorithm']['parameters']
},
'public_key': self.public_key
})
@property
def fingerprint(self):
"""
Creates a fingerprint that can be compared with a public key to see if
the two form a pair.
This fingerprint is not compatiable with fingerprints generated by any
other software.
:return:
A byte string that is a sha256 hash of selected components (based
on the key type)
"""
if self._fingerprint is None:
params = self['private_key_algorithm']['parameters']
key = self['private_key'].parsed
if self.algorithm == 'rsa':
to_hash = '%d:%d' % (
key['modulus'].native,
key['public_exponent'].native,
)
elif self.algorithm == 'dsa':
public_key = self.public_key
to_hash = '%d:%d:%d:%d' % (
params['p'].native,
params['q'].native,
params['g'].native,
public_key.native,
)
elif self.algorithm == 'ec':
public_key = key['public_key'].native
if public_key is None:
public_key = self.public_key.native
if params.name == 'named':
to_hash = '%s:' % params.chosen.native
to_hash = to_hash.encode('utf-8')
to_hash += public_key
elif params.name == 'implicit_ca':
to_hash = public_key
elif params.name == 'specified':
to_hash = '%s:' % params.chosen['field_id']['parameters'].native
to_hash = to_hash.encode('utf-8')
to_hash += b':' + params.chosen['curve']['a'].native
to_hash += b':' + params.chosen['curve']['b'].native
to_hash += public_key
if isinstance(to_hash, str_cls):
to_hash = to_hash.encode('utf-8')
self._fingerprint = hashlib.sha256(to_hash).digest()
return self._fingerprint
class EncryptedPrivateKeyInfo(Sequence):
"""
Source: https://tools.ietf.org/html/rfc5208#page-4
"""
_fields = [
('encryption_algorithm', EncryptionAlgorithm),
('encrypted_data', OctetString),
]
# These structures are from https://tools.ietf.org/html/rfc3279
class ValidationParms(Sequence):
"""
Source: https://tools.ietf.org/html/rfc3279#page-10
"""
_fields = [
('seed', BitString),
('pgen_counter', Integer),
]
class DomainParameters(Sequence):
"""
Source: https://tools.ietf.org/html/rfc3279#page-10
"""
_fields = [
('p', Integer),
('g', Integer),
('q', Integer),
('j', Integer, {'optional': True}),
('validation_params', ValidationParms, {'optional': True}),
]
class PublicKeyAlgorithmId(ObjectIdentifier):
"""
Original Name: None
Source: https://tools.ietf.org/html/rfc3279
"""
_map = {
# https://tools.ietf.org/html/rfc3279#page-19
'1.2.840.113549.1.1.1': 'rsa',
# https://tools.ietf.org/html/rfc3279#page-18
'1.2.840.10040.4.1': 'dsa',
# https://tools.ietf.org/html/rfc3279#page-13
'1.2.840.10045.2.1': 'ec',
# https://tools.ietf.org/html/rfc3279#page-10
'1.2.840.10046.2.1': 'dh',
}
class PublicKeyAlgorithm(_ForceNullParameters, Sequence):
"""
Original Name: AlgorithmIdentifier
Source: https://tools.ietf.org/html/rfc5280#page-18
"""
_fields = [
('algorithm', PublicKeyAlgorithmId),
('parameters', Any, {'optional': True}),
]
_oid_pair = ('algorithm', 'parameters')
_oid_specs = {
'dsa': DSAParams,
'ec': ECDomainParameters,
'dh': DomainParameters,
}
class PublicKeyInfo(Sequence):
"""
Original Name: SubjectPublicKeyInfo
Source: https://tools.ietf.org/html/rfc5280#page-17
"""
_fields = [
('algorithm', PublicKeyAlgorithm),
('public_key', ParsableOctetBitString),
]
def _public_key_spec(self):
algorithm = self['algorithm']['algorithm'].native
return {
'rsa': RSAPublicKey,
'dsa': Integer,
# We override the field spec with ECPoint so that users can easily
# decompose the byte string into the constituent X and Y coords
'ec': (ECPointBitString, None),
'dh': Integer,
}[algorithm]
_spec_callbacks = {
'public_key': _public_key_spec
}
_algorithm = None
_bit_size = None
_fingerprint = None
_sha1 = None
_sha256 = None
@classmethod
def wrap(cls, public_key, algorithm):
"""
Wraps a public key in a PublicKeyInfo structure
:param public_key:
A byte string or Asn1Value object of the public key
:param algorithm:
A unicode string of "rsa"
:return:
A PublicKeyInfo object
"""
if not isinstance(public_key, byte_cls) and not isinstance(public_key, Asn1Value):
raise TypeError(unwrap(
'''
public_key must be a byte string or Asn1Value, not %s
''',
type_name(public_key)
))
if algorithm != 'rsa':
raise ValueError(unwrap(
'''
algorithm must "rsa", not %s
''',
repr(algorithm)
))
algo = PublicKeyAlgorithm()
algo['algorithm'] = PublicKeyAlgorithmId(algorithm)
algo['parameters'] = Null()
container = cls()
container['algorithm'] = algo
if isinstance(public_key, Asn1Value):
public_key = public_key.untag().dump()
container['public_key'] = ParsableOctetBitString(public_key)
return container
def unwrap(self):
"""
Unwraps an RSA public key into an RSAPublicKey object. Does not support
DSA or EC public keys since they do not have an unwrapped form.
:return:
An RSAPublicKey object
"""
if self.algorithm == 'rsa':
return self['public_key'].parsed
key_type = self.algorithm.upper()
a_an = 'an' if key_type == 'EC' else 'a'
raise ValueError(unwrap(
'''
Only RSA public keys may be unwrapped - this key is %s %s public
key
''',
a_an,
key_type
))
@property
def curve(self):
"""
Returns information about the curve used for an EC key
:raises:
ValueError - when the key is not an EC key
:return:
A two-element tuple, with the first element being a unicode string
of "implicit_ca", "specified" or "named". If the first element is
"implicit_ca", the second is None. If "specified", the second is
an OrderedDict that is the native version of SpecifiedECDomain. If
"named", the second is a unicode string of the curve name.
"""
if self.algorithm != 'ec':
raise ValueError(unwrap(
'''
Only EC keys have a curve, this key is %s
''',
self.algorithm.upper()
))
params = self['algorithm']['parameters']
chosen = params.chosen
if params.name == 'implicit_ca':
value = None
else:
value = chosen.native
return (params.name, value)
@property
def hash_algo(self):
"""
Returns the name of the family of hash algorithms used to generate a
DSA key
:raises:
ValueError - when the key is not a DSA key
:return:
A unicode string of "sha1" or "sha2" or None if no parameters are
present
"""
if self.algorithm != 'dsa':
raise ValueError(unwrap(
'''
Only DSA keys are generated using a hash algorithm, this key is
%s
''',
self.algorithm.upper()
))
parameters = self['algorithm']['parameters']
if parameters.native is None:
return None
byte_len = math.log(parameters['q'].native, 2) / 8
return 'sha1' if byte_len <= 20 else 'sha2'
@property
def algorithm(self):
"""
:return:
A unicode string of "rsa", "dsa" or "ec"
"""
if self._algorithm is None:
self._algorithm = self['algorithm']['algorithm'].native
return self._algorithm
@property
def bit_size(self):
"""
:return:
The bit size of the public key, as an integer
"""
if self._bit_size is None:
if self.algorithm == 'ec':
self._bit_size = ((len(self['public_key'].native) - 1) / 2) * 8
else:
if self.algorithm == 'rsa':
prime = self['public_key'].parsed['modulus'].native
elif self.algorithm == 'dsa':
prime = self['algorithm']['parameters']['p'].native
self._bit_size = int(math.ceil(math.log(prime, 2)))
modulus = self._bit_size % 8
if modulus != 0:
self._bit_size += 8 - modulus
return self._bit_size
@property
def byte_size(self):
"""
:return:
The byte size of the public key, as an integer
"""
return int(math.ceil(self.bit_size / 8))
@property
def sha1(self):
"""
:return:
The SHA1 hash of the DER-encoded bytes of this public key info
"""
if self._sha1 is None:
self._sha1 = hashlib.sha1(byte_cls(self['public_key'])).digest()
return self._sha1
@property
def sha256(self):
"""
:return:
The SHA-256 hash of the DER-encoded bytes of this public key info
"""
if self._sha256 is None:
self._sha256 = hashlib.sha256(byte_cls(self['public_key'])).digest()
return self._sha256
@property
def fingerprint(self):
"""
Creates a fingerprint that can be compared with a private key to see if
the two form a pair.
This fingerprint is not compatiable with fingerprints generated by any
other software.
:return:
A byte string that is a sha256 hash of selected components (based
on the key type)
"""
if self._fingerprint is None:
key_type = self['algorithm']['algorithm'].native
params = self['algorithm']['parameters']
if key_type == 'rsa':
key = self['public_key'].parsed
to_hash = '%d:%d' % (
key['modulus'].native,
key['public_exponent'].native,
)
elif key_type == 'dsa':
key = self['public_key'].parsed
to_hash = '%d:%d:%d:%d' % (
params['p'].native,
params['q'].native,
params['g'].native,
key.native,
)
elif key_type == 'ec':
key = self['public_key']
if params.name == 'named':
to_hash = '%s:' % params.chosen.native
to_hash = to_hash.encode('utf-8')
to_hash += key.native
elif params.name == 'implicit_ca':
to_hash = key.native
elif params.name == 'specified':
to_hash = '%s:' % params.chosen['field_id']['parameters'].native
to_hash = to_hash.encode('utf-8')
to_hash += b':' + params.chosen['curve']['a'].native
to_hash += b':' + params.chosen['curve']['b'].native
to_hash += key.native
if isinstance(to_hash, str_cls):
to_hash = to_hash.encode('utf-8')
self._fingerprint = hashlib.sha256(to_hash).digest()
return self._fingerprint
| gpl-2.0 | -8,525,391,529,607,395,000 | 27.151004 | 107 | 0.524738 | false |
bhansa/fireball | pyvenv/Lib/site-packages/pip/vcs/bazaar.py | 514 | 3803 | from __future__ import absolute_import
import logging
import os
import tempfile
# TODO: Get this into six.moves.urllib.parse
try:
from urllib import parse as urllib_parse
except ImportError:
import urlparse as urllib_parse
from pip.utils import rmtree, display_path
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
logger = logging.getLogger(__name__)
class Bazaar(VersionControl):
name = 'bzr'
dirname = '.bzr'
repo_name = 'branch'
schemes = (
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
'bzr+lp',
)
def __init__(self, url=None, *args, **kwargs):
super(Bazaar, self).__init__(url, *args, **kwargs)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
# Register lp but do not expose as a scheme to support bzr+lp.
if getattr(urllib_parse, 'uses_fragment', None):
urllib_parse.uses_fragment.extend(['lp'])
urllib_parse.non_hierarchical.extend(['lp'])
def export(self, location):
"""
Export the Bazaar repository at the url to the destination location
"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
if os.path.exists(location):
# Remove the location to make sure Bazaar can export it correctly
rmtree(location)
try:
self.run_command(['export', location], cwd=temp_dir,
show_stdout=False)
finally:
rmtree(temp_dir)
def switch(self, dest, url, rev_options):
self.run_command(['switch', url], cwd=dest)
def update(self, dest, rev_options):
self.run_command(['pull', '-q'] + rev_options, cwd=dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = ['-r', rev]
rev_display = ' (to revision %s)' % rev
else:
rev_options = []
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['branch', '-q'] + rev_options + [url, dest])
def get_url_rev(self):
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
url, rev = super(Bazaar, self).get_url_rev()
if url.startswith('ssh://'):
url = 'bzr+' + url
return url, rev
def get_url(self, location):
urls = self.run_command(['info'], show_stdout=False, cwd=location)
for line in urls.splitlines():
line = line.strip()
for x in ('checkout of branch: ',
'parent branch: '):
if line.startswith(x):
repo = line.split(x)[1]
if self._is_local_repository(repo):
return path_to_url(repo)
return repo
return None
def get_revision(self, location):
revision = self.run_command(
['revno'], show_stdout=False, cwd=location)
return revision.splitlines()[-1]
def get_src_requirement(self, dist, location):
repo = self.get_url(location)
if not repo:
return None
if not repo.lower().startswith('bzr:'):
repo = 'bzr+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
current_rev = self.get_revision(location)
return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
def check_version(self, dest, rev_options):
"""Always assume the versions don't match"""
return False
vcs.register(Bazaar)
| gpl-3.0 | 5,999,897,277,662,634,000 | 31.784483 | 77 | 0.55614 | false |
codingjoe/viewflow | tests/integration/tests/test_flow_func.py | 2 | 2022 | from django.test import TestCase
from viewflow import flow
from viewflow.base import Flow, this
from .. import integration_test
def create_test_flow(activation):
activation.prepare()
activation.done()
return activation
@flow.flow_func(task_loader=lambda flow_task, process: process.get_task(FunctionFlow.func_task))
def function_task(activation, process):
activation.prepare()
activation.done()
def handler(activation):
pass
class FunctionFlow(Flow):
start = flow.StartFunction(create_test_flow).Next(this.func_task)
default_start = flow.StartFunction().Next(this.func_task)
inline_start = flow.StartFunction().Next(this.func_task)
func_task = flow.Function(function_task).Next(this.handler_task)
handler_task = flow.Handler(handler).Next(this.end)
end = flow.End()
def inline_start_func(self, activation):
activation.prepare()
activation.done()
self.inline_start_func_called = True
return activation
@integration_test
class TestFunctionFlow(TestCase):
def test_function_flow(self):
act = FunctionFlow.start.run()
FunctionFlow.func_task.run(act.process)
tasks = act.process.task_set.all()
self.assertEqual(4, tasks.count())
self.assertTrue(all(task.finished is not None for task in tasks))
def test_function_default_start(self):
act = FunctionFlow.default_start.run()
FunctionFlow.func_task.run(act.process)
tasks = act.process.task_set.all()
self.assertEqual(4, tasks.count())
self.assertTrue(all(task.finished is not None for task in tasks))
def test_function_inline_start(self):
act = FunctionFlow.inline_start.run()
self.assertTrue(getattr(FunctionFlow.instance, 'inline_start_func_called', False))
FunctionFlow.func_task.run(act.process)
tasks = act.process.task_set.all()
self.assertEqual(4, tasks.count())
self.assertTrue(all(task.finished is not None for task in tasks))
| agpl-3.0 | 5,030,956,545,812,475,000 | 29.636364 | 96 | 0.691889 | false |
noroutine/ansible | lib/ansible/playbook/role_include.py | 23 | 5507 |
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from os.path import basename
from ansible.errors import AnsibleParserError
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role import Role
from ansible.playbook.role.include import RoleInclude
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['IncludeRole']
class IncludeRole(TaskInclude):
"""
A Role include is derived from a regular role to handle the special
circumstances related to the `- include_role: ...`
"""
BASE = ('name', 'role') # directly assigned
FROM_ARGS = ('tasks_from', 'vars_from', 'defaults_from') # used to populate from dict in role
OTHER_ARGS = ('private', 'allow_duplicates') # assigned to matching property
VALID_ARGS = tuple(frozenset(BASE + FROM_ARGS + OTHER_ARGS)) # all valid args
_inheritable = False
# =================================================================================
# ATTRIBUTES
# private as this is a 'module options' vs a task property
_allow_duplicates = FieldAttribute(isa='bool', default=True, private=True)
_private = FieldAttribute(isa='bool', default=None, private=True)
def __init__(self, block=None, role=None, task_include=None):
super(IncludeRole, self).__init__(block=block, role=role, task_include=task_include)
self._from_files = {}
self._parent_role = role
self._role_name = None
self._role_path = None
def get_block_list(self, play=None, variable_manager=None, loader=None):
# only need play passed in when dynamic
if play is None:
myplay = self._parent._play
else:
myplay = play
ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader)
ri.vars.update(self.vars)
# build role
actual_role = Role.load(ri, myplay, parent_role=self._parent_role, from_files=self._from_files)
actual_role._metadata.allow_duplicates = self.allow_duplicates
# save this for later use
self._role_path = actual_role._role_path
# compile role with parent roles as dependencies to ensure they inherit
# variables
if not self._parent_role:
dep_chain = []
else:
dep_chain = list(self._parent_role._parents)
dep_chain.append(self._parent_role)
blocks = actual_role.compile(play=myplay, dep_chain=dep_chain)
for b in blocks:
b._parent = self
# updated available handlers in play
handlers = actual_role.get_handler_blocks(play=myplay)
myplay.handlers = myplay.handlers + handlers
return blocks, handlers
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
ir = IncludeRole(block, role, task_include=task_include).load_data(data, variable_manager=variable_manager, loader=loader)
# Validate options
my_arg_names = frozenset(ir.args.keys())
# name is needed, or use role as alias
ir._role_name = ir.args.get('name', ir.args.get('role'))
if ir._role_name is None:
raise AnsibleParserError("'name' is a required field for include_role.")
# validate bad args, otherwise we silently ignore
bad_opts = my_arg_names.difference(IncludeRole.VALID_ARGS)
if bad_opts:
raise AnsibleParserError('Invalid options for include_role: %s' % ','.join(list(bad_opts)))
# build options for role includes
for key in my_arg_names.intersection(IncludeRole.FROM_ARGS):
from_key = key.replace('_from', '')
ir._from_files[from_key] = basename(ir.args.get(key))
# manual list as otherwise the options would set other task parameters we don't want.
for option in my_arg_names.intersection(IncludeRole.OTHER_ARGS):
setattr(ir, option, ir.args.get(option))
return ir
def copy(self, exclude_parent=False, exclude_tasks=False):
new_me = super(IncludeRole, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
new_me.statically_loaded = self.statically_loaded
new_me._from_files = self._from_files.copy()
new_me._parent_role = self._parent_role
new_me._role_name = self._role_name
new_me._role_path = self._role_path
return new_me
def get_include_params(self):
v = super(IncludeRole, self).get_include_params()
if self._parent_role:
v.update(self._parent_role.get_role_params())
return v
| gpl-3.0 | -1,847,802,811,969,058,000 | 36.209459 | 130 | 0.654803 | false |
hnousiainen/pghoard | pghoard/pgutil.py | 2 | 3543 | # Copied from https://github.com/ohmu/ohmu_common_py ohmu_common_py/pgutil.py version 0.0.1-0-unknown-fa54b44
"""
pghoard - postgresql utility functions
Copyright (c) 2015 Ohmu Ltd
See LICENSE for details
"""
try:
from urllib.parse import urlparse, parse_qs # pylint: disable=no-name-in-module, import-error
except ImportError:
from urlparse import urlparse, parse_qs # pylint: disable=no-name-in-module, import-error
def create_connection_string(connection_info):
return " ".join("{}='{}'".format(k, str(v).replace("'", "\\'"))
for k, v in sorted(connection_info.items()))
def mask_connection_info(info):
masked_info = get_connection_info(info)
password = masked_info.pop("password", None)
return "{0}; {1} password".format(
create_connection_string(masked_info),
"no" if password is None else "hidden")
def get_connection_info_from_config_line(line):
_, value = line.split("=", 1)
value = value.strip()[1:-1].replace("''", "'")
return get_connection_info(value)
def get_connection_info(info):
"""turn a connection info object into a dict or return it if it was a
dict already. supports both the traditional libpq format and the new
url format"""
if isinstance(info, dict):
return info.copy()
elif info.startswith("postgres://") or info.startswith("postgresql://"):
return parse_connection_string_url(info)
else:
return parse_connection_string_libpq(info)
def parse_connection_string_url(url):
# drop scheme from the url as some versions of urlparse don't handle
# query and path properly for urls with a non-http scheme
schemeless_url = url.split(":", 1)[1]
p = urlparse(schemeless_url)
fields = {}
if p.hostname:
fields["host"] = p.hostname
if p.port:
fields["port"] = str(p.port)
if p.username:
fields["user"] = p.username
if p.password is not None:
fields["password"] = p.password
if p.path and p.path != "/":
fields["dbname"] = p.path[1:]
for k, v in parse_qs(p.query).items():
fields[k] = v[-1]
return fields
def parse_connection_string_libpq(connection_string):
"""parse a postgresql connection string as defined in
http://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING"""
fields = {}
while True:
connection_string = connection_string.strip()
if not connection_string:
break
if "=" not in connection_string:
raise ValueError("expecting key=value format in connection_string fragment {!r}".format(connection_string))
key, rem = connection_string.split("=", 1)
if rem.startswith("'"):
asis, value = False, ""
for i in range(1, len(rem)):
if asis:
value += rem[i]
asis = False
elif rem[i] == "'":
break # end of entry
elif rem[i] == "\\":
asis = True
else:
value += rem[i]
else:
raise ValueError("invalid connection_string fragment {!r}".format(rem))
connection_string = rem[i + 1:] # pylint: disable=undefined-loop-variable
else:
res = rem.split(None, 1)
if len(res) > 1:
value, connection_string = res
else:
value, connection_string = rem, ""
fields[key] = value
return fields
| apache-2.0 | 4,727,380,614,604,684,000 | 34.43 | 119 | 0.594129 | false |
vgkholla/ambry | ambry-tools/scripts/layout-analyzer.py | 7 | 7025 | #!/usr/bin/python2.7
import argparse
import json
import sys
from collections import defaultdict
class Node(object):
def __init__(self, node, datacenter):
self.node = node
self.datacenter = datacenter
self.partitions = set()
@property
def hostname(self):
return self.node["hostname"]
@property
def port(self):
return self.node["port"]
@property
def rack_id(self):
if "rackId" in self.node:
return self.node["rackId"]
return -1
@property
def datacenter_name(self):
return self.datacenter["name"]
def add_partition(self, partition):
self.partitions.add(partition)
def __repr__(self):
return "[hostname: {}, port: {}, dc: {}]".format(
self.hostname, self.port, self.datacenter_name)
__str__ = __repr__
class Partition(object):
def __init__(self, partition):
self.partition = partition
self.nodes_by_datacenter = defaultdict(set)
@property
def id(self):
return self.partition["id"]
def add_node(self, node):
self.nodes_by_datacenter[node.datacenter_name].add(node)
node.add_partition(self)
def racks_used(self, datacenter_name):
return {node.rack_id for node in self.nodes_by_datacenter[datacenter_name]}
def __repr__(self):
return "[id: {}]".format(self.id)
__str__ = __repr__
class Layout(object):
BALANCE_THRESHOLD = 4.0
def __init__(self, hardware_layout_filename, partition_layout_filename):
with open(hardware_layout_filename) as f:
self.hardware_layout = json.load(f)
with open(partition_layout_filename) as f:
self.partition_layout = json.load(f)
self.setup()
def setup(self):
self.node_map = {}
self.partition_map = {}
self.dc_node_combo_map = defaultdict(lambda: defaultdict(set))
for datacenter_struct in self.hardware_layout["datacenters"]:
for node_struct in datacenter_struct["dataNodes"]:
k = (node_struct["hostname"], node_struct["port"])
self.node_map[k] = Node(node_struct, datacenter_struct)
for partition_struct in self.partition_layout["partitions"]:
partition = Partition(partition_struct)
if len(partition_struct["replicas"]) == 0:
raise Exception("No replicas assigned to partition {}".format(partition.id))
for replica_struct in partition_struct["replicas"]:
k = (replica_struct["hostname"], replica_struct["port"])
node = self.node_map[k]
partition.add_node(node)
for dc, nodes in partition.nodes_by_datacenter.items():
self.dc_node_combo_map[dc][frozenset(nodes)].add(partition)
self.partition_map[partition_struct["id"]] = partition
def rack_id(self, node_host, node_port):
k = (node_host, node_port)
if k in self.node_map:
return self.node_map[k].rack_id
raise Exception("Node {}:{} not found".format(node_host, node_port))
def racks_used(self, partition_id, datacenter_name):
return self.partition_map[partition_id].racks_used(datacenter_name)
def shared_partitions(self, *nodes):
return set.intersection(
*(self.node_map[node].partitions for node in nodes)
)
def print_report(self):
for dc, node_combo_map in self.dc_node_combo_map.items():
print("In datacenter: {}".format(dc))
max_combo = max(node_combo_map,
key=lambda k: len(node_combo_map[k]))
avg_per_combo = sum(len(partitions) for partitions in node_combo_map.values()) / float(len(node_combo_map))
max_per_combo = len(node_combo_map[max_combo])
print("Num node combos used: {}".format(len(node_combo_map)))
print("Average partitions sharing a node combo: {}".format(avg_per_combo))
print("Max partitions sharing a node combo: {} on the following nodes:".format(max_per_combo))
for node in max_combo:
print(node)
if (float(max_per_combo) / avg_per_combo) > self.BALANCE_THRESHOLD:
print("The ratio of max to average number of partitions sharing a node combo "
+ "exceeds the threshold: {} on this datacenter".format(self.BALANCE_THRESHOLD))
sum_racks, n_partitions, min_racks = 0, 0, sys.maxsize
for partition in self.partition_map.values():
num_racks = len(partition.racks_used(dc))
n_partitions += 1
sum_racks += num_racks
if num_racks < min_racks:
min_racks = num_racks
print("Min racks used: {}".format(min_racks))
print("Average racks used: {}".format(
float(sum_racks) / n_partitions))
partitions_per_node = [len(node.partitions) for node in self.node_map.values()
if node.datacenter_name == dc]
print("")
def interactive(self):
while True:
cmd = raw_input(">> ").split()
try:
if len(cmd) == 0:
continue
elif cmd[0] == "report":
self.print_report()
elif cmd[0] == "rack_id":
print("Node {}:{} is on rack {}".format(
cmd[1], cmd[2], self.rack_id(cmd[1], int(cmd[2]))))
elif cmd[0] == "racks_used":
print("Partition {} in datacenter {} uses the following racks: {}".format(
cmd[1], cmd[2], self.racks_used(int(cmd[1]), cmd[2])))
elif cmd[0] == "shared_partitions":
args = [(cmd[i + 1], int(cmd[i + 2])) for i in range(0, len(cmd) - 1, 2)]
print("The following nodes:")
for hostname, port in args:
print(" {}:{}".format(hostname, port))
print("share the following partitions:")
print(self.shared_partitions(*args))
else:
print("Command not recognized")
except Exception:
print("Invalid input")
print("")
def main():
parser = argparse.ArgumentParser(
description='Analyze node distribution in a partition layout')
parser.add_argument("--interactive", "-i", action="store_true")
parser.add_argument('hardware_layout',
help='the path to the hardware layout file')
parser.add_argument('partition_layout',
help='the path to the partition layout file')
args = parser.parse_args()
layout = Layout(args.hardware_layout, args.partition_layout)
if args.interactive:
layout.interactive()
else:
layout.print_report()
if __name__ == "__main__":
main()
| apache-2.0 | -2,986,022,109,325,995,000 | 36.768817 | 119 | 0.560142 | false |
mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/topi/python/topi/testing/roi_align_python.py | 2 | 3126 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, too-many-nested-blocks
"Roi align in python"
import math
import numpy as np
def roi_align_nchw_python(a_np, rois_np, pooled_size, spatial_scale, sample_ratio):
"""Roi align in python"""
_, channel, height, width = a_np.shape
num_roi = rois_np.shape[0]
b_np = np.zeros((num_roi, channel, pooled_size, pooled_size), dtype=a_np.dtype)
if isinstance(pooled_size, int):
pooled_size_h = pooled_size_w = pooled_size
else:
pooled_size_h, pooled_size_w = pooled_size
def _bilinear(b, c, y, x):
if y < -1 or y > height or x < -1 or x > width:
return 0
y = max(y, 0.0)
x = max(x, 0.0)
y_low = int(y)
x_low = int(x)
y_high = min(y_low + 1, height - 1)
x_high = min(x_low + 1, width - 1)
ly = y - y_low
lx = x - x_low
return (1 - ly) * (1 - lx) * a_np[b, c, y_low, x_low] + \
(1 - ly) * lx * a_np[b, c, y_low, x_high] + \
ly * (1 - lx) * a_np[b, c, y_high, x_low] + \
ly * lx * a_np[b, c, y_high, x_high]
for i in range(num_roi):
roi = rois_np[i]
batch_index = int(roi[0])
roi_start_w, roi_start_h, roi_end_w, roi_end_h = roi[1:] * spatial_scale
roi_h = max(roi_end_h - roi_start_h, 1.0)
roi_w = max(roi_end_w - roi_start_w, 1.0)
bin_h = roi_h / pooled_size_h
bin_w = roi_w / pooled_size_w
if sample_ratio > 0:
roi_bin_grid_h = roi_bin_grid_w = int(sample_ratio)
else:
roi_bin_grid_h = int(math.ceil(roi_h / pooled_size))
roi_bin_grid_w = int(math.ceil(roi_w / pooled_size))
count = roi_bin_grid_h * roi_bin_grid_w
for c in range(channel):
for ph in range(pooled_size_h):
for pw in range(pooled_size_w):
total = 0.
for iy in range(roi_bin_grid_h):
for ix in range(roi_bin_grid_w):
y = roi_start_h + ph * bin_h + (iy + 0.5) * bin_h / roi_bin_grid_h
x = roi_start_w + pw * bin_w + (ix + 0.5) * bin_w / roi_bin_grid_w
total += _bilinear(batch_index, c, y, x)
b_np[i, c, ph, pw] = total / count
return b_np
| apache-2.0 | 9,151,436,490,313,603,000 | 38.56962 | 94 | 0.559181 | false |
zhouxiao-coder/Paddle | demo/introduction/trainer_config.py | 3 | 1344 | # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.trainer_config_helpers import *
# 1. read data. Suppose you saved above python code as dataprovider.py
data_file = 'empty.list'
with open(data_file, 'w') as f:
f.writelines(' ')
define_py_data_sources2(
train_list=data_file,
test_list=None,
module='dataprovider',
obj='process',
args={})
# 2. learning algorithm
settings(batch_size=12, learning_rate=1e-3, learning_method=MomentumOptimizer())
# 3. Network configuration
x = data_layer(name='x', size=1)
y = data_layer(name='y', size=1)
y_predict = fc_layer(
input=x,
param_attr=ParamAttr(name='w'),
size=1,
act=LinearActivation(),
bias_attr=ParamAttr(name='b'))
cost = regression_cost(input=y_predict, label=y)
outputs(cost)
| apache-2.0 | 7,014,155,936,608,804,000 | 31.780488 | 80 | 0.720238 | false |
caesar2164/edx-platform | common/djangoapps/track/transformers.py | 51 | 16677 | """
EventTransformers are data structures that represents events, and modify those
events to match the format desired for the tracking logs. They are registered
by name (or name prefix) in the EventTransformerRegistry, which is used to
apply them to the appropriate events.
"""
import json
import logging
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import UsageKey
log = logging.getLogger(__name__)
class DottedPathMapping(object):
"""
Dictionary-like object for creating keys of dotted paths.
If a key is created that ends with a dot, it will be treated as a path
prefix. Any value whose prefix matches the dotted path can be used
as a key for that value, but only the most specific match will
be used.
"""
# TODO: The current implementation of the prefix registry requires
# O(number of prefix event transformers) to access an event. If we get a
# large number of EventTransformers, it may be worth writing a tree-based
# map structure where each node is a segment of the match key, which would
# reduce access time to O(len(match.key.split('.'))), or essentially constant
# time.
def __init__(self, registry=None):
self._match_registry = {}
self._prefix_registry = {}
self.update(registry or {})
def __contains__(self, key):
try:
_ = self[key]
return True
except KeyError:
return False
def __getitem__(self, key):
if key in self._match_registry:
return self._match_registry[key]
if isinstance(key, basestring):
# Reverse-sort the keys to find the longest matching prefix.
for prefix in sorted(self._prefix_registry, reverse=True):
if key.startswith(prefix):
return self._prefix_registry[prefix]
raise KeyError('Key {} not found in {}'.format(key, type(self)))
def __setitem__(self, key, value):
if key.endswith('.'):
self._prefix_registry[key] = value
else:
self._match_registry[key] = value
def __delitem__(self, key):
if key.endswith('.'):
del self._prefix_registry[key]
else:
del self._match_registry[key]
def get(self, key, default=None):
"""
Return `self[key]` if it exists, otherwise, return `None` or `default`
if it is specified.
"""
try:
self[key]
except KeyError:
return default
def update(self, dict_):
"""
Update the mapping with the values in the supplied `dict`.
"""
for key, value in dict_:
self[key] = value
def keys(self):
"""
Return the keys of the mapping, including both exact matches and
prefix matches.
"""
return self._match_registry.keys() + self._prefix_registry.keys()
class EventTransformerRegistry(object):
"""
Registry to track which EventTransformers handle which events. The
EventTransformer must define a `match_key` attribute which contains the
name or prefix of the event names it tracks. Any `match_key` that ends
with a `.` will match all events that share its prefix. A transformer name
without a trailing dot only processes exact matches.
"""
mapping = DottedPathMapping()
@classmethod
def register(cls, transformer):
"""
Decorator to register an EventTransformer. It must have a `match_key`
class attribute defined.
"""
cls.mapping[transformer.match_key] = transformer
return transformer
@classmethod
def create_transformer(cls, event):
"""
Create an EventTransformer of the given event.
If no transformer is registered to handle the event, this raises a
KeyError.
"""
name = event.get(u'name')
return cls.mapping[name](event)
class EventTransformer(dict):
"""
Creates a transformer to modify analytics events based on event type.
To use the transformer, instantiate it using the
`EventTransformer.create_transformer()` classmethod with the event
dictionary as the sole argument, and then call `transformer.transform()` on
the created object to modify the event to the format required for output.
Custom transformers will want to define some or all of the following values
Attributes:
match_key:
This is the name of the event you want to transform. If the name
ends with a `'.'`, it will be treated as a *prefix transformer*.
All other names denote *exact transformers*.
A *prefix transformer* will handle any event whose name begins with
the name of the prefix transformer. Only the most specific match
will be used, so if a transformer exists with a name of
`'edx.ui.lms.'` and another transformer has the name
`'edx.ui.lms.sequence.'` then an event called
`'edx.ui.lms.sequence.tab_selected'` will be handled by the
`'edx.ui.lms.sequence.'` transformer.
An *exact transformer* will only handle events whose name matches
name of the transformer exactly.
Exact transformers always take precedence over prefix transformers.
Transformers without a name will not be added to the registry, and
cannot be accessed via the `EventTransformer.create_transformer()`
classmethod.
is_legacy_event:
If an event is a legacy event, it needs to set event_type to the
legacy name for the event, and may need to set certain event fields
to maintain backward compatiblity. If an event needs to provide
legacy support in some contexts, `is_legacy_event` can be defined
as a property to add dynamic behavior.
Default: False
legacy_event_type:
If the event is or can be a legacy event, it should define
the legacy value for the event_type field here.
Processing methods. Override these to provide the behavior needed for your
particular EventTransformer:
self.process_legacy_fields():
This method should modify the event payload in any way necessary to
support legacy event types. It will only be run if
`is_legacy_event` returns a True value.
self.process_event()
This method modifies the event payload unconditionally. It will
always be run.
"""
def __init__(self, *args, **kwargs):
super(EventTransformer, self).__init__(*args, **kwargs)
self.load_payload()
# Properties to be overridden
is_legacy_event = False
@property
def legacy_event_type(self):
"""
Override this as an attribute or property to provide the value for
the event's `event_type`, if it does not match the event's `name`.
"""
raise NotImplementedError
# Convenience properties
@property
def name(self):
"""
Returns the event's name.
"""
return self[u'name']
@property
def context(self):
"""
Returns the event's context dict.
"""
return self.get(u'context', {})
# Transform methods
def load_payload(self):
"""
Create a data version of self[u'event'] at self.event
"""
if u'event' in self:
if isinstance(self[u'event'], basestring):
self.event = json.loads(self[u'event'])
else:
self.event = self[u'event']
def dump_payload(self):
"""
Write self.event back to self[u'event'].
Keep the same format we were originally given.
"""
if isinstance(self.get(u'event'), basestring):
self[u'event'] = json.dumps(self.event)
else:
self[u'event'] = self.event
def transform(self):
"""
Transform the event with legacy fields and other necessary
modifications.
"""
if self.is_legacy_event:
self._set_legacy_event_type()
self.process_legacy_fields()
self.process_event()
self.dump_payload()
def _set_legacy_event_type(self):
"""
Update the event's `event_type` to the value specified by
`self.legacy_event_type`.
"""
self['event_type'] = self.legacy_event_type
def process_legacy_fields(self):
"""
Override this method to specify how to update event fields to maintain
compatibility with legacy events.
"""
pass
def process_event(self):
"""
Override this method to make unconditional modifications to event
fields.
"""
pass
@EventTransformerRegistry.register
class SequenceTabSelectedEventTransformer(EventTransformer):
"""
Transformer to maintain backward compatiblity with seq_goto events.
"""
match_key = u'edx.ui.lms.sequence.tab_selected'
is_legacy_event = True
legacy_event_type = u'seq_goto'
def process_legacy_fields(self):
self.event[u'old'] = self.event[u'current_tab']
self.event[u'new'] = self.event[u'target_tab']
class _BaseLinearSequenceEventTransformer(EventTransformer):
"""
Common functionality for transforming
`edx.ui.lms.sequence.{next,previous}_selected` events.
"""
offset = None
@property
def is_legacy_event(self):
"""
Linear sequence events are legacy events if the origin and target lie
within the same sequence.
"""
return not self.crosses_boundary()
def process_legacy_fields(self):
"""
Set legacy payload fields:
old: equal to the new current_tab field
new: the tab to which the user is navigating
"""
self.event[u'old'] = self.event[u'current_tab']
self.event[u'new'] = self.event[u'current_tab'] + self.offset
def crosses_boundary(self):
"""
Returns true if the navigation takes the focus out of the current
sequence.
"""
raise NotImplementedError
@EventTransformerRegistry.register
class NextSelectedEventTransformer(_BaseLinearSequenceEventTransformer):
"""
Transformer to maintain backward compatiblity with seq_next events.
"""
match_key = u'edx.ui.lms.sequence.next_selected'
offset = 1
legacy_event_type = u'seq_next'
def crosses_boundary(self):
"""
Returns true if the navigation moves the focus to the next sequence.
"""
return self.event[u'current_tab'] == self.event[u'tab_count']
@EventTransformerRegistry.register
class PreviousSelectedEventTransformer(_BaseLinearSequenceEventTransformer):
"""
Transformer to maintain backward compatiblity with seq_prev events.
"""
match_key = u'edx.ui.lms.sequence.previous_selected'
offset = -1
legacy_event_type = u'seq_prev'
def crosses_boundary(self):
"""
Returns true if the navigation moves the focus to the previous
sequence.
"""
return self.event[u'current_tab'] == 1
@EventTransformerRegistry.register
class VideoEventTransformer(EventTransformer):
"""
Converts new format video events into the legacy video event format.
Mobile devices cannot actually emit events that exactly match their
counterparts emitted by the LMS javascript video player. Instead of
attempting to get them to do that, we instead insert a transformer here
that converts the events they *can* easily emit and converts them into the
legacy format.
"""
match_key = u'edx.video.'
name_to_event_type_map = {
u'edx.video.played': u'play_video',
u'edx.video.paused': u'pause_video',
u'edx.video.stopped': u'stop_video',
u'edx.video.loaded': u'load_video',
u'edx.video.position.changed': u'seek_video',
u'edx.video.seeked': u'seek_video',
u'edx.video.transcript.shown': u'show_transcript',
u'edx.video.transcript.hidden': u'hide_transcript',
u'edx.video.language_menu.shown': u'video_show_cc_menu',
u'edx.video.language_menu.hidden': u'video_hide_cc_menu',
}
is_legacy_event = True
@property
def legacy_event_type(self):
"""
Return the legacy event_type of the current event
"""
return self.name_to_event_type_map[self.name]
def transform(self):
"""
Transform the event with necessary modifications if it is one of the
expected types of events.
"""
if self.name in self.name_to_event_type_map:
super(VideoEventTransformer, self).transform()
def process_event(self):
"""
Modify event fields.
"""
# Convert edx.video.seeked to edx.video.position.changed because edx.video.seeked was not intended to actually
# ever be emitted.
if self.name == "edx.video.seeked":
self['name'] = "edx.video.position.changed"
if not self.event:
return
self.set_id_to_usage_key()
self.capcase_current_time()
self.convert_seek_type()
self.disambiguate_skip_and_seek()
self.set_page_to_browser_url()
self.handle_ios_seek_bug()
def set_id_to_usage_key(self):
"""
Validate that the module_id is a valid usage key, and set the id field
accordingly.
"""
if 'module_id' in self.event:
module_id = self.event['module_id']
try:
usage_key = UsageKey.from_string(module_id)
except InvalidKeyError:
log.warning('Unable to parse module_id "%s"', module_id, exc_info=True)
else:
self.event['id'] = usage_key.html_id()
del self.event['module_id']
def capcase_current_time(self):
"""
Convert the current_time field to currentTime.
"""
if 'current_time' in self.event:
self.event['currentTime'] = self.event.pop('current_time')
def convert_seek_type(self):
"""
Converts seek_type to seek and converts skip|slide to
onSlideSeek|onSkipSeek.
"""
if 'seek_type' in self.event:
seek_type = self.event['seek_type']
if seek_type == 'slide':
self.event['type'] = "onSlideSeek"
elif seek_type == 'skip':
self.event['type'] = "onSkipSeek"
del self.event['seek_type']
def disambiguate_skip_and_seek(self):
"""
For the Android build that isn't distinguishing between skip/seek.
"""
if 'requested_skip_interval' in self.event:
if abs(self.event['requested_skip_interval']) != 30:
if 'type' in self.event:
self.event['type'] = 'onSlideSeek'
def set_page_to_browser_url(self):
"""
If `open_in_browser_url` is specified, set the page to the base of the
specified url.
"""
if 'open_in_browser_url' in self.context:
self['page'] = self.context.pop('open_in_browser_url').rpartition('/')[0]
def handle_ios_seek_bug(self):
"""
Handle seek bug in iOS.
iOS build 1.0.02 has a bug where it returns a +30 second skip when
it should be returning -30.
"""
if self._build_requests_plus_30_for_minus_30():
if self._user_requested_plus_30_skip():
self.event[u'requested_skip_interval'] = -30
def _build_requests_plus_30_for_minus_30(self):
"""
Returns True if this build contains the seek bug
"""
if u'application' in self.context:
if all(key in self.context[u'application'] for key in (u'version', u'name')):
app_version = self.context[u'application'][u'version']
app_name = self.context[u'application'][u'name']
return app_version == u'1.0.02' and app_name == u'edx.mobileapp.iOS'
return False
def _user_requested_plus_30_skip(self):
"""
If the user requested a +30 second skip, return True.
"""
if u'requested_skip_interval' in self.event and u'type' in self.event:
interval = self.event[u'requested_skip_interval']
action = self.event[u'type']
return interval == 30 and action == u'onSkipSeek'
else:
return False
| agpl-3.0 | -4,993,183,082,950,215,000 | 32.089286 | 118 | 0.611261 | false |
luci/luci-py | appengine/swarming/server/task_request_test.py | 2 | 86434 | #!/usr/bin/env vpython
# Copyright 2014 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import datetime
import logging
import random
import string
import sys
import unittest
import test_env
test_env.setup_test_env()
from google.protobuf import duration_pb2
from google.protobuf import timestamp_pb2
from google.appengine.api import datastore_errors
from google.appengine.ext import ndb
from components import auth_testing
from components import utils
from test_support import test_case
from proto.api import swarming_pb2
from server import bq_state
from server import config
from server import pools_config
from server import task_pack
from server import task_request
# pylint: disable=W0212
def _gen_cipd_input(**kwargs):
"""Creates a CipdInput."""
args = {
u'client_package':
task_request.CipdPackage(
package_name=u'infra/tools/cipd/${platform}',
version=u'git_revision:deadbeef'),
u'packages': [
task_request.CipdPackage(
package_name=u'rm', path=u'bin',
version=u'git_revision:deadbeef'),
],
u'server':
u'https://chrome-infra-packages.appspot.com'
}
args.update(kwargs)
return task_request.CipdInput(**args)
def _gen_properties(**kwargs):
"""Creates a TaskProperties."""
args = {
u'cipd_input':
_gen_cipd_input(),
u'command': [u'command1', u'arg1'],
u'containment': {
u'lower_priority': False,
u'containment_type': None,
u'limit_processes': None,
u'limit_total_committed_memory': None,
},
u'dimensions': {
u'OS': [u'Windows-3.1.1'],
u'hostname': [u'localhost'],
u'pool': [u'default'],
},
u'env': {
u'foo': u'bar',
u'joe': u'2'
},
u'env_prefixes': {
u'PATH': [u'local/path']
},
u'execution_timeout_secs':
30,
u'grace_period_secs':
30,
u'idempotent':
False,
u'inputs_ref':
task_request.FilesRef(
isolatedserver=u'https://isolateserver.appspot.com',
namespace=u'default-gzip'),
u'io_timeout_secs':
None,
u'has_secret_bytes':
False,
}
args.update(kwargs)
args[u'dimensions_data'] = args.pop(u'dimensions')
return task_request.TaskProperties(**args)
def _gen_request_slices(**kwargs):
"""Creates a TaskRequest."""
template_apply = kwargs.pop('_template_apply', task_request.TEMPLATE_AUTO)
now = utils.utcnow()
args = {
u'created_ts': now,
u'manual_tags': [u'tag:1'],
u'name': u'Request name',
u'priority': 50,
u'task_slices': [
task_request.TaskSlice(
expiration_secs=30, properties=_gen_properties()),
],
u'user': u'Jesus',
u'bot_ping_tolerance_secs': 120,
}
args.update(kwargs)
# Note that ndb model constructor accepts dicts for structured properties.
req = task_request.TaskRequest(**args)
task_request.init_new_request(req, True, template_apply)
return req
def _gen_request(properties=None, **kwargs):
"""Creates a TaskRequest with a single TaskSlice."""
return _gen_request_slices(
task_slices=[
task_request.TaskSlice(
expiration_secs=30, properties=properties or _gen_properties()),
],
**kwargs)
def _gen_secret(req, secret_bytes):
assert req.key
sb = task_request.SecretBytes(secret_bytes=secret_bytes)
sb.key = req.secret_bytes_key
return sb
def _gen_task_template(cache=None, cipd_package=None, env=None):
"""Builds an unverified pools_config.TaskTemplate for use with
_set_pool_config_with_templates.
Args:
cache (None|dict{name: path}) - cache entries to set.
cipd_package (None|dict{(path, pkg): version}) - cipd packages to set.
env (None|dict{var: value|(value, prefix)|(value, prefix, soft)}) -
envvars to set. The key is always the envvar to set, and the value may
be:
* the envvar value as a string (prefix=() and soft=False)
* A (value, prefix) tuple (soft=False)
* A (value, prefix, soft) tuple
Returns constructed pools_config.TaskTemplate.
"""
def env_value(var, combo_value):
prefix, soft = (), False
if isinstance(combo_value, tuple):
assert len(combo_value) in (2, 3), ('unexpected tuple length: %r' %
combo_value)
if len(combo_value) == 2:
value, prefix = combo_value
else:
value, prefix, soft = combo_value
else:
value = unicode(combo_value)
return pools_config.Env(var, value, tuple(map(unicode, prefix)), soft)
return pools_config.TaskTemplate(
cache=sorted(
pools_config.CacheEntry(unicode(name), unicode(path))
for name, path in (cache or {}).items()),
cipd_package=sorted(
pools_config.CipdPackage(
unicode(path), unicode(pkg), unicode(version))
for (path, pkg), version in (cipd_package or {}).items()),
env=sorted(
env_value(unicode(var), value) for var, value in (env or {}).items()),
inclusions=(),
)
class Prop(object):
_name = 'foo'
class TestCase(test_case.TestCase):
def setUp(self):
super(TestCase, self).setUp()
auth_testing.mock_get_current_identity(self)
class TaskRequestPrivateTest(TestCase):
def test_validate_task_run_id(self):
self.assertEqual(
'1d69b9f088008811',
task_request._validate_task_run_id(Prop(), '1d69b9f088008811'))
self.assertEqual(None, task_request._validate_task_run_id(Prop(), ''))
with self.assertRaises(ValueError):
task_request._validate_task_run_id(Prop(), '1')
def test_validate_cas_instance(self):
valid_cas_instance = 'projects/chromium-swarm/instances/default_instance'
self.assertEqual(
valid_cas_instance,
task_request._validate_cas_instance(Prop(), valid_cas_instance))
self.assertEqual(None, task_request._validate_cas_instance(Prop(), ''))
with self.assertRaises(datastore_errors.BadValueError):
task_request._validate_cas_instance(Prop(), 'invalid')
def test_apply_template_simple(self):
tt = _gen_task_template(
cache={'cache': 'c'},
cipd_package={('cipd', 'some/pkg'): 'latest'},
env={'ENV': ('1', ['a'])},
)
p = task_request.TaskProperties()
task_request._apply_task_template(tt, p)
self.assertEqual(
p,
task_request.TaskProperties(
env={u'ENV': u'1'},
env_prefixes={u'ENV': [u'a']},
caches=[task_request.CacheEntry(name=u'cache', path=u'c')],
cipd_input=task_request.CipdInput(packages=[
task_request.CipdPackage(
package_name=u'some/pkg', path=u'cipd', version=u'latest')
])))
def test_apply_template_env_set_error(self):
tt = _gen_task_template(env={'ENV': ('1', ['a'])})
p = task_request.TaskProperties(env={u'ENV': u'10'})
with self.assertRaises(ValueError) as ex:
task_request._apply_task_template(tt, p)
self.assertEqual(ex.exception.message,
"request.env[u'ENV'] conflicts with pool's template")
def test_apply_template_env_prefix_set_error(self):
tt = _gen_task_template(env={'ENV': ('1', ['a'])})
p = task_request.TaskProperties(env_prefixes={u'ENV': [u'b']})
with self.assertRaises(ValueError) as ex:
task_request._apply_task_template(tt, p)
self.assertEqual(
ex.exception.message,
"request.env_prefixes[u'ENV'] conflicts with pool's template")
def test_apply_template_env_override_soft(self):
tt = _gen_task_template(env={'ENV': ('1', ['a'], True)})
p = task_request.TaskProperties(env={u'ENV': u'2'})
task_request._apply_task_template(tt, p)
self.assertEqual(
p,
task_request.TaskProperties(
env={u'ENV': u'2'},
env_prefixes={u'ENV': [u'a']},
))
def test_apply_template_env_prefixes_append_soft(self):
tt = _gen_task_template(env={'ENV': ('1', ['a'], True)})
p = task_request.TaskProperties(env_prefixes={u'ENV': [u'b']})
task_request._apply_task_template(tt, p)
self.assertEqual(
p,
task_request.TaskProperties(
env={u'ENV': u'1'},
env_prefixes={u'ENV': [u'a', u'b']},
))
def test_apply_template_conflicting_cache(self):
tt = _gen_task_template(cache={'c': 'C'})
p = task_request.TaskProperties(
caches=[task_request.CacheEntry(name='c', path='B')])
with self.assertRaises(ValueError) as ex:
task_request._apply_task_template(tt, p)
self.assertEqual(ex.exception.message,
"request.cache['c'] conflicts with pool's template")
def test_apply_template_conflicting_cache_path(self):
tt = _gen_task_template(cache={'c': 'C'})
p = task_request.TaskProperties(
caches=[task_request.CacheEntry(name='other', path='C')])
with self.assertRaises(ValueError) as ex:
task_request._apply_task_template(tt, p)
self.assertEqual(
ex.exception.message,
"u'C': directory has conflicting owners: task cache 'other' "
"and task template cache u'c'")
def test_apply_template_conflicting_cache_cipd_path(self):
tt = _gen_task_template(cache={'c': 'C'})
p = task_request.TaskProperties(
cipd_input=task_request.CipdInput(packages=[
task_request.CipdPackage(
path='C', package_name='pkg', version='latest')
]))
with self.assertRaises(ValueError) as ex:
task_request._apply_task_template(tt, p)
self.assertEqual(
ex.exception.message,
"u'C': directory has conflicting owners: task cipd['pkg:latest'] "
"and task template cache u'c'")
def test_apply_template_conflicting_cipd_package(self):
tt = _gen_task_template(cipd_package={('C', 'pkg'): 'latest'})
p = task_request.TaskProperties(
cipd_input=task_request.CipdInput(packages=[
task_request.CipdPackage(
path='C', package_name='other', version='latest')
]))
with self.assertRaises(ValueError) as ex:
task_request._apply_task_template(tt, p)
self.assertEqual(
ex.exception.message,
"u'C': directory has conflicting owners: task cipd['other:latest'] "
"and task template cipd[u'pkg:latest']")
def test_apply_template_conflicting_cipd_cache_path(self):
tt = _gen_task_template(cipd_package={('C', 'pkg'): 'latest'})
p = task_request.TaskProperties(
caches=[task_request.CacheEntry(name='other', path='C')])
with self.assertRaises(ValueError) as ex:
task_request._apply_task_template(tt, p)
self.assertEqual(
ex.exception.message,
"u'C': directory has conflicting owners: task cache 'other' "
"and task template cipd[u'pkg:latest']")
class TaskRequestApiTest(TestCase):
def setUp(self):
super(TaskRequestApiTest, self).setUp()
# pool_configs is a mapping of pool name -> pools_config.PoolConfig. Tests
# can modify this to have pools_config.get_pool_config return the
# appropriate data.
self._pool_configs = {}
self.mock(pools_config, 'get_pool_config', self._pool_configs.get)
self._enqueue_calls = []
self._enqueue_orig = self.mock(utils, 'enqueue_task', self._enqueue)
def tearDown(self):
try:
self.assertFalse(self._enqueue_calls)
finally:
super(TaskRequestApiTest, self).tearDown()
def _enqueue(self, *args, **kwargs):
self._enqueue_calls.append((args, kwargs))
return self._enqueue_orig(*args, use_dedicated_module=False, **kwargs)
def test_all_apis_are_tested(self):
# Ensures there's a test for each public API.
module = task_request
expected = frozenset(
i for i in dir(module)
if i[0] != '_' and hasattr(getattr(module, i), 'func_name'))
missing = expected - frozenset(
i[5:] for i in dir(self) if i.startswith('test_'))
self.assertFalse(missing)
def test_get_automatic_tags(self):
req = _gen_request()
expected = set((u'hostname:localhost', u'OS:Windows-3.1.1', u'pool:default',
u'priority:50', u'service_account:none', u'user:Jesus',
u'use_isolate_1143123:1', u'use_cas_1143123:0'))
self.assertEqual(expected, task_request.get_automatic_tags(req, 0))
with self.assertRaises(IndexError):
task_request.get_automatic_tags(req, 1)
def test_get_automatic_tags_slices(self):
# Repeated TaskSlice.
slices = [
task_request.TaskSlice(
expiration_secs=60,
properties=_gen_properties(dimensions={
u'gpu': [u'1234:5678'],
u'pool': [u'GPU']
})),
task_request.TaskSlice(
expiration_secs=60,
properties=_gen_properties(dimensions={
u'gpu': [u'none'],
u'pool': [u'GPU']
})),
]
req = _gen_request_slices(task_slices=slices)
expected = set(
(u'gpu:1234:5678', u'pool:GPU', u'priority:50', u'service_account:none',
u'user:Jesus', u'use_isolate_1143123:1', u'use_cas_1143123:0'))
self.assertEqual(expected, task_request.get_automatic_tags(req, 0))
expected = set(
(u'gpu:none', u'pool:GPU', u'priority:50', u'service_account:none',
u'user:Jesus', u'use_isolate_1143123:1', u'use_cas_1143123:0'))
self.assertEqual(expected, task_request.get_automatic_tags(req, 1))
with self.assertRaises(IndexError):
task_request.get_automatic_tags(req, 2)
def test_get_automatic_tags_or_dim(self):
slices = [
task_request.TaskSlice(
expiration_secs=60,
properties=_gen_properties(dimensions={
u'gpu': [u'nv|amd'],
u'pool': [u'foo']
})),
task_request.TaskSlice(
expiration_secs=60,
properties=_gen_properties(dimensions={
u'os': [u'linux|mac|win'],
u'pool': [u'bar']
})),
]
req = _gen_request_slices(task_slices=slices)
expected = set((u'gpu:nv', u'gpu:amd', u'pool:foo', u'priority:50',
u'service_account:none', u'user:Jesus',
u'use_isolate_1143123:1', u'use_cas_1143123:0'))
self.assertEqual(expected, task_request.get_automatic_tags(req, 0))
expected = set((u'os:linux', u'os:mac', u'os:win', u'pool:bar',
u'priority:50', u'service_account:none', u'user:Jesus',
u'use_isolate_1143123:1', u'use_cas_1143123:0'))
self.assertEqual(expected, task_request.get_automatic_tags(req, 1))
def test_create_termination_task(self):
request = task_request.create_termination_task(
u'some-bot', wait_for_capacity=True)
self.assertTrue(request.task_slice(0).properties.is_terminate)
def test_new_request_key(self):
for _ in range(3):
delta = utils.utcnow() - task_request._BEGINING_OF_THE_WORLD
now = int(round(delta.total_seconds() * 1000.))
key = task_request.new_request_key()
# Remove the XOR.
key_id = key.integer_id() ^ task_pack.TASK_REQUEST_KEY_ID_MASK
timestamp = key_id >> 20
randomness = (key_id >> 4) & 0xFFFF
version = key_id & 0xF
self.assertLess(abs(timestamp - now), 1000)
self.assertEqual(1, version)
if randomness:
break
else:
self.fail('Failed to find randomness')
def test_new_request_key_zero(self):
def getrandbits(i):
self.assertEqual(i, 16)
return 0x7766
self.mock(random, 'getrandbits', getrandbits)
self.mock_now(task_request._BEGINING_OF_THE_WORLD)
key = task_request.new_request_key()
# Remove the XOR.
key_id = key.integer_id() ^ task_pack.TASK_REQUEST_KEY_ID_MASK
# 00000000000 7766 1
# ^ ^ ^
# | | |
# since 2010 | schema version
# |
# rand
self.assertEqual('0x0000000000077661', '0x%016x' % key_id)
def test_new_request_key_end(self):
def getrandbits(i):
self.assertEqual(i, 16)
return 0x7766
self.mock(random, 'getrandbits', getrandbits)
days_until_end_of_the_world = 2**43 / 24. / 60. / 60. / 1000.
num_days = int(days_until_end_of_the_world)
# Remove 1ms to not overflow.
num_seconds = ((days_until_end_of_the_world - num_days) * 24. * 60. * 60. -
0.001)
self.assertEqual(101806, num_days)
self.assertEqual(278, int(num_days / 365.3))
now = (
task_request._BEGINING_OF_THE_WORLD +
datetime.timedelta(days=num_days, seconds=num_seconds))
self.mock_now(now)
key = task_request.new_request_key()
# Remove the XOR.
key_id = key.integer_id() ^ task_pack.TASK_REQUEST_KEY_ID_MASK
# 7ffffffffff 7766 1
# ^ ^ ^
# | | |
# since 2010 | schema version
# |
# rand
self.assertEqual('0x7ffffffffff77661', '0x%016x' % key_id)
def test_validate_request_key(self):
task_request.validate_request_key(task_pack.unpack_request_key('11'))
with self.assertRaises(ValueError):
task_request.validate_request_key(ndb.Key('TaskRequest', 1))
def test_init_new_request(self):
parent = _gen_request()
# Parent entity must have a valid key id and be stored.
parent.key = task_request.new_request_key()
parent.put()
# The reference is to the TaskRunResult.
parent_id = task_pack.pack_request_key(parent.key) + u'1'
req = _gen_request(
properties=_gen_properties(
idempotent=True, relative_cwd=u'deeep', has_secret_bytes=True),
parent_task_id=parent_id)
# TaskRequest with secret must have a valid key.
req.key = task_request.new_request_key()
# Needed for the get() call below.
req.put()
sb = _gen_secret(req, 'I am a banana')
# Needed for properties_hash() call.
sb.put()
expected_properties = {
'caches': [],
'cipd_input': {
'client_package': {
'package_name': u'infra/tools/cipd/${platform}',
'path': None,
'version': u'git_revision:deadbeef',
},
'packages': [{
'package_name': u'rm',
'path': u'bin',
'version': u'git_revision:deadbeef',
}],
'server': u'https://chrome-infra-packages.appspot.com'
},
'command': [u'command1', u'arg1'],
'containment': {
u'lower_priority': False,
u'containment_type': None,
u'limit_processes': None,
u'limit_total_committed_memory': None,
},
'relative_cwd': u'deeep',
'dimensions': {
u'OS': [u'Windows-3.1.1'],
u'hostname': [u'localhost'],
u'pool': [u'default'],
},
'env': {
u'foo': u'bar',
u'joe': u'2'
},
'env_prefixes': {
u'PATH': [u'local/path']
},
'extra_args': [],
'execution_timeout_secs': 30,
'grace_period_secs': 30,
'has_secret_bytes': True,
'idempotent': True,
'inputs_ref': {
'isolated': None,
'isolatedserver': u'https://isolateserver.appspot.com',
'namespace': u'default-gzip',
},
'cas_input_root': None,
'io_timeout_secs': None,
'outputs': [],
}
expected_request = {
'authenticated': auth_testing.DEFAULT_MOCKED_IDENTITY,
'name': u'Request name',
'parent_task_id': unicode(parent_id),
'priority': 50,
'pubsub_topic': None,
'pubsub_userdata': None,
'service_account': u'none',
'tags': [
u'OS:Windows-3.1.1',
u'authenticated:user:[email protected]',
u'hostname:localhost',
u'parent_task_id:%s' % parent_id,
u'pool:default',
u'priority:50',
u'realm:none',
u'service_account:none',
u'swarming.pool.template:no_config',
u'tag:1',
u'use_cas_1143123:0',
u'use_isolate_1143123:1',
u'user:Jesus',
],
'task_slices': [{
'expiration_secs': 30,
'properties': expected_properties,
'wait_for_capacity': False,
},],
'user': u'Jesus',
'realm': None,
'realms_enabled': False,
'bot_ping_tolerance_secs': 120,
'resultdb': None,
}
actual = req.to_dict()
actual.pop('created_ts')
actual.pop('expiration_ts')
self.assertEqual(expected_request, actual)
self.assertEqual(30, req.expiration_secs)
# Intentionally hard code the hash value since it has to be deterministic.
# Other unit tests should use the calculated value.
self.assertEqual(
'c262bae20e9b1a265fa5937d67aa36f690612b0e28c8af7e38b347dd6746da65',
req.task_slice(0).properties_hash(req).encode('hex'))
# TODO(crbug.com/1115778): remove after RBE-CAS migration.
def test_init_new_request_isolated(self):
parent = _gen_request(
properties=_gen_properties(
command=[u'command1', u'arg1'],
inputs_ref={
'isolated': '0123456789012345678901234567890123456789',
'isolatedserver': 'http://localhost:1',
'namespace': 'default-gzip',
}))
# Parent entity must have a valid key id and be stored.
parent.key = task_request.new_request_key()
parent.put()
# The reference is to the TaskRunResult.
parent_id = task_pack.pack_request_key(parent.key) + u'1'
req = _gen_request(
properties=_gen_properties(idempotent=True, has_secret_bytes=True),
parent_task_id=parent_id)
# TaskRequest with secret must have a valid key.
req.key = task_request.new_request_key()
# Needed for the get() call below.
req.put()
sb = _gen_secret(req, 'I am not a banana')
# Needed for properties_hash() call.
sb.put()
expected_properties = {
'caches': [],
'cipd_input': {
'client_package': {
'package_name': u'infra/tools/cipd/${platform}',
'path': None,
'version': u'git_revision:deadbeef',
},
'packages': [{
'package_name': u'rm',
'path': u'bin',
'version': u'git_revision:deadbeef',
}],
'server': u'https://chrome-infra-packages.appspot.com'
},
'command': [u'command1', u'arg1'],
'containment': {
u'lower_priority': False,
u'containment_type': None,
u'limit_processes': None,
u'limit_total_committed_memory': None,
},
'relative_cwd': None,
'dimensions': {
u'OS': [u'Windows-3.1.1'],
u'hostname': [u'localhost'],
u'pool': [u'default'],
},
'env': {
u'foo': u'bar',
u'joe': u'2'
},
'env_prefixes': {
u'PATH': [u'local/path']
},
'extra_args': [],
'execution_timeout_secs': 30,
'grace_period_secs': 30,
'idempotent': True,
'inputs_ref': {
'isolated': None,
'isolatedserver': u'https://isolateserver.appspot.com',
'namespace': u'default-gzip',
},
'cas_input_root': None,
'io_timeout_secs': None,
'outputs': [],
'has_secret_bytes': True,
}
expected_request = {
'authenticated': auth_testing.DEFAULT_MOCKED_IDENTITY,
'name': u'Request name',
'parent_task_id': unicode(parent_id),
'priority': 50,
'pubsub_topic': None,
'pubsub_userdata': None,
'service_account': u'none',
'tags': [
u'OS:Windows-3.1.1',
u'authenticated:user:[email protected]',
u'hostname:localhost',
u'parent_task_id:%s' % parent_id,
u'pool:default',
u'priority:50',
u'realm:none',
u'service_account:none',
u'swarming.pool.template:no_config',
u'tag:1',
u'use_cas_1143123:0',
u'use_isolate_1143123:1',
u'user:Jesus',
],
'task_slices': [{
'expiration_secs': 30,
'properties': expected_properties,
'wait_for_capacity': False,
},],
'user': u'Jesus',
'realm': None,
'realms_enabled': False,
'bot_ping_tolerance_secs': 120,
'resultdb': None,
}
actual = req.to_dict()
# expiration_ts - created_ts == scheduling_expiration_secs.
actual.pop('created_ts')
actual.pop('expiration_ts')
self.assertEqual(expected_request, actual)
self.assertEqual(30, req.expiration_secs)
# Intentionally hard code the hash value since it has to be deterministic.
# Other unit tests should use the calculated value.
self.assertEqual(
'f9254eae480e442121919c503c685319ab3a903c2d7b76eac79a947afd09d425',
req.task_slice(0).properties_hash(req).encode('hex'))
def test_init_new_request_cas_input(self):
parent = _gen_request()
# Parent entity must have a valid key id and be stored.
parent.key = task_request.new_request_key()
parent.put()
# The reference is to the TaskRunResult.
parent_id = task_pack.pack_request_key(parent.key) + u'1'
cas_input_root = {
'cas_instance': u'projects/test/instances/default',
'digest': {
'hash': u'12345',
'size_bytes': 1,
}
}
req = _gen_request(
parent_task_id=parent_id,
properties=_gen_properties(
idempotent=True,
has_secret_bytes=True,
inputs_ref=None,
cas_input_root=cas_input_root,
))
# TaskRequest with secret must have a valid key.
req.key = task_request.new_request_key()
# Needed for the get() call below.
req.put()
sb = _gen_secret(req, 'I am not a banana')
# Needed for properties_hash() call.
sb.put()
expected_properties = {
'caches': [],
'cipd_input': {
'client_package': {
'package_name': u'infra/tools/cipd/${platform}',
'path': None,
'version': u'git_revision:deadbeef',
},
'packages': [{
'package_name': u'rm',
'path': u'bin',
'version': u'git_revision:deadbeef',
}],
'server': u'https://chrome-infra-packages.appspot.com'
},
'command': [u'command1', u'arg1'],
'containment': {
u'lower_priority': False,
u'containment_type': None,
u'limit_processes': None,
u'limit_total_committed_memory': None,
},
'relative_cwd': None,
'dimensions': {
u'OS': [u'Windows-3.1.1'],
u'hostname': [u'localhost'],
u'pool': [u'default'],
},
'env': {
u'foo': u'bar',
u'joe': u'2'
},
'env_prefixes': {
u'PATH': [u'local/path']
},
'extra_args': [],
'execution_timeout_secs': 30,
'grace_period_secs': 30,
'idempotent': True,
'inputs_ref': None,
'cas_input_root': cas_input_root,
'io_timeout_secs': None,
'outputs': [],
'has_secret_bytes': True,
}
expected_request = {
'authenticated': auth_testing.DEFAULT_MOCKED_IDENTITY,
'name': u'Request name',
'parent_task_id': parent_id,
'priority': 50,
'pubsub_topic': None,
'pubsub_userdata': None,
'service_account': u'none',
'tags': [
u'OS:Windows-3.1.1',
u'authenticated:user:[email protected]',
u'hostname:localhost',
u'parent_task_id:%s' % parent_id,
u'pool:default',
u'priority:50',
u'realm:none',
u'service_account:none',
u'swarming.pool.template:no_config',
u'tag:1',
u'use_cas_1143123:1',
u'use_isolate_1143123:0',
u'user:Jesus',
],
'task_slices': [{
'expiration_secs': 30,
'properties': expected_properties,
'wait_for_capacity': False,
},],
'user': u'Jesus',
'realm': None,
'realms_enabled': False,
'bot_ping_tolerance_secs': 120,
'resultdb': None,
}
actual = req.to_dict()
# expiration_ts - created_ts == scheduling_expiration_secs.
actual.pop('created_ts')
actual.pop('expiration_ts')
self.assertEqual(expected_request, actual)
self.assertEqual(30, req.expiration_secs)
# Intentionally hard code the hash value since it has to be deterministic.
# Other unit tests should use the calculated value.
self.assertEqual(
'9e1b99c20a5c523ea1ade51276230781f9ddfd3ae396e66c810612a1c5c8062a',
req.task_slice(0).properties_hash(req).encode('hex'))
def test_init_new_request_parent(self):
parent = _gen_request()
# Parent entity must have a valid key id and be stored.
parent.key = task_request.new_request_key()
parent.put()
# The reference is to the TaskRunResult.
parent_id = task_pack.pack_request_key(parent.key) + '1'
child = _gen_request(parent_task_id=parent_id)
self.assertEqual(parent_id, child.parent_task_id)
def test_init_new_request_invalid_parent_id(self):
# Must ends with '1' or '2', not '0'
with self.assertRaises(ValueError):
_gen_request(parent_task_id='1d69b9f088008810')
def test_init_new_request_missing_name(self):
req = _gen_request(name=None)
with self.assertRaisesRegexp(datastore_errors.BadValueError,
'^name is missing$'):
req.put()
def test_init_new_request_idempotent(self):
request = _gen_request(properties=_gen_properties(idempotent=True))
as_dict = request.to_dict()
self.assertEqual(True,
as_dict['task_slices'][0]['properties']['idempotent'])
# Intentionally hard code the hash value since it has to be deterministic.
# Other unit tests should use the calculated value.
# Ensure the algorithm is deterministic.
self.assertEqual(
'b1230281cc4bcc8d9458dab0810c86fcfaf8e4124351f4d39517833eb9541465',
request.task_slice(0).properties_hash(request).encode('hex'))
def test_init_new_request_bot_service_account(self):
request = _gen_request(service_account='bot')
request.put()
as_dict = request.to_dict()
self.assertEqual('bot', as_dict['service_account'])
self.assertIn(u'service_account:bot', as_dict['tags'])
def test_init_new_request_RBE_CAS(self):
request = _gen_request(
properties=_gen_properties(
inputs_ref=task_request.FilesRef(
isolated='dead' * (64 / 4),
isolatedserver='astuce-service',
namespace='sha256-GCP')))
request.put()
as_dict = request.to_dict()
expected = {
'isolated':
u'deaddeaddeaddeaddeaddeaddeaddeaddeaddeaddeaddeaddeaddeaddeaddead',
'isolatedserver':
u'astuce-service',
'namespace':
u'sha256-GCP',
}
self.assertEqual(expected,
as_dict['task_slices'][0]['properties']['inputs_ref'])
def _set_pool_config_with_templates(self,
prod=None,
canary=None,
canary_chance=None,
pool_name=u'default'):
"""Builds a new pools_config.PoolConfig populated with the given
pools_config.TaskTemplate objects and assigns it into the mocked
`pools_confi.get_pool_config()` method.
If prod is None, this omits the TaskTemplateDeployment entirely.
canary_chance may be supplied as >9999 (normally illegal) in order to force
the selection of canary."""
deployment = None
if prod is not None:
deployment = pools_config.TaskTemplateDeployment(
prod=prod, canary=canary, canary_chance=canary_chance)
self._pool_configs[pool_name] = pools_config.init_pool_config(
name=pool_name,
rev=u'testVersion1',
task_template_deployment=deployment,
)
def test_init_new_request_skip_template(self):
self._set_pool_config_with_templates(_gen_task_template(env={'hi': 'prod'}))
request = _gen_request(_template_apply=task_request.TEMPLATE_SKIP)
as_dict = request.to_dict()
self.assertIn(u'swarming.pool.version:testVersion1', as_dict['tags'])
self.assertIn(u'swarming.pool.template:skip', as_dict['tags'])
def test_init_new_request_missing_template(self):
self._set_pool_config_with_templates()
request = _gen_request()
as_dict = request.to_dict()
self.assertIn(u'swarming.pool.version:testVersion1', as_dict['tags'])
self.assertIn(u'swarming.pool.template:none', as_dict['tags'])
def test_init_new_request_prod_template(self):
self._set_pool_config_with_templates(
_gen_task_template(env={'hi': 'prod'}),
canary=None,
canary_chance=0, # always prefer prod serverside
)
request = _gen_request()
as_dict = request.to_dict()
self.assertIn(u'swarming.pool.version:testVersion1', as_dict['tags'])
self.assertIn(u'swarming.pool.template:prod', as_dict['tags'])
self.assertEqual(as_dict['task_slices'][0]['properties']['env']['hi'],
'prod')
def test_init_new_request_canary_template(self):
self._set_pool_config_with_templates(
_gen_task_template(env={'hi': 'prod'}),
_gen_task_template(env={'hi': 'canary'}),
canary_chance=10000, # always prefer canary serverside
)
request = _gen_request()
as_dict = request.to_dict()
self.assertIn(u'swarming.pool.version:testVersion1', as_dict['tags'])
self.assertIn(u'swarming.pool.template:canary', as_dict['tags'])
self.assertEqual(as_dict['task_slices'][0]['properties']['env']['hi'],
'canary')
def test_init_new_request_canary_never_template(self):
self._set_pool_config_with_templates(
_gen_task_template(env={'hi': 'prod'}),
_gen_task_template(env={'hi': 'canary'}),
canary_chance=10000, # always prefer canary serverside
)
request = _gen_request(_template_apply=task_request.TEMPLATE_CANARY_NEVER)
as_dict = request.to_dict()
self.assertIn(u'swarming.pool.version:testVersion1', as_dict['tags'])
self.assertIn(u'swarming.pool.template:prod', as_dict['tags'])
self.assertEqual(as_dict['task_slices'][0]['properties']['env']['hi'],
'prod')
def test_init_new_request_canary_prefer_template(self):
self._set_pool_config_with_templates(
_gen_task_template(env={'hi': 'prod'}),
_gen_task_template(env={'hi': 'canary'}),
canary_chance=0, # always prefer prod serverside
)
request = _gen_request(_template_apply=task_request.TEMPLATE_CANARY_PREFER)
as_dict = request.to_dict()
self.assertIn(u'swarming.pool.version:testVersion1', as_dict['tags'])
self.assertIn(u'swarming.pool.template:canary', as_dict['tags'])
self.assertEqual(as_dict['task_slices'][0]['properties']['env']['hi'],
'canary')
def test_init_new_request_canary_prefer_prod_template(self):
self._set_pool_config_with_templates(
_gen_task_template(env={'hi': 'prod'}),
# No canary defined, even though caller would prefer it, if available.
)
request = _gen_request(_template_apply=task_request.TEMPLATE_CANARY_PREFER)
as_dict = request.to_dict()
self.assertIn(u'swarming.pool.version:testVersion1', as_dict['tags'])
self.assertIn(u'swarming.pool.template:prod', as_dict['tags'])
self.assertEqual(as_dict['task_slices'][0]['properties']['env']['hi'],
'prod')
def test_duped(self):
# Two TestRequest with the same properties.
request_1 = _gen_request(properties=_gen_properties(idempotent=True))
now = utils.utcnow()
request_2 = _gen_request_slices(
name='Other',
user='Other',
priority=201,
created_ts=now,
manual_tags=['tag:2'],
task_slices=[
task_request.TaskSlice(
expiration_secs=129,
properties=_gen_properties(idempotent=True)),
])
self.assertEqual(
request_1.task_slice(0).properties_hash(request_1),
request_2.task_slice(0).properties_hash(request_2))
self.assertTrue(request_1.task_slice(0).properties_hash(request_1))
def test_different(self):
# Two TestRequest with different properties.
request_1 = _gen_request(
properties=_gen_properties(execution_timeout_secs=30, idempotent=True))
request_2 = _gen_request(
properties=_gen_properties(execution_timeout_secs=129, idempotent=True))
self.assertNotEqual(
request_1.task_slice(0).properties_hash(request_1),
request_2.task_slice(0).properties_hash(request_2))
# TODO(crbug.com/1115778): remove after RBE-CAS migration.
def test_TaskRequest_to_proto_isolated(self):
# Try to set as much things as possible to exercise most code paths.
def getrandbits(i):
self.assertEqual(i, 16)
return 0x7766
self.mock(random, 'getrandbits', getrandbits)
self.mock_now(task_request._BEGINING_OF_THE_WORLD)
# Parent entity must have a valid key id and be stored.
# This task uses user:Jesus, which will be inherited automatically.
parent = _gen_request()
parent.key = task_request.new_request_key()
parent.put()
# The reference is to the TaskRunResult.
parent_id = task_pack.pack_request_key(parent.key) + u'0'
parent_run_id = task_pack.pack_request_key(parent.key) + u'1'
self.mock_now(task_request._BEGINING_OF_THE_WORLD, 2)
request_props = _gen_properties(
inputs_ref={
'isolated': '0123456789012345678901234567890123456789',
'isolatedserver': 'http://localhost:1',
'namespace': 'default-gzip',
},
relative_cwd=u'subdir',
caches=[
task_request.CacheEntry(name=u'git_chromium', path=u'git_cache'),
],
cipd_input=_gen_cipd_input(
packages=[
task_request.CipdPackage(
package_name=u'foo', path=u'tool', version=u'git:12345'),
],),
idempotent=True,
outputs=[u'foo'],
has_secret_bytes=True,
containment=task_request.Containment(
lower_priority=True,
containment_type=task_request.ContainmentType.JOB_OBJECT,
limit_processes=1000,
limit_total_committed_memory=1024**3,
),
)
request = _gen_request_slices(
task_slices=[
task_request.TaskSlice(
expiration_secs=30,
properties=request_props,
wait_for_capacity=True,
),
],
# The user is ignored; the value is overridden by the parent task's
# user.
user=u'Joe',
parent_task_id=parent_run_id,
service_account=u'[email protected]',
pubsub_topic=u'projects/a/topics/abc',
pubsub_auth_token=u'sekret',
pubsub_userdata=u'obscure_reference',
)
# Necessary to have a valid task_id:
request.key = task_request.new_request_key()
# Necessary to attach a secret to the request:
request.put()
_gen_secret(request, 'I am a banana').put()
expected_props = swarming_pb2.TaskProperties(
cas_inputs=swarming_pb2.CASTree(
digest=u'0123456789012345678901234567890123456789',
server=u'http://localhost:1',
namespace=u'default-gzip',
),
cipd_inputs=[
swarming_pb2.CIPDPackage(
package_name=u'foo', version=u'git:12345', dest_path=u'tool'),
],
named_caches=[
swarming_pb2.NamedCacheEntry(
name=u'git_chromium', dest_path=u'git_cache'),
],
containment=swarming_pb2.Containment(
lower_priority=True,
containment_type=swarming_pb2.Containment.JOB_OBJECT,
limit_processes=1000,
limit_total_committed_memory=1024**3,
),
command=[u'command1', u'arg1'],
relative_cwd=u'subdir',
# extra_args cannot be specified with command.
# secret_bytes cannot be retrieved, but is included in properties_hash.
has_secret_bytes=True,
dimensions=[
swarming_pb2.StringListPair(key=u'OS', values=[u'Windows-3.1.1']),
swarming_pb2.StringListPair(key=u'hostname', values=[u'localhost']),
swarming_pb2.StringListPair(key=u'pool', values=[u'default']),
],
env=[
swarming_pb2.StringPair(key=u'foo', value=u'bar'),
swarming_pb2.StringPair(key=u'joe', value=u'2'),
],
env_paths=[
swarming_pb2.StringListPair(key=u'PATH', values=[u'local/path']),
],
execution_timeout=duration_pb2.Duration(seconds=30),
grace_period=duration_pb2.Duration(seconds=30),
idempotent=True,
outputs=[u'foo'],
)
# To be updated every time the schema changes.
props_h = 'e8718f59959d2c17d9ab1084b6fc9b3ee63e998a704de579543dd84bc1ef603a'
expected = swarming_pb2.TaskRequest(
# Scheduling.
task_slices=[
swarming_pb2.TaskSlice(
properties=expected_props,
expiration=duration_pb2.Duration(seconds=30),
wait_for_capacity=True,
properties_hash=props_h,
),
],
priority=50,
service_account=u'[email protected]',
# Information.
create_time=timestamp_pb2.Timestamp(seconds=1262304002),
name=u'Request name',
authenticated='user:[email protected]',
tags=[
u'OS:Windows-3.1.1',
u"authenticated:user:[email protected]",
u'hostname:localhost',
u'parent_task_id:%s' % parent_run_id,
u'pool:default',
u'priority:50',
u'realm:none',
u'service_account:[email protected]',
u'swarming.pool.template:no_config',
u'tag:1',
u"use_cas_1143123:0",
u"use_isolate_1143123:1",
u'user:Jesus',
],
user=u'Jesus',
# Hierarchy.
task_id=u'7d0776610',
parent_task_id=parent_id,
parent_run_id=parent_run_id,
# Notification. auth_token cannot be retrieved.
pubsub_notification=swarming_pb2.PubSub(
topic=u'projects/a/topics/abc', userdata=u'obscure_reference'),
)
actual = swarming_pb2.TaskRequest()
request.to_proto(actual)
self.assertEqual(unicode(expected), unicode(actual))
# with append_root_ids=True.
actual = swarming_pb2.TaskRequest()
request.to_proto(actual, append_root_ids=True)
expected.root_task_id = parent_id
expected.root_run_id = parent_run_id
self.assertEqual(unicode(expected), unicode(actual))
def test_TaskRequest_to_proto(self):
# Try to set as much things as possible to exercise most code paths.
def getrandbits(i):
self.assertEqual(i, 16)
return 0x7766
self.mock(random, 'getrandbits', getrandbits)
self.mock_now(task_request._BEGINING_OF_THE_WORLD)
# Grand parent entity must have a valid key id and be stored.
# This task uses user:Jesus, which will be inherited automatically.
grand_parent = _gen_request()
grand_parent.key = task_request.new_request_key()
grand_parent.put()
# Parent entity must have a valid key id and be stored.
self.mock_now(task_request._BEGINING_OF_THE_WORLD, 1)
parent = _gen_request(parent_task_id=grand_parent.task_id[:-1] + u'1')
parent.key = task_request.new_request_key()
parent.put()
parent_run_id = parent.task_id[:-1] + u'1'
self.mock_now(task_request._BEGINING_OF_THE_WORLD, 2)
request_props = _gen_properties(
inputs_ref=None,
cas_input_root={
'cas_instance': u'projects/test/instances/default',
'digest': {
'hash': u'12345',
'size_bytes': 1,
}
},
relative_cwd=u'subdir',
caches=[
task_request.CacheEntry(name=u'git_chromium', path=u'git_cache'),
],
cipd_input=_gen_cipd_input(
packages=[
task_request.CipdPackage(
package_name=u'foo', path=u'tool', version=u'git:12345'),
],),
idempotent=True,
outputs=[u'foo'],
has_secret_bytes=True,
containment=task_request.Containment(
lower_priority=True,
containment_type=task_request.ContainmentType.JOB_OBJECT,
limit_processes=1000,
limit_total_committed_memory=1024**3,
),
)
request = _gen_request_slices(
task_slices=[
task_request.TaskSlice(
expiration_secs=30,
properties=request_props,
wait_for_capacity=True,
),
],
# The user is ignored; the value is overridden by the parent task's
# user.
user=u'Joe',
parent_task_id=parent.task_id[:-1] + u'1',
service_account=u'[email protected]',
pubsub_topic=u'projects/a/topics/abc',
pubsub_auth_token=u'sekret',
pubsub_userdata=u'obscure_reference',
)
# Necessary to have a valid task_id:
request.key = task_request.new_request_key()
# Necessary to attach a secret to the request:
request.put()
_gen_secret(request, 'I am a banana').put()
expected_props = swarming_pb2.TaskProperties(
cas_input_root=swarming_pb2.CASReference(
cas_instance='projects/test/instances/default',
digest=swarming_pb2.Digest(
hash='12345', size_bytes=1),
),
cipd_inputs=[
swarming_pb2.CIPDPackage(
package_name=u'foo', version=u'git:12345', dest_path=u'tool'),
],
named_caches=[
swarming_pb2.NamedCacheEntry(
name=u'git_chromium', dest_path=u'git_cache'),
],
containment=swarming_pb2.Containment(
lower_priority=True,
containment_type=swarming_pb2.Containment.JOB_OBJECT,
limit_processes=1000,
limit_total_committed_memory=1024**3,
),
command=[u'command1', u'arg1'],
relative_cwd=u'subdir',
# extra_args cannot be specified with command.
# secret_bytes cannot be retrieved, but is included in properties_hash.
has_secret_bytes=True,
dimensions=[
swarming_pb2.StringListPair(key=u'OS', values=[u'Windows-3.1.1']),
swarming_pb2.StringListPair(key=u'hostname', values=[u'localhost']),
swarming_pb2.StringListPair(key=u'pool', values=[u'default']),
],
env=[
swarming_pb2.StringPair(key=u'foo', value=u'bar'),
swarming_pb2.StringPair(key=u'joe', value=u'2'),
],
env_paths=[
swarming_pb2.StringListPair(key=u'PATH', values=[u'local/path']),
],
execution_timeout=duration_pb2.Duration(seconds=30),
grace_period=duration_pb2.Duration(seconds=30),
idempotent=True,
outputs=[u'foo'],
)
# To be updated every time the schema changes.
props_h = '516b5f86592b0e5e3bdd9fbf715305ee6f7ddad36320775d5a945e60df67c360'
expected = swarming_pb2.TaskRequest(
# Scheduling.
task_slices=[
swarming_pb2.TaskSlice(
properties=expected_props,
expiration=duration_pb2.Duration(seconds=30),
wait_for_capacity=True,
properties_hash=props_h,
),
],
priority=50,
service_account=u'[email protected]',
# Information.
create_time=timestamp_pb2.Timestamp(seconds=1262304002),
name=u'Request name',
authenticated='user:[email protected]',
tags=[
u'OS:Windows-3.1.1',
u"authenticated:user:[email protected]",
u'hostname:localhost',
u'parent_task_id:%s' % parent_run_id,
u'pool:default',
u'priority:50',
u'realm:none',
u'service_account:[email protected]',
u'swarming.pool.template:no_config',
u'tag:1',
u"use_cas_1143123:1",
u"use_isolate_1143123:0",
u'user:Jesus',
],
user=u'Jesus',
# Hierarchy.
task_id=u'7d0776610',
parent_task_id=parent.task_id,
parent_run_id=parent_run_id,
# Notification. auth_token cannot be retrieved.
pubsub_notification=swarming_pb2.PubSub(
topic=u'projects/a/topics/abc', userdata=u'obscure_reference'),
)
actual = swarming_pb2.TaskRequest()
request.to_proto(actual)
self.assertEqual(unicode(expected), unicode(actual))
actual = swarming_pb2.TaskRequest()
expected.root_task_id = grand_parent.task_id
expected.root_run_id = grand_parent.task_id[:-1] + u'1'
request.to_proto(actual, append_root_ids=True)
self.assertEqual(unicode(expected), unicode(actual))
# With append_root_ids=True.
actual = swarming_pb2.TaskRequest()
request.to_proto(actual, append_root_ids=True)
expected.root_task_id = grand_parent.task_id
expected.root_run_id = grand_parent.task_id[:-1] + u'1'
self.assertEqual(unicode(expected), unicode(actual))
def test_TaskRequest_to_proto_empty(self):
# Assert that it doesn't throw on empty entity.
actual = swarming_pb2.TaskRequest()
task_request.TaskRequest().to_proto(actual)
self.assertEqual(swarming_pb2.TaskRequest(), actual)
def test_TaskSlice_to_proto_empty(self):
# Assert that it doesn't throw on empty entity.
request = task_request.TaskRequest()
actual = swarming_pb2.TaskSlice()
task_request.TaskSlice().to_proto(actual, request)
self.assertEqual(swarming_pb2.TaskSlice(), actual)
def test_TaskProperties_to_proto_empty(self):
# Assert that it doesn't throw on empty entity.
actual = swarming_pb2.TaskProperties()
task_request.TaskProperties().to_proto(actual)
expected = swarming_pb2.TaskProperties()
expected.grace_period.seconds = 30
self.assertEqual(expected, actual)
def test_request_bad_values(self):
with self.assertRaises(AttributeError):
_gen_request(properties=_gen_properties(foo='bar'))
def test_request_bad_values_stale_style(self):
# Old TaskRequest.properties style.
# Hack a bit the call to force the incorrect call.
req = _gen_request_slices(
task_slices=[],
expiration_ts=utils.utcnow() + datetime.timedelta(hours=1),
properties_old=_gen_properties())
with self.assertRaises(datastore_errors.BadValueError):
req.put()
def test_request_bad_values_task_slices(self):
with self.assertRaises(ValueError):
# No TaskSlice
_gen_request_slices(task_slices=[])
def _gen_slice(**props):
return task_request.TaskSlice(
expiration_secs=60, properties=_gen_properties(**props))
slices = [_gen_slice(dimensions={u'pool': [u'GPU']})]
_gen_request_slices(task_slices=slices).put()
# Limit on the maximum number of TaskSlice in a TaskRequest.
slices = [
_gen_slice(dimensions={
u'pool': [u'GPU'],
u'v': [unicode(i)]
}) for i in range(8)
]
_gen_request_slices(task_slices=slices).put()
slices = [
_gen_slice(dimensions={
u'pool': [u'GPU'],
u'v': [unicode(i)]
}) for i in range(9)
]
req = _gen_request_slices(task_slices=slices)
with self.assertRaises(datastore_errors.BadValueError):
req.put()
# Different pools.
slices = [
task_request.TaskSlice(
expiration_secs=60,
properties=_gen_properties(dimensions={u'pool': [u'GPU']})),
task_request.TaskSlice(
expiration_secs=60,
properties=_gen_properties(dimensions={u'pool': [u'other']})),
]
req = _gen_request_slices(task_slices=slices)
with self.assertRaises(datastore_errors.BadValueError):
req.put()
def test_request_bad_command(self):
req = _gen_request(properties=_gen_properties(command=[], inputs_ref=None))
with self.assertRaises(datastore_errors.BadValueError):
req.put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(properties=_gen_properties(command={'a': 'b'}))
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(properties=_gen_properties(command='python'))
_gen_request(properties=_gen_properties(command=['python'])).put()
_gen_request(properties=_gen_properties(command=[u'python'])).put()
_gen_request(properties=_gen_properties(command=[u'python'] * 128)).put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(properties=_gen_properties(command=[u'python'] * 129)).put()
def test_request_extra_args(self):
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(
command=[],
extra_args=[u'python'],
inputs_ref=task_request.FilesRef(
isolated='deadbeefdeadbeefdeadbeefdeadbeefdeadbeef',
isolatedserver='http://localhost:1',
namespace='default-gzip'))).put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(
command=[u'python'],
extra_args=[u'python'],
inputs_ref=task_request.FilesRef(
isolated='deadbeefdeadbeefdeadbeefdeadbeefdeadbeef',
isolatedserver='http://localhost:1',
namespace='default-gzip'))).put()
def test_request_bad_cipd_input(self):
def mkcipdreq(idempotent=False, **cipd_input):
return _gen_request(
properties=_gen_properties(
idempotent=idempotent, cipd_input=_gen_cipd_input(**cipd_input)))
req = mkcipdreq(packages=[{}])
with self.assertRaises(datastore_errors.BadValueError):
req.put()
with self.assertRaises(datastore_errors.BadValueError):
mkcipdreq(packages=[
task_request.CipdPackage(
package_name='infra|rm', path='.', version='latest'),
])
req = mkcipdreq(
packages=[task_request.CipdPackage(package_name='rm', path='.')])
with self.assertRaises(datastore_errors.BadValueError):
req.put()
req = mkcipdreq(packages=[
task_request.CipdPackage(package_name='rm', version='latest'),
])
with self.assertRaises(datastore_errors.BadValueError):
req.put()
with self.assertRaises(datastore_errors.BadValueError):
mkcipdreq(packages=[
task_request.CipdPackage(
package_name='rm', path='/', version='latest'),
])
with self.assertRaises(datastore_errors.BadValueError):
mkcipdreq(packages=[
task_request.CipdPackage(
package_name='rm', path='/a', version='latest'),
])
with self.assertRaises(datastore_errors.BadValueError):
mkcipdreq(packages=[
task_request.CipdPackage(
package_name='rm', path='a/..', version='latest'),
])
with self.assertRaises(datastore_errors.BadValueError):
mkcipdreq(packages=[
task_request.CipdPackage(
package_name='rm', path='a/./b', version='latest'),
])
req = mkcipdreq(packages=[
task_request.CipdPackage(package_name='rm', path='.', version='latest'),
task_request.CipdPackage(package_name='rm', path='.', version='canary'),
])
with self.assertRaises(datastore_errors.BadValueError):
req.put()
req = mkcipdreq(
idempotent=True,
packages=[
task_request.CipdPackage(
package_name='rm', path='.', version='latest'),
])
with self.assertRaises(datastore_errors.BadValueError):
req.put()
with self.assertRaises(datastore_errors.BadValueError):
mkcipdreq(server='abc')
with self.assertRaises(datastore_errors.BadValueError):
mkcipdreq(
client_package=task_request.CipdPackage(
package_name='--bad package--'))
mkcipdreq().put()
mkcipdreq(packages=[
task_request.CipdPackage(package_name='rm', path='.', version='latest'),
]).put()
mkcipdreq(
client_package=task_request.CipdPackage(
package_name='infra/tools/cipd/${platform}',
version='git_revision:daedbeef'),
packages=[
task_request.CipdPackage(
package_name='rm', path='.', version='latest'),
],
server='https://chrome-infra-packages.appspot.com').put()
def test_request_bad_named_cache(self):
mkcachereq = lambda *c: _gen_request(properties=_gen_properties(caches=c)
).put()
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(task_request.CacheEntry(name='', path='git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(task_request.CacheEntry(name='git_chromium', path=''))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='git_cache'),
task_request.CacheEntry(name='git_v8', path='git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='git_cache'),
task_request.CacheEntry(name='git_chromium', path='git_cache2'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='/git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='../git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(
name='git_chromium', path='git_cache/../../a'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='../git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='git_cache//a'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='a/./git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(task_request.CacheEntry(name='has space', path='git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(task_request.CacheEntry(name='CAPITAL', path='git_cache'))
mkcachereq()
mkcachereq(task_request.CacheEntry(name='git_chromium', path='git_cache'))
mkcachereq(
task_request.CacheEntry(name='git_chromium', path='git_cache'),
task_request.CacheEntry(name='build_chromium', path='out'))
mkcachereq(task_request.CacheEntry(name=u'g' * 128, path=u'git_cache'))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(task_request.CacheEntry(name=u'g' * 129, path=u'git_cache'))
mkcachereq(task_request.CacheEntry(name=u'g', path=u'p' * 256))
with self.assertRaises(datastore_errors.BadValueError):
mkcachereq(task_request.CacheEntry(name=u'g', path=u'p' * 257))
# Too many.
c = [
task_request.CacheEntry(name=unicode(i), path=unicode(i))
for i in range(32)
]
_gen_request(properties=_gen_properties(caches=c)).put()
with self.assertRaises(datastore_errors.BadValueError):
c = [
task_request.CacheEntry(name=unicode(i), path=unicode(i))
for i in range(33)
]
_gen_request(properties=_gen_properties(caches=c)).put()
def test_request_bad_named_cache_and_cipd_input(self):
# A CIPD package and named caches cannot be mapped to the same path.
req = _gen_request(
properties=_gen_properties(
caches=[
task_request.CacheEntry(name='git_chromium', path='git_cache'),
],
cipd_input=_gen_cipd_input(packages=[
task_request.CipdPackage(
package_name='foo', path='git_cache', version='latest'),
])))
with self.assertRaises(datastore_errors.BadValueError):
req.put()
req = _gen_request(
properties=_gen_properties(
caches=[
task_request.CacheEntry(name='git_chromium', path='git_cache1'),
],
cipd_input=_gen_cipd_input(packages=[
task_request.CipdPackage(
package_name='foo', path='git_cache2', version='latest'),
]))).put()
def test_request_bad_dimensions(self):
# Type error.
with self.assertRaises(TypeError):
_gen_request(properties=_gen_properties(dimensions=[]))
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(properties=_gen_properties(dimensions={}))
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(dimensions={
u'id': u'b',
u'a:': u'b'
}))
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(dimensions={
u'id': u'b',
u'a.': u'b'
}))
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(dimensions={
u'id': u'b',
u'a': [u'b']
}))
# >1 value for id.
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(properties=_gen_properties(dimensions={u'id': [u'a', u'b']}))
# >1 value for pool.
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(dimensions={u'pool': [u'b', u'b']}))
_gen_request(
properties=_gen_properties(dimensions={
u'id': [u'b'],
u'pool': [u'b']
})).put()
_gen_request(
properties=_gen_properties(dimensions={
u'id': [u'b'],
u'pool': [u'b'],
u'a.': [u'c']
})).put()
_gen_request(
properties=_gen_properties(dimensions={
u'pool': [u'b'],
u'a.': [u'b', u'c']
})).put()
def test_request_bad_dimensions_key(self):
# Max # keys.
d = {u'a%s' % string.ascii_letters[i]: [unicode(i)] for i in range(31)}
d[u'pool'] = [u'a']
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaises(datastore_errors.BadValueError):
d = {u'a%s' % string.ascii_letters[i]: [unicode(i)] for i in range(32)}
d[u'pool'] = [u'a']
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaises(datastore_errors.BadValueError):
# Key regexp.
d = {u'pool': [u'default'], u'1': [u'value']}
_gen_request(properties=_gen_properties(dimensions=d)).put()
# Key length.
d = {
u'pool': [u'default'],
u'v' * config.DIMENSION_KEY_LENGTH: [u'v'],
}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaises(datastore_errors.BadValueError):
d = {
u'pool': [u'default'],
u'v' * (config.DIMENSION_KEY_LENGTH + 1): [u'value'],
}
_gen_request(properties=_gen_properties(dimensions=d)).put()
def test_request_bad_dimensions_value(self):
# Max # values.
d = {u'pool': [u'b'], u'a.': [unicode(i) for i in range(16)]}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaises(datastore_errors.BadValueError):
d = {u'pool': [u'b'], u'a.': [unicode(i) for i in range(17)]}
_gen_request(properties=_gen_properties(dimensions=d)).put()
# Value length.
d = {
u'pool': [u'default'],
u'v': [u'v' * config.DIMENSION_VALUE_LENGTH],
}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaises(datastore_errors.BadValueError):
d = {
u'pool': [u'default'],
u'v': [u'v' * (config.DIMENSION_VALUE_LENGTH + 1)],
}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaises(datastore_errors.BadValueError):
# Value with space.
d = {u'pool': [u'default'], u'v': [u'v ']}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaises(datastore_errors.BadValueError):
# Duplicate value.
d = {u'pool': [u'default'], u'v': [u'v', u'v']}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaisesRegexp(
datastore_errors.BadValueError,
"^dimension key u'v' has invalid value u'v||c'$"):
# Empty 'or' dimension value.
d = {u'pool': [u'default'], u'v': [u'v||c']}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaisesRegexp(
datastore_errors.BadValueError,
"^'pool' cannot be specified more than once in dimensions "
"\[u'default|non-default'\]$"):
# Use 'or' dimension in pool.
d = {u'pool': [u'default|non-default'], u'v': [u'v']}
_gen_request(properties=_gen_properties(dimensions=d)).put()
with self.assertRaisesRegexp(
datastore_errors.BadValueError,
'possible dimension subset for \'or\' dimensions '
'should not be more than 8, but 9'):
# Too many combinations for 'or'
d = {
u'pool': [u'default'],
u'x': [u'1|2|3'],
u'y': [u'1|2|3'],
}
_gen_request(properties=_gen_properties(dimensions=d)).put()
d = {
u'pool': [u'default'],
u'x': [u'1|2'],
u'y': [u'1|2'],
u'z': [u'1|2'],
}
_gen_request(properties=_gen_properties(dimensions=d)).put()
def test_request_bad_env(self):
# Type error.
with self.assertRaises(TypeError):
_gen_request(properties=_gen_properties(env=[]))
with self.assertRaises(TypeError):
_gen_request(properties=_gen_properties(env={u'a': 1}))
_gen_request(properties=_gen_properties(env={})).put()
e = {u'k': u'v'}
_gen_request(properties=_gen_properties(env=e)).put()
# Key length.
e = {u'k' * 64: u'v'}
_gen_request(properties=_gen_properties(env=e)).put()
with self.assertRaises(datastore_errors.BadValueError):
e = {u'k' * 65: u'v'}
_gen_request(properties=_gen_properties(env=e)).put()
# # keys.
e = {u'k%s' % i: u'v' for i in range(64)}
_gen_request(properties=_gen_properties(env=e)).put()
with self.assertRaises(datastore_errors.BadValueError):
e = {u'k%s' % i: u'v' for i in range(65)}
_gen_request(properties=_gen_properties(env=e)).put()
# Value length.
e = {u'k': u'v' * 1024}
_gen_request(properties=_gen_properties(env=e)).put()
with self.assertRaises(datastore_errors.BadValueError):
e = {u'k': u'v' * 1025}
_gen_request(properties=_gen_properties(env=e)).put()
def test_request_bad_env_prefixes(self):
# Type error.
with self.assertRaises(TypeError):
_gen_request(properties=_gen_properties(env_prefixes=[]))
with self.assertRaises(TypeError):
_gen_request(properties=_gen_properties(env_prefixes={u'a': 1}))
_gen_request(properties=_gen_properties(env_prefixes={})).put()
e = {u'k': [u'v']}
_gen_request(properties=_gen_properties(env_prefixes=e)).put()
# Key length.
e = {u'k' * 64: [u'v']}
_gen_request(properties=_gen_properties(env_prefixes=e)).put()
with self.assertRaises(datastore_errors.BadValueError):
e = {u'k' * 65: [u'v']}
_gen_request(properties=_gen_properties(env_prefixes=e)).put()
# # keys.
e = {u'k%s' % i: [u'v'] for i in range(64)}
_gen_request(properties=_gen_properties(env_prefixes=e)).put()
with self.assertRaises(datastore_errors.BadValueError):
e = {u'k%s' % i: [u'v'] for i in range(65)}
_gen_request(properties=_gen_properties(env_prefixes=e)).put()
# Value length.
e = {u'k': [u'v' * 1024]}
_gen_request(properties=_gen_properties(env_prefixes=e)).put()
with self.assertRaises(datastore_errors.BadValueError):
e = {u'k': [u'v' * 1025]}
_gen_request(properties=_gen_properties(env_prefixes=e)).put()
def test_request_bad_priority(self):
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(priority=task_request.MAXIMUM_PRIORITY + 1)
_gen_request(priority=task_request.MAXIMUM_PRIORITY).put()
def test_request_bad_bot_ping_tolerance(self):
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
bot_ping_tolerance_secs=task_request._MAX_BOT_PING_TOLERANCE_SECS + 1)
_gen_request(
bot_ping_tolerance_secs=task_request._MIN_BOT_PING_TOLERANCE_SECS - 1)
def test_request_bad_execution_timeout(self):
# When used locally, it is set to 1, which means it's impossible to test
# below _MIN_TIMEOUT_SECS but above 0.
self.mock(task_request, '_MIN_TIMEOUT_SECS', 30)
p = _gen_request(properties=_gen_properties(execution_timeout_secs=0))
with self.assertRaises(datastore_errors.BadValueError):
# Only termination task may have 0.
p.put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(
execution_timeout_secs=task_request._MIN_TIMEOUT_SECS - 1))
_gen_request(
properties=_gen_properties(
execution_timeout_secs=task_request._MIN_TIMEOUT_SECS))
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(
properties=_gen_properties(
execution_timeout_secs=task_request.MAX_TIMEOUT_SECS + 1))
_gen_request(
properties=_gen_properties(
execution_timeout_secs=task_request.MAX_TIMEOUT_SECS)).put()
def test_request_bad_expiration(self):
now = utils.utcnow()
with self.assertRaises(ValueError):
_gen_request_slices(
created_ts=now,
task_slices=[
task_request.TaskSlice(
expiration_secs=None, properties=_gen_properties()),
])
with self.assertRaises(datastore_errors.BadValueError):
_gen_request_slices(
created_ts=now,
task_slices=[
task_request.TaskSlice(
expiration_secs=task_request._MIN_TIMEOUT_SECS - 1,
properties=_gen_properties()),
])
with self.assertRaises(datastore_errors.BadValueError):
_gen_request_slices(
created_ts=now,
task_slices=[
task_request.TaskSlice(
expiration_secs=task_request.MAX_EXPIRATION_SECS + 1,
properties=_gen_properties()),
])
_gen_request_slices(
created_ts=now,
task_slices=[
task_request.TaskSlice(
expiration_secs=task_request._MIN_TIMEOUT_SECS,
properties=_gen_properties()),
]).put()
_gen_request_slices(
created_ts=now,
task_slices=[
task_request.TaskSlice(
expiration_secs=task_request.MAX_EXPIRATION_SECS,
properties=_gen_properties()),
]).put()
def test_request_bad_inputs_ref(self):
# Both command and inputs_ref.isolated.
_gen_request(
properties=_gen_properties(
command=['python'],
inputs_ref=task_request.FilesRef(
isolated='deadbeefdeadbeefdeadbeefdeadbeefdeadbeef',
isolatedserver='http://localhost:1',
namespace='default-gzip'))).put()
# Bad digest.
req = _gen_request(
properties=_gen_properties(
command=['see', 'spot', 'run'],
inputs_ref=task_request.FilesRef(
isolated='deadbeef',
isolatedserver='http://localhost:1',
namespace='default-gzip')))
with self.assertRaises(datastore_errors.BadValueError):
req.put()
# inputs_ref without server/namespace.
req = _gen_request(
properties=_gen_properties(inputs_ref=task_request.FilesRef()))
with self.assertRaises(datastore_errors.BadValueError):
req.put()
# Without digest nor command.
req = _gen_request(
properties=_gen_properties(
command=[],
inputs_ref=task_request.FilesRef(
isolatedserver='https://isolateserver.appspot.com',
namespace='default-gzip^^^')))
with self.assertRaises(datastore_errors.BadValueError):
req.put()
# For 'sha256-GCP', the length must be 64.
req = _gen_request(
properties=_gen_properties(
command=[],
inputs_ref=task_request.FilesRef(
isolated='deadbeefdeadbeefdeadbeefdeadbeefdeadbeef',
isolatedserver='foo-bar',
namespace='sha256-GCP')))
with self.assertRaises(datastore_errors.BadValueError):
req.put()
# For 'sha256-GCP', the isolatedserver value must not contain '://'.
req = _gen_request(
properties=_gen_properties(
command=[],
inputs_ref=task_request.FilesRef(
isolated='dead' * (64 / 4),
isolatedserver='foo://bar',
namespace='sha256-GCP')))
with self.assertRaises(datastore_errors.BadValueError):
req.put()
def test_request_bad_cas_input_root(self):
def _gen_request_with_cas_input_root(cas_instance, digest):
return _gen_request(
properties=_gen_properties(
inputs_ref=None, # inputs_ref can't be set with cas_input_root.
cas_input_root=task_request.CASReference(
cas_instance=cas_instance, digest=digest)))
valid_cas_instance = 'projects/test/instances/default'
valid_digest = task_request.Digest(hash='12345', size_bytes=1)
# TaskRequest with a valid cas_input_root.
_gen_request_with_cas_input_root(
cas_instance=valid_cas_instance, digest=valid_digest).put()
# Missing cas_instance.
with self.assertRaises(datastore_errors.BadValueError):
_gen_request_with_cas_input_root(
cas_instance=None, digest=valid_digest).put()
# Invalid cas_instance.
with self.assertRaises(datastore_errors.BadValueError):
_gen_request_with_cas_input_root(
cas_instance='invalid_instance_name', digest=valid_digest).put()
# Missing digest.
with self.assertRaises(datastore_errors.BadValueError):
_gen_request_with_cas_input_root(
cas_instance=valid_cas_instance, digest=None).put()
# Missing digest.hash.
with self.assertRaises(datastore_errors.BadValueError):
_gen_request_with_cas_input_root(
cas_instance=valid_cas_instance,
digest=task_request.Digest(hash=None, size_bytes=1)).put()
# Missing digest.size_bytes.
with self.assertRaises(datastore_errors.BadValueError):
_gen_request_with_cas_input_root(
cas_instance=valid_cas_instance,
digest=task_request.Digest(hash='12345', size_bytes=None)).put()
def test_request_conflict_inputs(self):
req = _gen_request(
properties=_gen_properties(
inputs_ref=task_request.FilesRef(
isolated='0123456789012345678901234567890123456789',
isolatedserver=u'https://isolateserver.appspot.com',
namespace=u'default-gzip'),
cas_input_root=task_request.CASReference(
cas_instance='projects/test/instances/default',
digest=task_request.Digest(hash='12345', size_bytes=1)),
))
with self.assertRaises(datastore_errors.BadValueError) as e:
req.put()
self.assertEqual(e.exception.message,
"can't set both inputs_ref and cas_input_root")
def test_request_bad_pubsub(self):
_gen_request(pubsub_topic=u'projects/a/topics/abc').put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(pubsub_topic=u'a')
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(pubsub_topic=u'projects/a/topics/ab').put()
_gen_request(pubsub_topic=u'projects/' + u'a' * 1004 + u'/topics/abc').put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(pubsub_topic=u'projects/' + u'a' * 1005 +
u'/topics/abc').put()
def test_request_bad_service_account(self):
_gen_request(service_account=u'none').put()
_gen_request(service_account=u'bot').put()
_gen_request(service_account=u'joe@localhost').put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(service_account=u'joe').put()
_gen_request(service_account=u'joe@' + u'l' * 124).put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(service_account=u'joe@' + u'l' * 125).put()
def test_request_bad_tags(self):
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(manual_tags=['a']).put()
def test_request_bad_tags_too_many(self):
_gen_request(manual_tags=['a:b'] * 256).put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(manual_tags=['a:b'] * 257).put()
def test_request_bad_tags_too_long(self):
# Minus 2 for the 'a:' prefix.
l = task_request._TAG_LENGTH - 2
_gen_request(manual_tags=['a:' + 'b' * l]).put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(manual_tags=['a:' + 'a' * (l + 1)]).put()
def test_request_bad_realm(self):
_gen_request(realm=None).put()
_gen_request(realm='test:realm').put()
with self.assertRaises(datastore_errors.BadValueError):
_gen_request(realm='invalid_realm').put()
def test_resultdb_enable(self):
request = _gen_request(resultdb=task_request.ResultDBCfg(enable=True))
actual = swarming_pb2.TaskRequest()
request.to_proto(actual)
self.assertTrue(actual.resultdb.enable)
def test_execution_deadline(self):
self.mock_now(datetime.datetime(2020, 1, 2, 3, 4, 5))
request = _gen_request()
self.assertEqual(request.execution_deadline,
datetime.datetime(2020, 1, 2, 3, 5, 35))
def test_validate_priority(self):
with self.assertRaises(TypeError):
task_request.validate_priority(None)
with self.assertRaises(TypeError):
task_request.validate_priority('1')
with self.assertRaises(datastore_errors.BadValueError):
task_request.validate_priority(-1)
with self.assertRaises(datastore_errors.BadValueError):
task_request.validate_priority(task_request.MAXIMUM_PRIORITY + 1)
task_request.validate_priority(0)
task_request.validate_priority(1)
task_request.validate_priority(task_request.MAXIMUM_PRIORITY)
def test_datetime_to_request_base_id(self):
now = datetime.datetime(2012, 1, 2, 3, 4, 5, 123456)
self.assertEqual(0xeb5313d0300000,
task_request.datetime_to_request_base_id(now))
def test_convert_to_request_key(self):
"""Indirectly tested by API."""
now = datetime.datetime(2012, 1, 2, 3, 4, 5, 123456)
key = task_request.convert_to_request_key(now)
self.assertEqual(9157134072765480958, key.id())
def test_request_key_to_datetime(self):
key = ndb.Key(task_request.TaskRequest, 0x7f14acec2fcfffff)
# Resolution is only kept at millisecond level compared to
# datetime_to_request_base_id() by design.
self.assertEqual(
datetime.datetime(2012, 1, 2, 3, 4, 5, 123000),
task_request.request_key_to_datetime(key))
def test_request_id_to_key(self):
# Simple XOR.
self.assertEqual(
ndb.Key(task_request.TaskRequest, 0x7f14acec2fcfffff),
task_request.request_id_to_key(0xeb5313d0300000))
def test_secret_bytes(self):
task_request.SecretBytes(secret_bytes='a' * (20 * 1024)).put()
with self.assertRaises(datastore_errors.BadValueError):
task_request.SecretBytes(secret_bytes='a' * (20 * 1024 + 1)).put()
def test_cron_delete_old_task_requests(self):
# Creating 1000 tasks would make this test significantly slower.
self.mock(task_request, '_TASKS_DELETE_CHUNK_SIZE', 5)
now = utils.utcnow()
task_ids = []
for i in range(14):
self.mock_now(now, i)
request = _gen_request_slices()
request.key = task_request.new_request_key()
request.put()
task_ids.append(task_pack.pack_request_key(request.key))
# Use 11 seconds offset, so that entities 12, 13 are not deleted. Yet create
# 3 GAE tasks to delete the chunks limited at 5 items.
self.mock_now(now + task_request._OLD_TASK_REQUEST_CUT_OFF, 11)
self.assertEqual(12, task_request.cron_delete_old_task_requests())
expected = [
(
('/internal/taskqueue/cleanup/tasks/delete', 'delete-tasks'),
{
'payload': utils.encode_to_json({u'task_ids': task_ids[0:5]})
},
),
(
('/internal/taskqueue/cleanup/tasks/delete', 'delete-tasks'),
{
'payload': utils.encode_to_json({u'task_ids': task_ids[5:10]})
},
),
(
('/internal/taskqueue/cleanup/tasks/delete', 'delete-tasks'),
{
'payload': utils.encode_to_json({u'task_ids': task_ids[10:12]})
},
),
]
# task_ids[12:14] are not touched.
self.assertEqual(expected, self._enqueue_calls)
self._enqueue_calls = []
def test_task_delete_tasks(self):
# The data here should be the same as what is passed to the task queue in
# test_cron_delete_old_task_requests.
class Foo(ndb.Model):
pass
task_ids = []
for _ in range(5):
request = _gen_request_slices()
request.key = task_request.new_request_key()
request.put()
# Create a dummy child entity to ensure it's deleted too.
Foo(parent=request.key, id=1).put()
task_ids.append(task_pack.pack_request_key(request.key))
self.assertEqual(5, task_request.task_delete_tasks(task_ids))
self.assertEqual(0, task_request.TaskRequest.query().count())
self.assertEqual(0, Foo.query().count())
def test_task_bq_empty(self):
# Empty, nothing is done.
start = utils.utcnow()
end = start + datetime.timedelta(seconds=60)
self.assertEqual(0, task_request.task_bq(start, end))
def test_task_bq(self):
def getrandbits(i):
self.assertEqual(i, 16)
return 0x7766
self.mock(random, 'getrandbits', getrandbits)
payloads = []
def send_to_bq(table_name, rows):
self.assertEqual('task_requests', table_name)
payloads.append(rows)
self.mock(bq_state, 'send_to_bq', send_to_bq)
# Generate two tasks requests.
now = datetime.datetime(2014, 1, 2, 3, 4, 5, 6)
start = self.mock_now(now, 10)
request_1 = _gen_request()
request_1.key = task_request.new_request_key()
run_1_id = request_1.task_id[:-1] + '1'
request_1.put()
self.mock_now(now, 20)
request_2 = _gen_request(parent_task_id=run_1_id)
request_2.key = task_request.new_request_key()
request_2.put()
end = self.mock_now(now, 30)
self.assertEqual(2, task_request.task_bq(start, end))
self.assertEqual(1, len(payloads), payloads)
actual_rows = payloads[0]
self.assertEqual(2, len(actual_rows))
expected_ids = [
# No root IDs on task 1.
(request_1.task_id, '', ''),
# Task 1 is the root of Task 2.
(request_2.task_id, request_1.task_id, run_1_id),
]
self.assertEqual(
expected_ids,
[(t.task_id, t.root_task_id, t.root_run_id) for _, t in actual_rows])
def test_yield_request_keys_by_parent_task_id(self):
parent_request = _gen_request()
parent_request.key = task_request.new_request_key()
parent_request.put()
parent_summary_key = task_pack.request_key_to_result_summary_key(
parent_request.key)
parent_summary_id = task_pack.pack_result_summary_key(parent_summary_key)
parent_run_key = task_pack.result_summary_key_to_run_result_key(
parent_summary_key, 1)
parent_run_id = task_pack.pack_run_result_key(parent_run_key)
child_request_1_key = _gen_request(parent_task_id=parent_run_id).put()
child_request_2_key = _gen_request(parent_task_id=parent_run_id).put()
it = task_request.yield_request_keys_by_parent_task_id(parent_summary_id)
expected = [child_request_1_key, child_request_2_key]
self.assertEqual(sorted(expected), sorted([k for k in it]))
def test_normalize_or_dimensions(self):
dim1 = _gen_request(
properties=_gen_properties(dimensions={u'foo':
[u'a|c|b', u'xyz']})).task_slice(0).properties.dimensions
dim2 = _gen_request(
properties=_gen_properties(dimensions={u'foo':
[u'xyz', u'c|b|a']})).task_slice(0).properties.dimensions
expected = {u'foo': [u'a|b|c', u'xyz']}
self.assertEqual(dim1, expected)
self.assertEqual(dim1, dim2)
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
unittest.main()
| apache-2.0 | -3,309,384,925,261,021,000 | 36.843257 | 80 | 0.60541 | false |
Clemson-DPA/dpa-pipe | dpa/app/session.py | 1 | 9786 |
from abc import ABCMeta, abstractmethod, abstractproperty
import importlib
import os
import shlex
import subprocess
import socket
import time
import rpyc
from dpa.app.entity import EntityRegistry
from dpa.env.vars import DpaVars
from dpa.ptask.area import PTaskArea
from dpa.ptask import PTaskError, PTask
from dpa.singleton import Singleton
# -----------------------------------------------------------------------------
class SessionRegistry(Singleton):
# -------------------------------------------------------------------------
def init(self):
self._registry = {}
# -------------------------------------------------------------------------
def current(self):
for registered_cls in self._registry.values():
if registered_cls.current():
return registered_cls()
return None
# -------------------------------------------------------------------------
def register(self, cls):
self._registry[cls.app_name] = cls
# -----------------------------------------------------------------------------
class Session(object):
__metaclass__ = ABCMeta
app_name = None
# -------------------------------------------------------------------------
@classmethod
def current(cls):
return None
# -------------------------------------------------------------------------
def __init__(self):
pass
# -------------------------------------------------------------------------
@abstractmethod
def close(self):
"""Close the current file."""
# -------------------------------------------------------------------------
def list_entities(self, categories=None):
"""List entities in the session."""
entities = []
entity_classes = EntityRegistry().get_entity_classes(
self.__class__.app_name)
for entity_class in entity_classes:
entities.extend(entity_class.list(self))
if categories:
filtered = [e for e in entities if e.category in categories]
else:
filtered = entities
return filtered
# -------------------------------------------------------------------------
@classmethod
def open_file(self, filepath):
"""Open a new session with the supplied file."""
# -------------------------------------------------------------------------
@abstractmethod
def save(self, filepath=None):
"""Save the current session. Save to the file path if provided."""
# -------------------------------------------------------------------------
@abstractproperty
def in_session(self):
"""Returns True if inside a current app session."""
# -------------------------------------------------------------------------
def init_module(self, module_path):
_module = None
if self.in_session:
try:
_module = importlib.import_module(module_path)
except ImportError:
pass # will raise below
if not _module:
raise SessionError(
"Failed to initialize session. " + \
"'{mod}' module could not be imported.".format(mod=module_path)
)
return _module
# -------------------------------------------------------------------------
def require_executable(self, executable):
"""Returns the full path for the supplied executable name."""
(path, file_name) = os.path.split(executable)
# path already included
if path:
if not os.path.isfile(executable):
raise SessionError("Unable to locate executable: " + executable)
elif not os.access(executable, os.X_OK):
raise SessionError("File is not executable: " + executable)
else:
return executable
else:
bin_paths = DpaVars.path()
bin_paths.get()
for path in bin_paths.list:
executable_path = os.path.join(path, executable)
if (os.path.isfile(executable_path) and
os.access(executable_path, os.X_OK)):
return executable_path
raise SessionError("Unable to locate executable: " + executable)
# -------------------------------------------------------------------------
@property
def app_name(self):
return self.__class__.app_name
# -------------------------------------------------------------------------
@property
def ptask_area(self):
"""Return the current ptask area for this session."""
if not hasattr(self, '_ptask_area'):
self._ptask_area = PTaskArea.current()
return self._ptask_area
# -------------------------------------------------------------------------
@property
def ptask(self):
if not hasattr(self, '_ptask'):
ptask_area = self.ptask_area
if not ptask_area.spec:
self._ptask = None
else:
try:
self._ptask = PTask.get(ptask_area.spec)
except PTaskError as e:
raise SessionError("Unable to determine ptask.")
return self._ptask
# -------------------------------------------------------------------------
@property
def ptask_version(self):
"""Return the current ptask version for this session."""
if not hasattr(self, '_ptask_version'):
ptask = self.ptask
if not ptask:
self._ptask_version = None
else:
self._ptask_version = ptask.latest_version
return self._ptask_version
# -----------------------------------------------------------------------------
class RemoteMixin(object):
__metaclass__ = ABCMeta
# -------------------------------------------------------------------------
def __init__(self, remote=False):
self._remote = remote
# -------------------------------------------------------------------------
def __del__(self):
self.shutdown()
# -------------------------------------------------------------------------
def __enter__(self):
return self
# -------------------------------------------------------------------------
def __exit__(self, exc_type, exc_value, traceback):
self.shutdown()
# -------------------------------------------------------------------------
@staticmethod
def _get_port():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("",0))
port = sock.getsockname()[1]
sock.close()
return port
# -------------------------------------------------------------------------
@property
def remote(self):
"""Returns True if in a session, False otherwise."""
return self._remote
# -------------------------------------------------------------------------
@property
def remote_connection(self):
if not hasattr(self, '_remote_connection'):
self._remote_connection = self._connect_remote()
return self._remote_connection
# -------------------------------------------------------------------------
@abstractproperty
def server_executable(self):
"""The executable for starting the remote app server."""
# -------------------------------------------------------------------------
def shutdown(self):
if hasattr(self, '_remote_connection'):
try:
self._remote_connection.root.shutdown()
except EOFError:
# this is the expected error on shutdown
pass
else:
self._remote_connection = None
# -------------------------------------------------------------------------
def init_module(self, module_path):
_module = None
if self.remote:
# need to give time for standalone app to import properly
tries = 0
while not _module or tries < 30:
try:
self.remote_connection.execute("import " + module_path)
_module = getattr(
self.remote_connection.modules, module_path)
break
except ImportError:
tries += 1
time.sleep(1)
if not _module:
self.shutdown()
elif self.in_session:
try:
_module = importlib.import_module(module_path)
except ImportError:
pass # will raise below
if not _module:
raise SessionError(
"Failed to initialize session. " + \
"'{mod}' module could not be imported.".format(mod=module_path)
)
return _module
# -------------------------------------------------------------------------
def _connect_remote(self):
port = self._get_port()
cmd = "{cmd} {port}".format(cmd=self.server_executable, port=port)
args = shlex.split(cmd)
subprocess.Popen(args)
connection = None
tries = 0
while not connection or tries < 30:
try:
connection = rpyc.classic.connect("localhost", port)
break
except socket.error:
tries += 1
time.sleep(1)
if not connection:
raise SessionError("Unable connect to remote session.")
return connection
# -----------------------------------------------------------------------------
class SessionError(Exception):
pass
| mit | 835,993,178,283,954,700 | 30.365385 | 80 | 0.41263 | false |
h2non/paco | paco/wait.py | 1 | 3118 | # -*- coding: utf-8 -*-
import asyncio
from .assertions import isiter
from .concurrent import ConcurrentExecutor
@asyncio.coroutine
def wait(*coros_or_futures, limit=0, timeout=None, loop=None,
return_exceptions=False, return_when='ALL_COMPLETED'):
"""
Wait for the Futures and coroutine objects given by the sequence
futures to complete, with optional concurrency limit.
Coroutines will be wrapped in Tasks.
``timeout`` can be used to control the maximum number of seconds to
wait before returning. timeout can be an int or float.
If timeout is not specified or None, there is no limit to the wait time.
If ``return_exceptions`` is True, exceptions in the tasks are treated the
same as successful results, and gathered in the result list; otherwise,
the first raised exception will be immediately propagated to the
returned future.
``return_when`` indicates when this function should return.
It must be one of the following constants of the concurrent.futures module.
All futures must share the same event loop.
This functions is mostly compatible with Python standard
``asyncio.wait()``.
Arguments:
*coros_or_futures (iter|list):
an iterable collection yielding coroutines functions.
limit (int):
optional concurrency execution limit. Use ``0`` for no limit.
timeout (int/float):
maximum number of seconds to wait before returning.
return_exceptions (bool):
exceptions in the tasks are treated the same as successful results,
instead of raising them.
return_when (str):
indicates when this function should return.
loop (asyncio.BaseEventLoop):
optional event loop to use.
*args (mixed):
optional variadic argument to pass to the coroutines function.
Returns:
tuple: Returns two sets of Future: (done, pending).
Raises:
TypeError: in case of invalid coroutine object.
ValueError: in case of empty set of coroutines or futures.
TimeoutError: if execution takes more than expected.
Usage::
async def sum(x, y):
return x + y
done, pending = await paco.wait(
sum(1, 2),
sum(3, 4))
[task.result() for task in done]
# => [3, 7]
"""
# Support iterable as first argument for better interoperability
if len(coros_or_futures) == 1 and isiter(coros_or_futures[0]):
coros_or_futures = coros_or_futures[0]
# If no coroutines to schedule, return empty list
# Mimics asyncio behaviour.
if len(coros_or_futures) == 0:
raise ValueError('paco: set of coroutines/futures is empty')
# Create concurrent executor
pool = ConcurrentExecutor(limit=limit, loop=loop,
coros=coros_or_futures)
# Wait until all the tasks finishes
return (yield from pool.run(timeout=timeout,
return_when=return_when,
return_exceptions=return_exceptions))
| mit | 1,045,240,907,255,369,900 | 36.119048 | 79 | 0.650096 | false |
BehavioralInsightsTeam/edx-platform | lms/djangoapps/grades/apps.py | 16 | 1502 | """
Grades Application Configuration
Signal handlers are connected here.
"""
from django.apps import AppConfig
from django.conf import settings
from edx_proctoring.runtime import set_runtime_service
from openedx.core.djangoapps.plugins.constants import ProjectType, SettingsType, PluginURLs, PluginSettings
class GradesConfig(AppConfig):
"""
Application Configuration for Grades.
"""
name = u'lms.djangoapps.grades'
plugin_app = {
PluginURLs.CONFIG: {
ProjectType.LMS: {
PluginURLs.NAMESPACE: u'grades_api',
PluginURLs.REGEX: u'api/grades/',
PluginURLs.RELATIVE_PATH: u'api.urls',
}
},
PluginSettings.CONFIG: {
ProjectType.LMS: {
SettingsType.AWS: {PluginSettings.RELATIVE_PATH: u'settings.aws'},
SettingsType.COMMON: {PluginSettings.RELATIVE_PATH: u'settings.common'},
SettingsType.TEST: {PluginSettings.RELATIVE_PATH: u'settings.test'},
}
}
}
def ready(self):
"""
Connect handlers to recalculate grades.
"""
# Can't import models at module level in AppConfigs, and models get
# included from the signal handlers
from .signals import handlers # pylint: disable=unused-variable
if settings.FEATURES.get('ENABLE_SPECIAL_EXAMS'):
from .services import GradesService
set_runtime_service('grades', GradesService())
| agpl-3.0 | -5,582,814,633,632,640,000 | 32.377778 | 107 | 0.633156 | false |
akopich/spark | python/pyspark/java_gateway.py | 56 | 5851 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import atexit
import os
import sys
import select
import signal
import shlex
import socket
import platform
from subprocess import Popen, PIPE
if sys.version >= '3':
xrange = range
from py4j.java_gateway import java_import, JavaGateway, GatewayClient
from pyspark.find_spark_home import _find_spark_home
from pyspark.serializers import read_int
def launch_gateway(conf=None):
"""
launch jvm gateway
:param conf: spark configuration passed to spark-submit
:return:
"""
if "PYSPARK_GATEWAY_PORT" in os.environ:
gateway_port = int(os.environ["PYSPARK_GATEWAY_PORT"])
else:
SPARK_HOME = _find_spark_home()
# Launch the Py4j gateway using Spark's run command so that we pick up the
# proper classpath and settings from spark-env.sh
on_windows = platform.system() == "Windows"
script = "./bin/spark-submit.cmd" if on_windows else "./bin/spark-submit"
command = [os.path.join(SPARK_HOME, script)]
if conf:
for k, v in conf.getAll():
command += ['--conf', '%s=%s' % (k, v)]
submit_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
if os.environ.get("SPARK_TESTING"):
submit_args = ' '.join([
"--conf spark.ui.enabled=false",
submit_args
])
command = command + shlex.split(submit_args)
# Start a socket that will be used by PythonGatewayServer to communicate its port to us
callback_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
callback_socket.bind(('127.0.0.1', 0))
callback_socket.listen(1)
callback_host, callback_port = callback_socket.getsockname()
env = dict(os.environ)
env['_PYSPARK_DRIVER_CALLBACK_HOST'] = callback_host
env['_PYSPARK_DRIVER_CALLBACK_PORT'] = str(callback_port)
# Launch the Java gateway.
# We open a pipe to stdin so that the Java gateway can die when the pipe is broken
if not on_windows:
# Don't send ctrl-c / SIGINT to the Java gateway:
def preexec_func():
signal.signal(signal.SIGINT, signal.SIG_IGN)
proc = Popen(command, stdin=PIPE, preexec_fn=preexec_func, env=env)
else:
# preexec_fn not supported on Windows
proc = Popen(command, stdin=PIPE, env=env)
gateway_port = None
# We use select() here in order to avoid blocking indefinitely if the subprocess dies
# before connecting
while gateway_port is None and proc.poll() is None:
timeout = 1 # (seconds)
readable, _, _ = select.select([callback_socket], [], [], timeout)
if callback_socket in readable:
gateway_connection = callback_socket.accept()[0]
# Determine which ephemeral port the server started on:
gateway_port = read_int(gateway_connection.makefile(mode="rb"))
gateway_connection.close()
callback_socket.close()
if gateway_port is None:
raise Exception("Java gateway process exited before sending the driver its port number")
# In Windows, ensure the Java child processes do not linger after Python has exited.
# In UNIX-based systems, the child process can kill itself on broken pipe (i.e. when
# the parent process' stdin sends an EOF). In Windows, however, this is not possible
# because java.lang.Process reads directly from the parent process' stdin, contending
# with any opportunity to read an EOF from the parent. Note that this is only best
# effort and will not take effect if the python process is violently terminated.
if on_windows:
# In Windows, the child process here is "spark-submit.cmd", not the JVM itself
# (because the UNIX "exec" command is not available). This means we cannot simply
# call proc.kill(), which kills only the "spark-submit.cmd" process but not the
# JVMs. Instead, we use "taskkill" with the tree-kill option "/t" to terminate all
# child processes in the tree (http://technet.microsoft.com/en-us/library/bb491009.aspx)
def killChild():
Popen(["cmd", "/c", "taskkill", "/f", "/t", "/pid", str(proc.pid)])
atexit.register(killChild)
# Connect to the gateway
gateway = JavaGateway(GatewayClient(port=gateway_port), auto_convert=True)
# Import the classes used by PySpark
java_import(gateway.jvm, "org.apache.spark.SparkConf")
java_import(gateway.jvm, "org.apache.spark.api.java.*")
java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "org.apache.spark.ml.python.*")
java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
# TODO(davies): move into sql
java_import(gateway.jvm, "org.apache.spark.sql.*")
java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
java_import(gateway.jvm, "scala.Tuple2")
return gateway
| apache-2.0 | 2,350,691,943,521,753,000 | 45.070866 | 100 | 0.655785 | false |
MatriX-Coder/wsploit | modules/wjoomlacomponents.py | 1 | 8322 | #!/usr/bin/python
# Wsploit Project
'''
this is simple joomla
components scanner
'''
try:
import urllib2, Queue
except:
print 'You need urllib2 and Queue librarys installed.'
try:
from threading import Thread
except:
print 'You need threading library installed.'
try:
from time import sleep
except:
print 'You need time library installed.'
paths = [
'/components/com_tag',
'/components/com_virtuemart',
'/components/com_jvehicles',
'/components/com_s5clanroster',
'/components/com_fireboard',
'/components/com_fabrik',
'/components/com_jinc',
'/components/com_xcloner-backupandrestore',
'/components/com_dshop',
'/components/com_ponygallery',
'/components/com_bearleague',
'/components/com_obsuggest',
'/components/com_alameda',
'/components/com_estateagent',
'/components/com_collector',
'/components/com_qcontacts',
'/components/com_niceajaxpoll',
'/components/com_xmap',
'/components/com_team',
'/components/com_joomnik',
'/components/com_question',
'/components/com_jmsfileseller',
'/components/com_rsfiles',
'/components/com_versioning',
'/components/com_hello',
'/components/com_calcbuilder',
'/components/com_jmsfileseller',
'/components/com_xmovie',
'/components/com_people',
'/components/com_idoblog',
'/components/com_adsmanager',
'/components/com_xgallery',
'/components/com_alfurqan15x',
'/components/com_alfurqan',
'/components/com_billyportfolio',
'/components/com_jimtawl',
'/components/com_content',
'/components/com_jfuploader',
'/components/com_kunena',
'/components/com_jooproperty',
'/components/com_jsupport',
'/components/com_markt',
'/components/com_img',
'/components/com_clanlist',
'/components/com_clan',
'/components/com_ckforms',
'/components/com_dcnews',
'/components/com_connect',
'/components/com_rsappt_pro2',
'/components/com_techfolio',
'/components/com_zcalendar',
'/components/com_tpjobs',
'/components/com_simpleshop',
'/components/com_sef',
'/components/com_searchlog',
'/components/com_contact',
'/components/com_enmasse',
'/components/com_elite_experts',
'/components/com_ezautos',
'/components/com_jgen',
'/components/com_jphone',
'/components/com_mosets',
'/components/com_jefaqpro',
'/components/com_picsell',
'/components/com_ongallery',
'/components/com_equipment',
'/components/com_zoomportfolio',
'/components/com_amblog',
'/components/com_joltcard',
'/components/com_jp_jobs',
'/components/com_bfquiztrial',
'/components/com_qpersonel',
'/components/com_pandafminigames',
'/components/com_golfcourseguid',
'/components/com_jejob',
'/components/com_jeajaxeventcalendar',
'/components/com_jradio',
'/components/com_spidercatalog',
'/components/com_/components/commedia',
'/components/com_fss',
'/components/com_icagenda',
'/components/com_spidercalendar',
'/components/com_joomgalaxy',
'/components/com_ornekek',
'/components/com_weblinks',
'/components/com_rokmodule',
'/components/com_discussions',
'/components/com_hm/components/community',
'/components/com_eslamiat',
'/components/com_listing',
'/components/com_jeemasms',
'/components/com_yjcontactus',
'/components/com_timereturns',
'/components/com_jce',
'/components/com_joomtouch',
'/components/com_jdirectory',
'/components/com_jesubmit',
'/components/com_sobi2',
'/components/com_acooldebate',
'/components/com_booklibrary',
'/components/com_acymailing',
'/components/com_doqment',
'/components/com_allcinevid',
'/components/com_jotloader',
'/components/com_jeauto',
'/components/com_ccboard',
'/components/com_ccinvoices',
'/components/com_flipwall',
'/components/com_sponsorwall',
'/components/com_cbe',
'/components/com_jscalendar',
'/components/com_restaurantguide',
'/components/com_nkc',
'/components/com_aardvertiser',
'/components/com_clantools',
'/components/com_remository',
'/components/com_dateconverter',
'/components/com_wmtpic',
'/components/com_donateprocess',
'/components/com_gamesbox',
'/components/com_jcafe',
'/components/com_awd_song',
'/components/com_picasa2gallery',
'/components/com_ybggal',
'/components/com_joomdocs',
'/components/com_answers',
'/components/com_galleryxml',
'/components/com_oziogallery2',
'/components/com_listbingo',
'/components/com_easygb',
'/components/com_jtickets',
'/components/com_jesectionfinder',
'/components/com_realtyna',
'/components/com_/components/community',
'/components/com_jomestate',
'/components/com_jtickets',
'/components/com_cinema',
'/components/com_jstore',
'/components/com_annonces',
'/components/com_lead',
'/components/com_sar_news',
'/components/com_chronocontact',
'/components/com_chronoconnectivity',
'/components/com_djartgallery',
'/components/com_quran',
'/components/com_g2bridge',
'/components/com_reservations',
'/components/com_jepoll',
'/components/com_mycar',
'/components/com_mediqna',
'/components/com_zelig',
'/components/com_bookmarks',
'/components/com_hotproperty',
'/components/com_jombib',
'/components/com_store',
'/components/com_mosforms',
'/components/com_/components/comprofiler',
'/components/com_crowdsource',
'/components/com_camp',
'/components/com_ms/components/comment',
'/components/com_extcalendar',
'/components/com_imoti',
'/components/com_product',
'/components/com_event',
'/components/com_simpledownload',
'/components/com_news',
'/components/com_article',
'/components/com_jequoteform',
'/components/com_konsultasi',
'/components/com_sebercart',
'/components/com_php',
'/components/com_jinc',
'/components/com_mytube',
'/components/com_jbudgetsmagic',
'/components/com_surveymanager',
'/components/com_jreservation',
'/components/com_foobla_suggestions',
'/components/com_djcatalog',
'/components/com_turtushout',
'/components/com_alphauserpoints',
'/components/com_lucygames',
'/components/com_bfsurvey_profree',
'/components/com_tpdugg',
'/components/com_joomloc',
'/components/com_joomlub',
'/components/com_artportal',
'/components/com_agora',
'/components/com_gameserver',
'/components/com_digifolio',
'/components/com_bca-rss-syndicator',
'/components/com_expose',
'/components/com_equotes',
'/components/com_media',
'/components/com_misterestate',
'/components/com_wrapper',
'/components/com_mailto',
'/components/com_autartimonial',
'/components/com_artforms',
'/components/com_redshop',
'/components/com_staticxt',
'/components/com_spa',
'/components/com_jomtube',
'/components/com_golfcourseguide',
'/components/com_huruhelpdesk',
'/components/com_joomdle',
'/components/com_youtube',
'/components/com_joomla-visites',
'/components/com_ttvideo',
'/components/com_appointinator',
'/components/com_photomapgallery',
'/components/com_spielothek',
'/components/com_pbbooking',
'/components/com_beamospetition',
'/components/com_neorecruit',
'/components/com_cgtestimonial',
'/components/com_jgrid',
'/components/com_zina',
'/components/com_pro_desk',
'/components/com_user',
'/components/com_k2',
'/components/com_rsbook_15',
'/components/com_gk3_photoslide',
'/components/com_jvideodirect',
'/components/com_jcalpro',
'/components/com_banners',
'/components/com_datsogallery',
'/components/com_joomradio',
'/components/com_jfbconnect',
'/components/com_myblog',
'/components/com_phocamaps',
'/components/com_contact_enhanced',
'/components/com_aicontactsafe',
'/components/com_poll']
def one():
def test(target,path):
if 'http://' not in target:
target = 'http://'+target
bb = target+path
try:
a = urllib2.urlopen(bb)
c = a.getcode()
if c == 200:
print 'Found ---> '+path[12:]
except urllib2.URLError:
pass
thrdlst = []
target = raw_input('\nEnter site : ')
for path in paths:
t = Thread(target=test , args=(target,path))
t.start()
thrdlst.append(t)
sleep(0.009)
for b in thrdlst:
b.join()
def lista():
path = raw_input('\nEnter List Path : ')
sites = open(path,'r')
sites = sites.readlines()
print '\n'
for site in sites:
if 'http://' not in site:
site = 'http://'+site
site = site.strip()
print '\n[*] Target : %s\n' % site
for path in paths:
bb = site+path
try:
a = urllib2.urlopen(bb)
c = a.getcode()
if c == 200:
print 'Found ---> '+path[12:]
except urllib2.URLError:
pass
def init():
print '\n[1]-Single URL'
print '[2]-List Of URLs\n'
line_1 = "Enter Option : "
choose = raw_input(line_1)
if choose.isdigit():
choose = int(choose)
pass
else :
print "Choose From List Bro"
exit()
if choose == 1:
one()
if choose == 2:
lista()
| artistic-2.0 | 5,440,260,959,330,500,000 | 24.685185 | 55 | 0.727469 | false |
anentropic/django-oscar | tests/integration/offer/availability_tests.py | 40 | 1143 | from django.test import TestCase
from oscar.apps.offer import models
from oscar.test.factories import (
create_order, OrderDiscountFactory, UserFactory)
class TestAPerUserConditionalOffer(TestCase):
def setUp(self):
self.offer = models.ConditionalOffer(max_user_applications=1)
self.user = UserFactory()
def test_is_available_with_no_applications(self):
self.assertTrue(self.offer.is_available())
def test_max_applications_is_correct_when_no_applications(self):
self.assertEqual(1, self.offer.get_max_applications(self.user))
def test_max_applications_is_correct_when_equal_applications(self):
order = create_order(user=self.user)
OrderDiscountFactory(
order=order, offer_id=self.offer.id, frequency=1)
self.assertEqual(0, self.offer.get_max_applications(self.user))
def test_max_applications_is_correct_when_more_applications(self):
order = create_order(user=self.user)
OrderDiscountFactory(
order=order, offer_id=self.offer.id, frequency=5)
self.assertEqual(0, self.offer.get_max_applications(self.user))
| bsd-3-clause | -853,793,773,920,993,200 | 37.1 | 71 | 0.712161 | false |
uskudnik/ggrc-core | src/ggrc/migrations/versions/20130910232450_53ef72c8a867_remove_old_audit_tab.py | 2 | 13296 | """Remove old audit tables
Revision ID: 53ef72c8a867
Revises: 526117e15ce4
Create Date: 2013-09-10 23:24:50.751098
"""
# revision identifiers, used by Alembic.
revision = '53ef72c8a867'
down_revision = '526117e15ce4'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
NOT_NULL_COLS = [
('control_assessments', 'pbc_list_id'),
('control_assessments', 'control_id'),
('system_controls', 'system_id'),
('system_controls', 'control_id'),
('responses', 'request_id'),
('responses', 'system_id'),
]
EXPLICIT_INDEXES = [
('control_assessments', 'control_id', 'controls', 'control_assessments_ibfk_1'),
('control_assessments', 'pbc_list_id', 'pbc_lists', 'control_assessments_ibfk_2'),
('system_controls', 'system_id', 'systems', 'system_controls_ibfk_3'),
('system_controls', 'control_id', 'controls', 'system_controls_ibfk_1'),
('responses', 'request_id', 'requests', 'responses_ibfk_1'),
('responses', 'system_id', 'systems', 'responses_ibfk_2'),
]
UNIQUE_CONSTRAINTS = [('control_assessments', ['pbc_list_id', 'control_id']),
('system_controls', ['system_id', 'control_id']),
('responses', ['request_id', 'system_id']),
]
def create_explicit_index(table, column, referred_table, constraint_name):
" Explicit indexes need to be created to work around http://bugs.mysql.com/bug.php?id=21395 "
op.drop_constraint(constraint_name, table, type_='foreignkey')
op.create_index('ix_' + column, table, [column])
op.create_foreign_key(constraint_name, table, referred_table, [column], ['id'])
def drop_explicit_index(table, column, referred_table, constraint_name):
op.drop_constraint(constraint_name, table, type_='foreignkey')
op.drop_index('ix_' + column, table)
op.create_foreign_key(constraint_name, table, referred_table, [column], ['id'])
def upgrade():
op.drop_table(u'system_controls')
op.drop_table(u'meetings')
op.drop_table(u'population_samples')
op.drop_table(u'responses')
op.drop_table(u'requests')
op.drop_table(u'control_assessments')
op.drop_table(u'pbc_lists')
op.drop_table(u'cycles')
op.drop_table(u'transactions')
def downgrade():
op.create_table(u'transactions',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'description', sa.TEXT(), nullable=True),
sa.Column(u'title', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'system_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_transactions_contexts'),
sa.ForeignKeyConstraint(['system_id'], [u'systems.id'], name=u'transactions_ibfk_1'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'cycles',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'description', sa.TEXT(), nullable=True),
sa.Column(u'start_at', sa.DATE(), nullable=True),
sa.Column(u'complete', sa.BOOLEAN, nullable=False),
sa.Column(u'title', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'audit_firm', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'audit_lead', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'status', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'notes', sa.TEXT(), nullable=True),
sa.Column(u'end_at', sa.DATE(), nullable=True),
sa.Column(u'program_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'report_due_at', sa.DATE(), nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_cycles_contexts'),
sa.ForeignKeyConstraint(['program_id'], [u'programs.id'], name=u'cycles_ibfk_1'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'pbc_lists',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'audit_cycle_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['audit_cycle_id'], [u'cycles.id'], name=u'pbc_lists_ibfk_1'),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_pbc_lists_contexts'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'control_assessments',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'pbc_list_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'control_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'control_version', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'internal_tod', sa.BOOLEAN, nullable=True),
sa.Column(u'internal_toe', sa.BOOLEAN, nullable=True),
sa.Column(u'external_tod', sa.BOOLEAN, nullable=True),
sa.Column(u'external_toe', sa.BOOLEAN, nullable=True),
sa.Column(u'notes', sa.TEXT(), nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_control_assessments_contexts'),
sa.ForeignKeyConstraint(['control_id'], [u'controls.id'], name=u'control_assessments_ibfk_1'),
sa.ForeignKeyConstraint(['pbc_list_id'], [u'pbc_lists.id'], name=u'control_assessments_ibfk_2'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'requests',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'pbc_list_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'type_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'pbc_control_code', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'pbc_control_desc', sa.TEXT(), nullable=True),
sa.Column(u'request', sa.TEXT(), nullable=True),
sa.Column(u'test', sa.TEXT(), nullable=True),
sa.Column(u'notes', sa.TEXT(), nullable=True),
sa.Column(u'company_responsible', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'auditor_responsible', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'date_requested', sa.DATETIME(), nullable=True),
sa.Column(u'status', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'control_assessment_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'response_due_at', sa.DATE(), nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_requests_contexts'),
sa.ForeignKeyConstraint(['control_assessment_id'], [u'control_assessments.id'], name=u'requests_ibfk_1'),
sa.ForeignKeyConstraint(['pbc_list_id'], [u'pbc_lists.id'], name=u'requests_ibfk_2'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'responses',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'request_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'system_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'status', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_responses_contexts'),
sa.ForeignKeyConstraint(['request_id'], [u'requests.id'], name=u'responses_ibfk_1'),
sa.ForeignKeyConstraint(['system_id'], [u'systems.id'], name=u'responses_ibfk_2'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'population_samples',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'response_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'population_document_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'population', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'sample_worksheet_document_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'samples', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'sample_evidence_document_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_population_samples_contexts'),
sa.ForeignKeyConstraint(['population_document_id'], [u'documents.id'], name=u'population_samples_ibfk_1'),
sa.ForeignKeyConstraint(['response_id'], [u'responses.id'], name=u'population_samples_ibfk_2'),
sa.ForeignKeyConstraint(['sample_evidence_document_id'], [u'documents.id'], name=u'population_samples_ibfk_3'),
sa.ForeignKeyConstraint(['sample_worksheet_document_id'], [u'documents.id'], name=u'population_samples_ibfk_4'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'meetings',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'response_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'start_at', sa.DATETIME(), nullable=True),
sa.Column(u'calendar_url', sa.VARCHAR(length=250), nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_meetings_contexts'),
sa.ForeignKeyConstraint(['response_id'], [u'responses.id'], name=u'meetings_ibfk_1'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'system_controls',
sa.Column(u'id', sa.INTEGER(), nullable=False),
sa.Column(u'modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'created_at', sa.DATETIME(), nullable=True),
sa.Column(u'updated_at', sa.DATETIME(), nullable=True),
sa.Column(u'system_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'control_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'state', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column(u'cycle_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(u'context_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'], name=u'fk_system_controls_contexts'),
sa.ForeignKeyConstraint(['control_id'], [u'controls.id'], name=u'system_controls_ibfk_1'),
sa.ForeignKeyConstraint(['cycle_id'], [u'cycles.id'], name=u'system_controls_ibfk_2'),
sa.ForeignKeyConstraint(['system_id'], [u'systems.id'], name=u'system_controls_ibfk_3'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
for table, column in NOT_NULL_COLS:
op.alter_column(table, column, nullable=False, existing_type = sa.INTEGER)
for table, column, referred_table, constraint_name in EXPLICIT_INDEXES:
create_explicit_index(table, column, referred_table, constraint_name)
for table, columns in UNIQUE_CONSTRAINTS:
op.create_unique_constraint('uq_' + table, table, columns)
| apache-2.0 | -4,135,967,233,158,390,000 | 54.865546 | 116 | 0.675842 | false |
ryokochang/Slab-GCS | ExtLibs/Mavlink/pymavlink/generator/lib/genxmlif/xmlifApi.py | 12 | 54395 | #
# genxmlif, Release 0.9.0
# file: xmlifapi.py
#
# API (interface) classes for generic interface package
#
# history:
# 2007-06-29 rl created, classes extracted from xmlifbase.py
#
# Copyright (c) 2005-2008 by Roland Leuthe. All rights reserved.
#
# --------------------------------------------------------------------
# The generic XML interface is
#
# Copyright (c) 2005-2008 by Roland Leuthe
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
__author__ = "Roland Leuthe <[email protected]>"
__date__ = "08. August 2008"
__version__ = "0.9.0"
import string
import os
import re
import copy
from types import TupleType, StringTypes
from xml.dom import EMPTY_PREFIX, EMPTY_NAMESPACE
from xmlifUtils import processWhitespaceAction, NsNameTupleFactory, splitQName, nsNameToQName, escapeCdata, escapeAttribute
########################################
# XML interface base class
# All not implemented methods have to be overloaded by the derived class!!
#
class XmlInterfaceBase:
"""XML interface base class.
All not implemented methods have to be overloaded by the derived class!!
"""
def __init__(self, verbose, useCaching, processXInclude):
"""Constructor of class XmlInterfaceBase.
Input parameter:
'verbose': 0 or 1: controls verbose print output for module genxmlif
'useCaching': 0 or 1: controls usage of caching for module genxmlif
'processXInclude': 0 or 1: controls XInclude processing during parsing
"""
self.verbose = verbose
self.useCaching = useCaching
self.processXInclude = processXInclude
# set default wrapper classes
self.setTreeWrapperClass (XmlTreeWrapper)
self.setElementWrapperClass (XmlElementWrapper)
def createXmlTree (self, namespace, xmlRootTagName, attributeDict={}, publicId=None, systemId=None):
"""Create a new XML TreeWrapper object (wrapper for DOM document or elementtree).
Input parameter:
'namespace': not yet handled (for future use)
'xmlRootTagName': specifies the tag name of the root element
'attributeDict': contains the attributes of the root node (optional)
'publicId': forwarded to contained DOM tree (unused for elementtree)
'systemId': forwarded to contained DOM tree (unused for elementtree)
Returns the created XML tree wrapper object.
Method has to be implemented by derived classes!
"""
raise NotImplementedError
def parse (self, filePath, baseUrl="", ownerDoc=None):
"""Call the XML parser for 'file'.
Input parameter:
'filePath': a file path or an URI
'baseUrl': if specified, it is used e.g. as base path for schema files referenced inside the XML file.
'ownerDoc': only used in case of 4DOM (forwarded to 4DOM parser).
Returns the respective XML tree wrapper object for the parsed XML file.
Method has to be implemented by derived classes!
"""
raise NotImplementedError
def parseString (self, text, baseUrl="", ownerDoc=None):
"""Call the XML parser for 'text'.
Input parameter:
'text': contains the XML string to be parsed
'baseUrl': if specified, it is used e.g. as base path for schema files referenced inside the XML string.
'ownerDoc': only used in case of 4DOM (forwarded to 4DOM parser).
Returns the respective XML tree wrapper object for the parsed XML 'text' string.
Method has to be implemented by derived classes!
"""
raise NotImplementedError
def setTreeWrapperClass (self, treeWrapperClass):
"""Set the tree wrapper class which shall be used by this interface.
Input parameter:
treeWrapperClass: tree wrapper class
"""
self.treeWrapperClass = treeWrapperClass
def setElementWrapperClass (self, elementWrapperClass):
"""Set the element wrapper classes which shall be used by this interface.
Input parameter:
elementWrapperClass: element wrapper class
"""
self.elementWrapperClass = elementWrapperClass
def getXmlIfType (self):
"""Retrieve the type of the XML interface."""
return self.xmlIfType
########################################
# Tree wrapper API (interface class)
#
class XmlTreeWrapper:
"""XML tree wrapper API.
Contains a DOM tree or an elementtree (depending on used XML parser)
"""
def __init__(self, xmlIf, tree, useCaching):
"""Constructor of wrapper class XmlTreeWrapper.
Input parameter:
'xmlIf': used XML interface class
'tree': DOM tree or elementtree which is wrapped by this object
'useCaching': 1 if caching shall be used inside genxmlif, otherwise 0
"""
self.xmlIf = xmlIf
self.__tree = tree
self.__useCaching = useCaching
def createElement (self, tupleOrLocalName, attributeDict=None, curNs=[]):
"""Create an ElementWrapper object.
Input parameter:
tupleOrLocalName: tag name of element node to be created
(tuple of namespace and localName or only localName if no namespace is used)
attributeDict: attributes for this elements
curNs: namespaces for scope of this element
Returns an ElementWrapper object containing the created element node.
"""
nsName = NsNameTupleFactory(tupleOrLocalName)
elementNode = self.__tree.xmlIfExtCreateElement(nsName, attributeDict, curNs)
return self.xmlIf.elementWrapperClass(elementNode, self, curNs)
def cloneTree (self):
"""Creates a copy of a whole XML DOM tree."""
rootElementWrapperCopy = self.getRootNode().cloneNode(deep=1)
treeWrapperCopy = self.__class__(self.xmlIf,
self.__tree.xmlIfExtCloneTree(rootElementWrapperCopy.element),
self.__useCaching)
for elementWrapper in rootElementWrapperCopy.getIterator():
elementWrapper.treeWrapper = treeWrapperCopy
return treeWrapperCopy
def getRootNode (self):
"""Retrieve the wrapper object of the root element of the contained XML tree.
Returns the ElementWrapper object of the root element.
"""
return self.__tree.xmlIfExtGetRootNode().xmlIfExtElementWrapper
def getTree (self):
"""Retrieve the contained XML tree.
Returns the contained XML tree object (internal DOM tree wrapper or elementtree).
"""
return self.__tree
def printTree (self, prettyPrint=0, printElementValue=1, encoding=None):
"""Return the string representation of the contained XML tree.
Input parameter:
'prettyPrint': aligns the columns of the attributes of childNodes
'printElementValue': controls if the lement values are printed or not.
Returns a string with the string representation of the whole XML tree.
"""
if not encoding:
encoding = "utf-8"
if encoding != "utf-8" and encoding != "us-ascii":
text = "<?xml version='1.0' encoding='%s'?>\n" % encoding
else:
text = ""
return text + self.getRootNode().printNode(deep=1, prettyPrint=prettyPrint, printElementValue=printElementValue, encoding=encoding)
def useCaching (self):
"""Return 1 if caching should be used for the contained XML tree."""
return self.__useCaching
def setExternalCacheUsage (self, used):
"""Set external cache usage for the whole tree
unlink commands are ignored if used by an external cache
Input parameter:
used: 0 or 1 (used by external cache)
"""
self.getRootNode().setExternalCacheUsage (used, deep=1)
def unlink (self):
"""Break circular references of the complete XML tree.
To be called if the XML tree is not longer used => garbage collection!
"""
self.getRootNode().unlink()
def __str__ (self):
"""Return the string representation of the contained XML tree."""
return self.printTree()
########################################
# Element wrapper API (interface class)
#
class XmlElementWrapper:
"""XML element wrapper API.
Contains a XML element node
All not implemented methods have to be overloaded by the derived class!!
"""
def __init__(self, element, treeWrapper, curNs=[], initAttrSeq=1):
"""Constructor of wrapper class XmlElementWrapper.
Input parameter:
element: XML element node which is wrapped by this object
treeWrapper: XML tree wrapper class the current element belongs to
curNs: namespaces for scope of this element
"""
self.element = element
self.element.xmlIfExtElementWrapper = self
self.treeWrapper = treeWrapper
self.nodeUsedByExternalCache = 0
if self.__useCaching():
self.__childrenCache = {}
self.__firstChildCache = {}
self.__qNameAttrCache = {}
self.baseUrl = None
self.absUrl = None
self.filePath = None
self.startLineNumber = None
self.endLineNumber = None
self.curNs = curNs[:]
self.attributeSequence = []
if initAttrSeq:
self.attributeSequence = self.getAttributeDict().keys()
def unlink (self):
"""Break circular references of this element and its children."""
for childWrapper in self.getChildren():
childWrapper.unlink()
if not self.isUsedByExternalCache():
self.element.xmlIfExtUnlink()
def cloneNode (self, deep, cloneCallback=None):
"""Create a copy of the current element wrapper.
The reference to the parent node is set to None!"""
elementCopy = self.element.xmlIfExtCloneNode()
elementWrapperCopy = self.__class__(elementCopy, self.treeWrapper, initAttrSeq=0)
elementWrapperCopy.treeWrapper = None
elementWrapperCopy.baseUrl = self.baseUrl
elementWrapperCopy.absUrl = self.absUrl
elementWrapperCopy.filePath = self.filePath
elementWrapperCopy.startLineNumber = self.startLineNumber
elementWrapperCopy.endLineNumber = self.endLineNumber
elementWrapperCopy.curNs = self.curNs[:]
elementWrapperCopy.attributeSequence = self.attributeSequence[:]
if cloneCallback: cloneCallback(elementWrapperCopy)
if deep:
for childElement in self.element.xmlIfExtGetChildren():
childWrapperElementCopy = childElement.xmlIfExtElementWrapper.cloneNode(deep, cloneCallback)
childWrapperElementCopy.element.xmlIfExtSetParentNode(elementWrapperCopy.element)
elementWrapperCopy.element.xmlIfExtAppendChild(childWrapperElementCopy.element)
return elementWrapperCopy
def clearNodeCache (self):
"""Clear all caches used by this element wrapper which contains element wrapper references."""
self.__clearChildrenCache()
def isUsedByExternalCache (self):
"""Check if this node is used by an external cache.
unlink commands are ignored if used by an external cache"""
return self.nodeUsedByExternalCache
def setExternalCacheUsage (self, used, deep=1):
"""Set external cache usage for this node and its children
unlink commands are ignored if used by an external cache
Input parameter:
used: 0 or 1 (used by external cache)
deep: 0 or 1: controls if the child elements are also marked as used by external cache
"""
self.nodeUsedByExternalCache = used
if deep:
for childWrapper in self.getChildren():
childWrapper.setExternalCacheUsage (used, deep)
##########################################################
# attributes of the current node can be accessed via key operator
def __getitem__(self, tupleOrAttrName):
"""Attributes of the contained element node can be accessed via key operator.
Input parameter:
tupleOrAttrName: name of the attribute (tuple of namespace and attributeName or only attributeName)
Returns the attribute value.
"""
attrValue = self.getAttribute (tupleOrAttrName)
if attrValue != None:
return attrValue
else:
raise AttributeError, "Attribute %s not found!" %(repr(tupleOrAttrName))
def __setitem__(self, tupleOrAttrName, attributeValue):
"""Attributes of the contained element node can be accessed via key operator.
Input parameter:
tupleOrAttrName: name of the attribute (tuple of namespace and attributeName or only attributeName)
attributeValue: attribute value to be set
"""
self.setAttribute (tupleOrAttrName, attributeValue)
#++++++++++++ methods concerning the tag name ++++++++++++++++++++++++
def getTagName (self):
"""Retrieve the (complete) tag name of the contained element node
Returns the (complete) tag name of the contained element node
"""
return self.element.xmlIfExtGetTagName()
def getLocalName (self):
"""Retrieve the local name (without namespace) of the contained element node
Returns the local name (without namespace) of the contained element node
"""
try:
return self.__localNameCache
except:
prefix, localName = splitQName (self.getTagName())
if self.__useCaching():
self.__localNameCache = localName
return localName
def getNamespaceURI (self):
"""Retrieve the namespace URI of the contained element node
Returns the namespace URI of the contained element node (None if no namespace is used).
"""
try:
return self.__nsUriCache
except:
prefix = self.element.xmlIfExtGetNamespaceURI()
if self.__useCaching():
self.__nsUriCache = prefix
return prefix
def getNsName (self):
"""Retrieve a tuple (namespace, localName) of the contained element node
Returns a tuple (namespace, localName) of the contained element node (namespace is None if no namespace is used).
"""
try:
return self.__nsNameCache
except:
nsName = NsNameTupleFactory( (self.getNamespaceURI(), self.getLocalName()) )
if self.__useCaching():
self.__nsNameCache = nsName
return nsName
def getQName (self):
"""Retrieve a string prefix and localName of the contained element node
Returns a string "prefix:localName" of the contained element node
"""
return self.nsName2QName(self.getNsName())
def getPrefix (self):
"""Retrieve the namespace prefix of the contained element node
Returns the namespace prefix of the contained element node (None if no namespace is used).
"""
return self.getNsPrefix(self.getNsName())
#++++++++++++ methods concerning print support ++++++++++++++++++++++++
def __str__ (self):
"""Retrieve the textual representation of the contained element node."""
return self.printNode()
def printNode (self, indent="", deep=0, prettyPrint=0, attrMaxLengthDict={}, printElementValue=1, encoding=None):
"""Retrieve the textual representation of the contained element node.
Input parameter:
indent: indentation to be used for string representation
deep: 0 or 1: controls if the child element nodes are also printed
prettyPrint: aligns the columns of the attributes of childNodes
attrMaxLengthDict: dictionary containing the length of the attribute values (used for prettyprint)
printElementValue: 0 or 1: controls if the element value is printed
Returns the string representation
"""
patternXmlTagShort = '''\
%(indent)s<%(qName)s%(attributeString)s/>%(tailText)s%(lf)s'''
patternXmlTagLong = '''\
%(indent)s<%(qName)s%(attributeString)s>%(elementValueString)s\
%(lf)s%(subTreeString)s\
%(indent)s</%(qName)s>%(tailText)s%(lf)s'''
subTreeStringList = []
tailText = ""
addIndent = ""
lf = ""
if deep:
childAttrMaxLengthDict = {}
if prettyPrint:
for childNode in self.getChildren():
childNode.__updateAttrMaxLengthDict(childAttrMaxLengthDict)
lf = "\n"
addIndent = " "
for childNode in self.getChildren():
subTreeStringList.append (childNode.printNode(indent + addIndent, deep, prettyPrint, childAttrMaxLengthDict, printElementValue))
tailText = escapeCdata(self.element.xmlIfExtGetElementTailText(), encoding)
attributeStringList = []
for attrName in self.getAttributeList():
attrValue = escapeAttribute(self.getAttribute(attrName), encoding)
if prettyPrint:
try:
align = attrMaxLengthDict[attrName]
except:
align = len(attrValue)
else:
align = len(attrValue)
qName = self.nsName2QName(attrName)
attributeStringList.append (' %s="%s"%*s' %(qName, attrValue, align - len(attrValue), ""))
attributeString = string.join (attributeStringList, "")
qName = self.getQName()
if printElementValue:
if deep:
elementValueString = escapeCdata(self.element.xmlIfExtGetElementText(), encoding)
else:
elementValueString = escapeCdata(self.getElementValue(ignoreEmtpyStringFragments=1), encoding)
else:
elementValueString = ""
if subTreeStringList == [] and elementValueString == "":
printPattern = patternXmlTagShort
else:
if subTreeStringList != []:
subTreeString = string.join (subTreeStringList, "")
else:
subTreeString = ""
printPattern = patternXmlTagLong
return printPattern % vars()
#++++++++++++ methods concerning the parent of the current node ++++++++++++++++++++++++
def getParentNode (self):
"""Retrieve the ElementWrapper object of the parent element node.
Returns the ElementWrapper object of the parent element node.
"""
parent = self.element.xmlIfExtGetParentNode()
if parent != None:
return parent.xmlIfExtElementWrapper
else:
return None
#++++++++++++ methods concerning the children of the current node ++++++++++++++++++++++++
def getChildren (self, tagFilter=None):
"""Retrieve the ElementWrapper objects of the children element nodes.
Input parameter:
tagFilter: retrieve only the children with this tag name ('*' or None returns all children)
Returns all children of this element node which match 'tagFilter' (list)
"""
if tagFilter in (None, '*', (None, '*')):
children = self.element.xmlIfExtGetChildren()
elif tagFilter[1] == '*':
# handle (namespace, '*')
children = filter(lambda child:child.xmlIfExtElementWrapper.getNamespaceURI() == tagFilter[0],
self.element.xmlIfExtGetChildren())
else:
nsNameFilter = NsNameTupleFactory(tagFilter)
try:
children = self.__childrenCache[nsNameFilter]
except:
children = self.element.xmlIfExtGetChildren(nsNameFilter)
if self.__useCaching():
self.__childrenCache[nsNameFilter] = children
return map(lambda child: child.xmlIfExtElementWrapper, children)
def getChildrenNS (self, namespaceURI, tagFilter=None):
"""Retrieve the ElementWrapper objects of the children element nodes using a namespace.
Input parameter:
namespaceURI: the namespace URI of the children or None
tagFilter: retrieve only the children with this localName ('*' or None returns all children)
Returns all children of this element node which match 'namespaceURI' and 'tagFilter' (list)
"""
return self.getChildren((namespaceURI, tagFilter))
def getChildrenWithKey (self, tagFilter=None, keyAttr=None, keyValue=None):
"""Retrieve the ElementWrapper objects of the children element nodes.
Input parameter:
tagFilter: retrieve only the children with this tag name ('*' or None returns all children)
keyAttr: name of the key attribute
keyValue: value of the key
Returns all children of this element node which match 'tagFilter' (list)
"""
children = self.getChildren(tagFilter)
return filter(lambda child:child[keyAttr]==keyValue, children)
def getFirstChild (self, tagFilter=None):
"""Retrieve the ElementWrapper objects of the first child element node.
Input parameter:
tagFilter: retrieve only the first child with this tag name ('*' or None: no filter)
Returns the first child of this element node which match 'tagFilter'
or None if no suitable child element was found
"""
if tagFilter in (None, '*', (None, '*')):
element = self.element.xmlIfExtGetFirstChild()
elif tagFilter[1] == '*':
# handle (namespace, '*')
children = filter(lambda child:child.xmlIfExtElementWrapper.getNamespaceURI() == tagFilter[0],
self.element.xmlIfExtGetChildren())
try:
element = children[0]
except:
element = None
else:
nsNameFilter = NsNameTupleFactory(tagFilter)
try:
element = self.__firstChildCache[nsNameFilter]
except:
element = self.element.xmlIfExtGetFirstChild(nsNameFilter)
if self.__useCaching():
self.__firstChildCache[nsNameFilter] = element
if element != None:
return element.xmlIfExtElementWrapper
else:
return None
def getFirstChildNS (self, namespaceURI, tagFilter=None):
"""Retrieve the ElementWrapper objects of the first child element node using a namespace.
Input parameter:
namespaceURI: the namespace URI of the children or None
tagFilter: retrieve only the first child with this localName ('*' or None: no filter)
Returns the first child of this element node which match 'namespaceURI' and 'tagFilter'
or None if no suitable child element was found
"""
return self.getFirstChild ((namespaceURI, tagFilter))
def getFirstChildWithKey (self, tagFilter=None, keyAttr=None, keyValue=None):
"""Retrieve the ElementWrapper objects of the children element nodes.
Input parameter:
tagFilter: retrieve only the children with this tag name ('*' or None returns all children)
keyAttr: name of the key attribute
keyValue: value of the key
Returns all children of this element node which match 'tagFilter' (list)
"""
children = self.getChildren(tagFilter)
childrenWithKey = filter(lambda child:child[keyAttr]==keyValue, children)
if childrenWithKey != []:
return childrenWithKey[0]
else:
return None
def getElementsByTagName (self, tagFilter=None):
"""Retrieve all descendant ElementWrapper object of current node whose tag name match 'tagFilter'.
Input parameter:
tagFilter: retrieve only the children with this tag name ('*' or None returns all descendants)
Returns all descendants of this element node which match 'tagFilter' (list)
"""
if tagFilter in (None, '*', (None, '*'), (None, None)):
descendants = self.element.xmlIfExtGetElementsByTagName()
elif tagFilter[1] == '*':
# handle (namespace, '*')
descendants = filter(lambda desc:desc.xmlIfExtElementWrapper.getNamespaceURI() == tagFilter[0],
self.element.xmlIfExtGetElementsByTagName())
else:
nsNameFilter = NsNameTupleFactory(tagFilter)
descendants = self.element.xmlIfExtGetElementsByTagName(nsNameFilter)
return map(lambda descendant: descendant.xmlIfExtElementWrapper, descendants)
def getElementsByTagNameNS (self, namespaceURI, tagFilter=None):
"""Retrieve all descendant ElementWrapper object of current node whose tag name match 'namespaceURI' and 'tagFilter'.
Input parameter:
namespaceURI: the namespace URI of the descendants or None
tagFilter: retrieve only the descendants with this localName ('*' or None returns all descendants)
Returns all descendants of this element node which match 'namespaceURI' and 'tagFilter' (list)
"""
return self.getElementsByTagName((namespaceURI, tagFilter))
def getIterator (self, tagFilter=None):
"""Creates a tree iterator. The iterator loops over this element
and all subelements, in document order, and returns all elements
whose tag name match 'tagFilter'.
Input parameter:
tagFilter: retrieve only the children with this tag name ('*' or None returns all descendants)
Returns all element nodes which match 'tagFilter' (list)
"""
if tagFilter in (None, '*', (None, '*'), (None, None)):
matchingElements = self.element.xmlIfExtGetIterator()
elif tagFilter[1] == '*':
# handle (namespace, '*')
matchingElements = filter(lambda desc:desc.xmlIfExtElementWrapper.getNamespaceURI() == tagFilter[0],
self.element.xmlIfExtGetIterator())
else:
nsNameFilter = NsNameTupleFactory(tagFilter)
matchingElements = self.element.xmlIfExtGetIterator(nsNameFilter)
return map(lambda e: e.xmlIfExtElementWrapper, matchingElements)
def appendChild (self, tupleOrLocalNameOrElement, attributeDict={}):
"""Append an element node to the children of the current node.
Input parameter:
tupleOrLocalNameOrElement: (namespace, localName) or tagName or ElementWrapper object of the new child
attributeDict: attribute dictionary containing the attributes of the new child (optional)
If not an ElementWrapper object is given, a new ElementWrapper object is created with tupleOrLocalName
Returns the ElementWrapper object of the new child.
"""
if not isinstance(tupleOrLocalNameOrElement, self.__class__):
childElementWrapper = self.__createElement (tupleOrLocalNameOrElement, attributeDict)
else:
childElementWrapper = tupleOrLocalNameOrElement
self.element.xmlIfExtAppendChild (childElementWrapper.element)
self.__clearChildrenCache(childElementWrapper.getNsName())
return childElementWrapper
def insertBefore (self, tupleOrLocalNameOrElement, refChild, attributeDict={}):
"""Insert an child element node before the given reference child of the current node.
Input parameter:
tupleOrLocalNameOrElement: (namespace, localName) or tagName or ElementWrapper object of the new child
refChild: reference child ElementWrapper object
attributeDict: attribute dictionary containing the attributes of the new child (optional)
If not an ElementWrapper object is given, a new ElementWrapper object is created with tupleOrLocalName
Returns the ElementWrapper object of the new child.
"""
if not isinstance(tupleOrLocalNameOrElement, self.__class__):
childElementWrapper = self.__createElement (tupleOrLocalNameOrElement, attributeDict)
else:
childElementWrapper = tupleOrLocalNameOrElement
if refChild == None:
self.appendChild (childElementWrapper)
else:
self.element.xmlIfExtInsertBefore(childElementWrapper.element, refChild.element)
self.__clearChildrenCache(childElementWrapper.getNsName())
return childElementWrapper
def removeChild (self, childElementWrapper):
"""Remove the given child element node from the children of the current node.
Input parameter:
childElementWrapper: ElementWrapper object to be removed
"""
self.element.xmlIfExtRemoveChild(childElementWrapper.element)
self.__clearChildrenCache(childElementWrapper.getNsName())
def insertSubtree (self, refChildWrapper, subTreeWrapper, insertSubTreeRootNode=1):
"""Insert the given subtree before 'refChildWrapper' ('refChildWrapper' is not removed!)
Input parameter:
refChildWrapper: reference child ElementWrapper object
subTreeWrapper: subtree wrapper object which contains the subtree to be inserted
insertSubTreeRootNode: if 1, root node of subtree is inserted into parent tree, otherwise not
"""
if refChildWrapper != None:
self.element.xmlIfExtInsertSubtree (refChildWrapper.element, subTreeWrapper.getTree(), insertSubTreeRootNode)
else:
self.element.xmlIfExtInsertSubtree (None, subTreeWrapper.getTree(), insertSubTreeRootNode)
self.__clearChildrenCache()
def replaceChildBySubtree (self, childElementWrapper, subTreeWrapper, insertSubTreeRootNode=1):
"""Replace child element node by XML subtree (e.g. expanding included XML files)
Input parameter:
childElementWrapper: ElementWrapper object to be replaced
subTreeWrapper: XML subtree wrapper object to be inserted
insertSubTreeRootNode: if 1, root node of subtree is inserted into parent tree, otherwise not
"""
self.insertSubtree (childElementWrapper, subTreeWrapper, insertSubTreeRootNode)
self.removeChild(childElementWrapper)
#++++++++++++ methods concerning the attributes of the current node ++++++++++++++++++++++++
def getAttributeDict (self):
"""Retrieve a dictionary containing all attributes of the current element node.
Returns a dictionary (copy) containing all attributes of the current element node.
"""
return self.element.xmlIfExtGetAttributeDict()
def getAttributeList (self):
"""Retrieve a list containing all attributes of the current element node
in the sequence specified in the input XML file.
Returns a list (copy) containing all attributes of the current element node
in the sequence specified in the input XML file (TODO: does currently not work for 4DOM/pyXML interface).
"""
attrList = map(lambda a: NsNameTupleFactory(a), self.attributeSequence)
return attrList
def getAttribute (self, tupleOrAttrName):
"""Retrieve an attribute value of the current element node.
Input parameter:
tupleOrAttrName: tuple '(namespace, attributeName)' or 'attributeName' if no namespace is used
Returns the value of the specified attribute.
"""
nsName = NsNameTupleFactory(tupleOrAttrName)
return self.element.xmlIfExtGetAttribute(nsName)
def getAttributeOrDefault (self, tupleOrAttrName, defaultValue):
"""Retrieve an attribute value of the current element node or the given default value if the attribute doesn't exist.
Input parameter:
tupleOrAttrName: tuple '(namespace, attributeName)' or 'attributeName' if no namespace is used
Returns the value of the specified attribute or the given default value if the attribute doesn't exist.
"""
attributeValue = self.getAttribute (tupleOrAttrName)
if attributeValue == None:
attributeValue = defaultValue
return attributeValue
def getQNameAttribute (self, tupleOrAttrName):
"""Retrieve a QName attribute value of the current element node.
Input parameter:
tupleOrAttrName: tuple '(namespace, attributeName)' or 'attributeName' if no namespace is used
Returns the value of the specified QName attribute as tuple (namespace, localName),
i.e. the prefix is converted into the corresponding namespace value.
"""
nsNameAttrName = NsNameTupleFactory(tupleOrAttrName)
try:
return self.__qNameAttrCache[nsNameAttrName]
except:
qNameValue = self.getAttribute (nsNameAttrName)
nsNameValue = self.qName2NsName(qNameValue, useDefaultNs=1)
if self.__useCaching():
self.__qNameAttrCache[nsNameAttrName] = nsNameValue
return nsNameValue
def hasAttribute (self, tupleOrAttrName):
"""Checks if the requested attribute exist for the current element node.
Returns 1 if the attribute exists, otherwise 0.
"""
nsName = NsNameTupleFactory(tupleOrAttrName)
attrValue = self.element.xmlIfExtGetAttribute(nsName)
if attrValue != None:
return 1
else:
return 0
def setAttribute (self, tupleOrAttrName, attributeValue):
"""Sets an attribute value of the current element node.
If the attribute does not yet exist, it will be created.
Input parameter:
tupleOrAttrName: tuple '(namespace, attributeName)' or 'attributeName' if no namespace is used
attributeValue: attribute value to be set
"""
if not isinstance(attributeValue, StringTypes):
raise TypeError, "%s (attribute %s) must be a string!" %(repr(attributeValue), repr(tupleOrAttrName))
nsNameAttrName = NsNameTupleFactory(tupleOrAttrName)
if nsNameAttrName not in self.attributeSequence:
self.attributeSequence.append(nsNameAttrName)
if self.__useCaching():
if self.__qNameAttrCache.has_key(nsNameAttrName):
del self.__qNameAttrCache[nsNameAttrName]
self.element.xmlIfExtSetAttribute(nsNameAttrName, attributeValue, self.getCurrentNamespaces())
def setAttributeDefault (self, tupleOrAttrName, defaultValue):
"""Create attribute and set value to default if it does not yet exist for the current element node.
If the attribute is already existing nothing is done.
Input parameter:
tupleOrAttrName: tuple '(namespace, attributeName)' or 'attributeName' if no namespace is used
defaultValue: default attribute value to be set
"""
if not self.hasAttribute(tupleOrAttrName):
self.setAttribute(tupleOrAttrName, defaultValue)
def removeAttribute (self, tupleOrAttrName):
"""Removes an attribute from the current element node.
No exception is raised if there is no matching attribute.
Input parameter:
tupleOrAttrName: tuple '(namespace, attributeName)' or 'attributeName' if no namespace is used
"""
nsNameAttrName = NsNameTupleFactory(tupleOrAttrName)
if self.__useCaching():
if self.__qNameAttrCache.has_key(nsNameAttrName):
del self.__qNameAttrCache[nsNameAttrName]
self.element.xmlIfExtRemoveAttribute(nsNameAttrName)
def processWsAttribute (self, tupleOrAttrName, wsAction):
"""Process white space action for the specified attribute according to requested 'wsAction'.
Input parameter:
tupleOrAttrName: tuple '(namespace, attributeName)' or 'attributeName' if no namespace is used
wsAction: 'collapse': substitute multiple whitespace characters by a single ' '
'replace': substitute each whitespace characters by a single ' '
"""
attributeValue = self.getAttribute(tupleOrAttrName)
newValue = processWhitespaceAction (attributeValue, wsAction)
if newValue != attributeValue:
self.setAttribute(tupleOrAttrName, newValue)
return newValue
#++++++++++++ methods concerning the content of the current node ++++++++++++++++++++++++
def getElementValue (self, ignoreEmtpyStringFragments=0):
"""Retrieve the content of the current element node.
Returns the content of the current element node as string.
The content of multiple text nodes / CDATA nodes are concatenated to one string.
Input parameter:
ignoreEmtpyStringFragments: if 1, text nodes containing only whitespaces are ignored
"""
return "".join (self.getElementValueFragments(ignoreEmtpyStringFragments))
def getElementValueFragments (self, ignoreEmtpyStringFragments=0):
"""Retrieve the content of the current element node as value fragment list.
Returns the content of the current element node as list of string fragments.
Each list element represents one text nodes / CDATA node.
Input parameter:
ignoreEmtpyStringFragments: if 1, text nodes containing only whitespaces are ignored
Method has to be implemented by derived classes!
"""
return self.element.xmlIfExtGetElementValueFragments (ignoreEmtpyStringFragments)
def setElementValue (self, elementValue):
"""Set the content of the current element node.
Input parameter:
elementValue: string containing the new element value
If multiple text nodes / CDATA nodes are existing, 'elementValue' is set
for the first text node / CDATA node. All other text nodes /CDATA nodes are set to ''.
"""
self.element.xmlIfExtSetElementValue(elementValue)
def processWsElementValue (self, wsAction):
"""Process white space action for the content of the current element node according to requested 'wsAction'.
Input parameter:
wsAction: 'collapse': substitute multiple whitespace characters by a single ' '
'replace': substitute each whitespace characters by a single ' '
"""
self.element.xmlIfExtProcessWsElementValue(wsAction)
return self.getElementValue()
#++++++++++++ methods concerning the info about the current node in the XML file ++++++++++++++++++++
def getStartLineNumber (self):
"""Retrieve the start line number of the current element node.
Returns the start line number of the current element node in the XML file
"""
return self.startLineNumber
def getEndLineNumber (self):
"""Retrieve the end line number of the current element node.
Returns the end line number of the current element node in the XML file
"""
return self.endLineNumber
def getAbsUrl (self):
"""Retrieve the absolute URL of the XML file the current element node belongs to.
Returns the absolute URL of the XML file the current element node belongs to.
"""
return self.absUrl
def getBaseUrl (self):
"""Retrieve the base URL of the XML file the current element node belongs to.
Returns the base URL of the XML file the current element node belongs to.
"""
return self.baseUrl
def getFilePath (self):
"""Retrieve the file path of the XML file the current element node belongs to.
Returns the file path of the XML file the current element node belongs to.
"""
return self.filePath
def getLocation (self, end=0, fullpath=0):
"""Retrieve a string containing file name and line number of the current element node.
Input parameter:
end: 1 if end line number shall be shown, 0 for start line number
fullpath: 1 if the full path of the XML file shall be shown, 0 for only the file name
Returns a string containing file name and line number of the current element node.
(e.g. to be used for traces or error messages)
"""
lineMethod = (self.getStartLineNumber, self.getEndLineNumber)
pathFunc = (os.path.basename, os.path.abspath)
return "%s, %d" % (pathFunc[fullpath](self.getFilePath()), lineMethod[end]())
#++++++++++++ miscellaneous methods concerning namespaces ++++++++++++++++++++
def getCurrentNamespaces (self):
"""Retrieve the namespace prefixes visible for the current element node
Returns a list of the namespace prefixes visible for the current node.
"""
return self.curNs
def qName2NsName (self, qName, useDefaultNs):
"""Convert a qName 'prefix:localName' to a tuple '(namespace, localName)'.
Input parameter:
qName: qName to be converted
useDefaultNs: 1 if default namespace shall be used
Returns the corresponding tuple '(namespace, localName)' for 'qName'.
"""
if qName != None:
qNamePrefix, qNameLocalName = splitQName (qName)
for prefix, namespaceURI in self.getCurrentNamespaces():
if qNamePrefix == prefix:
if prefix != EMPTY_PREFIX or useDefaultNs:
nsName = (namespaceURI, qNameLocalName)
break
else:
if qNamePrefix == None:
nsName = (EMPTY_NAMESPACE, qNameLocalName)
else:
raise ValueError, "Namespace prefix '%s' not bound to a namespace!" % (qNamePrefix)
else:
nsName = (None, None)
return NsNameTupleFactory(nsName)
def nsName2QName (self, nsLocalName):
"""Convert a tuple '(namespace, localName)' to a string 'prefix:localName'
Input parameter:
nsLocalName: tuple '(namespace, localName)' to be converted
Returns the corresponding string 'prefix:localName' for 'nsLocalName'.
"""
qName = nsNameToQName (nsLocalName, self.getCurrentNamespaces())
if qName == "xmlns:None": qName = "xmlns"
return qName
def getNamespace (self, qName):
"""Retrieve namespace for a qName 'prefix:localName'.
Input parameter:
qName: qName 'prefix:localName'
Returns the corresponding namespace for the prefix of 'qName'.
"""
if qName != None:
qNamePrefix, qNameLocalName = splitQName (qName)
for prefix, namespaceURI in self.getCurrentNamespaces():
if qNamePrefix == prefix:
namespace = namespaceURI
break
else:
if qNamePrefix == None:
namespace = EMPTY_NAMESPACE
else:
raise LookupError, "Namespace for QName '%s' not found!" % (qName)
else:
namespace = EMPTY_NAMESPACE
return namespace
def getNsPrefix (self, nsLocalName):
"""Retrieve prefix for a tuple '(namespace, localName)'.
Input parameter:
nsLocalName: tuple '(namespace, localName)'
Returns the corresponding prefix for the namespace of 'nsLocalName'.
"""
ns = nsLocalName[0]
for prefix, namespace in self.getCurrentNamespaces():
if ns == namespace:
return prefix
else:
if ns == None:
return None
else:
raise LookupError, "Prefix for namespaceURI '%s' not found!" % (ns)
#++++++++++++ limited XPath support ++++++++++++++++++++
def getXPath (self, xPath, namespaceRef=None, useDefaultNs=1, attrIgnoreList=[]):
"""Retrieve node list or attribute list for specified XPath
Input parameter:
xPath: string containing xPath specification
namespaceRef: scope for namespaces (default is own element node)
useDefaultNs: 1, if default namespace shall be used if no prefix is available
attrIgnoreList: list of attributes to be ignored if wildcard is specified for attributes
Returns all nodes which match xPath specification or
list of attribute values if xPath specifies an attribute
"""
return self.getXPathList(xPath, namespaceRef, useDefaultNs, attrIgnoreList)[0]
def getXPathList (self, xPath, namespaceRef=None, useDefaultNs=1, attrIgnoreList=[]):
"""Retrieve node list or attribute list for specified XPath
Input parameter:
xPath: string containing xPath specification
namespaceRef: scope for namespaces (default is own element node)
useDefaultNs: 1, if default namespace shall be used if no prefix is available
attrIgnoreList: list of attributes to be ignored if wildcard is specified for attributes
Returns tuple (completeChildList, attrNodeList, attrNsNameFirst).
completeChildList: contains all child node which match xPath specification or
list of attribute values if xPath specifies an attribute
attrNodeList: contains all child nodes where the specified attribute was found
attrNsNameFirst: contains the name of the first attribute which was found
TODO: Re-design namespace and attribute handling of this method
"""
reChild = re.compile('child *::')
reAttribute = re.compile('attribute *::')
if namespaceRef == None: namespaceRef = self
xPath = reChild.sub('./', xPath)
xPath = reAttribute.sub('@', xPath)
xPathList = string.split (xPath, "|")
completeChildDict = {}
completeChildList = []
attrNodeList = []
attrNsNameFirst = None
for xRelPath in xPathList:
xRelPath = string.strip(xRelPath)
descendantOrSelf = 0
if xRelPath[:3] == ".//":
descendantOrSelf = 1
xRelPath = xRelPath[3:]
xPathLocalStepList = string.split (xRelPath, "/")
childList = [self, ]
for localStep in xPathLocalStepList:
localStep = string.strip(localStep)
stepChildList = []
if localStep == "":
raise IOError ("Invalid xPath '%s'!" %(xRelPath))
elif localStep == ".":
continue
elif localStep[0] == '@':
if len(localStep) == 1:
raise ValueError ("Attribute name is missing in xPath!")
if descendantOrSelf:
childList = self.getElementsByTagName()
attrName = localStep[1:]
for childNode in childList:
if attrName == '*':
attrNodeList.append (childNode)
attrDict = childNode.getAttributeDict()
for attrIgnore in attrIgnoreList:
if attrDict.has_key(attrIgnore):
del attrDict[attrIgnore]
stepChildList.extend(attrDict.values())
try:
attrNsNameFirst = attrDict.keys()[0]
except:
pass
else:
attrNsName = namespaceRef.qName2NsName (attrName, useDefaultNs=0)
if attrNsName[1] == '*':
for attr in childNode.getAttributeDict().keys():
if attr[0] == attrNsName[0]:
if attrNodeList == []:
attrNsNameFirst = attrNsName
attrNodeList.append (childNode)
stepChildList.append (childNode.getAttribute(attr))
elif childNode.hasAttribute(attrNsName):
if attrNodeList == []:
attrNsNameFirst = attrNsName
attrNodeList.append (childNode)
stepChildList.append (childNode.getAttribute(attrNsName))
childList = stepChildList
else:
nsLocalName = namespaceRef.qName2NsName (localStep, useDefaultNs=useDefaultNs)
if descendantOrSelf:
descendantOrSelf = 0
if localStep == "*":
stepChildList = self.getElementsByTagName()
else:
stepChildList = self.getElementsByTagName(nsLocalName)
else:
for childNode in childList:
if localStep == "*":
stepChildList.extend (childNode.getChildren())
else:
stepChildList.extend (childNode.getChildrenNS(nsLocalName[0], nsLocalName[1]))
childList = stepChildList
# filter duplicated childs
for child in childList:
try:
childKey = child.element
except:
childKey = child
if not completeChildDict.has_key(childKey):
completeChildList.append(child)
completeChildDict[childKey] = 1
return completeChildList, attrNodeList, attrNsNameFirst
###############################################################
# PRIVATE methods
###############################################################
def __createElement (self, tupleOrLocalName, attributeDict):
"""Create a new ElementWrapper object.
Input parameter:
tupleOrLocalName: tuple '(namespace, localName)' or 'localName' if no namespace is used
attributeDict: dictionary which contains the attributes and their values of the element node to be created
Returns the created ElementWrapper object
"""
childElementWrapper = self.treeWrapper.createElement (tupleOrLocalName, attributeDict, self.curNs[:]) # TODO: when to be adapted???)
childElementWrapper.element.xmlIfExtSetParentNode(self.element)
return childElementWrapper
def __updateAttrMaxLengthDict (self, attrMaxLengthDict):
"""Update dictionary which contains the maximum length of node attributes.
Used for pretty print to align the attributes of child nodes.
attrMaxLengthDict is in/out parameter.
"""
for attrName, attrValue in self.getAttributeDict().items():
attrLength = len(attrValue)
if not attrMaxLengthDict.has_key(attrName):
attrMaxLengthDict[attrName] = attrLength
else:
attrMaxLengthDict[attrName] = max(attrMaxLengthDict[attrName], attrLength)
def __clearChildrenCache (self, childNsName=None):
"""Clear children cache.
"""
if self.__useCaching():
if childNsName != None:
if self.__childrenCache.has_key(childNsName):
del self.__childrenCache[childNsName]
if self.__firstChildCache.has_key(childNsName):
del self.__firstChildCache[childNsName]
else:
self.__childrenCache.clear()
self.__firstChildCache.clear()
def __useCaching(self):
return self.treeWrapper.useCaching()
| gpl-3.0 | 7,730,973,759,348,299,000 | 41.134005 | 144 | 0.616803 | false |
shaistaansari/django | tests/admin_widgets/tests.py | 55 | 57921 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gettext
import os
from datetime import datetime, timedelta
from importlib import import_module
from unittest import skipIf
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db.models import CharField, DateField
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils import six, translation
from . import models
from .widgetadmin import site as widget_admin_site
try:
import pytz
except ImportError:
pytz = None
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
pk=101, username='testser', first_name='Add', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=False,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagon', model='Passat')
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
class SeleniumDataMixin(object):
def setUp(self):
self.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
class AdminFormfieldForDBFieldTests(SimpleTestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
# Check that we got a field of the right type
self.assertTrue(
isinstance(widget, widgetclass),
"Wrong widget for %s.%s: expected %s, got %s" % (
model.__class__.__name__,
fieldname,
widgetclass,
type(widget),
)
)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(models.Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(models.Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(models.Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(models.Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(models.Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(models.Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(models.Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(models.Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(models.Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertEqual(ff.empty_label, None)
def test_many_to_many(self):
self.assertFormfield(models.Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(models.Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Test that widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(models.Band, admin.site)
f1 = ma.formfield_for_dbfield(models.Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(models.Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_field_with_choices(self):
self.assertFormfield(models.Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(models.Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(models.Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(models.Advisor, admin.site)
f = ma.formfield_for_dbfield(models.Advisor._meta.get_field('companies'), request=None)
self.assertEqual(six.text_type(f.help_text), 'Hold down "Control", or "Command" on a Mac, to select more than one.')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagon Passat")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_nonexistent_target_id(self):
band = models.Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": '%s' % pk,
}
# Try posting with a non-existent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'),
{"main_band": test_str})
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilter">\n</select>'
'<script type="text/javascript">addEvent(window, "load", function(e) '
'{SelectFilter.init("id_test", "test\\u005C", 0); });</script>\n'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilterstacked">\n</select>'
'<script type="text/javascript">addEvent(window, "load", function(e) '
'{SelectFilter.init("id_test", "test\\u005C", 1); });</script>\n'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10" />',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20" />',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8" />',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20" />',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">Date: <input value="2007-12-01" type="text" class="vDateField" name="test_0" size="10" /><br />Time: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>',
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">Datum: <input value="01.12.2007" type="text" class="vDateField" name="test_0" size="10" /><br />Zeit: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>',
)
class AdminURLWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url" />'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">http://example.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example.com" /></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">http://example-äüö.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example-äüö.com" /></p>'
)
def test_render_quoting(self):
# WARNING: Don't use assertHTMLEqual in that testcase!
# assertHTMLEqual will get rid of some escapes which are tested here!
w = widgets.AdminURLFieldWidget()
self.assertEqual(
w.render('test', 'http://example.com/<sometag>some text</sometag>'),
'<p class="url">Currently: <a href="http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example.com/<sometag>some text</sometag></a><br />Change: <input class="vURLField" name="test" type="url" value="http://example.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example-äüö.com/<sometag>some text</sometag></a><br />Change: <input class="vURLField" name="test" type="url" value="http://example-äüö.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"'),
'<p class="url">Currently: <a href="http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22">http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"</a><br />Change: <input class="vURLField" name="test" type="url" value="http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"" /></p>'
)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls',
)
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super(AdminFileWidgetTests, cls).setUpTestData()
band = models.Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" /> '
'<label for="test-clear_id">Clear</label></span><br />'
'Change: <input type="file" name="test" /></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test" />',
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<p><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
'albums\hybrid_theory.jpg</a></p>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art" />',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<p></p>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = models.Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.pk, attrs={}), (
'<input type="text" name="test" value="%(bandpk)s" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong>Linkin Park</strong>'
) % {'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# Check that ForeignKeyRawIdWidget works with fields which aren't
# related to the model's primary key.
apple = models.Inventory.objects.create(barcode=86, name='Apple')
models.Inventory.objects.create(barcode=22, name='Pear')
core = models.Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}), (
'<input type="text" name="test" value="86" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">'
'</a> <strong>Apple</strong>'
)
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = models.Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = models.Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s" /> <strong>Honeycomb object</strong>' % {'hcombpk': big_honeycomb.pk}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = models.Individual.objects.create(name='Subject #1')
models.Individual.objects.create(name='Child', parent=subject1)
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s" /> <strong>Individual object</strong>' % {'subj1pk': subject1.pk}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = models.Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = models.Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}), (
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">'
'</a> <strong>Hidden</strong>'
)
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
m1 = models.Member.objects.create(name='Chester')
m2 = models.Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = models.Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField" />'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk, m2pk=m2.pk)
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk)
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = models.Advisor.objects.create(name='Rockstar Techie')
c1 = models.Company.objects.create(name='Doodle')
c2 = models.Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = models.Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s" />' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s" />' % {'c1pk': c1.pk}
)
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = models.Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_show_hide_date_time_picker_widgets(self):
"""
Ensure that pressing the ESC key closes the date and time picker
widgets.
Refs #17064.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# First, with the date picker widget ---------------------------------
# Check that the date picker is hidden
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Check that the date picker is visible
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'block')
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the date picker is hidden again
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Then, with the time picker widget ----------------------------------
# Check that the time picker is hidden
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
# Click the time icon
self.selenium.find_element_by_id('clocklink0').click()
# Check that the time picker is visible
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'block')
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the time picker is hidden again
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
Ensure that the calendar show the date from the input field for every
locale supported by django.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = models.Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month names translations for every locales
month_string = 'January February March April May June July August September October November December'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except IOError:
continue
if month_string in catalog._catalog:
month_names = catalog._catalog[month_string]
else:
month_names = month_string
# Get the expected caption
may_translation = month_names.split(' ')[4]
expected_caption = '{0:s} {1:d}'.format(may_translation, 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
self.selenium.get('{}{}'.format(self.live_server_url,
reverse('admin:admin_widgets_member_change', args=(member.pk,))))
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Get the calendar caption
calendar0 = self.selenium.find_element_by_id('calendarin0')
caption = calendar0.find_element_by_tag_name('caption')
# Make sure that the right month and year are displayed
self.assertEqual(caption.text, expected_caption)
class DateTimePickerSeleniumChromeTests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerSeleniumIETests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@skipIf(pytz is None, "this test requires pytz")
@override_settings(TIME_ZONE='Asia/Singapore')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerShortcutsSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_date_time_picker_shortcuts(self):
"""
Ensure that date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
now = datetime.now()
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector(
'.field-birthdate .datetimeshortcuts')
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# Check that there is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector(
'.field-birthdate .timezonewarning')
# Submit the form.
self.selenium.find_element_by_tag_name('form').submit()
self.wait_page_loaded()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = models.Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
class DateTimePickerShortcutsSeleniumChromeTests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerShortcutsSeleniumIETests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class HorizontalVerticalFilterSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(HorizontalVerticalFilterSeleniumFirefoxTests, self).setUp()
self.lisa = models.Student.objects.create(name='Lisa')
self.john = models.Student.objects.create(name='John')
self.bob = models.Student.objects.create(name='Bob')
self.peter = models.Student.objects.create(name='Peter')
self.jenny = models.Student.objects.create(name='Jenny')
self.jason = models.Student.objects.create(name='Jason')
self.cliff = models.Student.objects.create(name='Cliff')
self.arthur = models.Student.objects.create(name='Arthur')
self.school = models.School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove,
choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.get_select_option(from_box, str(self.lisa.id))
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
from_lisa_select_option.click()
self.get_select_option(from_box, str(self.jason.id)).click()
self.get_select_option(from_box, str(self.bob.id)).click()
self.get_select_option(from_box, str(self.john.id)).click()
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id)])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.get_select_option(to_box, str(self.lisa.id))
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.get_select_option(to_box, str(self.lisa.id)).click()
self.get_select_option(to_box, str(self.bob.id)).click()
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.get_select_option(from_box, str(self.arthur.id)).click()
self.get_select_option(from_box, str(self.cliff.id)).click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id)])
def test_basic(self):
self.school.students = [self.lisa, self.peter]
self.school.alumni = [self.lisa, self.peter]
self.school.save()
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
self.wait_page_loaded()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()),
[self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Ensure that typing in the search box filters out options displayed in
the 'from' box.
"""
from selenium.webdriver.common.keys import Keys
self.school.students = [self.lisa, self.peter]
self.school.alumni = [self.lisa, self.peter]
self.school.save()
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = '#id_%s_add_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
input = self.selenium.find_element_by_css_selector('#id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# -----------------------------------------------------------------
# Check that choosing a filtered option sends it properly to the
# 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.get_select_option(from_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.jason.id)])
self.get_select_option(to_box, str(self.lisa.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Check that pressing enter on a filtered option sends it properly
# to the 'to' box.
self.get_select_option(to_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()),
[self.jason, self.peter])
class HorizontalVerticalFilterSeleniumChromeTests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class HorizontalVerticalFilterSeleniumIETests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminRawIdWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(AdminRawIdWidgetSeleniumFirefoxTests, self).setUp()
models.Band.objects.create(id=42, name='Bogey Blues')
models.Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_main_band').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.selenium.switch_to.window('id_main_band')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.selenium.switch_to.window('id_main_band')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.selenium.switch_to.window('id_supporting_bands')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.selenium.switch_to.window('id_supporting_bands')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class AdminRawIdWidgetSeleniumChromeTests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class AdminRawIdWidgetSeleniumIETests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url,
reverse('admin:admin_widgets_profile_add')))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.selenium.switch_to.window('id_user')
self.wait_for('#id_password')
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.wait_for('#id_user option[value="newuser"]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.selenium.switch_to_window('id_user')
self.wait_page_loaded()
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to_window(main_window)
# Wait up to 2 seconds for the new option to show up after clicking save in the popup.
self.selenium.implicitly_wait(2)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
self.selenium.implicitly_wait(0)
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile "changednewuser" was added successfully.')
profiles = models.Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
class RelatedFieldWidgetSeleniumChromeTests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class RelatedFieldWidgetSeleniumIETests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| bsd-3-clause | -1,057,694,303,072,237,600 | 44.949206 | 406 | 0.623635 | false |
stefelisabeth/dplace | dplace_app/api_views.py | 1 | 21716 | import json
import re
import datetime
from itertools import groupby
import logging
from django.db import connection
from django.db.models import Prefetch, Q, Count
from django.shortcuts import get_object_or_404
from django.http import Http404
from rest_framework import viewsets
from rest_framework.pagination import PageNumberPagination
from rest_framework.decorators import api_view, permission_classes, renderer_classes
from rest_framework.permissions import AllowAny
from rest_framework.views import Response
from rest_framework.renderers import JSONRenderer
from dplace_app.filters import GeographicRegionFilter
from dplace_app.renderers import DPLACECSVRenderer
from dplace_app import serializers
from dplace_app import models
from dplace_app.tree import update_newick
log = logging.getLogger('profile')
class VariableViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.VariableSerializer
filter_fields = ('label', 'name', 'index_categories', 'niche_categories', 'source')
queryset = models.Variable.objects\
.prefetch_related('index_categories', 'niche_categories')
# Override retrieve to use the detail serializer, which includes categories
def retrieve(self, request, *args, **kwargs):
self.object = self.get_object()
serializer = serializers.VariableDetailSerializer(self.object)
return Response(serializer.data)
class CategoryViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.CategorySerializer
filter_fields = ('name', 'type', 'index_variables', 'niche_variables')
queryset = models.Category.objects.all()
# Override retrieve to use the detail serializer, which includes variables
def retrieve(self, request, *args, **kwargs):
self.object = self.get_object()
serializer = serializers.CategoryDetailSerializer(self.object)
return Response(serializer.data)
class CodeDescriptionViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.CodeDescriptionSerializer
filter_fields = ('variable',)
queryset = models.CodeDescription.objects.all()
class ValueViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.ValueSerializer
filter_fields = ('variable', 'coded_value', 'code', 'society',)
# Avoid additional database trips by select_related for foreign keys
queryset = models.Value.objects.filter(variable__type='cultural')\
.select_related('variable', 'code', 'source').all()
class SocietyViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.SocietySerializer
queryset = models.Society.objects.all().select_related(
'source', 'language__iso_code', 'language__family')
lookup_field = 'ext_id'
def detail(self, request, society_id):
# block spider attacks
if len(request.GET) > 0 and request.path.startswith('/society'):
raise Http404
society = get_object_or_404(models.Society, ext_id=society_id)
# gets the society's location for inset map
location = {}
if society.location:
location = {
'lat': society.location['coordinates'][1],
'lng': society.location['coordinates'][0]
}
# gets other societies in database with the same xd_id
xd_id = models.Society.objects.filter(
xd_id=society.xd_id).exclude(ext_id=society_id)
if society.hraf_link and '(' in society.hraf_link:
hraf_link = society.hraf_link.split('(')[len(society.hraf_link.split('('))-1]
else:
hraf_link = ''
environmentals = society.get_environmental_data()
cultural_traits = society.get_cultural_trait_data()
references = society.get_data_references()
language_classification = None
if society.language:
# just glottolog at the moment
language_classification = models.LanguageFamily.objects\
.filter(name=society.language.family.name)
return Response(
{
'society': society,
'hraf_link': hraf_link[0:len(hraf_link)-1],
'xd_id': xd_id,
'location': location,
'language_classification': language_classification,
'environmentals': dict(environmentals),
'cultural_traits': dict(cultural_traits),
'references': references
},
template_name='society.html'
)
class ISOCodeViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.ISOCodeSerializer
filter_fields = ('iso_code',)
queryset = models.ISOCode.objects.all()
class LargeResultsSetPagination(PageNumberPagination):
page_size = 1000
page_size_query_param = 'page_size'
max_page_size = 1000
class LanguageViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.LanguageSerializerWithSocieties
filter_fields = ('name', 'iso_code', 'societies', 'family',)
queryset = models.Language.objects.all()\
.select_related('family', 'iso_code')\
.prefetch_related(Prefetch(
'societies',
queryset=models.Society.objects.exclude(value__isnull=True)
))
pagination_class = LargeResultsSetPagination
class LanguageFamilyViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.LanguageFamilySerializer
filter_fields = ('name',)
queryset = models.LanguageFamily.objects.all()\
.annotate(language_count=Count('language__societies'))\
.order_by('name')
pagination_class = LargeResultsSetPagination
class TreeResultsSetPagination(PageNumberPagination):
"""
Since trees may have *many* languages, which are serialized as well, we limit the
page size to just 1.
"""
page_size = 1
page_size_query_param = 'page_size'
max_page_size = 10
class LanguageTreeViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.LanguageTreeSerializer
filter_fields = ('name',)
queryset = models.LanguageTree.objects.all()
pagination_class = TreeResultsSetPagination
class LanguageTreeLabelsViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.LanguageTreeLabelsSerializer
filter_fields = ('label',)
queryset = models.LanguageTreeLabels.objects.all()
pagination_class = LargeResultsSetPagination
class SourceViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.SourceSerializer
filter_fields = ('author', 'name')
queryset = models.Source.objects.all()
def get_query_from_json(request):
query_string = request.query_params.get('query')
if query_string is None:
raise Http404('missing query parameter')
try:
query_dict = json.loads(query_string)
except ValueError:
raise Http404('malformed query parameter')
if not isinstance(query_dict, dict):
raise Http404('malformed query parameter')
return query_dict
def result_set_from_query_dict(query_dict):
from time import time
_s = time()
log.info('enter result_set_from_query_dict')
result_set = serializers.SocietyResultSet()
sql_joins, sql_where = [], []
def id_array(l):
return '(%s)' % ','.join('%s' % int(i) for i in l)
if 'l' in query_dict:
sql_joins.append(('language', 'l', 'l.id = s.language_id'))
sql_where.append('l.id IN ' + id_array(query_dict['l']))
for lang in models.Language.objects.filter(id__in=query_dict['l']):
result_set.languages.add(lang)
if 'c' in query_dict:
variables = {
v.id: v for v in models.Variable.objects
.filter(id__in=[int(x.split('-')[0]) for x in query_dict['c']])
.prefetch_related(Prefetch(
'codes',
queryset=models.CodeDescription.objects
.filter(id__in=[int(x.split('-')[1]) for x in query_dict['c'] if len(x.split('-')) == 2])))
}
for variable, codes in groupby(
sorted(query_dict['c'], key=lambda c: int(c.split('-')[0])),
key=lambda x: int(str(x).split('-')[0])
):
variable = variables[variable]
codes = [{
'id': None if (len(c.split('-')) > 2 or len(c.split('-')) == 1) else int(c.split('-')[1]),
'min': None if len(c.split('-')) < 3 else float(c.split('-')[1]),
'max': None if len(c.split('-')) < 3 else float(c.split('-')[2])
} for c in list(codes)]
alias = 'cv%s' % variable.id
sql_joins.append((
"value",
alias,
"{0}.society_id = s.id AND {0}.variable_id = {1}".format(alias, variable.id)
))
if variable.data_type and variable.data_type == 'Continuous':
include_NA = not all((c['min'] is not None) for c in codes)
ors = [
"({0}.coded_value_float >= %(min)f AND {0}.coded_value_float <= %(max)f)".format(alias) % c
for c in codes if ('min' in c and c['min'] is not None)]
if include_NA:
ors.append("%s.coded_value = 'NA'" % alias)
sql_where.append("(%s)" % ' OR '.join(ors))
if not include_NA:
sql_where.append("{0}.coded_value != 'NA'".format(alias))
else:
assert all('id' in c for c in codes)
sql_where.append("{0}.code_id IN %s".format(alias) % id_array([x['id'] for x in codes]))
result_set.variable_descriptions.add(serializers.VariableCode(variable.codes, variable))
if 'e' in query_dict:
# There can be multiple filters, so we must aggregate the results.
for varid, criteria in groupby(
sorted(query_dict['e'], key=lambda c: c[0]),
key=lambda x: x[0]
):
alias = 'ev%s' % varid
sql_joins.append((
"value",
alias,
"{0}.society_id = s.id AND {0}.variable_id = {1}".format(alias, int(varid))))
for varid, operator, params in criteria:
params = map(float, params)
if operator == 'inrange':
sql_where.append("{0}.coded_value_float >= {1:f} AND {0}.coded_value_float <= {2:f}".format(alias, params[0], params[1]))
elif operator == 'outrange':
sql_where.append("{0}.coded_value_float >= {1:f} AND {0}.coded_value_float <= {2:f}".format(alias, params[1], params[0]))
elif operator == 'gt':
sql_where.append("{0}.coded_value_float >= {1:f}".format(alias, params[0]))
elif operator == 'lt':
sql_where.append("{0}.coded_value_float <= {1:f}".format(alias, params[0]))
for variable in models.Variable.objects.filter(id__in=[x[0] for x in query_dict['e']]):
result_set.environmental_variables.add(variable)
if 'p' in query_dict:
sql_joins.append(('geographicregion', 'r', 'r.id = s.region_id'))
sql_where.append('r.id IN %s' % id_array(query_dict['p']))
for region in models.GeographicRegion.objects.filter(id__in=query_dict['p']):
result_set.geographic_regions.add(region)
if sql_where:
cursor = connection.cursor()
sql = "select distinct s.id from dplace_app_society as s %s where %s" % (
' '.join('join dplace_app_%s as %s on %s' % t for t in sql_joins),
' AND '.join(sql_where))
cursor.execute(sql)
soc_ids = [r[0] for r in cursor.fetchall()]
else:
soc_ids = []
soc_query = models.Society.objects.filter(id__in=soc_ids)\
.select_related('source', 'language__family', 'language__iso_code', 'region')
if result_set.geographic_regions:
soc_query = soc_query.select_related('region')
if result_set.variable_descriptions:
soc_query = soc_query.prefetch_related(Prefetch(
'value_set',
to_attr='selected_cvalues',
queryset=models.Value.objects
# FIXME: this selects possibly too many values, in case there are multiple
# values for the same variable, not all of them matching the criteria.
.filter(variable_id__in=[v.variable.id for v in result_set.variable_descriptions])
.select_related('code')
.prefetch_related('references')))
if result_set.environmental_variables:
soc_query = soc_query.prefetch_related(Prefetch(
'value_set',
to_attr='selected_evalues',
queryset=models.Value.objects
.filter(variable_id__in=[v.id for v in result_set.environmental_variables])
.prefetch_related('references')))
for i, soc in enumerate(soc_query):
soc_result = serializers.SocietyResult(soc)
if result_set.variable_descriptions:
for cval in soc.selected_cvalues:
soc_result.variable_coded_values.add(cval)
if result_set.environmental_variables:
for eval in soc.selected_evalues:
soc_result.environmental_values.add(eval)
result_set.societies.add(soc_result)
log.info('mid 1: %s' % (time() - _s,))
# Filter the results to those that matched all criteria
#result_set.finalize(criteria)
log.info('mid 2: %s' % (time() - _s,))
return result_set
@api_view(['GET'])
@permission_classes((AllowAny,))
def trees_from_societies(request):
language_trees = []
for k, v in request.query_params.lists():
soc_ids = v
labels = models.LanguageTreeLabels.objects.filter(societies__id__in=soc_ids).all()
global_tree = None
global_newick = []
global_isolates = []
for t in models.LanguageTree.objects\
.filter(taxa__societies__id__in=soc_ids)\
.prefetch_related(
'taxa__languagetreelabelssequence_set__labels',
'taxa__languagetreelabelssequence_set__society',
)\
.distinct():
if 'global' in t.name:
global_tree = t
# TODO ask @Bibiko once the isolates are in the db under global.tree as string: isol1,isol2,isol3,...
# global_isolates.extend(t.newick_string.split(','))
global_isolates.extend(['alse1251','amas1236','bana1292','calu1239','chim1301','chit1248','chon1248','coah1252','coos1249','furr1244','gaga1251','guai1237','guat1253','hadz1240','high1242','kara1289','karo1304','klam1254','kute1249','lara1258','mull1237','natc1249','nort2938','paez1247','pume1238','pura1257','pure1242','sali1253','sand1273','seri1257','shom1245','sius1254','sout1439','take1257','ticu1245','timu1245','tiwi1244','toll1241','trum1247','uruu1244','wara1303','wash1253','yama1264','yuch1247','zuni1245'])
else:
if update_newick(t, labels):
language_trees.append(t)
if 'glotto' in t.name:
#remove last ; in order to be able to join the trees
global_newick.append(t.newick_string[:-1])
if global_tree:
langs_in_tree = [str(l.label) for l in labels]
#add isolates if present in current selection
[global_newick.append('(' + isolate + ':1)') for isolate in global_isolates if isolate in langs_in_tree]
#join all pruned glottolog trees into the global one
global_tree.newick_string = '(' + ','.join(global_newick) + ');'
language_trees.append(global_tree)
return Response(serializers.LanguageTreeSerializer(language_trees, many=True).data)
@api_view(['GET'])
@permission_classes((AllowAny,))
def find_societies(request):
"""
View to find the societies that match an input request. Currently expects
{ language_filters: [{language_ids: [1,2,3]}], variable_codes: [4,5,6...],
environmental_filters: [{id: 1, operator: 'gt', params: [0.0]},
{id:3, operator 'inrange', params: [10.0,20.0] }] }
Returns serialized collection of SocietyResult objects
"""
from time import time
from django.db import connection
s = time()
log.info('%s find_societies 1: %s queries' % (time() - s, len(connection.queries)))
query = {}
if 'name' in request.query_params:
result_set = serializers.SocietyResultSet()
q = request.query_params['name']
if q:
soc = models.Society.objects.filter(
Q(name__icontains=q) | Q(alternate_names__unaccent__icontains=q))
for s in soc:
if s.value_set.count():
result_set.societies.add(serializers.SocietyResult(s))
return Response(serializers.SocietyResultSetSerializer(result_set).data)
for k, v in request.query_params.lists():
if str(k) == 'c':
query[k] = v
else:
query[k] = [json.loads(vv) for vv in v]
result_set = result_set_from_query_dict(query)
log.info('%s find_societies 2: %s queries' % (time() - s, len(connection.queries)))
d = serializers.SocietyResultSetSerializer(result_set).data
log.info('%s find_societies 3: %s queries' % (time() - s, len(connection.queries)))
for i, q in enumerate(
sorted(connection.queries, key=lambda q: q['time'], reverse=True)):
if 10 < i < 20: # pragma: no cover
log.info('%s for %s' % (q['time'], q['sql'][:500]))
return Response(d)
@api_view(['GET'])
@permission_classes((AllowAny,))
def get_categories(request):
"""
Filters categories for sources, as some categories are empty for some sources
"""
query_dict = get_query_from_json(request)
categories = models.Category.objects.filter(type='cultural')
source_categories = []
if 'source' in query_dict:
source = models.Source.objects.filter(id=query_dict['source'])
variables = models.Variable.objects.filter(source=source)
for c in categories:
if variables.filter(index_categories=c.id):
source_categories.append(c)
return Response(
serializers.CategorySerializer(source_categories, many=True).data)
return Response(serializers.CategorySerializer(categories, many=True).data)
@api_view(['GET'])
@permission_classes((AllowAny,))
def get_dataset_sources(request):
return Response(
serializers.SourceSerializer(
models.Source.objects.all().exclude(name=""), many=True).data)
class GeographicRegionViewSet(viewsets.ReadOnlyModelViewSet):
serializer_class = serializers.GeographicRegionSerializer
model = models.GeographicRegion
filter_class = GeographicRegionFilter
queryset = models.GeographicRegion.objects.all()
@api_view(['GET'])
@permission_classes((AllowAny,))
@renderer_classes((JSONRenderer,))
def get_min_and_max(request):
res = {}
varid = get_query_from_json(request).get('environmental_id')
if varid:
values = [
v.coded_value_float for v in models.Value.objects.filter(variable__id=varid)
if v.coded_value_float is not None]
vmin = min(values) if values else 0.0
vmax = max(values) if values else 0.0
res = {'min': format(vmin, '.4f'), 'max': format(vmax, '.4f')}
return Response(res)
@api_view(['GET'])
@permission_classes((AllowAny,))
@renderer_classes((JSONRenderer,))
def bin_cont_data(request): # MAKE THIS GENERIC
bf_id = get_query_from_json(request).get('bf_id')
bins = []
if bf_id:
values = models.Value.objects.filter(variable__id=bf_id)
min_value = None
max_value = 0.0
missing_data_option = False
for v in values:
if re.search('[a-zA-Z]', v.coded_value):
if not missing_data_option:
bins.append({
'code': v.coded_value,
'description': v.code.description,
'variable': bf_id,
})
missing_data_option = True
continue
else:
v.coded_value = v.coded_value.replace(',', '')
if min_value is None:
min_value = float(v.coded_value)
elif float(v.coded_value) < min_value:
min_value = float(v.coded_value)
elif float(v.coded_value) > max_value:
max_value = float(v.coded_value)
min_value = min_value or 0.0 # This is the case when there are no values!
data_range = max_value - min_value
bin_size = data_range / 5
min_bin = min_value
for x in range(0, 5):
min = min_bin
max = min_bin + bin_size
bins.append({
'code': x,
'description': str(min) + ' - ' + str(max),
'min': min_bin,
'max': min_bin + bin_size,
'variable': bf_id,
})
min_bin = min_bin + bin_size + 1
return Response(bins)
@api_view(['GET'])
@permission_classes((AllowAny,))
@renderer_classes((DPLACECSVRenderer,))
def csv_download(request):
query_dict = get_query_from_json(request)
result_set = result_set_from_query_dict(query_dict)
response = Response(serializers.SocietyResultSetSerializer(result_set).data)
filename = "dplace-societies-%s.csv" % datetime.datetime.now().strftime("%Y-%m-%d")
response['Content-Disposition'] = 'attachment; filename="%s"' % filename
return response
| mit | 6,217,632,777,301,787,000 | 39.819549 | 536 | 0.611945 | false |
vjmac15/Lyilis | lib/youtube_dl/extractor/aparat.py | 15 | 2195 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
int_or_none,
mimetype2ext,
)
class AparatIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?aparat\.com/(?:v/|video/video/embed/videohash/)(?P<id>[a-zA-Z0-9]+)'
_TEST = {
'url': 'http://www.aparat.com/v/wP8On',
'md5': '131aca2e14fe7c4dcb3c4877ba300c89',
'info_dict': {
'id': 'wP8On',
'ext': 'mp4',
'title': 'تیم گلکسی 11 - زومیت',
'age_limit': 0,
},
# 'skip': 'Extremely unreliable',
}
def _real_extract(self, url):
video_id = self._match_id(url)
# Note: There is an easier-to-parse configuration at
# http://www.aparat.com/video/video/config/videohash/%video_id
# but the URL in there does not work
webpage = self._download_webpage(
'http://www.aparat.com/video/video/embed/vt/frame/showvideo/yes/videohash/' + video_id,
video_id)
title = self._search_regex(r'\s+title:\s*"([^"]+)"', webpage, 'title')
file_list = self._parse_json(
self._search_regex(
r'fileList\s*=\s*JSON\.parse\(\'([^\']+)\'\)', webpage,
'file list'),
video_id)
formats = []
for item in file_list[0]:
file_url = item.get('file')
if not file_url:
continue
ext = mimetype2ext(item.get('type'))
label = item.get('label')
formats.append({
'url': file_url,
'ext': ext,
'format_id': label or ext,
'height': int_or_none(self._search_regex(
r'(\d+)[pP]', label or '', 'height', default=None)),
})
self._sort_formats(formats)
thumbnail = self._search_regex(
r'image:\s*"([^"]+)"', webpage, 'thumbnail', fatal=False)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'age_limit': self._family_friendly_search(webpage),
'formats': formats,
}
| gpl-3.0 | -1,589,022,954,366,422,800 | 30.623188 | 106 | 0.499542 | false |
denys-duchier/django | tests/file_uploads/tests.py | 45 | 23742 | import base64
import hashlib
import os
import shutil
import sys
import tempfile as sys_tempfile
import unittest
from io import BytesIO, StringIO
from urllib.parse import quote
from django.core.files import temp as tempfile
from django.core.files.uploadedfile import SimpleUploadedFile
from django.http.multipartparser import MultiPartParser, parse_header
from django.test import SimpleTestCase, TestCase, client, override_settings
from django.utils.encoding import force_bytes
from . import uploadhandler
from .models import FileModel
UNICODE_FILENAME = 'test-0123456789_中文_Orléans.jpg'
MEDIA_ROOT = sys_tempfile.mkdtemp()
UPLOAD_TO = os.path.join(MEDIA_ROOT, 'test_upload')
@override_settings(MEDIA_ROOT=MEDIA_ROOT, ROOT_URLCONF='file_uploads.urls', MIDDLEWARE=[])
class FileUploadTests(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
if not os.path.isdir(MEDIA_ROOT):
os.makedirs(MEDIA_ROOT)
@classmethod
def tearDownClass(cls):
shutil.rmtree(MEDIA_ROOT)
super().tearDownClass()
def test_simple_upload(self):
with open(__file__, 'rb') as fp:
post_data = {
'name': 'Ringo',
'file_field': fp,
}
response = self.client.post('/upload/', post_data)
self.assertEqual(response.status_code, 200)
def test_large_upload(self):
file = tempfile.NamedTemporaryFile
with file(suffix=".file1") as file1, file(suffix=".file2") as file2:
file1.write(b'a' * (2 ** 21))
file1.seek(0)
file2.write(b'a' * (10 * 2 ** 20))
file2.seek(0)
post_data = {
'name': 'Ringo',
'file_field1': file1,
'file_field2': file2,
}
for key in list(post_data):
try:
post_data[key + '_hash'] = hashlib.sha1(post_data[key].read()).hexdigest()
post_data[key].seek(0)
except AttributeError:
post_data[key + '_hash'] = hashlib.sha1(force_bytes(post_data[key])).hexdigest()
response = self.client.post('/verify/', post_data)
self.assertEqual(response.status_code, 200)
def _test_base64_upload(self, content, encode=base64.b64encode):
payload = client.FakePayload("\r\n".join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file"; filename="test.txt"',
'Content-Type: application/octet-stream',
'Content-Transfer-Encoding: base64',
'']))
payload.write(b"\r\n" + encode(force_bytes(content)) + b"\r\n")
payload.write('--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/echo_content/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.json()['file'], content)
def test_base64_upload(self):
self._test_base64_upload("This data will be transmitted base64-encoded.")
def test_big_base64_upload(self):
self._test_base64_upload("Big data" * 68000) # > 512Kb
def test_big_base64_newlines_upload(self):
self._test_base64_upload("Big data" * 68000, encode=base64.encodebytes)
def test_unicode_file_name(self):
with sys_tempfile.TemporaryDirectory() as temp_dir:
# This file contains Chinese symbols and an accented char in the name.
with open(os.path.join(temp_dir, UNICODE_FILENAME), 'w+b') as file1:
file1.write(b'b' * (2 ** 10))
file1.seek(0)
response = self.client.post('/unicode_name/', {'file_unicode': file1})
self.assertEqual(response.status_code, 200)
def test_unicode_file_name_rfc2231(self):
"""
Test receiving file upload when filename is encoded with RFC2231
(#22971).
"""
payload = client.FakePayload()
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file_unicode"; filename*=UTF-8\'\'%s' % quote(UNICODE_FILENAME),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n',
'\r\n--' + client.BOUNDARY + '--\r\n'
]))
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/unicode_name/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_unicode_name_rfc2231(self):
"""
Test receiving file upload when filename is encoded with RFC2231
(#22971).
"""
payload = client.FakePayload()
payload.write(
'\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name*=UTF-8\'\'file_unicode; filename*=UTF-8\'\'%s' % quote(
UNICODE_FILENAME
),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n',
'\r\n--' + client.BOUNDARY + '--\r\n'
])
)
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/unicode_name/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_blank_filenames(self):
"""
Receiving file upload when filename is blank (before and after
sanitization) should be okay.
"""
# The second value is normalized to an empty name by
# MultiPartParser.IE_sanitize()
filenames = ['', 'C:\\Windows\\']
payload = client.FakePayload()
for i, name in enumerate(filenames):
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n'
]))
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/echo/',
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
# Empty filenames should be ignored
received = response.json()
for i, name in enumerate(filenames):
self.assertIsNone(received.get('file%s' % i))
def test_dangerous_file_names(self):
"""Uploaded file names should be sanitized before ever reaching the view."""
# This test simulates possible directory traversal attacks by a
# malicious uploader We have to do some monkeybusiness here to construct
# a malicious payload with an invalid file name (containing os.sep or
# os.pardir). This similar to what an attacker would need to do when
# trying such an attack.
scary_file_names = [
"/tmp/hax0rd.txt", # Absolute path, *nix-style.
"C:\\Windows\\hax0rd.txt", # Absolute path, win-style.
"C:/Windows/hax0rd.txt", # Absolute path, broken-style.
"\\tmp\\hax0rd.txt", # Absolute path, broken in a different way.
"/tmp\\hax0rd.txt", # Absolute path, broken by mixing.
"subdir/hax0rd.txt", # Descendant path, *nix-style.
"subdir\\hax0rd.txt", # Descendant path, win-style.
"sub/dir\\hax0rd.txt", # Descendant path, mixed.
"../../hax0rd.txt", # Relative path, *nix-style.
"..\\..\\hax0rd.txt", # Relative path, win-style.
"../..\\hax0rd.txt" # Relative path, mixed.
]
payload = client.FakePayload()
for i, name in enumerate(scary_file_names):
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n'
]))
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/echo/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
# The filenames should have been sanitized by the time it got to the view.
received = response.json()
for i, name in enumerate(scary_file_names):
got = received["file%s" % i]
self.assertEqual(got, "hax0rd.txt")
def test_filename_overflow(self):
"""File names over 256 characters (dangerous on some platforms) get fixed up."""
long_str = 'f' * 300
cases = [
# field name, filename, expected
('long_filename', '%s.txt' % long_str, '%s.txt' % long_str[:251]),
('long_extension', 'foo.%s' % long_str, '.%s' % long_str[:254]),
('no_extension', long_str, long_str[:255]),
('no_filename', '.%s' % long_str, '.%s' % long_str[:254]),
('long_everything', '%s.%s' % (long_str, long_str), '.%s' % long_str[:254]),
]
payload = client.FakePayload()
for name, filename, _ in cases:
payload.write("\r\n".join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="{}"; filename="{}"',
'Content-Type: application/octet-stream',
'',
'Oops.',
''
]).format(name, filename))
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/echo/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
result = response.json()
for name, _, expected in cases:
got = result[name]
self.assertEqual(expected, got, 'Mismatch for {}'.format(name))
self.assertLess(len(got), 256,
"Got a long file name (%s characters)." % len(got))
def test_file_content(self):
file = tempfile.NamedTemporaryFile
with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file:
no_content_type.write(b'no content')
no_content_type.seek(0)
simple_file.write(b'text content')
simple_file.seek(0)
simple_file.content_type = 'text/plain'
string_io = StringIO('string content')
bytes_io = BytesIO(b'binary content')
response = self.client.post('/echo_content/', {
'no_content_type': no_content_type,
'simple_file': simple_file,
'string': string_io,
'binary': bytes_io,
})
received = response.json()
self.assertEqual(received['no_content_type'], 'no content')
self.assertEqual(received['simple_file'], 'text content')
self.assertEqual(received['string'], 'string content')
self.assertEqual(received['binary'], 'binary content')
def test_content_type_extra(self):
"""Uploaded files may have content type parameters available."""
file = tempfile.NamedTemporaryFile
with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file:
no_content_type.write(b'something')
no_content_type.seek(0)
simple_file.write(b'something')
simple_file.seek(0)
simple_file.content_type = 'text/plain; test-key=test_value'
response = self.client.post('/echo_content_type_extra/', {
'no_content_type': no_content_type,
'simple_file': simple_file,
})
received = response.json()
self.assertEqual(received['no_content_type'], {})
self.assertEqual(received['simple_file'], {'test-key': 'test_value'})
def test_truncated_multipart_handled_gracefully(self):
"""
If passed an incomplete multipart message, MultiPartParser does not
attempt to read beyond the end of the stream, and simply will handle
the part that can be parsed gracefully.
"""
payload_str = "\r\n".join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file"; filename="foo.txt"',
'Content-Type: application/octet-stream',
'',
'file contents'
'--' + client.BOUNDARY + '--',
'',
])
payload = client.FakePayload(payload_str[:-10])
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/echo/',
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
self.assertEqual(self.client.request(**r).json(), {})
def test_empty_multipart_handled_gracefully(self):
"""
If passed an empty multipart message, MultiPartParser will return
an empty QueryDict.
"""
r = {
'CONTENT_LENGTH': 0,
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/echo/',
'REQUEST_METHOD': 'POST',
'wsgi.input': client.FakePayload(b''),
}
self.assertEqual(self.client.request(**r).json(), {})
def test_custom_upload_handler(self):
file = tempfile.NamedTemporaryFile
with file() as smallfile, file() as bigfile:
# A small file (under the 5M quota)
smallfile.write(b'a' * (2 ** 21))
smallfile.seek(0)
# A big file (over the quota)
bigfile.write(b'a' * (10 * 2 ** 20))
bigfile.seek(0)
# Small file posting should work.
self.assertIn('f', self.client.post('/quota/', {'f': smallfile}).json())
# Large files don't go through.
self.assertNotIn('f', self.client.post("/quota/", {'f': bigfile}).json())
def test_broken_custom_upload_handler(self):
with tempfile.NamedTemporaryFile() as file:
file.write(b'a' * (2 ** 21))
file.seek(0)
# AttributeError: You cannot alter upload handlers after the upload has been processed.
with self.assertRaises(AttributeError):
self.client.post('/quota/broken/', {'f': file})
def test_fileupload_getlist(self):
file = tempfile.NamedTemporaryFile
with file() as file1, file() as file2, file() as file2a:
file1.write(b'a' * (2 ** 23))
file1.seek(0)
file2.write(b'a' * (2 * 2 ** 18))
file2.seek(0)
file2a.write(b'a' * (5 * 2 ** 20))
file2a.seek(0)
response = self.client.post('/getlist_count/', {
'file1': file1,
'field1': 'test',
'field2': 'test3',
'field3': 'test5',
'field4': 'test6',
'field5': 'test7',
'file2': (file2, file2a)
})
got = response.json()
self.assertEqual(got.get('file1'), 1)
self.assertEqual(got.get('file2'), 2)
def test_fileuploads_closed_at_request_end(self):
file = tempfile.NamedTemporaryFile
with file() as f1, file() as f2a, file() as f2b:
response = self.client.post('/fd_closing/t/', {
'file': f1,
'file2': (f2a, f2b),
})
request = response.wsgi_request
# The files were parsed.
self.assertTrue(hasattr(request, '_files'))
file = request._files['file']
self.assertTrue(file.closed)
files = request._files.getlist('file2')
self.assertTrue(files[0].closed)
self.assertTrue(files[1].closed)
def test_no_parsing_triggered_by_fd_closing(self):
file = tempfile.NamedTemporaryFile
with file() as f1, file() as f2a, file() as f2b:
response = self.client.post('/fd_closing/f/', {
'file': f1,
'file2': (f2a, f2b),
})
request = response.wsgi_request
# The fd closing logic doesn't trigger parsing of the stream
self.assertFalse(hasattr(request, '_files'))
def test_file_error_blocking(self):
"""
The server should not block when there are upload errors (bug #8622).
This can happen if something -- i.e. an exception handler -- tries to
access POST while handling an error in parsing POST. This shouldn't
cause an infinite loop!
"""
class POSTAccessingHandler(client.ClientHandler):
"""A handler that'll access POST during an exception."""
def handle_uncaught_exception(self, request, resolver, exc_info):
ret = super().handle_uncaught_exception(request, resolver, exc_info)
request.POST # evaluate
return ret
# Maybe this is a little more complicated that it needs to be; but if
# the django.test.client.FakePayload.read() implementation changes then
# this test would fail. So we need to know exactly what kind of error
# it raises when there is an attempt to read more than the available bytes:
try:
client.FakePayload(b'a').read(2)
except Exception as err:
reference_error = err
# install the custom handler that tries to access request.POST
self.client.handler = POSTAccessingHandler()
with open(__file__, 'rb') as fp:
post_data = {
'name': 'Ringo',
'file_field': fp,
}
try:
self.client.post('/upload_errors/', post_data)
except reference_error.__class__ as err:
self.assertFalse(
str(err) == str(reference_error),
"Caught a repeated exception that'll cause an infinite loop in file uploads."
)
except Exception as err:
# CustomUploadError is the error that should have been raised
self.assertEqual(err.__class__, uploadhandler.CustomUploadError)
def test_filename_case_preservation(self):
"""
The storage backend shouldn't mess with the case of the filenames
uploaded.
"""
# Synthesize the contents of a file upload with a mixed case filename
# so we don't have to carry such a file in the Django tests source code
# tree.
vars = {'boundary': 'oUrBoUnDaRyStRiNg'}
post_data = [
'--%(boundary)s',
'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"',
'Content-Type: application/octet-stream',
'',
'file contents\n'
'',
'--%(boundary)s--\r\n',
]
response = self.client.post(
'/filename_case/',
'\r\n'.join(post_data) % vars,
'multipart/form-data; boundary=%(boundary)s' % vars
)
self.assertEqual(response.status_code, 200)
id = int(response.content)
obj = FileModel.objects.get(pk=id)
# The name of the file uploaded and the file stored in the server-side
# shouldn't differ.
self.assertEqual(os.path.basename(obj.testfile.path), 'MiXeD_cAsE.txt')
@override_settings(MEDIA_ROOT=MEDIA_ROOT)
class DirectoryCreationTests(SimpleTestCase):
"""
Tests for error handling during directory creation
via _save_FIELD_file (ticket #6450)
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
if not os.path.isdir(MEDIA_ROOT):
os.makedirs(MEDIA_ROOT)
@classmethod
def tearDownClass(cls):
shutil.rmtree(MEDIA_ROOT)
super().tearDownClass()
def setUp(self):
self.obj = FileModel()
@unittest.skipIf(sys.platform == 'win32', "Python on Windows doesn't have working os.chmod().")
def test_readonly_root(self):
"""Permission errors are not swallowed"""
os.chmod(MEDIA_ROOT, 0o500)
self.addCleanup(os.chmod, MEDIA_ROOT, 0o700)
with self.assertRaises(PermissionError):
self.obj.testfile.save('foo.txt', SimpleUploadedFile('foo.txt', b'x'), save=False)
def test_not_a_directory(self):
"""The correct IOError is raised when the upload directory name exists but isn't a directory"""
# Create a file with the upload directory name
open(UPLOAD_TO, 'wb').close()
self.addCleanup(os.remove, UPLOAD_TO)
with self.assertRaises(IOError) as exc_info:
with SimpleUploadedFile('foo.txt', b'x') as file:
self.obj.testfile.save('foo.txt', file, save=False)
# The test needs to be done on a specific string as IOError
# is raised even without the patch (just not early enough)
self.assertEqual(exc_info.exception.args[0], "%s exists and is not a directory." % UPLOAD_TO)
class MultiParserTests(unittest.TestCase):
def test_empty_upload_handlers(self):
# We're not actually parsing here; just checking if the parser properly
# instantiates with empty upload handlers.
MultiPartParser({
'CONTENT_TYPE': 'multipart/form-data; boundary=_foo',
'CONTENT_LENGTH': '1'
}, StringIO('x'), [], 'utf-8')
def test_rfc2231_parsing(self):
test_data = (
(b"Content-Type: application/x-stuff; title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A",
"This is ***fun***"),
(b"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html",
"foo-ä.html"),
(b"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html",
"foo-ä.html"),
)
for raw_line, expected_title in test_data:
parsed = parse_header(raw_line)
self.assertEqual(parsed[1]['title'], expected_title)
def test_rfc2231_wrong_title(self):
"""
Test wrongly formatted RFC 2231 headers (missing double single quotes).
Parsing should not crash (#24209).
"""
test_data = (
(b"Content-Type: application/x-stuff; title*='This%20is%20%2A%2A%2Afun%2A%2A%2A",
b"'This%20is%20%2A%2A%2Afun%2A%2A%2A"),
(b"Content-Type: application/x-stuff; title*='foo.html",
b"'foo.html"),
(b"Content-Type: application/x-stuff; title*=bar.html",
b"bar.html"),
)
for raw_line, expected_title in test_data:
parsed = parse_header(raw_line)
self.assertEqual(parsed[1]['title'], expected_title)
| bsd-3-clause | -218,260,698,169,522,560 | 38.558333 | 115 | 0.554624 | false |
onyxfish/agate | agate/table/bar_chart.py | 3 | 1303 | #!/usr/bin/env python
# pylint: disable=W0212
import leather
from agate import utils
def bar_chart(self, label=0, value=1, path=None, width=None, height=None):
"""
Render a bar chart using :class:`leather.Chart`.
:param label:
The name or index of a column to plot as the labels of the chart.
Defaults to the first column in the table.
:param value:
The name or index of a column to plot as the values of the chart.
Defaults to the second column in the table.
:param path:
If specified, the resulting SVG will be saved to this location. If
:code:`None` and running in IPython, then the SVG will be rendered
inline. Otherwise, the SVG data will be returned as a string.
:param width:
The width of the output SVG.
:param height:
The height of the output SVG.
"""
if type(label) is int:
label_name = self.column_names[label]
else:
label_name = label
if type(value) is int:
value_name = self.column_names[value]
else:
value_name = value
chart = leather.Chart()
chart.add_x_axis(name=value_name)
chart.add_y_axis(name=label_name)
chart.add_bars(self, x=value, y=label)
return chart.to_svg(path=path, width=width, height=height)
| mit | 4,765,904,349,357,894,000 | 29.302326 | 74 | 0.646201 | false |
heiscsy/evolutus_ros_src | evolutus_filter/build/catkin_generated/installspace/_setup_util.py | 4 | 12275 | #!/usr/bin/python
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
'''This file generates shell code for the setup.SHELL scripts to set environment variables'''
from __future__ import print_function
import argparse
import copy
import errno
import os
import platform
import sys
CATKIN_MARKER_FILE = '.catkin'
system = platform.system()
IS_DARWIN = (system == 'Darwin')
IS_WINDOWS = (system == 'Windows')
# subfolder of workspace prepended to CMAKE_PREFIX_PATH
ENV_VAR_SUBFOLDERS = {
'CMAKE_PREFIX_PATH': '',
'CPATH': 'include',
'LD_LIBRARY_PATH' if not IS_DARWIN else 'DYLD_LIBRARY_PATH': ['lib', os.path.join('lib', 'x86_64-linux-gnu')],
'PATH': 'bin',
'PKG_CONFIG_PATH': [os.path.join('lib', 'pkgconfig'), os.path.join('lib', 'x86_64-linux-gnu', 'pkgconfig')],
'PYTHONPATH': 'lib/python2.7/dist-packages',
}
def rollback_env_variables(environ, env_var_subfolders):
'''
Generate shell code to reset environment variables
by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH.
This does not cover modifications performed by environment hooks.
'''
lines = []
unmodified_environ = copy.copy(environ)
for key in sorted(env_var_subfolders.keys()):
subfolders = env_var_subfolders[key]
if not isinstance(subfolders, list):
subfolders = [subfolders]
for subfolder in subfolders:
value = _rollback_env_variable(unmodified_environ, key, subfolder)
if value is not None:
environ[key] = value
lines.append(assignment(key, value))
if lines:
lines.insert(0, comment('reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH'))
return lines
def _rollback_env_variable(environ, name, subfolder):
'''
For each catkin workspace in CMAKE_PREFIX_PATH remove the first entry from env[NAME] matching workspace + subfolder.
:param subfolder: str '' or subfoldername that may start with '/'
:returns: the updated value of the environment variable.
'''
value = environ[name] if name in environ else ''
env_paths = [path for path in value.split(os.pathsep) if path]
value_modified = False
if subfolder:
if subfolder.startswith(os.path.sep) or (os.path.altsep and subfolder.startswith(os.path.altsep)):
subfolder = subfolder[1:]
if subfolder.endswith(os.path.sep) or (os.path.altsep and subfolder.endswith(os.path.altsep)):
subfolder = subfolder[:-1]
for ws_path in _get_workspaces(environ, include_fuerte=True, include_non_existing=True):
path_to_find = os.path.join(ws_path, subfolder) if subfolder else ws_path
path_to_remove = None
for env_path in env_paths:
env_path_clean = env_path[:-1] if env_path and env_path[-1] in [os.path.sep, os.path.altsep] else env_path
if env_path_clean == path_to_find:
path_to_remove = env_path
break
if path_to_remove:
env_paths.remove(path_to_remove)
value_modified = True
new_value = os.pathsep.join(env_paths)
return new_value if value_modified else None
def _get_workspaces(environ, include_fuerte=False, include_non_existing=False):
'''
Based on CMAKE_PREFIX_PATH return all catkin workspaces.
:param include_fuerte: The flag if paths starting with '/opt/ros/fuerte' should be considered workspaces, ``bool``
'''
# get all cmake prefix paths
env_name = 'CMAKE_PREFIX_PATH'
value = environ[env_name] if env_name in environ else ''
paths = [path for path in value.split(os.pathsep) if path]
# remove non-workspace paths
workspaces = [path for path in paths if os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE)) or (include_fuerte and path.startswith('/opt/ros/fuerte')) or (include_non_existing and not os.path.exists(path))]
return workspaces
def prepend_env_variables(environ, env_var_subfolders, workspaces):
'''
Generate shell code to prepend environment variables
for the all workspaces.
'''
lines = []
lines.append(comment('prepend folders of workspaces to environment variables'))
paths = [path for path in workspaces.split(os.pathsep) if path]
prefix = _prefix_env_variable(environ, 'CMAKE_PREFIX_PATH', paths, '')
lines.append(prepend(environ, 'CMAKE_PREFIX_PATH', prefix))
for key in sorted([key for key in env_var_subfolders.keys() if key != 'CMAKE_PREFIX_PATH']):
subfolder = env_var_subfolders[key]
prefix = _prefix_env_variable(environ, key, paths, subfolder)
lines.append(prepend(environ, key, prefix))
return lines
def _prefix_env_variable(environ, name, paths, subfolders):
'''
Return the prefix to prepend to the environment variable NAME, adding any path in NEW_PATHS_STR without creating duplicate or empty items.
'''
value = environ[name] if name in environ else ''
environ_paths = [path for path in value.split(os.pathsep) if path]
checked_paths = []
for path in paths:
if not isinstance(subfolders, list):
subfolders = [subfolders]
for subfolder in subfolders:
path_tmp = path
if subfolder:
path_tmp = os.path.join(path_tmp, subfolder)
# exclude any path already in env and any path we already added
if path_tmp not in environ_paths and path_tmp not in checked_paths:
checked_paths.append(path_tmp)
prefix_str = os.pathsep.join(checked_paths)
if prefix_str != '' and environ_paths:
prefix_str += os.pathsep
return prefix_str
def assignment(key, value):
if not IS_WINDOWS:
return 'export %s="%s"' % (key, value)
else:
return 'set %s=%s' % (key, value)
def comment(msg):
if not IS_WINDOWS:
return '# %s' % msg
else:
return 'REM %s' % msg
def prepend(environ, key, prefix):
if key not in environ or not environ[key]:
return assignment(key, prefix)
if not IS_WINDOWS:
return 'export %s="%s$%s"' % (key, prefix, key)
else:
return 'set %s=%s%%%s%%' % (key, prefix, key)
def find_env_hooks(environ, cmake_prefix_path):
'''
Generate shell code with found environment hooks
for the all workspaces.
'''
lines = []
lines.append(comment('found environment hooks in workspaces'))
generic_env_hooks = []
generic_env_hooks_workspace = []
specific_env_hooks = []
specific_env_hooks_workspace = []
generic_env_hooks_by_filename = {}
specific_env_hooks_by_filename = {}
generic_env_hook_ext = 'bat' if IS_WINDOWS else 'sh'
specific_env_hook_ext = environ['CATKIN_SHELL'] if not IS_WINDOWS and 'CATKIN_SHELL' in environ and environ['CATKIN_SHELL'] else None
# remove non-workspace paths
workspaces = [path for path in cmake_prefix_path.split(os.pathsep) if path and os.path.isfile(os.path.join(path, CATKIN_MARKER_FILE))]
for workspace in reversed(workspaces):
env_hook_dir = os.path.join(workspace, 'etc', 'catkin', 'profile.d')
if os.path.isdir(env_hook_dir):
for filename in sorted(os.listdir(env_hook_dir)):
if filename.endswith('.%s' % generic_env_hook_ext):
# remove previous env hook with same name if present
if filename in generic_env_hooks_by_filename:
i = generic_env_hooks.index(generic_env_hooks_by_filename[filename])
generic_env_hooks.pop(i)
generic_env_hooks_workspace.pop(i)
# append env hook
generic_env_hooks.append(os.path.join(env_hook_dir, filename))
generic_env_hooks_workspace.append(workspace)
generic_env_hooks_by_filename[filename] = generic_env_hooks[-1]
elif specific_env_hook_ext is not None and filename.endswith('.%s' % specific_env_hook_ext):
# remove previous env hook with same name if present
if filename in specific_env_hooks_by_filename:
i = specific_env_hooks.index(specific_env_hooks_by_filename[filename])
specific_env_hooks.pop(i)
specific_env_hooks_workspace.pop(i)
# append env hook
specific_env_hooks.append(os.path.join(env_hook_dir, filename))
specific_env_hooks_workspace.append(workspace)
specific_env_hooks_by_filename[filename] = specific_env_hooks[-1]
env_hooks = generic_env_hooks + specific_env_hooks
env_hooks_workspace = generic_env_hooks_workspace + specific_env_hooks_workspace
count = len(env_hooks)
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_COUNT', count))
for i in range(count):
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d' % i, env_hooks[i]))
lines.append(assignment('_CATKIN_ENVIRONMENT_HOOKS_%d_WORKSPACE' % i, env_hooks_workspace[i]))
return lines
def _parse_arguments(args=None):
parser = argparse.ArgumentParser(description='Generates code blocks for the setup.SHELL script.')
parser.add_argument('--extend', action='store_true', help='Skip unsetting previous environment variables to extend context')
return parser.parse_known_args(args=args)[0]
if __name__ == '__main__':
try:
try:
args = _parse_arguments()
except Exception as e:
print(e, file=sys.stderr)
sys.exit(1)
# environment at generation time
CMAKE_PREFIX_PATH = '/home/yuhan/catkin_ws/devel;/opt/ros/indigo'.split(';')
# prepend current workspace if not already part of CPP
base_path = os.path.dirname(__file__)
if base_path not in CMAKE_PREFIX_PATH:
CMAKE_PREFIX_PATH.insert(0, base_path)
CMAKE_PREFIX_PATH = os.pathsep.join(CMAKE_PREFIX_PATH)
environ = dict(os.environ)
lines = []
if not args.extend:
lines += rollback_env_variables(environ, ENV_VAR_SUBFOLDERS)
lines += prepend_env_variables(environ, ENV_VAR_SUBFOLDERS, CMAKE_PREFIX_PATH)
lines += find_env_hooks(environ, CMAKE_PREFIX_PATH)
print('\n'.join(lines))
# need to explicitly flush the output
sys.stdout.flush()
except IOError as e:
# and catch potantial "broken pipe" if stdout is not writable
# which can happen when piping the output to a file but the disk is full
if e.errno == errno.EPIPE:
print(e, file=sys.stderr)
sys.exit(2)
raise
sys.exit(0)
| gpl-2.0 | 4,278,920,657,189,255,000 | 41.770035 | 213 | 0.657515 | false |
robhudson/kuma | vendor/packages/translate/storage/wordfast.py | 25 | 16043 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007-2010 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Manage the Wordfast Translation Memory format
Wordfast TM format is the Translation Memory format used by the
`Wordfast <http://www.wordfast.net/>`_ computer aided translation tool.
It is a bilingual base class derived format with :class:`WordfastTMFile`
and :class:`WordfastUnit` providing file and unit level access.
Wordfast is a computer aided translation tool. It is an application
built on top of Microsoft Word and is implemented as a rather
sophisticated set of macros. Understanding that helps us understand
many of the seemingly strange choices around this format including:
encoding, escaping and file naming.
Implementation
The implementation covers the full requirements of a Wordfast TM file.
The files are simple Tab Separated Value (TSV) files that can be read
by Microsoft Excel and other spreadsheet programs. They use the .txt
extension which does make it more difficult to automatically identify
such files.
The dialect of the TSV files is specified by :class:`WordfastDialect`.
Encoding
The files are UTF-16 or ISO-8859-1 (Latin1) encoded. These choices
are most likely because Microsoft Word is the base editing tool for
Wordfast.
The format is tab separated so we are able to detect UTF-16 vs Latin-1
by searching for the occurance of a UTF-16 tab character and then
continuing with the parsing.
Timestamps
:class:`WordfastTime` allows for the correct management of the Wordfast
YYYYMMDD~HHMMSS timestamps. However, timestamps on individual units are
not updated when edited.
Header
:class:`WordfastHeader` provides header management support. The header
functionality is fully implemented through observing the behaviour of the
files in real use cases, input from the Wordfast programmers and
public documentation.
Escaping
Wordfast TM implements a form of escaping that covers two aspects:
1. Placeable: bold, formating, etc. These are left as is and ignored. It
is up to the editor and future placeable implementation to manage these.
2. Escapes: items that may confuse Excel or translators are escaped as
``&'XX;``. These are fully implemented and are converted to and from
Unicode. By observing behaviour and reading documentation we where able
to observe all possible escapes. Unfortunately the escaping differs
slightly between Windows and Mac version. This might cause errors in
future. Functions allow for ``<_wf_to_char>`` and back to Wordfast
escape (``<_char_to_wf>``).
Extended Attributes
The last 4 columns allow users to define and manage extended attributes.
These are left as is and are not directly managed byour implemenation.
"""
import csv
import time
from translate.storage import base
WF_TIMEFORMAT = "%Y%m%d~%H%M%S"
"""Time format used by Wordfast"""
WF_FIELDNAMES_HEADER = ["date", "userlist", "tucount", "src-lang", "version",
"target-lang", "license", "attr1list", "attr2list",
"attr3list", "attr4list", "attr5list"]
"""Field names for the Wordfast header"""
WF_FIELDNAMES = ["date", "user", "reuse", "src-lang", "source", "target-lang",
"target", "attr1", "attr2", "attr3", "attr4"]
"""Field names for a Wordfast TU"""
WF_FIELDNAMES_HEADER_DEFAULTS = {
"date": "%19000101~121212",
"userlist": "%User ID,TT,TT Translate-Toolkit",
"tucount": "%TU=00000001",
"src-lang": "%EN-US",
"version": "%Wordfast TM v.5.51w9/00",
"target-lang": "",
"license": "%---00000001",
"attr1list": "",
"attr2list": "",
"attr3list": "",
"attr4list": "",
}
"""Default or minimum header entries for a Wordfast file"""
# TODO Needs validation. The following need to be checked against a WF TM file
# to ensure that the correct Unicode values have been chosen for the characters.
# For now these look correct and have been taken from Windows CP1252 and
# Macintosh code points found for the respective character sets on Linux.
WF_ESCAPE_MAP = (
("&'26;", u"\u0026"), # & - Ampersand (must be first to prevent
# escaping of escapes)
("&'82;", u"\u201A"), # ‚ - Single low-9 quotation mark
("&'85;", u"\u2026"), # … - Elippsis
("&'91;", u"\u2018"), # ‘ - left single quotation mark
("&'92;", u"\u2019"), # ’ - right single quotation mark
("&'93;", u"\u201C"), # “ - left double quotation mark
("&'94;", u"\u201D"), # ” - right double quotation mark
("&'96;", u"\u2013"), # – - en dash (validate)
("&'97;", u"\u2014"), # — - em dash (validate)
("&'99;", u"\u2122"), # ™ - Trade mark
# Windows only
("&'A0;", u"\u00A0"), # - Non breaking space
("&'A9;", u"\u00A9"), # © - Copyright
("&'AE;", u"\u00AE"), # ® - Registered
("&'BC;", u"\u00BC"), # ¼
("&'BD;", u"\u00BD"), # ½
("&'BE;", u"\u00BE"), # ¾
# Mac only
("&'A8;", u"\u00AE"), # ® - Registered
("&'AA;", u"\u2122"), # ™ - Trade mark
("&'C7;", u"\u00AB"), # « - Left-pointing double angle quotation mark
("&'C8;", u"\u00BB"), # » - Right-pointing double angle quotation mark
("&'C9;", u"\u2026"), # … - Horizontal Elippsis
("&'CA;", u"\u00A0"), # - Non breaking space
("&'D0;", u"\u2013"), # – - en dash (validate)
("&'D1;", u"\u2014"), # — - em dash (validate)
("&'D2;", u"\u201C"), # “ - left double quotation mark
("&'D3;", u"\u201D"), # ” - right double quotation mark
("&'D4;", u"\u2018"), # ‘ - left single quotation mark
("&'D5;", u"\u2019"), # ’ - right single quotation mark
("&'E2;", u"\u201A"), # ‚ - Single low-9 quotation mark
("&'E3;", u"\u201E"), # „ - Double low-9 quotation mark
# Other markers
#("&'B;", u"\n"), # Soft-break - XXX creates a problem with
# roundtripping could also be represented
# by \u2028
)
"""Mapping of Wordfast &'XX; escapes to correct Unicode characters"""
TAB_UTF16 = "\x00\x09"
"""The tab \\t character as it would appear in UTF-16 encoding"""
def _char_to_wf(string):
"""Char -> Wordfast &'XX; escapes
Full roundtripping is not possible because of the escaping of
NEWLINE \\n and TAB \\t"""
# FIXME there is no platform check to ensure that we use Mac encodings
# when running on a Mac
if string:
for code, char in WF_ESCAPE_MAP:
string = string.replace(char.encode('utf-8'), code)
string = string.replace("\n", "\\n").replace("\t", "\\t")
return string
def _wf_to_char(string):
"""Wordfast &'XX; escapes -> Char"""
if string:
for code, char in WF_ESCAPE_MAP:
string = string.replace(code, char.encode('utf-8'))
string = string.replace("\\n", "\n").replace("\\t", "\t")
return string
class WordfastDialect(csv.Dialect):
"""Describe the properties of a Wordfast generated TAB-delimited file."""
delimiter = "\t"
lineterminator = "\r\n"
quoting = csv.QUOTE_NONE
csv.register_dialect("wordfast", WordfastDialect)
class WordfastTime(object):
"""Manages time stamps in the Wordfast format of YYYYMMDD~hhmmss"""
def __init__(self, newtime=None):
self._time = None
if not newtime:
self.time = None
elif isinstance(newtime, basestring):
self.timestring = newtime
elif isinstance(newtime, time.struct_time):
self.time = newtime
def get_timestring(self):
"""Get the time in the Wordfast time format"""
if not self._time:
return None
else:
return time.strftime(WF_TIMEFORMAT, self._time)
def set_timestring(self, timestring):
"""Set the time_sturct object using a Wordfast time formated string
:param timestring: A Wordfast time string (YYYMMDD~hhmmss)
:type timestring: String
"""
self._time = time.strptime(timestring, WF_TIMEFORMAT)
timestring = property(get_timestring, set_timestring)
def get_time(self):
"""Get the time_struct object"""
return self._time
def set_time(self, newtime):
"""Set the time_struct object
:param newtime: a new time object
:type newtime: time.time_struct
"""
if newtime and isinstance(newtime, time.struct_time):
self._time = newtime
else:
self._time = None
time = property(get_time, set_time)
def __str__(self):
if not self.timestring:
return ""
else:
return self.timestring
class WordfastHeader(object):
"""A wordfast translation memory header"""
def __init__(self, header=None):
self._header_dict = []
if not header:
self.header = self._create_default_header()
elif isinstance(header, dict):
self.header = header
def _create_default_header(self):
"""Create a default Wordfast header with the date set to the current
time"""
defaultheader = WF_FIELDNAMES_HEADER_DEFAULTS
defaultheader['date'] = '%%%s' % WordfastTime(time.localtime()).timestring
return defaultheader
def getheader(self):
"""Get the header dictionary"""
return self._header_dict
def setheader(self, newheader):
self._header_dict = newheader
header = property(getheader, setheader)
def settargetlang(self, newlang):
self._header_dict['target-lang'] = '%%%s' % newlang
targetlang = property(None, settargetlang)
def settucount(self, count):
self._header_dict['tucount'] = '%%TU=%08d' % count
tucount = property(None, settucount)
class WordfastUnit(base.TranslationUnit):
"""A Wordfast translation memory unit"""
def __init__(self, source=None):
self._dict = {}
if source:
self.source = source
super(WordfastUnit, self).__init__(source)
def _update_timestamp(self):
"""Refresh the timestamp for the unit"""
self._dict['date'] = WordfastTime(time.localtime()).timestring
def getdict(self):
"""Get the dictionary of values for a Wordfast line"""
return self._dict
def setdict(self, newdict):
"""Set the dictionary of values for a Wordfast line
:param newdict: a new dictionary with Wordfast line elements
:type newdict: Dict
"""
# TODO First check that the values are OK
self._dict = newdict
dict = property(getdict, setdict)
def _get_source_or_target(self, key):
if self._dict.get(key, None) is None:
return None
elif self._dict[key]:
return _wf_to_char(self._dict[key]).decode('utf-8')
else:
return ""
def _set_source_or_target(self, key, newvalue):
if newvalue is None:
self._dict[key] = None
if isinstance(newvalue, unicode):
newvalue = newvalue.encode('utf-8')
newvalue = _char_to_wf(newvalue)
if not key in self._dict or newvalue != self._dict[key]:
self._dict[key] = newvalue
self._update_timestamp()
def getsource(self):
return self._get_source_or_target('source')
def setsource(self, newsource):
self._rich_source = None
return self._set_source_or_target('source', newsource)
source = property(getsource, setsource)
def gettarget(self):
return self._get_source_or_target('target')
def settarget(self, newtarget):
self._rich_target = None
return self._set_source_or_target('target', newtarget)
target = property(gettarget, settarget)
def settargetlang(self, newlang):
self._dict['target-lang'] = newlang
targetlang = property(None, settargetlang)
def __str__(self):
return str(self._dict)
def istranslated(self):
if not self._dict.get('source', None):
return False
return bool(self._dict.get('target', None))
class WordfastTMFile(base.TranslationStore):
"""A Wordfast translation memory file"""
Name = "Wordfast Translation Memory"
Mimetypes = ["application/x-wordfast"]
Extensions = ["txt"]
def __init__(self, inputfile=None, unitclass=WordfastUnit):
"""construct a Wordfast TM, optionally reading in from inputfile."""
self.UnitClass = unitclass
base.TranslationStore.__init__(self, unitclass=unitclass)
self.filename = ''
self.header = WordfastHeader()
self._encoding = 'iso-8859-1'
if inputfile is not None:
self.parse(inputfile)
def parse(self, input):
"""parsese the given file or file source string"""
if hasattr(input, 'name'):
self.filename = input.name
elif not getattr(self, 'filename', ''):
self.filename = ''
if hasattr(input, "read"):
tmsrc = input.read()
input.close()
input = tmsrc
if TAB_UTF16 in input.split("\n")[0]:
self._encoding = 'utf-16'
else:
self._encoding = 'iso-8859-1'
try:
input = input.decode(self._encoding).encode('utf-8')
except:
raise ValueError("Wordfast files are either UTF-16 (UCS2) or ISO-8859-1 encoded")
for header in csv.DictReader(input.split("\n")[:1],
fieldnames=WF_FIELDNAMES_HEADER,
dialect="wordfast"):
self.header = WordfastHeader(header)
lines = csv.DictReader(input.split("\n")[1:],
fieldnames=WF_FIELDNAMES,
dialect="wordfast")
for line in lines:
newunit = WordfastUnit()
newunit.dict = line
self.addunit(newunit)
def __str__(self):
output = csv.StringIO()
header_output = csv.StringIO()
writer = csv.DictWriter(output, fieldnames=WF_FIELDNAMES,
dialect="wordfast")
unit_count = 0
for unit in self.units:
if unit.istranslated():
unit_count += 1
writer.writerow(unit.dict)
if unit_count == 0:
return ""
output.reset()
self.header.tucount = unit_count
outheader = csv.DictWriter(header_output,
fieldnames=WF_FIELDNAMES_HEADER,
dialect="wordfast")
outheader.writerow(self.header.header)
header_output.reset()
decoded = "".join(header_output.readlines() + output.readlines()).decode('utf-8')
try:
return decoded.encode(self._encoding)
except UnicodeEncodeError:
return decoded.encode('utf-16')
| mpl-2.0 | -3,540,750,116,507,062,000 | 36.71934 | 93 | 0.598887 | false |
cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools | third_party/pylint/checkers/similar.py | 64 | 14174 | # pylint: disable=W0622
# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:[email protected]
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""a similarities / code duplication command line tool and pylint checker
"""
from __future__ import print_function
import sys
from collections import defaultdict
from logilab.common.ureports import Table
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker, table_lines_from_stats
import six
from six.moves import zip
class Similar(object):
"""finds copy-pasted lines of code in a project"""
def __init__(self, min_lines=4, ignore_comments=False,
ignore_docstrings=False, ignore_imports=False):
self.min_lines = min_lines
self.ignore_comments = ignore_comments
self.ignore_docstrings = ignore_docstrings
self.ignore_imports = ignore_imports
self.linesets = []
def append_stream(self, streamid, stream, encoding=None):
"""append a file to search for similarities"""
if encoding is None:
readlines = stream.readlines
else:
readlines = lambda: [line.decode(encoding) for line in stream]
try:
self.linesets.append(LineSet(streamid,
readlines(),
self.ignore_comments,
self.ignore_docstrings,
self.ignore_imports))
except UnicodeDecodeError:
pass
def run(self):
"""start looking for similarities and display results on stdout"""
self._display_sims(self._compute_sims())
def _compute_sims(self):
"""compute similarities in appended files"""
no_duplicates = defaultdict(list)
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
duplicate = no_duplicates[num]
for couples in duplicate:
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
couples.add((lineset1, idx1))
couples.add((lineset2, idx2))
break
else:
duplicate.append(set([(lineset1, idx1), (lineset2, idx2)]))
sims = []
for num, ensembles in six.iteritems(no_duplicates):
for couples in ensembles:
sims.append((num, couples))
sims.sort()
sims.reverse()
return sims
def _display_sims(self, sims):
"""display computed similarities on stdout"""
nb_lignes_dupliquees = 0
for num, couples in sims:
print()
print(num, "similar lines in", len(couples), "files")
couples = sorted(couples)
for lineset, idx in couples:
print("==%s:%s" % (lineset.name, idx))
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
print(" ", line.rstrip())
nb_lignes_dupliquees += num * (len(couples)-1)
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
print("TOTAL lines=%s duplicates=%s percent=%.2f" \
% (nb_total_lignes, nb_lignes_dupliquees,
nb_lignes_dupliquees*100. / nb_total_lignes))
def _find_common(self, lineset1, lineset2):
"""find similarities in the two given linesets"""
lines1 = lineset1.enumerate_stripped
lines2 = lineset2.enumerate_stripped
find = lineset2.find
index1 = 0
min_lines = self.min_lines
while index1 < len(lineset1):
skip = 1
num = 0
for index2 in find(lineset1[index1]):
non_blank = 0
for num, ((_, line1), (_, line2)) in enumerate(
zip(lines1(index1), lines2(index2))):
if line1 != line2:
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
break
if line1:
non_blank += 1
else:
# we may have reach the end
num += 1
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
index1 += skip
def _iter_sims(self):
"""iterate on similarities among all files, by making a cartesian
product
"""
for idx, lineset in enumerate(self.linesets[:-1]):
for lineset2 in self.linesets[idx+1:]:
for sim in self._find_common(lineset, lineset2):
yield sim
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
"""return lines with leading/trailing whitespace and any ignored code
features removed
"""
strippedlines = []
docstring = None
for line in lines:
line = line.strip()
if ignore_docstrings:
if not docstring and \
(line.startswith('"""') or line.startswith("'''")):
docstring = line[:3]
line = line[3:]
if docstring:
if line.endswith(docstring):
docstring = None
line = ''
if ignore_imports:
if line.startswith("import ") or line.startswith("from "):
line = ''
if ignore_comments:
# XXX should use regex in checkers/format to avoid cutting
# at a "#" in a string
line = line.split('#', 1)[0].strip()
strippedlines.append(line)
return strippedlines
class LineSet(object):
"""Holds and indexes all the lines of a single source file"""
def __init__(self, name, lines, ignore_comments=False,
ignore_docstrings=False, ignore_imports=False):
self.name = name
self._real_lines = lines
self._stripped_lines = stripped_lines(lines, ignore_comments,
ignore_docstrings,
ignore_imports)
self._index = self._mk_index()
def __str__(self):
return '<Lineset for %s>' % self.name
def __len__(self):
return len(self._real_lines)
def __getitem__(self, index):
return self._stripped_lines[index]
def __lt__(self, other):
return self.name < other.name
def __hash__(self):
return id(self)
def enumerate_stripped(self, start_at=0):
"""return an iterator on stripped lines, starting from a given index
if specified, else 0
"""
idx = start_at
if start_at:
lines = self._stripped_lines[start_at:]
else:
lines = self._stripped_lines
for line in lines:
#if line:
yield idx, line
idx += 1
def find(self, stripped_line):
"""return positions of the given stripped line in this set"""
return self._index.get(stripped_line, ())
def _mk_index(self):
"""create the index for this set"""
index = defaultdict(list)
for line_no, line in enumerate(self._stripped_lines):
if line:
index[line].append(line_no)
return index
MSGS = {'R0801': ('Similar lines in %s files\n%s',
'duplicate-code',
'Indicates that a set of similar lines has been detected \
among multiple file. This usually means that the code should \
be refactored to avoid this duplication.')}
def report_similarities(sect, stats, old_stats):
"""make a layout with some stats about duplication"""
lines = ['', 'now', 'previous', 'difference']
lines += table_lines_from_stats(stats, old_stats,
('nb_duplicated_lines',
'percent_duplicated_lines'))
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
# wrapper to get a pylint checker from the similar class
class SimilarChecker(BaseChecker, Similar):
"""checks for similarities and duplicated code. This computation may be
memory / CPU intensive, so you should disable it if you experiment some
problems.
"""
__implements__ = (IRawChecker,)
# configuration section name
name = 'similarities'
# messages
msgs = MSGS
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
options = (('min-similarity-lines',
{'default' : 4, 'type' : "int", 'metavar' : '<int>',
'help' : 'Minimum lines number of a similarity.'}),
('ignore-comments',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore comments when computing similarities.'}
),
('ignore-docstrings',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore docstrings when computing similarities.'}
),
('ignore-imports',
{'default' : False, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore imports when computing similarities.'}
),
)
# reports
reports = (('RP0801', 'Duplication', report_similarities),)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
Similar.__init__(self, min_lines=4,
ignore_comments=True, ignore_docstrings=True)
self.stats = None
def set_option(self, optname, value, action=None, optdict=None):
"""method called to set an option (registered in the options list)
overridden to report options setting to Similar
"""
BaseChecker.set_option(self, optname, value, action, optdict)
if optname == 'min-similarity-lines':
self.min_lines = self.config.min_similarity_lines
elif optname == 'ignore-comments':
self.ignore_comments = self.config.ignore_comments
elif optname == 'ignore-docstrings':
self.ignore_docstrings = self.config.ignore_docstrings
elif optname == 'ignore-imports':
self.ignore_imports = self.config.ignore_imports
def open(self):
"""init the checkers: reset linesets and statistics information"""
self.linesets = []
self.stats = self.linter.add_stats(nb_duplicated_lines=0,
percent_duplicated_lines=0)
def process_module(self, node):
"""process a module
the module's content is accessible via the stream object
stream must implement the readlines method
"""
with node.stream() as stream:
self.append_stream(self.linter.current_name,
stream,
node.file_encoding)
def close(self):
"""compute and display similarities on closing (i.e. end of parsing)"""
total = sum([len(lineset) for lineset in self.linesets])
duplicated = 0
stats = self.stats
for num, couples in self._compute_sims():
msg = []
for lineset, idx in couples:
msg.append("==%s:%s" % (lineset.name, idx))
msg.sort()
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
msg.append(line.rstrip())
self.add_message('R0801', args=(len(couples), '\n'.join(msg)))
duplicated += num * (len(couples) - 1)
stats['nb_duplicated_lines'] = duplicated
stats['percent_duplicated_lines'] = total and duplicated * 100. / total
def register(linter):
"""required method to auto register this checker """
linter.register_checker(SimilarChecker(linter))
def usage(status=0):
"""display command line usage information"""
print("finds copy pasted blocks in a set of files")
print()
print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
sys.exit(status)
def Run(argv=None):
"""standalone command line access point"""
if argv is None:
argv = sys.argv[1:]
from getopt import getopt
s_opts = 'hdi'
l_opts = ('help', 'duplicates=', 'ignore-comments', 'ignore-imports',
'ignore-docstrings')
min_lines = 4
ignore_comments = False
ignore_docstrings = False
ignore_imports = False
opts, args = getopt(argv, s_opts, l_opts)
for opt, val in opts:
if opt in ('-d', '--duplicates'):
min_lines = int(val)
elif opt in ('-h', '--help'):
usage()
elif opt in ('-i', '--ignore-comments'):
ignore_comments = True
elif opt in ('--ignore-docstrings',):
ignore_docstrings = True
elif opt in ('--ignore-imports',):
ignore_imports = True
if not args:
usage(1)
sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
for filename in args:
with open(filename) as stream:
sim.append_stream(filename, stream)
sim.run()
sys.exit(0)
if __name__ == '__main__':
Run()
| bsd-3-clause | 1,616,317,361,231,289,600 | 37.102151 | 80 | 0.564484 | false |
wnesl/gnuradio-IA | gnuradio-core/src/python/gnuradio/gr/qa_hier_block2.py | 18 | 13570 | #!/usr/bin/env python
from gnuradio import gr, gr_unittest
class test_hier_block2(gr_unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_001_make(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
self.assertEqual("test_block", hblock.name())
self.assertEqual(1, hblock.input_signature().max_streams())
self.assertEqual(1, hblock.output_signature().min_streams())
self.assertEqual(1, hblock.output_signature().max_streams())
self.assertEqual(gr.sizeof_int, hblock.output_signature().sizeof_stream_item(0))
def test_002_connect_input(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(hblock, nop1)
def test_004_connect_output(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(nop1, hblock)
def test_005_connect_output_in_use(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
nop2 = gr.nop(gr.sizeof_int)
hblock.connect(nop1, hblock)
self.assertRaises(ValueError,
lambda: hblock.connect(nop2, hblock))
def test_006_connect_invalid_src_port_neg(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
self.assertRaises(ValueError,
lambda: hblock.connect((hblock, -1), nop1))
def test_005_connect_invalid_src_port_exceeds(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
self.assertRaises(ValueError,
lambda: hblock.connect((hblock, 1), nop1))
def test_007_connect_invalid_dst_port_neg(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
nop2 = gr.nop(gr.sizeof_int)
self.assertRaises(ValueError,
lambda: hblock.connect(nop1, (nop2, -1)))
def test_008_connect_invalid_dst_port_exceeds(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.null_sink(gr.sizeof_int)
nop2 = gr.null_sink(gr.sizeof_int)
self.assertRaises(ValueError,
lambda: hblock.connect(nop1, (nop2, 1)))
def test_009_check_topology(self):
hblock = gr.top_block("test_block")
hblock.check_topology(0, 0)
def test_010_run(self):
expected = (1.0, 2.0, 3.0, 4.0)
hblock = gr.top_block("test_block")
src = gr.vector_source_f(expected, False)
sink1 = gr.vector_sink_f()
sink2 = gr.vector_sink_f()
hblock.connect(src, sink1)
hblock.connect(src, sink2)
hblock.run()
actual1 = sink1.data()
actual2 = sink2.data()
self.assertEquals(expected, actual1)
self.assertEquals(expected, actual2)
def test_012_disconnect_input(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(hblock, nop1)
hblock.disconnect(hblock, nop1)
def test_013_disconnect_input_not_connected(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
nop2 = gr.nop(gr.sizeof_int)
hblock.connect(hblock, nop1)
self.assertRaises(ValueError,
lambda: hblock.disconnect(hblock, nop2))
def test_014_disconnect_input_neg(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(hblock, nop1)
self.assertRaises(ValueError,
lambda: hblock.disconnect((hblock, -1), nop1))
def test_015_disconnect_input_exceeds(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(hblock, nop1)
self.assertRaises(ValueError,
lambda: hblock.disconnect((hblock, 1), nop1))
def test_016_disconnect_output(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(nop1, hblock)
hblock.disconnect(nop1, hblock)
def test_017_disconnect_output_not_connected(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
nop2 = gr.nop(gr.sizeof_int)
hblock.connect(nop1, hblock)
self.assertRaises(ValueError,
lambda: hblock.disconnect(nop2, hblock))
def test_018_disconnect_output_neg(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(hblock, nop1)
self.assertRaises(ValueError,
lambda: hblock.disconnect(nop1, (hblock, -1)))
def test_019_disconnect_output_exceeds(self):
hblock = gr.hier_block2("test_block",
gr.io_signature(1,1,gr.sizeof_int),
gr.io_signature(1,1,gr.sizeof_int))
nop1 = gr.nop(gr.sizeof_int)
hblock.connect(nop1, hblock)
self.assertRaises(ValueError,
lambda: hblock.disconnect(nop1, (hblock, 1)))
def test_020_run(self):
hblock = gr.top_block("test_block")
data = (1.0, 2.0, 3.0, 4.0)
src = gr.vector_source_f(data, False)
dst = gr.vector_sink_f()
hblock.connect(src, dst)
hblock.run()
self.assertEquals(data, dst.data())
def test_021_connect_single(self):
hblock = gr.top_block("test_block")
blk = gr.hier_block2("block",
gr.io_signature(0, 0, 0),
gr.io_signature(0, 0, 0))
hblock.connect(blk)
def test_022_connect_single_with_ports(self):
hblock = gr.top_block("test_block")
blk = gr.hier_block2("block",
gr.io_signature(1, 1, 1),
gr.io_signature(1, 1, 1))
self.assertRaises(ValueError,
lambda: hblock.connect(blk))
def test_023_connect_single_twice(self):
hblock = gr.top_block("test_block")
blk = gr.hier_block2("block",
gr.io_signature(0, 0, 0),
gr.io_signature(0, 0, 0))
hblock.connect(blk)
self.assertRaises(ValueError,
lambda: hblock.connect(blk))
def test_024_disconnect_single(self):
hblock = gr.top_block("test_block")
blk = gr.hier_block2("block",
gr.io_signature(0, 0, 0),
gr.io_signature(0, 0, 0))
hblock.connect(blk)
hblock.disconnect(blk)
def test_025_disconnect_single_not_connected(self):
hblock = gr.top_block("test_block")
blk = gr.hier_block2("block",
gr.io_signature(0, 0, 0),
gr.io_signature(0, 0, 0))
self.assertRaises(ValueError,
lambda: hblock.disconnect(blk))
def test_026_run_single(self):
expected_data = (1.0,)
tb = gr.top_block("top_block")
hb = gr.hier_block2("block",
gr.io_signature(0, 0, 0),
gr.io_signature(0, 0, 0))
src = gr.vector_source_f(expected_data)
dst = gr.vector_sink_f()
hb.connect(src, dst)
tb.connect(hb)
tb.run()
self.assertEquals(expected_data, dst.data())
def test_027a_internally_unconnected_input(self):
tb = gr.top_block()
hb = gr.hier_block2("block",
gr.io_signature(1, 1, 1),
gr.io_signature(1, 1, 1))
hsrc = gr.vector_source_b([1,])
hb.connect(hsrc, hb) # wire output internally
src = gr.vector_source_b([1, ])
dst = gr.vector_sink_b()
tb.connect(src, hb, dst) # hb's input is not connected internally
self.assertRaises(RuntimeError,
lambda: tb.run())
def test_027b_internally_unconnected_output(self):
tb = gr.top_block()
hb = gr.hier_block2("block",
gr.io_signature(1, 1, 1),
gr.io_signature(1, 1, 1))
hdst = gr.vector_sink_b()
hb.connect(hb, hdst) # wire input internally
src = gr.vector_source_b([1, ])
dst = gr.vector_sink_b()
tb.connect(src, hb, dst) # hb's output is not connected internally
self.assertRaises(RuntimeError,
lambda: tb.run())
def test_027c_fully_unconnected_output(self):
tb = gr.top_block()
hb = gr.hier_block2("block",
gr.io_signature(1, 1, 1),
gr.io_signature(1, 1, 1))
hsrc = gr.vector_sink_b()
hb.connect(hb, hsrc) # wire input internally
src = gr.vector_source_b([1, ])
dst = gr.vector_sink_b()
tb.connect(src, hb) # hb's output is not connected internally or externally
self.assertRaises(RuntimeError,
lambda: tb.run())
def test_027d_fully_unconnected_input(self):
tb = gr.top_block()
hb = gr.hier_block2("block",
gr.io_signature(1, 1, 1),
gr.io_signature(1, 1, 1))
hdst = gr.vector_source_b([1,])
hb.connect(hdst, hb) # wire output internally
dst = gr.vector_sink_b()
tb.connect(hb, dst) # hb's input is not connected internally or externally
self.assertRaises(RuntimeError,
lambda: tb.run())
def test_028_singleton_reconfigure(self):
tb = gr.top_block()
hb = gr.hier_block2("block",
gr.io_signature(0, 0, 0), gr.io_signature(0, 0, 0))
src = gr.vector_source_b([1, ])
dst = gr.vector_sink_b()
hb.connect(src, dst)
tb.connect(hb) # Singleton connect
tb.lock()
tb.disconnect_all()
tb.connect(src, dst)
tb.unlock()
def test_029_singleton_disconnect(self):
tb = gr.top_block()
src = gr.vector_source_b([1, ])
dst = gr.vector_sink_b()
tb.connect(src, dst)
tb.disconnect(src) # Singleton disconnect
tb.connect(src, dst)
tb.run()
self.assertEquals(dst.data(), (1,))
def test_030_nested_input(self):
tb = gr.top_block()
src = gr.vector_source_b([1,])
hb1 = gr.hier_block2("hb1",
gr.io_signature(1, 1, gr.sizeof_char),
gr.io_signature(0, 0, 0))
hb2 = gr.hier_block2("hb2",
gr.io_signature(1, 1, gr.sizeof_char),
gr.io_signature(0, 0, 0))
dst = gr.vector_sink_b()
tb.connect(src, hb1)
hb1.connect(hb1, hb2)
hb2.connect(hb2, gr.kludge_copy(gr.sizeof_char), dst)
tb.run()
self.assertEquals(dst.data(), (1,))
def test_031_multiple_internal_inputs(self):
tb = gr.top_block()
src = gr.vector_source_f([1.0,])
hb = gr.hier_block2("hb",
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(1, 1, gr.sizeof_float))
m1 = gr.multiply_const_ff(1.0)
m2 = gr.multiply_const_ff(2.0)
add = gr.add_ff()
hb.connect(hb, m1) # m1 is connected to hb external input #0
hb.connect(hb, m2) # m2 is also connected to hb external input #0
hb.connect(m1, (add, 0))
hb.connect(m2, (add, 1))
hb.connect(add, hb) # add is connected to hb external output #0
dst = gr.vector_sink_f()
tb.connect(src, hb, dst)
tb.run()
self.assertEquals(dst.data(), (3.0,))
def test_032_nested_multiple_internal_inputs(self):
tb = gr.top_block()
src = gr.vector_source_f([1.0,])
hb = gr.hier_block2("hb",
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(1, 1, gr.sizeof_float))
hb2 = gr.hier_block2("hb",
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(1, 1, gr.sizeof_float))
m1 = gr.multiply_const_ff(1.0)
m2 = gr.multiply_const_ff(2.0)
add = gr.add_ff()
hb2.connect(hb2, m1) # m1 is connected to hb2 external input #0
hb2.connect(hb2, m2) # m2 is also connected to hb2 external input #0
hb2.connect(m1, (add, 0))
hb2.connect(m2, (add, 1))
hb2.connect(add, hb2) # add is connected to hb2 external output #0
hb.connect(hb, hb2, hb) # hb as hb2 as nested internal block
dst = gr.vector_sink_f()
tb.connect(src, hb, dst)
tb.run()
self.assertEquals(dst.data(), (3.0,))
if __name__ == "__main__":
gr_unittest.run(test_hier_block2, "test_hier_block2.xml")
| gpl-3.0 | 8,499,547,956,467,680,000 | 35.775068 | 83 | 0.576197 | false |
chubbymaggie/claripy | claripy/vsa/bool_result.py | 2 | 5012 | from ..backend_object import BackendObject
class BoolResult(BackendObject):
def __init__(self, op=None, args=None):
self._op = op
self._args = args
def value(self):
raise NotImplementedError()
def __len__(self):
return BackendError()
def __eq__(self, other):
raise NotImplementedError()
def __and__(self, other):
raise NotImplementedError()
def __invert__(self):
raise NotImplementedError()
def __or__(self, other):
raise NotImplementedError()
def identical(self, other):
if self.value != other.value:
return False
if self._op != other._op:
return False
if self._args != other._args:
return False
return True
def union(self, other):
raise NotImplementedError()
def size(self): #pylint:disable=no-self-use
return None
@staticmethod
def is_maybe(o):
if isinstance(o, Base):
raise ClaripyValueError("BoolResult can't handle AST objects directly")
return isinstance(o, MaybeResult)
@staticmethod
def has_true(o):
if isinstance(o, Base):
raise ClaripyValueError("BoolResult can't handle AST objects directly")
return o is True or (isinstance(o, BoolResult) and True in o.value)
@staticmethod
def has_false(o):
if isinstance(o, Base):
raise ClaripyValueError("BoolResult can't handle AST objects directly")
return o is False or (isinstance(o, BoolResult) and False in o.value)
@staticmethod
def is_true(o):
if isinstance(o, Base):
raise ClaripyValueError("BoolResult can't handle AST objects directly")
return o is True or (isinstance(o, TrueResult))
@staticmethod
def is_false(o):
if isinstance(o, Base):
raise ClaripyValueError("BoolResult can't handle AST objects directly")
return o is False or (isinstance(o, FalseResult))
class TrueResult(BoolResult):
cardinality = 1
@property
def value(self):
return (True, )
def identical(self, other):
return isinstance(other, TrueResult)
def __eq__(self, other):
if isinstance(other, FalseResult):
return FalseResult()
elif isinstance(other, TrueResult):
return TrueResult()
else:
return MaybeResult()
def __invert__(self):
return FalseResult()
def __or__(self, other):
return TrueResult()
def __and__(self, other):
if BoolResult.is_maybe(other):
return MaybeResult()
elif BoolResult.is_false(other):
return FalseResult()
else:
return TrueResult()
def union(self, other):
if other is True or type(other) is TrueResult:
return TrueResult()
elif other is False or type(other) is FalseResult:
return MaybeResult()
elif type(other) is MaybeResult:
return MaybeResult()
else:
return NotImplemented
def __repr__(self):
return '<True>'
class FalseResult(BoolResult):
cardinality = 1
@property
def value(self):
return (False, )
def identical(self, other):
return isinstance(other, FalseResult)
def __eq__(self, other):
if isinstance(other, FalseResult):
return TrueResult()
elif isinstance(other, TrueResult):
return FalseResult()
else:
return MaybeResult()
def __invert__(self):
return TrueResult()
def __and__(self, other):
return FalseResult()
def __or__(self, other):
return other
def __repr__(self):
return '<False>'
def union(self, other):
if other is True or type(other) is TrueResult:
return MaybeResult()
elif other is False or type(other) is FalseResult:
return FalseResult()
elif type(other) is MaybeResult:
return MaybeResult()
else:
return NotImplemented
class MaybeResult(BoolResult):
cardinality = 2
@property
def value(self):
return (True, False)
def identical(self, other):
return isinstance(other, MaybeResult)
def __eq__(self, other):
return MaybeResult()
def __invert__(self):
return MaybeResult()
def __and__(self, other):
if BoolResult.is_false(other):
return FalseResult()
else:
return MaybeResult()
def union(self, other):
return MaybeResult()
def __or__(self, other):
if BoolResult.is_true(other):
return TrueResult()
else:
return self
def __repr__(self):
if self._op is None:
return '<Maybe>'
else:
return '<Maybe(%s, %s)>' % (self._op, self._args)
from ..errors import BackendError, ClaripyValueError
from ..ast.base import Base
| bsd-2-clause | 7,863,521,839,149,455,000 | 23.935323 | 83 | 0.582203 | false |
emilyvon/titanium_mobile | support/android/android.py | 33 | 15397 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Appcelerator Titanium Mobile
# Copyright (c) 2011-2013 by Appcelerator, Inc. All Rights Reserved.
# Licensed under the terms of the Apache Public License
# Please see the LICENSE included with this distribution for details.
#
# Android Application Script
#
import os, sys, shutil, platform, zipfile
import string, subprocess, re
from xml.etree.ElementTree import ElementTree
from StringIO import StringIO
from os.path import join, splitext, split, exists
from shutil import copyfile
from androidsdk import AndroidSDK
from compiler import Compiler
import bindings
template_dir = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
module_dir = os.path.join(os.path.dirname(template_dir), 'module')
common_dir = os.path.join(os.path.dirname(template_dir), 'common')
sys.path.extend([os.path.dirname(template_dir), module_dir, common_dir])
from mako.template import Template
from tiapp import TiAppXML, touch_tiapp_xml
from manifest import Manifest
from module import ModuleDetector
import simplejson
ignoreFiles = ['.gitignore', '.cvsignore', '.DS_Store'];
ignoreDirs = ['.git','.svn','_svn', 'CVS'];
def run(args):
return subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate()[0]
def pipe(args1,args2):
p1 = subprocess.Popen(args1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(args2, stdin=p1.stdout, stdout=subprocess.PIPE)
return p2.communicate()[0]
def copy_resources(source, target):
if not os.path.exists(os.path.expanduser(target)):
os.mkdir(os.path.expanduser(target))
for root, dirs, files in os.walk(source, True, None, True):
for name in ignoreDirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directories
for file in files:
if file in ignoreFiles:
continue
from_ = join(root, file)
to_ = os.path.expanduser(from_.replace(source, target, 1))
to_directory = os.path.expanduser(split(to_)[0])
if not exists(to_directory):
os.makedirs(to_directory)
print "[TRACE] copying: %s to: %s" % (from_,to_)
copyfile(from_, to_)
class Android(object):
def __init__(self, name, myid, sdk, deploy_type, java):
self.name = name
# android requires at least one dot in packageid
if len(re.findall(r'\.',myid))==0:
myid = 'com.%s' % myid
self.id = myid
self.sdk = sdk
# Used in templating
self.config = {
'appid': self.id,
'appname' : self.name,
'appversion' : '1',
'apiversion' : '7', #Android 2.1
'deploy_type': deploy_type,
'compile_js': False
}
self.config['classname'] = Android.strip_classname(self.name)
self.deploy_type = deploy_type
self.java = java
@classmethod
def strip_classname(cls, name):
classname = ''.join([str.capitalize() for str in re.split('[^A-Za-z0-9_]', name)])
if re.search("^[0-9]", classname) != None:
classname = "_" + classname
return classname
def newdir(self, *segments):
path = os.path.join(*segments)
if not os.path.exists(path):
os.makedirs(path)
return path
def copyfile(self, file, src, dest):
shutil.copy(os.path.join(src, file), os.path.join(dest, file))
def load_template(self, template):
return Template(filename=template, output_encoding='utf-8', encoding_errors='replace')
def render_android_manifest(self):
template_dir = os.path.dirname(sys._getframe(0).f_code.co_filename)
tmpl = self.load_template(os.path.join(template_dir, 'templates', 'AndroidManifest.xml'))
return tmpl.render(config = self.config)
def render(self, template_dir, template_file, dest, dest_file, **kwargs):
tmpl = self.load_template(os.path.join(template_dir, 'templates', template_file))
f = None
try:
print "[TRACE] Generating %s" % os.path.join(dest, dest_file)
f = open(os.path.join(dest, dest_file), "w")
f.write(tmpl.render(config = self.config, **kwargs))
finally:
if f!=None: f.close
def build_app_info(self, project_dir):
tiapp = ElementTree()
assets_tiappxml = os.path.join(project_dir, 'build', 'android', 'bin', 'assets', 'tiapp.xml')
self.app_info = {'fullscreen':'false','navbar-hidden':'false','deploy-type':self.deploy_type}
self.app_properties = {}
if not os.path.exists(assets_tiappxml):
shutil.copy(os.path.join(project_dir, 'tiapp.xml'), assets_tiappxml)
tiapp.parse(open(assets_tiappxml, 'r'))
for key in ['id', 'name', 'version', 'publisher', 'url', 'copyright',
'description', 'icon', 'analytics', 'guid', 'navbar-hidden', 'fullscreen']:
el = tiapp.find(key)
if el != None:
self.app_info[key] = el.text
for property_el in tiapp.findall("property"):
name = property_el.get("name")
type = property_el.get("type")
value = property_el.text
if name == None: continue
if type == None: type = "string"
if value == None: value = ""
self.app_properties[name] = {"type": type, "value": value}
def generate_activities(self, app_package_dir):
if not 'activities' in self.tiapp.android: return
for key in self.tiapp.android['activities'].keys():
activity = self.tiapp.android['activities'][key]
print '[DEBUG] generating activity class: ' + activity['classname']
self.render(template_dir, 'JSActivity.java', app_package_dir, activity['classname']+'.java', activity=activity)
def generate_services(self, app_package_dir):
if not 'services' in self.tiapp.android: return
for key in self.tiapp.android['services'].keys():
service = self.tiapp.android['services'][key]
service_type = service['service_type']
print '[DEBUG] generating service type "%s", class "%s"' %(service_type, service['classname'])
if service_type == 'interval':
self.render(template_dir, 'JSIntervalService.java', app_package_dir, service['classname']+'.java', service=service)
else:
self.render(template_dir, 'JSService.java', app_package_dir, service['classname']+'.java', service=service)
def build_modules_info(self, resources_dir, app_bin_dir, include_all_ti_modules=False):
self.app_modules = []
(modules, external_child_modules) = bindings.get_all_module_bindings()
compiler = Compiler(self.tiapp, resources_dir, self.java, app_bin_dir,
None, os.path.dirname(app_bin_dir),
include_all_modules=include_all_ti_modules)
compiler.compile(compile_bytecode=False, info_message=None)
for module in compiler.modules:
module_bindings = []
# TODO: we should also detect module properties
for method in compiler.module_methods:
if method.lower().startswith(module+'.') and '.' not in method:
module_bindings.append(method[len(module)+1:])
module_onAppCreate = None
module_class = None
module_apiName = None
for m in modules.keys():
if modules[m]['fullAPIName'].lower() == module:
module_class = m
module_apiName = modules[m]['fullAPIName']
if 'onAppCreate' in modules[m]:
module_onAppCreate = modules[m]['onAppCreate']
break
if module_apiName == None: continue # module wasn't found
ext_modules = []
if module_class in external_child_modules:
for child_module in external_child_modules[module_class]:
if child_module['fullAPIName'].lower() in compiler.modules:
ext_modules.append(child_module)
self.app_modules.append({
'api_name': module_apiName,
'class_name': module_class,
'bindings': module_bindings,
'external_child_modules': ext_modules,
'on_app_create': module_onAppCreate
})
# discover app modules
detector = ModuleDetector(self.project_dir)
missing, detected_modules = detector.find_app_modules(self.tiapp, 'android', self.deploy_type)
for missing_module in missing: print '[WARN] Couldn\'t find app module: %s' % missing_module['id']
self.custom_modules = []
for module in detected_modules:
if module.jar == None: continue
module_jar = zipfile.ZipFile(module.jar)
module_bindings = bindings.get_module_bindings(module_jar)
if module_bindings is None: continue
for module_class in module_bindings['modules'].keys():
module_apiName = module_bindings['modules'][module_class]['apiName']
module_proxy = module_bindings['proxies'][module_class]
module_id = module_proxy['proxyAttrs']['id']
module_proxy_class_name = module_proxy['proxyClassName']
module_onAppCreate = None
if 'onAppCreate' in module_proxy:
module_onAppCreate = module_proxy['onAppCreate']
print '[DEBUG] module_id = %s' % module_id
if module_id == module.manifest.moduleid:
# make sure that the module was not built before 1.8.0.1
try:
module_api_version = int(module.manifest.apiversion)
if module_api_version < 2:
print "[ERROR] The 'apiversion' for '%s' in the module manifest is less than version 2. The module was likely built against a Titanium SDK pre 1.8.0.1. Please use a version of the module that has 'apiversion' 2 or greater" % module_id
touch_tiapp_xml(os.path.join(self.project_dir, 'tiapp.xml'))
sys.exit(1)
except(TypeError, ValueError):
print "[ERROR] The 'apiversion' for '%s' in the module manifest is not a valid value. Please use a version of the module that has an 'apiversion' value of 2 or greater set in it's manifest file" % module_id
touch_tiapp_xml(os.path.join(self.project_dir, 'tiapp.xml'))
sys.exit(1)
is_native_js_module = (hasattr(module.manifest, 'commonjs') and module.manifest.commonjs)
print '[DEBUG] appending module: %s' % module_class
self.custom_modules.append({
'module_id': module_id,
'module_apiName': module_apiName,
'proxy_name': module_proxy_class_name,
'class_name': module_class,
'manifest': module.manifest,
'on_app_create': module_onAppCreate,
'is_native_js_module': is_native_js_module
})
if is_native_js_module:
# Need to look at the app modules used in this external js module
metadata_file = os.path.join(module.path, "metadata.json")
metadata = None
try:
f = open(metadata_file, "r")
metadata = f.read()
finally:
f.close()
if metadata:
metadata = simplejson.loads(metadata)
if metadata.has_key("exports"):
exported_module_ids = metadata["exports"]
already_included_module_ids = [m["api_name"].lower() for m in self.app_modules]
need_to_add = [m for m in exported_module_ids if m not in already_included_module_ids]
if need_to_add:
for to_add in need_to_add:
module_onAppCreate = None
module_class = None
module_apiName = None
for m in modules.keys():
if modules[m]['fullAPIName'].lower() == to_add:
module_class = m
module_apiName = modules[m]['fullAPIName']
if 'onAppCreate' in modules[m]:
module_onAppCreate = modules[m]['onAppCreate']
break
if module_apiName == None: continue # module wasn't found
ext_modules = []
if module_class in external_child_modules:
for child_module in external_child_modules[module_class]:
if child_module['fullAPIName'].lower() in compiler.modules:
ext_modules.append(child_module)
self.app_modules.append({
'api_name': module_apiName,
'class_name': module_class,
'bindings': [],
'external_child_modules': ext_modules,
'on_app_create': module_onAppCreate
})
def create(self, dir, build_time=False, project_dir=None, include_all_ti_modules=False):
template_dir = os.path.dirname(sys._getframe(0).f_code.co_filename)
# Build up output directory tree
if project_dir is None:
project_dir = self.newdir(dir, self.name)
self.project_dir = project_dir
# Paths to Titanium assets that need to be linked into eclipse structure
self.config['ti_tiapp_xml'] = os.path.join(project_dir, 'tiapp.xml')
self.tiapp = TiAppXML(self.config['ti_tiapp_xml'])
resource_dir = os.path.join(project_dir, 'Resources')
self.config['ti_resources_dir'] = resource_dir
json_contents = open(os.path.join(template_dir,'dependency.json')).read()
depends_map = simplejson.loads(json_contents)
app_build_dir = self.newdir(project_dir, 'build')
app_dir = self.newdir(app_build_dir, 'android')
#if os.path.exists(os.path.join(app_dir,'bin')):
# shutil.rmtree(os.path.join(app_dir,'bin'))
if os.path.exists(os.path.join(app_dir,'src')):
shutil.rmtree(os.path.join(app_dir,'src'))
if os.path.exists(os.path.join(app_dir,'res')):
shutil.rmtree(os.path.join(app_dir,'res'))
app_bin_dir = self.newdir(app_dir, 'bin')
app_lib_dir = self.newdir(app_dir, 'lib')
app_src_dir = self.newdir(app_dir, 'src')
app_res_dir = self.newdir(app_dir, 'res')
app_gen_dir = self.newdir(app_dir, 'gen')
app_bin_classes_dir = self.newdir(app_bin_dir, 'classes')
app_res_drawable_dir = self.newdir(app_res_dir, 'drawable')
app_assets_dir = self.newdir(app_dir, 'assets')
app_package_dir = self.newdir(app_gen_dir, *self.id.split('.'))
app_bin_assets_dir = self.newdir(app_bin_dir, 'assets')
app_bin_assets_resources_dir = self.newdir(app_bin_assets_dir, 'Resources')
self.build_app_info(project_dir)
self.build_modules_info(app_bin_assets_resources_dir, app_bin_dir, include_all_ti_modules=include_all_ti_modules)
# Create android source
self.render(template_dir, 'AppInfo.java', app_package_dir, self.config['classname'] + 'AppInfo.java',
app_properties = self.app_properties, app_info = self.app_info)
self.render(template_dir, 'AndroidManifest.xml', app_dir, 'AndroidManifest.xml')
self.render(template_dir, 'App.java', app_package_dir, self.config['classname'] + 'Application.java',
app_modules = self.app_modules, custom_modules = self.custom_modules)
self.render(template_dir, 'Activity.java', app_package_dir, self.config['classname'] + 'Activity.java')
self.generate_activities(app_package_dir)
self.generate_services(app_package_dir)
self.render(template_dir, 'classpath', app_dir, '.classpath')
self.render(template_dir, 'project', app_dir, '.project')
self.render(template_dir, 'default.properties', app_dir, 'default.properties')
print "[TRACE] Generating app.json"
f = None
try:
f = open(os.path.join(app_bin_assets_dir, "app.json"), "w")
f.write(simplejson.dumps({"app_modules":self.app_modules}))
finally:
if f is not None:
f.close()
# Don't override a pre-existing .gitignore in case users have their own preferences
# for what should be in it. (LH #2446)
if not os.path.exists(os.path.join(app_dir, '.gitignore')):
self.render(template_dir, 'gitignore', app_dir, '.gitignore')
else:
print "[TRACE] Skipping copying gitignore -> .gitignore because already exists"
android_project_resources = os.path.join(project_dir,'Resources','android')
if build_time==False and os.path.exists(android_project_resources):
shutil.rmtree(android_project_resources)
if not os.path.exists(android_project_resources):
copy_resources(os.path.join(template_dir,'resources'),android_project_resources)
if __name__ == '__main__':
# this is for testing only for the time being
if len(sys.argv) != 5 or sys.argv[1]=='--help':
print "Usage: %s <name> <id> <directory> <sdk>" % os.path.basename(sys.argv[0])
sys.exit(1)
sdk = AndroidSDK(sys.argv[4])
android = Android(sys.argv[1], sys.argv[2], sdk, None, 'java')
android.create(sys.argv[3])
| apache-2.0 | -1,732,317,656,061,341,000 | 39.096354 | 243 | 0.681302 | false |
will4906/PatentCrawler | config/crawler_config.py | 1 | 8899 | # -*- coding: utf-8 -*-
"""
Created on 2018/3/14
@author: will4906
采集的内容、方式定义
"""
from bs4 import BeautifulSoup
from controller.url_config import url_search, url_detail, url_related_info, url_full_text
from crawler.items import DataItem
from entity.crawler_item import BaseItem, ResultItem
class PatentId(BaseItem):
is_required = True
crawler_id = url_search.get('crawler_id')
english = 'patent_id'
chinese = ['专利标志', '专利id', '专利ID', '专利Id']
@classmethod
def parse(cls, raw, item, process=None):
if process is not None:
patent_id = process.find(attrs={'name': 'idHidden'}).get('value')
item.patent_id = ResultItem(title=cls.title, value=str(patent_id))
return item
class PatentName(BaseItem):
is_required = True
crawler_id = url_detail.get('crawler_id')
english = ['patent_name', 'invention_name']
chinese = '专利名称'
@classmethod
def parse(cls, raw, item, process=None):
if process is not None:
patent_name = process.get('abstractInfoDTO').get('tioIndex').get('value')
item.patent_name = ResultItem(title=cls.title, value=str(patent_name))
return item
class Abstract(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = 'abstract'
chinese = '摘要'
@classmethod
def parse(cls, raw, item, process=None):
if process is not None:
abstract = BeautifulSoup(process.get('abstractInfoDTO').get('abIndexList')[0].get('value'),
'lxml').text.replace('\n', '').strip()
item.abstract = ResultItem(title=cls.title, value=abstract)
return item
def push_item(json_list, item: DataItem, title, name):
"""
detail页面的解析函数
:param json_list:
:param item:
:param title:
:param name:
:return:
"""
if json_list is not None:
aitem_list = json_list.get('abstractInfoDTO').get('abstractItemList')
for a_item in aitem_list:
if a_item.get('indexCnName').find(name) != -1:
item.__setattr__(title, ResultItem(title=name, value=a_item.get('value')))
break
if not hasattr(item, title):
item.__setattr__(title, ResultItem(title=name, value=""))
return item
class RequestNumber(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['request_number', 'application_number']
chinese = '申请号'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'request_number', '申请号')
class RequestDate(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = 'request_date'
chinese = '申请日'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'request_date', '申请日')
class PublishNumber(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = 'publish_number'
chinese = ['公开号', '公布号', '公开(公告)号']
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'publish_number', '公开(公告)号')
class PublishDate(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = 'publish_date'
chinese = ['公开日', '公布日', '公开(公告)日']
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'publish_date', '公开(公告)日')
class IpcClassificationNumber(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['ipc_class_number', 'IPC', 'ipc', 'Ipc']
chinese = 'IPC分类号'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'ipc_class_number', 'IPC分类号')
class Applicant(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['Applicant', 'applicant', 'assignee', 'Assignee', 'proposer']
chinese = ['申请人', '专利权人', '专利人', '申请(专利权)人']
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'applicant', '申请(专利权)人')
class Inventor(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['Inventor', 'inventor']
chinese = '发明人'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'inventor', '发明人')
class PriorityNumber(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = 'priority_number'
chinese = '优先权号'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'priority_number', '优先权号')
class PriorityDate(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = 'priority_date'
chinese = '优先权日'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'priority_date', '优先权日')
class AddressOfApplicant(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['proposer_address', 'address_of_the_Applicant', 'applicant_address']
chinese = '申请人地址'
@classmethod
def parse(cls, raw, item, process=None):
if process is not None:
item = push_item(process, item, 'proposer_address', '申请人地址')
return item
class ZipCodeOfTheApplicant(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['proposer_post_code', 'zip_code_of_the_applicant', 'proposer_zip_code']
chinese = '申请人邮编'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'proposer_zip_code', '申请人邮编')
class CountryOfTheApplicant(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['proposer_location', 'country_of_the_applicant', 'country_of_the_assignee']
chinese = ['申请人所在国(省)', '申请人所在地']
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'proposer_location', '申请人所在国(省)')
class CpcClassificationNumber(BaseItem):
crawler_id = url_detail.get('crawler_id')
english = ['cpc_class_number', 'cpc', 'CPC', 'Cpc']
chinese = 'CPC分类号'
@classmethod
def parse(cls, raw, item, process=None):
return push_item(process, item, 'cpc_class_number', 'CPC分类号')
class Cognation(BaseItem):
crawler_id = url_related_info.get('crawler_id')
table_name = 'cognation'
english = 'cognation_list'
chinese = '同族表'
title = '同族'
@classmethod
def parse(cls, raw, item, process=None):
if process is not None:
cognation_list = process.get('cognationList')
# print('cognation', cognation_list)
if cognation_list is not None:
pn_list = []
for cog in cognation_list:
pn_list.append(cog.get('pn'))
item.cognation_list = ResultItem(table=cls.table_name, title=cls.title, value=pn_list)
return item
class LawStateList(BaseItem):
crawler_id = url_related_info.get('crawler_id')
table_name = 'law_state'
english = 'law_state_list'
chinese = '法律状态表'
title = ['法律状态', '法律状态时间']
@classmethod
def set_title(cls, title):
if title == cls.english:
cls.title = ['law_status', 'law_status_date']
elif title == cls.chinese:
cls.title = ['法律状态', '法律状态时间']
@classmethod
def parse(cls, raw, item, process=None):
if process is not None:
law_state_list = process.get('lawStateList')
if law_state_list is not None:
tmp_list = []
for law in law_state_list:
mean = law.get('lawStateCNMeaning')
law_date = law.get('prsDate')
part = (ResultItem(table=cls.table_name, title=cls.title[0], value=mean),
ResultItem(table=cls.table_name, title=cls.title[1], value=law_date))
tmp_list.append(part)
item.law_state_list = tmp_list
return item
class FullText(BaseItem):
crawler_id = url_full_text.get('crawler_id')
english = ['full_text', 'whole_text']
chinese = ['全文文本', '全文']
@classmethod
def parse(cls, raw, item, process=None):
if process is not None:
item.full_text = ResultItem(table=cls.table_name, title=cls.title,
value=BeautifulSoup(str(process.get('fullTextDTO').get('literaInfohtml')), 'lxml')
.get_text().replace("'", '"').replace(';', ','))
return item
| apache-2.0 | -7,134,658,395,125,342,000 | 30.177122 | 122 | 0.609066 | false |
w1ll1am23/home-assistant | tests/components/minio/common.py | 27 | 1804 | """Minio Test event."""
TEST_EVENT = {
"Records": [
{
"eventVersion": "2.0",
"eventSource": "minio:s3",
"awsRegion": "",
"eventTime": "2019-05-02T11:05:07Z",
"eventName": "s3:ObjectCreated:Put",
"userIdentity": {"principalId": "SO9KNO6YT9OGE39PQCZW"},
"requestParameters": {
"accessKey": "SO9KNO6YT9OGE39PQCZW",
"region": "",
"sourceIPAddress": "172.27.0.1",
},
"responseElements": {
"x-amz-request-id": "159AD8E6F6805783",
"x-minio-deployment-id": "90b265b8-bac5-413a-b12a-8915469fd769",
"x-minio-origin-endpoint": "http://172.27.0.2:9000",
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "Config",
"bucket": {
"name": "test",
"ownerIdentity": {"principalId": "SO9KNO6YT9OGE39PQCZW"},
"arn": "arn:aws:s3:::test",
},
"object": {
"key": "5jJkTAo.jpg",
"size": 108368,
"eTag": "1af324731637228cbbb0b2e8c07d4e50",
"contentType": "image/jpeg",
"userMetadata": {"content-type": "image/jpeg"},
"versionId": "1",
"sequencer": "159AD8E6F76DD9C4",
},
},
"source": {
"host": "",
"port": "",
"userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) "
"AppleWebKit/605.1.15 (KHTML, like Gecko) "
"Version/12.0.3 Safari/605.1.15",
},
}
]
}
| apache-2.0 | -2,185,058,942,565,162,800 | 36.583333 | 80 | 0.413525 | false |
sunlianqiang/kbengine | kbe/src/lib/python/Lib/distutils/tests/test_config.py | 90 | 3148 | """Tests for distutils.pypirc.pypirc."""
import sys
import os
import unittest
import tempfile
from distutils.core import PyPIRCCommand
from distutils.core import Distribution
from distutils.log import set_threshold
from distutils.log import WARN
from distutils.tests import support
from test.support import run_unittest
PYPIRC = """\
[distutils]
index-servers =
server1
server2
[server1]
username:me
password:secret
[server2]
username:meagain
password: secret
realm:acme
repository:http://another.pypi/
"""
PYPIRC_OLD = """\
[server-login]
username:tarek
password:secret
"""
WANTED = """\
[distutils]
index-servers =
pypi
[pypi]
username:tarek
password:xxx
"""
class PyPIRCCommandTestCase(support.TempdirManager,
support.LoggingSilencer,
support.EnvironGuard,
unittest.TestCase):
def setUp(self):
"""Patches the environment."""
super(PyPIRCCommandTestCase, self).setUp()
self.tmp_dir = self.mkdtemp()
os.environ['HOME'] = self.tmp_dir
self.rc = os.path.join(self.tmp_dir, '.pypirc')
self.dist = Distribution()
class command(PyPIRCCommand):
def __init__(self, dist):
PyPIRCCommand.__init__(self, dist)
def initialize_options(self):
pass
finalize_options = initialize_options
self._cmd = command
self.old_threshold = set_threshold(WARN)
def tearDown(self):
"""Removes the patch."""
set_threshold(self.old_threshold)
super(PyPIRCCommandTestCase, self).tearDown()
def test_server_registration(self):
# This test makes sure PyPIRCCommand knows how to:
# 1. handle several sections in .pypirc
# 2. handle the old format
# new format
self.write_file(self.rc, PYPIRC)
cmd = self._cmd(self.dist)
config = cmd._read_pypirc()
config = list(sorted(config.items()))
waited = [('password', 'secret'), ('realm', 'pypi'),
('repository', 'https://pypi.python.org/pypi'),
('server', 'server1'), ('username', 'me')]
self.assertEqual(config, waited)
# old format
self.write_file(self.rc, PYPIRC_OLD)
config = cmd._read_pypirc()
config = list(sorted(config.items()))
waited = [('password', 'secret'), ('realm', 'pypi'),
('repository', 'https://pypi.python.org/pypi'),
('server', 'server-login'), ('username', 'tarek')]
self.assertEqual(config, waited)
def test_server_empty_registration(self):
cmd = self._cmd(self.dist)
rc = cmd._get_rc_file()
self.assertFalse(os.path.exists(rc))
cmd._store_pypirc('tarek', 'xxx')
self.assertTrue(os.path.exists(rc))
f = open(rc)
try:
content = f.read()
self.assertEqual(content, WANTED)
finally:
f.close()
def test_suite():
return unittest.makeSuite(PyPIRCCommandTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| lgpl-3.0 | 53,671,800,436,507,030 | 25.233333 | 68 | 0.594981 | false |
moccu/django-cruditor | tests/test_views.py | 1 | 15414 | import pytest
from django.contrib.auth.models import AnonymousUser
from django.contrib.messages import SUCCESS as SUCCESS_LEVEL
from django.contrib.messages import get_messages
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.urls import reverse
from cruditor.views import Cruditor403View, Cruditor404View, CruditorListView
from examples.minimal.views import DemoView
from examples.store.models import Person
from .factories import PersonFactory, RelatedPersonFactory
@pytest.mark.django_db
class TestBasicView:
@pytest.fixture(autouse=True)
def setup(self, rf, admin_user):
self.request = rf.get('/')
self.request.user = admin_user
self.view = DemoView()
self.view.request = self.request
self.view.required_permission = 'accounts.some_permission'
def test_not_logged_in(self, rf):
self.request.user = AnonymousUser()
response = self.view.dispatch(self.request)
assert response.status_code == 200
assert 'breadcrumb' not in response.context_data['cruditor']
assert response.template_name[0] == DemoView.login_template_name
def test_no_permission(self, admin_user):
self.request.user.is_superuser = False
with pytest.raises(PermissionDenied):
self.view.dispatch(self.request)
def test_no_permission_required(self, admin_user):
self.view.required_permission = None
self.request.user.is_superuser = False
response = self.view.dispatch(self.request)
assert response.status_code == 200
def test_permission_granted(self, admin_user):
response = self.view.dispatch(self.request)
assert response.status_code == 200
assert response.template_name[0] == 'minimal/demo.html'
def test_cruditor_context(self):
assert self.view.get_cruditor_context() == {
'breadcrumb': [
{'title': 'Additional breadcrumb', 'url': '/'},
{'title': 'Disabled item'},
{'title': 'Demo view', 'url': None}
],
'constants': {
'change_password_url': '/change-password/',
'extrahead_template_name': 'cruditor/includes/extrahead.html',
'index_url': '/',
'logout_url': '/logout/',
'menu_template_name': 'menu.html',
'menu_title': 'Examples Demo'
},
'title': 'Demo view',
'titlebuttons': None
}
def test_title(self):
assert self.view.get_title() == 'Demo view'
def test_model_verbose_name_explicit(self):
self.view.model_verbose_name = 'Foo'
assert self.view.get_model_verbose_name() == 'Foo'
def test_model_verbose_name_from_meta(self):
self.view.model = Person
assert self.view.get_model_verbose_name() == 'Person'
def test_model_verbose_name_fallback(self):
assert self.view.get_model_verbose_name() == 'Item'
def test_not_found_view(rf):
response = Cruditor404View.as_view()(rf.get('/'))
assert response.status_code == 404
assert response.template_name[0] == 'cruditor/404.html'
def test_forbidden_view(rf):
response = Cruditor403View.as_view()(rf.get('/'))
assert response.status_code == 403
assert response.template_name[0] == 'cruditor/403.html'
@pytest.mark.django_db
class TestListView:
def setup(self):
self.person1 = PersonFactory.create(approved=True)
self.person2 = PersonFactory.create(approved=False)
def test_get_without_filter(self, admin_client):
response = admin_client.get(reverse('collection:list'))
assert response.status_code == 200
assert response.context['table'].data.data.count() == 2
assert response.context['filter_form'] is None
def test_get_with_filter(self, admin_client):
response = admin_client.get(reverse('collection:filter'))
assert response.status_code == 200
assert response.context['table'].data.data.count() == 2
assert response.context['filter_form'] is not None
assert not response.context['filter_form'].data
def test_get_with_filter_active(self, admin_client):
response = admin_client.get(
reverse('collection:filter'), data={'approved': '2'})
assert response.status_code == 200
assert response.context['table'].data.data.count() == 1
assert response.context['filter_form'] is not None
assert response.context['filter_form'].data
def test_get_queryset_model(self):
class DummyListView(CruditorListView):
model = Person
assert list(DummyListView().get_queryset()) == list(Person.objects.all())
def test_get_queryset_qset(self):
class DummyListView(CruditorListView):
queryset = Person.objects.filter(approved=True)
assert DummyListView().get_queryset().get() == self.person1
def test_get_queryset_fallback(self):
class DummyListView(CruditorListView):
pass
assert DummyListView().get_queryset() == []
def test_get_table_class(self):
class DummyListView(CruditorListView):
table_class = object
assert DummyListView().get_table_class()
def test_get_table_class_invalid(self):
class DummyListView(CruditorListView):
pass
with pytest.raises(ImproperlyConfigured):
DummyListView().get_table_class()
@pytest.mark.django_db
class TestAddView:
def test_get(self, admin_client):
response = admin_client.get(reverse('collection:add'))
assert response.status_code == 200
assert response.context['cruditor']['title'] == 'Add Person'
def test_post_valid(self, admin_client):
response = admin_client.post(
reverse('collection:add'),
data={
'first_name': 'John',
'last_name': 'Doe',
'country': 'Germany',
'reminder_0': '2018-05-25',
'reminder_1': '09:00:00',
'stars': '2',
}
)
assert response.status_code == 302
assert response['Location'] == reverse('collection:list')
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert messages[0].level == SUCCESS_LEVEL
assert Person.objects.get().first_name == 'John'
def test_post_invalid(self, admin_client):
response = admin_client.post(reverse('collection:add'), data={})
assert response.status_code == 200
assert response.context['form'].is_valid() is False
assert Person.objects.exists() is False
@pytest.mark.django_db
class TestChangeView:
def setup(self):
self.person = PersonFactory.create(first_name='Sally')
def test_get(self, admin_client):
response = admin_client.get(
reverse('collection:change', args=(self.person.pk,)))
assert response.status_code == 200
assert response.context['form'].instance == self.person
def test_post_valid(self, admin_client):
response = admin_client.post(
reverse('collection:change', args=(self.person.pk,)),
data={
'first_name': 'John',
'last_name': 'Doe',
'country': 'Germany',
'reminder_0': '2018-05-25',
'reminder_1': '09:00:00',
'stars': '2',
}
)
assert response.status_code == 302
assert response['Location'] == reverse('collection:list')
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 0
self.person.refresh_from_db()
assert self.person.first_name == 'John'
def test_post_invalid(self, admin_client):
response = admin_client.post(
reverse('collection:change', args=(self.person.pk,)), data={})
assert response.status_code == 200
assert response.context['form'].is_valid() is False
assert Person.objects.get().first_name == 'Sally'
@pytest.mark.django_db
class TestDeleteView:
def setup(self):
self.person = PersonFactory.create(first_name='Sally')
def test_get(self, admin_client):
response = admin_client.get(
reverse('collection:delete', args=(self.person.pk,)))
assert response.status_code == 200
assert Person.objects.exists() is True
def test_post(self, admin_client):
response = admin_client.post(
reverse('collection:delete', args=(self.person.pk,)))
assert response.status_code == 302
assert response['Location'] == reverse('collection:list')
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert messages[0].level == SUCCESS_LEVEL
assert Person.objects.exists() is False
def test_post_protected(self, admin_client):
related = RelatedPersonFactory(person=self.person)
response = admin_client.post(
reverse('collection:delete', args=(self.person.pk,)))
assert response.status_code == 200
assert response.context['linked_objects'] == [
'Related person: {}'.format(str(related)),
]
assert Person.objects.exists() is True
def test_custom_button_label(self, admin_client):
response = admin_client.get(
reverse('collection:delete', args=(self.person.pk,)))
assert response.context['form_save_button_label'] == 'Delete this person'
assert 'Delete this person' in response.content.decode(response.charset)
@pytest.mark.django_db
class TestFormsetView:
def setup(self):
self.person = PersonFactory.create()
self.related_persons = RelatedPersonFactory.create_batch(2, person=self.person)
def test_get(self, admin_client):
response = admin_client.get(reverse('formset:change', args=(self.person.pk,)))
assert response.status_code == 200
assert response.context['form'].instance == self.person
instances = [
form.instance
for form in response.context['formsets']['related_persons'].forms
]
assert self.related_persons[0] in instances
assert self.related_persons[1] in instances
def test_post_valid(self, admin_client):
response = admin_client.post(
reverse('formset:change', args=(self.person.pk,)),
data={
'first_name': 'John',
'last_name': 'Doe',
'country': 'Germany',
'reminder_0': '2018-05-25',
'reminder_1': '09:00:00',
'stars': '2',
'relatedperson_set-INITIAL_FORMS': '1',
'relatedperson_set-MAX_NUM_FORMS': '1000',
'relatedperson_set-MIN_NUM_FORMS': '0',
'relatedperson_set-TOTAL_FORMS': '1',
'relatedperson_set-0-DELETE': '',
'relatedperson_set-0-id': '1',
'relatedperson_set-0-first_name': 'Sally',
'relatedperson_set-0-last_name': 'Mary',
'relatedperson_set-0-person': '1',
}
)
assert response.status_code == 302
assert response['Location'] == reverse('formset:list')
self.person.refresh_from_db()
assert self.person.first_name == 'John'
self.related_persons[0].refresh_from_db()
assert self.related_persons[0].first_name == 'Sally'
def test_post_invalid_formset(self, admin_client):
response = admin_client.post(
reverse('formset:change', args=(self.person.pk,)),
data={
'first_name': 'John',
'last_name': 'Doe',
'country': 'Germany',
'reminder_0': '2018-05-25',
'reminder_1': '09:00:00',
'stars': '2',
'relatedperson_set-INITIAL_FORMS': '1',
'relatedperson_set-MAX_NUM_FORMS': '1000',
'relatedperson_set-MIN_NUM_FORMS': '0',
'relatedperson_set-TOTAL_FORMS': '1',
'relatedperson_set-0-DELETE': '',
'relatedperson_set-0-id': '1',
'relatedperson_set-0-first_name': '',
'relatedperson_set-0-last_name': '',
'relatedperson_set-0-person': '1',
}
)
assert response.status_code == 200
assert response.context['form'].is_valid() is True
assert response.context['formsets']['related_persons'].is_valid() is False
def test_post_invalid_form(self, admin_client):
response = admin_client.post(
reverse('formset:change', args=(self.person.pk,)),
data={
'first_name': '',
'last_name': '',
'country': 'Germany',
'reminder_0': '2018-05-25',
'reminder_1': '09:00:00',
'stars': '2',
'relatedperson_set-INITIAL_FORMS': '1',
'relatedperson_set-MAX_NUM_FORMS': '1000',
'relatedperson_set-MIN_NUM_FORMS': '0',
'relatedperson_set-TOTAL_FORMS': '1',
'relatedperson_set-0-DELETE': '',
'relatedperson_set-0-id': '1',
'relatedperson_set-0-first_name': 'Sally',
'relatedperson_set-0-last_name': 'Mary',
'relatedperson_set-0-person': '1',
}
)
assert response.status_code == 200
assert response.context['form'].is_valid() is False
assert response.context['formsets']['related_persons'].is_valid() is True
class TestChangePasswordView:
def test_get(self, admin_client):
response = admin_client.get(reverse('change-password'))
assert response.status_code == 200
assert list(response.context['form'].fields) == ['new_password1', 'new_password2']
def test_post_invalid(self, admin_user, admin_client):
response = admin_client.post(reverse('change-password'), data={
'new_password1': 'Secret',
'new_password2': 'Secret2'
})
assert response.status_code == 200
assert response.context['form'].is_valid() is False
admin_user.refresh_from_db()
assert admin_user.check_password('password') is True
def test_post_valid(self, admin_user, admin_client):
response = admin_client.post(reverse('change-password'), data={
'new_password1': 'Secret',
'new_password2': 'Secret'
})
assert response.status_code == 302
assert response['Location'] == reverse('change-password')
admin_user.refresh_from_db()
assert admin_user.check_password('Secret') is True
def test_logout_view(admin_client):
response = admin_client.get(reverse('minimal:demo'))
assert response.status_code == 200
assert response.template_name[0] == 'minimal/demo.html'
response = admin_client.get(reverse('logout'))
assert response.status_code == 200
assert response.template_name[0] == 'cruditor/logout.html'
response = admin_client.get(reverse('minimal:demo'))
assert response.status_code == 200
assert response.template_name[0] == 'cruditor/login.html'
| mit | 4,224,671,266,427,966,500 | 35.964029 | 90 | 0.597898 | false |
MoritzS/django | tests/urlpatterns_reverse/included_namespace_urls.py | 72 | 1059 | from django.conf.urls import include, url
from .utils import URLObject
from .views import empty_view, view_class_instance
testobj3 = URLObject('testapp', 'test-ns3')
testobj4 = URLObject('testapp', 'test-ns4')
app_name = 'included_namespace_urls'
urlpatterns = [
url(r'^normal/$', empty_view, name='inc-normal-view'),
url(r'^normal/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', empty_view, name='inc-normal-view'),
url(r'^\+\\\$\*/$', empty_view, name='inc-special-view'),
url(r'^mixed_args/([0-9]+)/(?P<arg2>[0-9]+)/$', empty_view, name='inc-mixed-args'),
url(r'^no_kwargs/([0-9]+)/([0-9]+)/$', empty_view, name='inc-no-kwargs'),
url(r'^view_class/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', view_class_instance, name='inc-view-class'),
url(r'^test3/', include(*testobj3.urls)),
url(r'^test4/', include(*testobj4.urls)),
url(r'^ns-included3/', include(('urlpatterns_reverse.included_urls', 'included_urls'), namespace='inc-ns3')),
url(r'^ns-included4/', include('urlpatterns_reverse.namespace_urls', namespace='inc-ns4')),
]
| bsd-3-clause | 1,291,412,455,333,850,000 | 41.36 | 113 | 0.629839 | false |
iwconfig/svtplay-dl | lib/svtplay_dl/fetcher/rtmp.py | 4 | 1391 | # ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
from __future__ import absolute_import
import subprocess
import shlex
from svtplay_dl.log import log
from svtplay_dl.utils import is_py2
from svtplay_dl.fetcher import VideoRetriever
from svtplay_dl.output import output
class RTMP(VideoRetriever):
def name(self):
return "rtmp"
def download(self):
""" Get the stream from RTMP """
args = []
if self.options.live:
args.append("-v")
if self.options.resume:
args.append("-e")
file_d = output(self.options, "flv", False)
if file_d is None:
return
args += ["-o", self.options.output]
if self.options.silent or self.options.output == "-":
args.append("-q")
if self.options.other:
if is_py2:
args += shlex.split(self.options.other.encode("utf-8"))
else:
args += shlex.split(self.options.other)
if self.options.verbose:
args.append("-V")
command = ["rtmpdump", "-r", self.url] + args
log.debug("Running: %s", " ".join(command))
try:
subprocess.call(command)
except OSError as e:
log.error("Could not execute rtmpdump: " + e.strerror)
return
self.finished = True
| mit | -336,394,785,929,697,400 | 26.82 | 71 | 0.562185 | false |
ryfx/modrana | modules/mod_options/mod_options.py | 1 | 66705 | # -*- coding: utf-8 -*-
#----------------------------------------------------------------------------
# Handle option menus
#----------------------------------------------------------------------------
# Copyright 2008, Oliver White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------------
from modules.base_module import RanaModule
from core import utils
from core.backports import six
from core import constants
# identifies item as a group
GROUP_IDENTIFIER = "groupIdentifier"
def getModule(*args, **kwargs):
return Options(*args, **kwargs)
class Options(RanaModule):
"""Handle options"""
def __init__(self, *args, **kwargs):
RanaModule.__init__(self, *args, **kwargs)
self.options = {}
# for fast searching defaults for corresponding keys
self.keyDefault = {}
# profile folder
self.profileFolderPath = self.modrana.paths.getProfilePath()
# check the profile path and create the folders if necessary
utils.createFolderPath(self.profileFolderPath)
# items menu cache
self.itemMenus = {}
# item tools special menu name
self.keyStateListGroupID = None
# option content variables
self.monavPackList = []
def _getCategoryID(self, catId):
return "opt_cat_%s" % catId # get a standardized id
def addCategory(self, name, inId, icon, actionPrefix="", actionSuffix=""):
"""this method should be run only after menu module instance
is available in self.menuModule and the options menu is cleared,
eq. has at least the escape button"""
# first we add the category button to the options
catId = self._getCategoryID(inId) # get a standardized id
action = "%sset:menu:%s%s" % (actionPrefix, catId, actionSuffix)
self.menuModule.addItem('options', name, icon, action)
# initialize menu for the new category menu
self.menuModule.clearMenu(catId, "set:menu:options")
# as a convenience, return the id
return inId
def _getGroupId(self, catId, groupId):
parentId = self._getCategoryID(catId)
return "%s_opt_group_%s" % (parentId, groupId)
def addGroup(self, name, groupId, parentId, icon, actionPrefix="",
actionSuffix="", registerToMenu=True, backButtonAction=None):
"""this method ads a new (empty) options group to category specified by
catId, as a convenience feature, the id of the new group is returned"""
catId = self._getCategoryID(parentId)
groupId = self._getGroupId(parentId, groupId) # get a standardized id
# handle empty parent ids - such ids can be valid because the menu switching is handled
# handled entirely by the pre and post actions
if not parentId:
action = "%s%s" % (actionPrefix, actionSuffix)
else:
action = "%sset:menu:options#%s%s" % (actionPrefix, groupId, actionSuffix)
if registerToMenu: # add to options menu structure ?
self.menuModule.addItem(catId, name, icon, action)
if backButtonAction is not None:
self.options[groupId] = [backButtonAction, 0, []]
else:
self.options[groupId] = ["set:menu:%s" % catId, 0, []]
return groupId
def setGroupParent(self, groupID, parentID):
"""set the parent id of a given group id"""
if groupID in self.options:
self.options[groupID][0] = "set:menu:options#%s" % parentID
else:
self.log.error("can't set group parent - group %s not found", groupID)
def getGroupParent(self, groupID):
"""set the parent id of a given group id"""
if groupID in self.options:
return self.options[groupID][0]
else:
self.log.error("can't get group parent - group %s not found", groupID)
def clearGroup(self, groupID):
"""clear a given group from any options,
preserving its parent setting"""
self.options[groupID][1] = 0
self.options[groupID][2] = []
def addBoolOption(self, title, variable, group, default=None, action=None):
on = constants.PANGO_ON
off = constants.PANGO_OFF
if action:
states = ((False, off, action), (True, on, action))
else:
states = ((False, off), (True, on))
data = {"type": "toggle",
"states": states}
self.addOption(title, variable, data, group, default)
def addToggleOption(self, title, variable, choices, group, default=None):
data = {"type": "toggle",
"states": choices}
self.addOption(title, variable, data, group, default)
def addEditOption(self, title, variable, group, label="Edit variable", description=None):
choices = {"type": "showAndEditVariable",
"label": label,
"description": description
}
self.addOption(title, variable, choices, group, None)
def _generateNestedItems(self, inputList, variable, backAction, fakeMode=None):
"""generate item tuples for nested item menus"""
menuItems = [] # an ordered list of all the menu items
itemDict = {} # for easily assigning keys to labels
index = 1 # id 0 is the escape button
for value, name, icon, action in inputList:
if value == "groupIdentifier": # this item is a group
item = self.menuModule.generateItem("#%s" % name, "generic", action)
else: # this item is a button that sets a value
if fakeMode is None: # just use the current mode
item = self.menuModule.generateItem("#%s" % name, "generic",
"setWithCurrentMode:%s:%s|%s" % (variable, value, backAction))
else:# use a fake mode (used for per mode option state list)
item = self.menuModule.generateItem("#%s" % name, "generic",
"setWithMode:%s:%s:%s|%s" % (
fakeMode, variable, value, backAction))
menuItems.append(item)
itemDict[value] = (name, index)
index += 1
return menuItems, itemDict
def _generateItems(self, valueNameList, variable, backAction, fakeMode=None):
menuItems = [] # an ordered list of all the menu items
itemDict = {} # for easily assigning keys to labels
index = 1 # id 0 is the escape button
for value, name in valueNameList:
if fakeMode is None: # just use the current mode
item = self.menuModule.generateItem("#%s" % name, "generic",
"setWithCurrentMode:%s:%s|%s" % (variable, value, backAction))
else:# use a fake mode (used for per mode option state list)
item = self.menuModule.generateItem("#%s" % name, "generic",
"setWithMode:%s:%s:%s|%s" % (fakeMode, variable, value, backAction))
menuItems.append(item)
itemDict[value] = (name, index)
index += 1
return menuItems, itemDict
def addItemsOption(self, title, variable, items, group, default=None, fakeMode=None, preAction=None):
"""add an option, that opens and item selection menu"""
#NOTE: for the value - name mapping to work correctly, the value must be a string
# -> this is caused by the value being sent as a string once one of the items is clicked
# -> if the value, is not string, just the raw value value be shown
# Example:
# (100, "100%") will show 100
# ('100', "100%") will show 100%
# the back action returns back to the group
backAction = "set:menu:options#%s" % group
# create and add the menu
menu = self.menuModule.getClearedMenu(backAction)
menuItems, itemDict = self._generateItems(items, variable, backAction, fakeMode=fakeMode)
# load all items to the menu
menu = self.menuModule.addItemsToThisMenu(menu, menuItems)
# store the menu in the menu module
# NOTE: for the returning back to the group to work correctly,
# the menu is stored under a key combined from the variable and group names
storageKey = self._getItemsOptionStorageKey(group, variable, fakeMode=fakeMode)
# add the Item menu entry button
self.menuModule.addItemMenu(storageKey, menu, wideButtons=True)
# also store in the local options structure
choices = {"type": "selectOneItem",
'label': "",
'description': "",
'default': default,
'items': items,
'itemDict': itemDict,
'storageKey': storageKey,
'preAction': preAction # send this message before entering the menu
}
# this means we are probably showing the option in the per mode state list
if fakeMode is not None:
choices['mode'] = fakeMode
choices['noToolsIcon'] = True # disable the tools in the per mode state list
self.addOption(title, variable, choices, group, default)
def addNestedItemsOption(self, title, variable, items, group, default=None, fakeMode=None, preAction=None):
"""add an option, that opens and item selection menu with groups"""
#NOTE: for the value - name mapping to work correctly, the value must be a string
# -> this is caused by the value being sent as a string once one of the items is clicked
# -> if the value, is not string, just the raw value value be shown
# Example:
# (100, "100%") will show 100
# ('100', "100%") will show 100%
# the back action returns back to the group from which the menu was opened
backAction = "set:menu:options#%s" % group
# create submenus for the groups & a toplevel menu
# NOTE: for the returning back to the group to work correctly,
# the menu is stored under a key combined from the variable and group names
storageKey = self._getItemsOptionStorageKey(group, variable, fakeMode=fakeMode)
groupIndex = 1 # 0 is the back button
topLevel = []
itemDict = {}
tempItemDict = {}
for item in items:
value, name, icon, action = item["item"]
if item.get('group', None): # this is a group
subMenuItems = item.get('group', [])
# create per-group submenu
menuItems, tempItemDict = self._generateNestedItems(subMenuItems, variable, backAction,
fakeMode=fakeMode)
groupStorageKey = "%s_%d" % (storageKey, groupIndex)
groupBackAction = "set:menu:%s" % storageKey
self._setItemsAsMenu(groupStorageKey, menuItems, groupBackAction)
# update value -> name mapping with correct group and subgroup IDs
for layerKey in tempItemDict.keys():
label = tempItemDict[layerKey][0]
subId = tempItemDict[layerKey][1]
# label, toplevel id, group id
# NOTE: group level highlighting is not yet implemented
tempItemDict[layerKey] = (label, groupIndex, subId)
# TODO: highlighting inside groups
itemDict.update(tempItemDict)
# override the action for the toplevel group button
# to point to the group menu
action = "set:menu:%s" % groupStorageKey
groupIndex += 1
# add the toplevel button
topLevel.append((value, name, icon, action))
# add the toplevel menu
menuItems, tempItemDict = self._generateNestedItems(topLevel, variable, backAction, fakeMode=fakeMode)
self._setItemsAsMenu(storageKey, menuItems, backAction)
# update value -> name mapping for tomplevel buttons
itemDict.update(tempItemDict)
# also store in the local options structure
choices = {"type": "selectOneItem",
'label': "",
'description': "",
'default': default,
'items': items,
'itemDict': itemDict,
'storageKey': storageKey,
'preAction': preAction # send this message before entering the menu
}
# this means we are probably showing the option in the per mode state list
if fakeMode is not None:
choices['mode'] = fakeMode
choices['noToolsIcon'] = True # disable the tools in the per mode state list
self.addOption(title, variable, choices, group, default)
def _setItemsAsMenu(self, storageKey, menuItems, backAction, wideButtons=True):
"""create a new item menu (or overwrite an existing one) and register it in the
menu Module"""
menu = self.menuModule.getClearedMenu(backAction)
menu = self.menuModule.addItemsToThisMenu(menu, menuItems)
self.menuModule.addItemMenu(storageKey, menu, wideButtons=wideButtons)
def _getItemsOptionStorageKey(self, group, variable, fakeMode=None):
"""return menu name for the special item selection itemized menu
"""
if fakeMode is None:
return "options1Item*%s*%s" % (group, variable)
else:
return "options1Item*%s*%s*%s" % (group, variable, fakeMode)
def _highlightActiveItem(self, menu, variable):
"""highlight currently active item in the item selection menu"""
# test if the key was initialized
if self.optionsKeyExists(variable):
pass
# text, icon, action, type, timedAction
else: # not initialized, no need to highlight anything
return menu
def addOption(self, title, variable, choices, group, default=None):
"""add an option item"""
# add group name to choices,
# this is needed for the item tools menu to know where to return
choices['groupName'] = group
newOption = [title, variable, choices, group, default]
if group in self.options:
self.options[group][2].append(newOption)
self.keyDefault[variable] = default
else:
self.log.error("group %s does not exist, call addGroup to create it first", group)
def addRawOption(self, optionData):
"""add a raw option to options
NOTE: the options contains its group ID"""
(title, variable, choices, group, default) = optionData
# as some options have have side effects when they are created,
# we need to check option the type and replicate those effect as needed
optionType = choices['type']
choices = dict(choices)
if optionType == 'selectOneItem':
if 'mode' in choices:
fakeMode = choices['mode']
else:
fakeMode = None
items = choices['items']
self.addItemsOption(title, variable, items, group, default, fakeMode=fakeMode)
else: # no side effects, just add the raw option
if group in self.options:
self.options[group][2].append(optionData)
self.keyDefault[variable] = default
else:
self.log.error("group %s does not exist, can't add a raw option to it", group)
def removeOption(self, categoryId, groupId, variable):
"""remove an option given by group and variable name"""
group = self._getGroupId(categoryId, groupId)
if group in self.options:
remove = lambda x: x[1] == variable
self.options[group][2][:] = [x for x in self.options[group][2] if not remove(x)]
if variable in self.keyDefault:
del self.keyDefault[variable]
else:
self.log.error("group %s does not exist, so option with variable %s can not be removed", group, variable)
def getOption(self, groupID, index):
"""get a options item from a given group by its index"""
if groupID in self.options:
try:
return self.options[groupID][2][index]
except IndexError:
self.log.error("group %s has no index %d, so this option can not be returned", groupID, index)
return False
else:
self.log.error("group %s does not exist, so option with index %d can not be returned", groupID, index)
return False
def getKeyDefault(self, key, default=None):
"""get default value for a given key"""
return self.keyDefault.get(key, default)
def firstTime(self):
# initialize the options menu
self.initOptionsMenu()
def initOptionsMenu(self):
"""Create the options menu structure.
You can add your own options in here,
or alternatively create them at runtime from your module's firstTime()
function by calling addOption. That would be best if your module is
only occasionally used, this function is best if the option is likely
to be needed in all installations"""
self.menuModule = self.m.get("menu", None)
self.menuModule.clearMenu("options")
# shortcuts
addCat = self.addCategory
addGroup = self.addGroup
addOpt = self.addToggleOption
addBoolOpt = self.addBoolOption
addItems = self.addItemsOption
# useful notifications
notifyRestartNeeded = "ml:notification:m:restart modRana to apply this change;3"
# * the Map category *
catMap = addCat("Map", "map", "map")
# ** map layers
optionGroup = addGroup("Map layers", "map_layers", catMap, "generic")
defaultBA = "set:menu:options" # default back action
mapLayers = self.m.get('mapLayers', None)
layerStructure = []
if mapLayers:
groups = mapLayers.getGroupList()
# sort the groups in alphabetical order by label
groups = sorted(groups, key=lambda group: group.label)
# assign layers to groups
for group in groups:
name = group.label
icon = group.icon
# if no icon is specified, use the generic icon
if icon is None:
icon = "generic"
# list all layers for this group
#groupLayers = filter(lambda x: layers[x].get("group", None) == key, layers.keys())
groupLayers = group.layers
# layer keys to list of layers
# groupLayers = map(lambda x:
# (x, layers[x]['label'], layers[x]['icon'], defaultBA)
# ,groupLayers)
# sort them alphabetically by label
groupLayers.sort(key=lambda layer: layer.label)
# create (layerId, label, icon) tuples,
# reuse the variable:
groupLayers = list(map(lambda x: (x.id, x.label, x.icon, defaultBA), groupLayers))
# append their counter to the group name
name = "%s (%d)" % (name, len(groupLayers))
layerStructure.append({"item": (GROUP_IDENTIFIER, name, icon, defaultBA),
"group": groupLayers})
# append layers without group right after groups in the list
nonGroupLayers = mapLayers.getLayersWithoutGroup()
# sort the groups in alphabetical order by label
nonGroupLayers = sorted(nonGroupLayers, key=lambda group: group.label)
# convert to option format
nonGroupLayers = map(
lambda x: {'item': (x, x.label, x.icon, defaultBA)}, nonGroupLayers)
layerStructure.extend(nonGroupLayers)
# add empty layer
layerStructure.append({'item': (None, "Empty layer", "generic", defaultBA)})
# add the option
self.addNestedItemsOption("Main map", "layer", layerStructure, optionGroup, "mapnik")
# ** Overlay
group = addGroup("Map overlay", "map_overlay", catMap, "generic")
addBoolOpt("Map as overlay", "overlay", group, False)
self.addNestedItemsOption("Main map", "layer", layerStructure, group, "mapnik")
self.addNestedItemsOption("Background map", "layer2", layerStructure, group, "cycle")
addOpt("Transparency ratio", "transpRatio",
[("0.25,1", "overlay:25%"),
("0.5,1", "overlay:50%"),
("0.75,1", "overlay:75%"),
("1,1", "overlay:100%")],
group,
"0.5,1")
# ** Rotation
group = addGroup("Rotation", "map_rotation", catMap, "generic")
addBoolOpt("Rotate map in direction of travel", "rotateMap", group, False)
# ** Scaling
group = addGroup("Scaling", "map_scaling", catMap, "generic")
addOpt("Map scale", "mapScale",
[(1, "1X"),
(2, "2X"),
(4, "4X")],
group,
1)
# ** centering
group = addGroup("Centering", "centering", catMap, "generic")
addBoolOpt("Centre map", "centred", group, True)
addOpt("Centering shift", "posShiftDirection",
[("down", "shift down"),
("up", "shift up"),
("left", "shift left"),
("right", "shift right"),
(False, "don't shift")],
group,
"down")
addOpt("Centering shift amount", "posShiftAmount",
[(0.25, "25%"),
(0.5, "50%"),
(0.75, "75%"),
(1.0, "edge of the screen")],
group,
0.75)
changedMsg = "mapView:centeringDisableThresholdChanged"
addOpt("Disable by dragging", "centeringDisableThreshold",
[(2048, "normal drag - <i>default</i>", changedMsg),
(15000, "long drag", changedMsg),
(40000, "really long drag", changedMsg),
(80000, "extremely long drag", changedMsg),
(False, constants.PANGO_OFF, changedMsg)],
group,
2048)
# ** dragging
group = addGroup("Dragging", "dragging", catMap, "generic")
# check if we are on a powerful device or not and set the default accordingly
if self.dmod.simpleMapDragging():
defaultMode = "staticMapDrag"
else:
defaultMode = "default"
addOpt("Map dragging", "mapDraggingMode",
[("default", "full redraw - <i>default</i>", "mapView:dragModeChanged"),
("staticMapDrag", "drag visible map - <i>fastest</i>", "mapView:dragModeChanged")],
group,
defaultMode)
# ** map filtering
group = addGroup("Filters", "map_filtering", catMap, "generic")
addOpt("Negative", "invertMapTiles",
[(False, "disabled"),
(True, "enabled"),
('withNightTheme', "with night theme")],
group,
False)
# ** map grid
group = addGroup("Grid", "map_grid", catMap, "generic")
addBoolOpt("Show grid", "drawMapGrid", group, False)
addOpt("Grid color", "mapGridColor",
[("white", '<span color="white">white</span>'),
("black", '<span color="black">black</span>'),
("red", '<span color="red">red</span>'),
("green", '<span color="green">green</span>'),
("blue", '<span color="blue">blue</span>')],
group,
"blue")
addBoolOpt("Labels", "mapGridLabels", group, True)
# ** tile storage
group = addGroup("Tile storage", "tile_storage", catMap, "generic")
addOpt("Tile storage", "tileStorageType",
[('files', "files (default, more space used)"),
('sqlite', "sqlite (new, less space used)")],
group,
self.modrana.dmod.defaultTileStorageType)
addBoolOpt("Store downloaded tiles", "storeDownloadedTiles", group, True)
addOpt("Sqlite tile db commit interval", "sqliteTileDatabaseCommitInterval",
[(1, "1 second", notifyRestartNeeded),
(2, "2 seconds", notifyRestartNeeded),
(5, "5 seconds (default)", notifyRestartNeeded),
(10, "10 seconds", notifyRestartNeeded),
(30, "30 seconds", notifyRestartNeeded),
(60, "1 minute", notifyRestartNeeded)],
group,
150)
# * the view category *
catView = addCat("View", "view", "view")
# ** GUI
group = addGroup("GUI", "gui", catView, "generic")
addOpt("Hide main buttons", "hideDelay",
[("never", "never hide buttons"),
("5", "hide buttons after 5 seconds"),
("10", "hide buttons after 10 seconds"),
("15", "hide buttons after 15 seconds"),
("30", "hide buttons after 30 seconds"),
("60", "hide buttons after 1 minute"),
("120", "hide buttons after 2 minutes")],
group,
"10")
addOpt("GUI Rotation", "rotationMode",
[("auto", "automatic", "device:modeChanged"),
("landscape", "landscape", "device:modeChanged"),
("portrait", "portrait", "device:modeChanged")],
group,
"auto")
# if non-fullscreen mode is supported on the device, add a button
# that enables starting in fullscreen
# (requested by N900 users)
if not self.modrana.dmod.fullscreenOnly():
addBoolOpt("Start in fullscreen", "startInFullscreen", group, False)
# if the quit button should be shown even if it is not needed on the
# given platform
if not self.modrana.gui.showQuitButton():
addBoolOpt("Show Quit button", "showQuitButton", group, False)
# ** screen
# only add if supported on device
display = self.m.get('display', None)
if display:
if display.screenBlankingControlSupported():
group = addGroup("Screen", "screen", catView, "generic")
addOpt("Keep display ON", "screenBlankingMode",
[("always", "always", "display:blankingModeChanged"),
("centred", "while centred", "display:blankingModeChanged"),
("moving", "while moving", "display:blankingModeChanged"),
("movingInFullscreen", "while moving in fullscreen", "display:blankingModeChanged"),
("fullscreen", "while in fullscreen", "display:blankingModeChanged"),
("gpsFix", "while there is a GPS fix", "display:blankingModeChanged"),
#TODO: while there is actually a GPS lock
("never", "never", "display:blankingModeChanged")],
group,
"always")
if display.usesDashboard():
addBoolOpt("Redraw when on dashboard", "redrawOnDashboard", group, False)
# ** themes
theme = self.m.get('theme', None)
if theme:
group = addGroup("Themes", "themes", catView, "generic")
defaultTheme = constants.DEFAULT_THEME_ID
themeList = theme.getAvailableThemeIds()
# check if current theme as set in options exists
currentTheme = self.get('currentTheme', None)
if currentTheme is not None:
if currentTheme not in themeList:
self.log.error("theme with id %s is not available,\nswitching "
"back to default theme", currentTheme)
# theme not valid, reset to default
self.set('currentTheme', defaultTheme)
themeChangedMessage = "icons:themeChanged"
nameValueList = map(lambda x: (x, x, themeChangedMessage), themeList)
addOpt("Current theme", "currentTheme",
nameValueList,
group,
defaultTheme
)
# ** units
group = addGroup("formats#Units and", "units", catView, "generic")
addOpt("Units", "unitType",
[("km", "use kilometers"),
("mile", "use miles")],
group,
"km")
addOpt("Time format", "currentTimeFormat",
[("24h", "24 hours"),
("12h", "12 hours")],
group,
"24h")
addOpt("Small imperial units", "unitTypeImperialSmall",
[("yards", "yards"),
("feet", "feet")],
group,
"yards")
# ** menus
group = addGroup("Menus", "menus", catView, "generic")
addOpt("Listable menu rows", "listableMenuRows",
[(2, "2 rows"),
(3, "3 rows"),
(4, "4 rows"),
(5, "5 rows"),
(6, "6 rows")],
group,
4)
if self.dmod.hasButtons():
# TODO: change this once there are more options for key shortcuts
# * the Keys category *
catKeys = addCat("Keys", "keys", "keys")
# * Device buttons
group = addGroup("Device buttons", "device_buttons", catKeys, "n900")
if self.dmod.hasVolumeKeys():
addBoolOpt("Use volume keys for zooming", "useVolumeKeys", group, True, "device:updateKeys")
# * the Navigation category
catNavigation = addCat("Navigation", "navigation", "navigation")
# * navigation language
group = addGroup("Language", "tbt_language", catNavigation, "generic")
# in the first string: first one goes to espeak, the second part goes to Google
directionsLanguages = [('ca ca', 'Catalan'),
('zh-yue zh-TW', 'Chinese(Cantonese)'),
('zh zh-CN', 'Chinese(Mandarin)'),
('hr hr', 'Croatian'),
('cs cs', 'Czech'),
('nl nl', 'Dutch'),
('en en', 'English'),
('fi fi', 'Finnish'),
('fr fr', 'French'),
('de de', 'German'),
('el el', 'Greek'),
('hi hi', 'Hindi'),
('hu hu', 'Hungarian'),
('id id', 'Indonesian'),
('it it', 'Italian'),
('lv lv', 'Latvian'),
('no no', 'Norwegian'),
('pl pl', 'Polish'),
('pt pt-BR', 'Portuguese(Brazil)'),
('pt-pt pt-PT', 'Portuguese(European)'),
('ro ro', 'Romanian'),
('ru ru', 'Russian'),
('sr sr', 'Serbian'),
('sk sk', 'Slovak'),
('es es', 'Spanish'),
('ta ta', 'Tamil'),
('tr tr', 'Turkish'),
('vi vi', 'Vietnamese')]
addItems("Language for directions", 'directionsLanguage',
directionsLanguages,
group,
"en en") # TODO: use locale for default language ?
addOpt("read Cyrillic with", "voiceNavigationCyrillicVoice",
[('ru', "Russian voice"),
(False, "current voice")],
group,
'ru')
# ** online routing submenu
group = addGroup("Routing", "routing", catNavigation, "generic")
addOpt("Routing provider", "routingProvider",
[(constants.ROUTING_PROVIDER_GOOGLE, "Google - <b>online</b>"),
(constants.ROUTING_PROVIDER_MONAV, "Monav - <b>on device</b>")],
group,
constants.DEFAULT_ROUTING_PROVIDER)
addBoolOpt("Avoid major highways", "routingAvoidHighways", group, False)
addBoolOpt("Avoid toll roads", "routingAvoidToll", group, False)
# ** routing data submenu
group = addGroup("data#Routing", "routing_data", catNavigation, "generic")
self._reloadMonavPackList()
#TODO: on demand reloading
addItems("Monav data pack", 'preferredMonavDataPack',
self.monavPackList,
group,
"no preferred pack"
)
# * turn by turn navigation
group = addGroup("Turn by turn", "turn_by_turn", catNavigation, "generic")
addOpt("Autostart navigation", "autostartNavigationDefaultOnAutoselectTurn",
[('disabled', constants.PANGO_OFF),
('enabled', constants.PANGO_ON)],
group,
'enabled')
addOpt("Make final turn announcement at", "pointReachedDistance",
[(10, "10 m"),
(20, "20 m"),
(30, "30 m"),
(60, "60 m"),
(100, "100 m"),
(200, "200 m"),
(300, "300 m"),
(400, "400 m"),
(500, "500 m")],
group,
30)
addOpt("Announce turns at least this far ahead", "minAnnounceDistance",
[(10, "10 m"),
(20, "20 m"),
(30, "30 m"),
(60, "60 m"),
(100, "100 m"),
(200, "200 m"),
(300, "300 m"),
(500, "500 m")],
group,
100)
addOpt("Announce turns at least this long ahead", "minAnnounceTime",
[(5, "5 s"),
(10, "10 s"),
(20, "20 s"),
(30, "30 s"),
(45, "45 s"),
(60, "60 s"),
(90, "90 s")],
group,
10)
# Note: actual values are in m/s, accurate to 2 decimal places. We
# store them as strings so lookup will work reliably.
addOpt("Increase turn announcement time above", "minAnnounceSpeed",
[("5.56", "20 km/h (12 mph)"),
("8.33", "30 km/h (20 mph)"),
("11.11", "40 km/h (25 mph)"),
("13.89", "50 km/h (30 mph)"),
("22.22", "80 km/h (50 mph)"),
("27.78", "100 km/h (60 mph)")],
group,
"13.89")
# Note: actual values are in m/s, accurate to 2 decimal places. We
# store them as strings so lookup will work reliably.
addOpt("Constant turn announcement time above", "maxAnnounceSpeed",
[("13.89", "50 km/h (30 mph)"),
("22.22", "80 km/h (50 mph)"),
("27.78", "100 km/h (60 mph)"),
("33.33", "120 km/h (75 mph)"),
("44.44", "160 km/h (100 mph)")],
group,
"27.78")
addOpt("Maximum turn announcement time", "maxAnnounceTime",
[(20, "20 s"),
(30, "30 s"),
(45, "45 s"),
(60, "60 s"),
(90, "90 s"),
(120, "120 s")],
group,
60)
# Note: these are exponents, stored as strings so lookup will work reliably.
addOpt("Announcement time increase type", "announcePower",
[("1.0", "Linear with speed"),
("0.5", "Very quickly, then linear"),
("0.75", "Quickly, then linear"),
("1.5", "Slowly, then linear"),
("2.0", "Quite slowly, then linear"),
("4.0", "Very slowly, then quite fast")],
group,
"2.0")
# ** rerouting submenu
group = addGroup("Rerouting", "rerouting", catNavigation, "generic")
addItems("Rerouting trigger distance", "reroutingThreshold",
[(None, "<b>disabled</b>"),
("10", "10 m"),
("20", "20 m"),
("30", "30 m (default)"),
("40", "40 m"),
("50", "50 m"),
("75", "75 m"),
("100", "100 m"),
("200", "200 m"),
("500", "500 m"),
("1000", "1000 m")],
group,
"30")
# for some reason, the items menu doesn't work correctly for
# non-string values (eq. 10 won't work correctly but "10" would
# * the POI category
catPOI = addCat("POI", "poi", "poi")
# ** online POI search
group = addGroup("Markers", "poi_markers", catPOI, "generic")
addOpt("Show captions", "hideMarkerCaptionsBelowZl",
[(-1, "always"),
(5, "below zoomlevel 5"),
(7, "below zoomlevel 7"),
(10, "below zoomlevel 10"),
(11, "below zoomlevel 11"),
(12, "below zoomlevel 12"),
(13, "below zoomlevel 13"),
(14, "below zoomlevel 14"),
(15, "below zoomlevel 15"),
(16, "below zoomlevel 16"),
(17, "below zoomlevel 17"),
(18, "below zoomlevel 18"),
(65535, "never"),
],
group,
13)
# ** POI storage
group = addGroup("POI storage", "poi_storage", catPOI, "generic")
addOpt("POI database", "POIDBFilename",
[("poi.db", "shared with Mappero (EXPERIMENTAL)", "storePOI:reconnectToDb"),
("modrana_poi.db", "modRana only (default)", "storePOI:reconnectToDb")],
group,
"modrana_poi.db")
# ExportPOIDatabaseToCSV is just a dummy value,
# we just need to send a dump message to storePOI
addOpt("Export POI Database to CSV", "EportPOIDatabaseToCSV",
[("dump", "click to export", "storePOI:dumpToCSV"),
("dump", "click to export", "storePOI:dumpToCSV")],
group,
"dump")
# ** online POI search
group = addGroup("Local search", "poi_online", catPOI, "generic")
addOpt("Local search ordering", "GLSOrdering",
[("default", "ordering from provider"),
("distance", "order by distance")
],
group,
"default")
addOpt("Local search radius", "localSearchRadius",
[(500, "500 m"),
(1000, "1 km"),
(5000, "5 km"),
(10000, "10 km"),
(25000, "25 km"),
(50000, "50 km")],
group,
10000)
# addOpt("Google local search results", "GLSResults",
# [("8", "max 8 results"),
# ("16", "max 16 results"),
# ("32", "max 32 results")],
# group,
# "8")
addOpt("Local search captions", "drawGLSResultCaptions",
[("True", "draw captions"),
("False", "dont draw captions")],
group,
"True")
# * the Location category *
catLocation = addCat("Location", "location", "gps_satellite")
# ** GPS
group = addGroup("GPS", "gps", catLocation, "generic")
addBoolOpt("GPS", "GPSEnabled", group, True, "location:checkGPSEnabled")
if self.dmod.getLocationType() == 'gpsd':
knots = "knots per second"
meters = "meters per second"
if self.dmod.getDeviceIDString() == 'neo':
knots = "knots per second (old SHR)"
meters = "meters per second (new SHR)"
addOpt("GPSD reports speed in", "gpsdSpeedUnit",
[('knotsPerSecond', knots),
('metersPerSecond', meters)],
group,
'knotsPerSecond')
# * the Network category *
catNetwork = addCat("Network", "network", "network")
# * network *
group = addGroup("Network usage", "network_usage", catNetwork, "generic")
addOpt("Network", "network",
# [("off","No use of network"), #TODO: implement this :)
[("minimal", "Don't Download Map Tiles"),
("full", "Unlimited use of network")],
group,
"full")
addOpt("Auto tile download thread count", "maxAutoDownloadThreads2",
[(1, "1 (serial)", notifyRestartNeeded),
(5, "5", notifyRestartNeeded),
(10, "10 (default)", notifyRestartNeeded),
(20, "20", notifyRestartNeeded),
(30, "30", notifyRestartNeeded),
(40, "40", notifyRestartNeeded),
(50, "50", notifyRestartNeeded)],
group,
10)
addOpt("Batch tile download thread count", "maxDlThreads",
[(1, "1 (serial)"),
(2, "2"),
(3, "3"),
(5, "5 (default)"),
(10, "10"),
(20, "20")],
group,
10)
# * the Sound category *
catSound = addCat("Sound", "sound", "sound")
# * sound output
group = addGroup("Sound output", "sound_output", catSound, "sound")
addBoolOpt("Application wide sound output", "soundEnabled", group, True)
# * espeak group
group = addGroup("Voice", "voice_out", catSound, "espeak")
addOpt("Test voice output", "voiceTest",
[("test", "<b>press to start test</b>", "voice:voiceTest"),
("test", "<b>press to start test</b>", "voice:voiceTest")],
group,
"test")
addItems("Voice volume", "voiceVolume",
[('0', "0% - silent"),
('20', "20%"),
('50', "50%"),
('100', "100% (default)"),
('200', "200%"),
('300', "300%"),
('400', "400%"),
('500', "500%"),
('600', "600%"),
('700', "700%"),
('1000', "1000%"),
('1100', "1100%"),
('1200', "1200%"),
('1300', "1300% (might be distorted)"),
('1400', "1400% (might be distorted)"),
('1500', "1500% (might be distorted)")],
group,
100)
addOpt("Voice parameters", "voiceParameters",
[("auto", "<b>automatic</b>", "ms:options:espeakParams:auto"),
("manual", "<b>manual</b>", "ms:options:espeakParams:manual")],
group,
"auto")
if self.get('voiceParameters', None) == "manual":
self._updateVoiceManual('add')
# *** special group for a list per mode item states ***
self.keyStateListGroupID = addGroup("specialTools", 'specialGroup', 'specialParent',
"generic", registerToMenu=False,
backButtonAction="set:menu:optionsItemTools")
# addOpt("Network", "threadedDownload",
## [("off","No use of network"),
# [("True", "Use threads for download"),
# ("False", "Don't use threads for download")],
# "network",
# "on")
# addBoolOpt("Logging", "logging", "logging", True)
# options = []
# for i in (1,2,5,10,20,40,60):
# options.append((i, "%d sec" % i))
# addOpt("Frequency", "log_period", options, "logging", 2)
# addBoolOpt("Vector maps", "vmap", "map", True)
# [("0.5,0.5","over:50%,back:50%"),
# ("0.25,0.75","over:25%,back:75%"),
# ("0.75,0.25","over:75%,back:50%")],
# "map",
# "0.5,0.5")
# addBoolOpt("Old tracklogs", "old_tracklogs", "map", False)
# addBoolOpt("Latest tracklog", "tracklog", "map", True)
# * the Debug category
catDebug = addCat("Debug", "debug", "debug")
# ** redraw
group = addGroup("Redrawing", "redrawing", catDebug, "generic")
addBoolOpt("Print redraw time to terminal", "showRedrawTime", group, False, "display:checkShowRedrawTime")
# ** logging
group = addGroup("Logging", "logging", catDebug, "generic")
addBoolOpt("Log to file", "loggingStatus", group, False)
addBoolOpt("Log file compression", "compressLogFile", group, False)
# ** tiles
group = addGroup("Tiles", "tiles", catDebug, "generic")
addBoolOpt("Print tile cache status to terminal", "reportTileCacheStatus", group, False)
addBoolOpt("Tile loading status to terminal", "tileLoadingDebug", group, False)
addBoolOpt("Redraw screen once a new tile is loaded", "tileLoadedRedraw", group, True)
addOpt("In memory tile cache size", "memoryTileCacheSize",
[(20, "20", notifyRestartNeeded),
(50, "50", notifyRestartNeeded),
(100, "100", notifyRestartNeeded),
(150, "150 (default)", notifyRestartNeeded),
(200, "200", notifyRestartNeeded),
(300, "300", notifyRestartNeeded),
(500, "500", notifyRestartNeeded),
(1000, "1000", notifyRestartNeeded)],
group,
150)
addOpt("Auto tile download queue size", "autoDownloadQueueSize",
[(1, "1", notifyRestartNeeded),
(10, "10", notifyRestartNeeded),
(20, "20", notifyRestartNeeded),
(50, "50", notifyRestartNeeded),
(100, "100 (default)", notifyRestartNeeded),
(200, "200", notifyRestartNeeded),
(500, "500", notifyRestartNeeded),
(1000, "1000", notifyRestartNeeded)],
group,
100)
addBoolOpt("Remove dups before batch dl", "checkTiles", group, False)
# ** tracklog drawing
group = addGroup("Tracklogs", "tracklogs", catDebug, "generic")
addBoolOpt("Debug circles", "debugCircles", group, False)
addBoolOpt("Debug squares", "debugSquares", group, False)
# ** navigation
group = self.addGroup("Navigation", "navigation", catDebug, "generic")
addBoolOpt("Print Turn-By-Turn triggers", "debugTbT", group, False)
# ** gps
group = self.addGroup("GPS", "gps", catDebug, "generic")
# only show relevant
locationType = self.dmod.getLocationType()
if locationType == 'liblocation':
addBoolOpt("Show N900 GPS-fix", "n900GPSDebug", group, False)
addBoolOpt("GPS debug messages", "gpsDebugEnabled", group, False)
# addOpt("Tracklogs", "showTracklog",
# [(False, "Don't draw tracklogs"),
# ("simple", "Draw simple tracklogs")],
# "view",
# False)
self._setUndefinedToDefault()
def _setUndefinedToDefault(self):
# Set all undefined options to default values
for category, options in self.options.items():
for option in options[2]:
(title, variable, choices, category, default) = option
if default is not None:
if not variable in self.d:
self.set(variable, default)
def _removeNonPersistent(self, inputDict):
"""keys that begin with # are not saved
(as they mostly contain data that is either time sensitive or is
reloaded on startup)
ASSUMPTION: keys are strings of length>=1"""
try:
return dict((k, v) for k, v in six.iteritems(inputDict) if k[0] != '#')
except Exception:
self.log.exception("error while filtering options\nsome nonpersistent keys might have been left in\nNOTE: keys should be strings of length>=1")
return self.d
def _reloadKeyStateList(self, groupID, index, key):
"""reload the key state list to represent currently selected option"""
# clear the group
self.clearGroup(self.keyStateListGroupID)
# for each mode show the current key state
modes = self.modrana.getModes().keys()
modes.sort()
# get data for the given option
optionData = self.getOption(groupID, index)
# modify the option
for mode in modes:
optionD = list(optionData) # make a copy
# modify the choices dictionary
# NOTE: somehow, it is needed to do a copy not just to modify it in the option
# or else the mode value is for all options added the same
d = dict(optionD[2])
d['noToolsIcon'] = True # disable the tools icon
d['mode'] = mode # force a mode
optionD[2] = d
optionD[3] = self.keyStateListGroupID # set the group to the state list
self.addRawOption(optionD)
def handleMessage(self, message, messageType, args):
if messageType == "ml" and message == "scroll":
(direction, menuName) = args
index = self.options[menuName][1]
maxIndex = len(self.options[menuName][2]) - 1
if direction == "up" and index > 0:
newIndex = index - 1
self.options[menuName][1] = newIndex
elif direction == "down" and index < maxIndex:
newIndex = index + 1
self.options[menuName][1] = newIndex
elif message == "save":
self.modrana._saveOptions()
elif messageType == 'ml' and message == "go2ItemToolsMenu":
(groupID, index, key) = args
index = int(index)
# reload the tools menu
menus = self.m.get('menu', None)
if menus:
menuName = 'optionsItemTools'
reset = 'ms:options:resetKey:%s' % key
notify = "ml:notification:m:Item has been reset to default;3"
resetAction = "%s|%s|set:menu:options#%s" % (reset, notify, groupID)
menus.clearMenu(menuName, 'set:menu:options#%s' % groupID)
menus.addItem(menuName, 'state list#per mode', 'generic',
'ml:options:go2ItemStateListMenu:%s;%d;%s' % (groupID, index, key)
)
menus.addItem(menuName, 'default#reset to', 'generic', resetAction)
self.set('menu', menuName)
elif messageType == 'ml' and message == "go2ItemStateListMenu":
(groupID, index, key) = args
index = int(index)
# reload the option key state list for the given key
self._reloadKeyStateList(groupID, index, key)
# go to the menu
self.set('menu', 'options#%s' % self.keyStateListGroupID)
elif messageType == 'ms' and message == 'resetKey':
# reset a given options item to default, including any key modifiers
key = args
self.modrana.purgeKey(key)
default = self.getKeyDefault(key)
self.set(key, default)
elif messageType == 'ml' and message == 'addKeyModifier':
# make the value of a key mode specific
(key, mode) = args
self.modrana.addKeyModifier(key, mode=mode)
elif messageType == 'ml' and message == 'removeKeyModifier':
(key, mode) = args
# make the value of a key mode unspecific
self.modrana.removeKeyModifier(key, mode=mode)
elif messageType == "ms" and message == "espeakParams":
# switch between espeak parameter modes
if args == "manual":
self._updateVoiceManual("remove")
elif args == "auto":
self._updateVoiceManual("add")
elif messageType == "ml" and message == "editVariable":
(variable, label, description) = args
initialText = self.get(variable, "")
entry = self.m.get('textEntry', None)
if entry:
key = "editVariable_%s" % variable
entry.entryBox(self, key, label, initialText, description)
#messages to toggles a key to be mode un/specific
#Mode specific keys:
#this means that the options key can have a different value
#depending on the current mode, thus enabling better customization"""
elif messageType == "ms" and message == "makeKeyModeSpecific":
self.modrana.addKeyModifier(args)
elif messageType == "ms" and message == "makeKeyModeUnSpecific":
self.modrana.removeKeyModifier(args)
elif messageType == 'ml' and message == 'update':
if len(args) >= 1:
target = args[0]
if target == 'packListMonav':
self._reloadMonavPackList()
else:
self.log.error('update target not specified')
def _reloadMonavPackList(self):
route = self.m.get('route', None)
if route:
self.log.info('reloading Monav data pack list')
# wee need a list of (name, key) tuples
self.monavPackList = map(lambda x: (x, x), route.getAvailableMonavDataPacks())
def _updateVoiceManual(self, action):
"""add or remove custom voice parameters option items"""
if action == "add":
groupId = self._getGroupId("sound", "voice_out")
description = "<b>Note:</b> <tt>%language%</tt> will be replaced by current language code, <tt>%volume%</tt> with current voice volume, <tt>%message%</tt> with the message and <tt>%qmessage%</tt> will be replaced by the message in quotes"
self.addEditOption("Edit voice string", "voiceString", groupId, "Edit voice string",
description=description)
message = "ms:voice:resetStringToDefault:espeak"
self.addToggleOption("Reset voice string with <b>Espeak</b> default", "placeholder",
[("foo", "<i>click to use this default</i>", message)],
groupId,
"foo")
elif action == "remove":
self.removeOption("sound", "voice_out", "voiceString")
self.removeOption("sound", "voice_out", "placeholder")
def handleTextEntryResult(self, key, result):
(optionType, variable) = key.split("_", 1)
if optionType == "editVariable":
self.log.info("editing variable: %s with: %s", variable, result)
self.set(variable, result)
def drawMenu(self, cr, menuName, args=None):
# custom options list drawing
clickHandler = self.m.get('clickHandler', None)
if self.menuModule and clickHandler:
# elements allocation
(e1, e2, e3, e4, alloc) = self.menuModule.threePlusOneMenuCoords()
(x1, y1) = e1
(x2, y2) = e2
(x3, y3) = e3
(x4, y4) = e4
(w1, h1, dx, dy) = alloc
(cancelButtonAction, firstItemIndex, options) = self.options[menuName]
# Top row:
# * parent menu
timeout = self.modrana.gui.msLongPress
self.menuModule.drawButton(cr, x1, y1, dx, dy, "", "back", cancelButtonAction,
timedAction=(timeout, "set:menu:None"))
# * scroll up
self.menuModule.drawButton(cr, x2, y2, dx, dy, "", "up_list", "ml:options:scroll:up;%s" % menuName)
# * scroll down
self.menuModule.drawButton(cr, x3, y3, dx, dy, "", "down_list", "ml:options:scroll:down;%s" % menuName)
# One option per row
for row in (0, 1, 2):
index = firstItemIndex + row
numItems = len(options)
cAction = None
if 0 <= index < numItems:
(title, variable, choices, group, default) = options[index]
# What's it set to currently?
if 'mode' in choices:
mode = choices['mode']
fakeMode = mode
else:
mode = self.get('mode', 'car')
fakeMode = None
value = self.get(variable, None, mode=mode)
# if the key has a modifier in this mode,
# append the mode label to the title
if 'mode' in choices:
# this currently means we are in the option state list
if self.get('mode', 'car') == choices['mode']:
# current mode
title = "%s: <small><sup><b>[%s]</b></sup></small>" % (
title, self.modrana.getModeLabel(mode))
else:
title = "%s: <small><sup>[%s]</sup></small>" % (
title, self.modrana.getModeLabel(mode))
else:
# normal option display
if self.modrana.hasKeyModifierInMode(variable, mode):
title = "%s: <small><sup><b>[%s]</b></sup></small>" % (
title, self.modrana.getModeLabel(mode))
else:
title = "%s:" % title
# Lookup the description of the currently-selected choice.
# (if any, use str(value) if it doesn't match any defined options)
# Also lookup the _next_ choice in the list, because that's what
# we will set the option to if it's clicked
optionType = choices["type"]
if optionType == "showAndEditVariable":
label = choices["label"]
description = choices["description"]
# show and edit the exact value of a variable manually
valueDescription = self.get(variable, "variable is not set yet")
valueDescription = "<tt><b>%s</b></tt>" % valueDescription
payload = "%s;%s;%s" % (variable, label, description)
onClick = "ml:options:editVariable:%s|set:needRedraw:True" % payload
elif optionType == "selectOneItem":
#show multiple items and make it possible to select one of them
# get current value
default = choices['default']
value = self.get(variable, default, mode=mode)
# show label for the given value
highlight = choices['itemDict'].get(value, (value, None))
valueDescription, highlightId = highlight[0], highlight[1]
# if no description is found, just display the value
valueDescription = "<tt><b>%s</b></tt>" % valueDescription
preAction = ""
# add any pre-actions (actions that should run before the
# menu is entered, eq updating data, etc.)
pre = choices.get('preAction', "")
if pre:
preAction += "%s|" % pre
#assure highlighting
if highlightId is not None:
# add an action before switching to the next menu that
# assures that items in the next menu are properly highlighted
# according to the state of the corresponding variable
preAction += "ml:menu:highlightItem:%s;%d|" % (choices['storageKey'], highlightId)
if fakeMode is None:
onClick = "%sset:menu:options1Item*%s*%s" % (preAction, group, variable)
else:
onClick = "%sset:menu:options1Item*%s*%s*%s" % (preAction, group, variable, fakeMode)
# the fake mode is used for listing and setting options for other mode than the current one
elif optionType == 'toggle':
states = choices['states']
nextChoice = states[0]
valueDescription = str(value)
useNext = False
for c in states:
(cVal, cName) = (c[0], c[1])
if useNext:
nextChoice = c
useNext = False
if str(value) == str(cVal):
valueDescription = cName
useNext = True
if len(c) == 3:
cAction = c[2]
# What should happen if this option is clicked -
# set the associated option to the next value in sequence
onClick = "setWithMode:%s:%s:%s" % (mode, variable, str(nextChoice[0]))
if cAction:
onClick += "|%s" % cAction
onClick += "|options:save"
onClick += "|set:needRedraw:1"
y = y4 + row * dy
if w1 > h1: # landscape
dx = (x4 - x1)
w = w1 - dx
else: # portrait
dx = x2
w = w1
smallButtonW = dx / 2.0
smallButtonH = dy / 2.0
# Draw the option button and make it clickable
self.menuModule.drawButton(cr,
x4,
y,
w,
dy,
None,
"generic", # background for a 3x1 icon
onClick)
# draw mode specific combined toggle & indicator
if self.modrana.hasKeyModifierInMode(variable, mode):
toggleText = constants.PANGO_ON + '#per Mode'
modeSpecToggleAction = "ml:options:removeKeyModifier:%s;%s" % (variable, mode)
else:
toggleText = "OFF#per Mode"
modeSpecToggleAction = "ml:options:addKeyModifier:%s;%s" % (variable, mode)
self.menuModule.drawButton(cr,
x4 + w - smallButtonW,
y,
smallButtonW,
smallButtonH,
toggleText,
"generic",
modeSpecToggleAction,
layer=1)
groupName = choices['groupName']
if 'noToolsIcon' not in choices:
# draw tools button
self.menuModule.drawButton(cr,
x4 + w - smallButtonW,
y + smallButtonH,
smallButtonW,
smallButtonH,
None,
"tools", # tools icon
"ml:options:go2ItemToolsMenu:%s;%d;%s" %
(groupName, index, variable),
layer=1)
border = 20
# 1st line: option name
self.menuModule.showText(cr, title, x4 + border, y + border, w * 0.95 - smallButtonW - border)
# 2nd line: current value
self.menuModule.showText(cr, valueDescription, x4 + 0.15 * w, y + 0.6 * dy,
w * 0.85 - smallButtonW - border)
# in corner: row number
indexX = x4 + w * 0.90 - smallButtonW
self.menuModule.showText(cr, "%d/%d" % (index + 1, numItems), indexX, y + dy * 0.07,
w * 0.10 - border, 20)
| gpl-3.0 | -762,559,210,851,248,400 | 43.559118 | 250 | 0.507908 | false |
wetek-enigma/enigma2 | lib/python/Screens/ChannelSelection.py | 2 | 110011 | # -*- coding: utf-8 -*-
from boxbranding import getMachineBuild, getMachineBrand, getMachineName
import os
from Tools.Profile import profile
from Screen import Screen
import Screens.InfoBar
import Components.ParentalControl
from Components.Button import Button
from Components.ServiceList import ServiceList, refreshServiceList
from Components.ActionMap import NumberActionMap, ActionMap, HelpableActionMap
from Components.MenuList import MenuList
from Components.ServiceEventTracker import ServiceEventTracker, InfoBarBase
from Components.Sources.List import List
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredTimerPath
from Components.Renderer.Picon import getPiconName
from Screens.TimerEdit import TimerSanityConflict
profile("ChannelSelection.py 1")
from EpgSelection import EPGSelection
from enigma import eActionMap, eServiceReference, eEPGCache, eServiceCenter, eRCInput, eTimer, ePoint, eDVBDB, iPlayableService, iServiceInformation, getPrevAsciiCode, eEnv, loadPNG
from Components.config import config, configfile, ConfigSubsection, ConfigText, ConfigYesNo
from Tools.NumericalTextInput import NumericalTextInput
profile("ChannelSelection.py 2")
from Components.NimManager import nimmanager
profile("ChannelSelection.py 2.1")
from Components.Sources.RdsDecoder import RdsDecoder
profile("ChannelSelection.py 2.2")
from Components.Sources.ServiceEvent import ServiceEvent
from Components.Sources.Event import Event
profile("ChannelSelection.py 2.3")
from Components.Input import Input
profile("ChannelSelection.py 3")
from Components.ChoiceList import ChoiceList, ChoiceEntryComponent
from RecordTimer import RecordTimerEntry, AFTEREVENT
from TimerEntry import TimerEntry, InstantRecordTimerEntry
from Screens.InputBox import InputBox, PinInput
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.ServiceInfo import ServiceInfo
from Screens.ButtonSetup import InfoBarButtonSetup, ButtonSetupActionMap, getButtonSetupFunctions
profile("ChannelSelection.py 4")
from Screens.PictureInPicture import PictureInPicture
from Screens.RdsDisplay import RassInteractive
from ServiceReference import ServiceReference
from Tools.BoundFunction import boundFunction
from Tools import Notifications
from Plugins.Plugin import PluginDescriptor
from Components.PluginComponent import plugins
from time import localtime, time
try:
from Plugins.SystemPlugins.PiPServiceRelation.plugin import getRelationDict
plugin_PiPServiceRelation_installed = True
except:
plugin_PiPServiceRelation_installed = False
profile("ChannelSelection.py after imports")
FLAG_SERVICE_NEW_FOUND = 64
FLAG_IS_DEDICATED_3D = 128 #define in lib/dvb/idvb.h as dxNewFound = 64 and dxIsDedicated3D = 128
class BouquetSelector(Screen):
def __init__(self, session, bouquets, selectedFunc, enableWrapAround=True):
Screen.__init__(self, session)
Screen.setTitle(self, _("Choose Bouquet"))
self.selectedFunc=selectedFunc
self["actions"] = ActionMap(["OkCancelActions"],
{
"ok": self.okbuttonClick,
"cancel": self.cancelClick
})
entrys = [ (x[0], x[1]) for x in bouquets ]
self["menu"] = MenuList(entrys, enableWrapAround)
def getCurrent(self):
cur = self["menu"].getCurrent()
return cur and cur[1]
def okbuttonClick(self):
self.selectedFunc(self.getCurrent())
def up(self):
self["menu"].up()
def down(self):
self["menu"].down()
def cancelClick(self):
self.close(False)
class EpgBouquetSelector(BouquetSelector):
def __init__(self, session, bouquets, selectedFunc, enableWrapAround=False):
BouquetSelector.__init__(self, session, bouquets, selectedFunc, enableWrapAround=False)
self.skinName = "BouquetSelector"
self.bouquets=bouquets
def okbuttonClick(self):
self.selectedFunc(self.getCurrent(),self.bouquets)
class SilentBouquetSelector:
def __init__(self, bouquets, enableWrapAround=False, current=0):
self.bouquets = [b[1] for b in bouquets]
self.pos = current
self.count = len(bouquets)
self.enableWrapAround = enableWrapAround
def up(self):
if self.pos > 0 or self.enableWrapAround:
self.pos = (self.pos - 1) % self.count
def down(self):
if self.pos < (self.count - 1) or self.enableWrapAround:
self.pos = (self.pos + 1) % self.count
def getCurrent(self):
return self.bouquets[self.pos]
# csel.bouquet_mark_edit values
OFF = 0
EDIT_BOUQUET = 1
EDIT_ALTERNATIVES = 2
def append_when_current_valid(current, menu, args, level=0, key=""):
if current and current.valid() and level <= config.usage.setup_level.index:
menu.append(ChoiceEntryComponent(key, args))
def removed_userbouquets_available():
for file in os.listdir("/etc/enigma2/"):
if file.startswith("userbouquet") and file.endswith(".del"):
return True
return False
class ChannelContextMenu(Screen):
def __init__(self, session, csel):
Screen.__init__(self, session)
Screen.setTitle(self, _("Channel list context menu"))
#raise Exception("we need a better summary screen here")
self.csel = csel
self.bsel = None
if self.isProtected():
self.onFirstExecBegin.append(boundFunction(self.session.openWithCallback, self.protectResult, PinInput, pinList=[x.value for x in config.ParentalControl.servicepin], triesEntry=config.ParentalControl.retries.servicepin, title=_("Please enter the correct pin code"), windowTitle=_("Enter pin code")))
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "NumberActions", "MenuActions"],
{
"ok": self.okbuttonClick,
"cancel": self.cancelClick,
"blue": self.showServiceInPiP,
"red": self.playMain,
"menu": self.openSetup,
"2": self.renameEntry,
"3": self.findCurrentlyPlayed,
"5": self.addServiceToBouquetOrAlternative,
"6": self.toggleMoveModeSelect,
"8": self.removeEntry
})
menu = [ ]
self.removeFunction = False
self.addFunction = False
self.pipAvailable = False
current = csel.getCurrentSelection()
current_root = csel.getRoot()
current_sel_path = current.getPath()
current_sel_flags = current.flags
inBouquetRootList = current_root and 'FROM BOUQUET "bouquets.' in current_root.getPath() #FIXME HACK
inAlternativeList = current_root and 'FROM BOUQUET "alternatives' in current_root.getPath()
self.inBouquet = csel.getMutableList() is not None
haveBouquets = config.usage.multibouquet.value
from Components.ParentalControl import parentalControl
self.parentalControl = parentalControl
self.parentalControlEnabled = config.ParentalControl.servicepinactive.value
menu.append(ChoiceEntryComponent(text = (_("Settings..."), boundFunction(self.openSetup))))
if not (current_sel_path or current_sel_flags & (eServiceReference.isDirectory|eServiceReference.isMarker)):
append_when_current_valid(current, menu, (_("show transponder info"), self.showServiceInformations), level=2)
if csel.bouquet_mark_edit == OFF and not csel.entry_marked:
if not inBouquetRootList:
isPlayable = not (current_sel_flags & (eServiceReference.isMarker|eServiceReference.isDirectory))
if isPlayable:
for p in plugins.getPlugins(PluginDescriptor.WHERE_CHANNEL_CONTEXT_MENU):
append_when_current_valid(current, menu, (p.name, boundFunction(self.runPlugin, p)), key="bullet")
if config.servicelist.startupservice.value == self.csel.getCurrentSelection().toString():
append_when_current_valid(current, menu, (_("stop using as startup service"), self.unsetStartupService), level=0)
else:
append_when_current_valid(current, menu, (_("set as startup service"), self.setStartupService), level=0)
if self.parentalControlEnabled:
if self.parentalControl.getProtectionLevel(csel.getCurrentSelection().toCompareString()) == -1:
append_when_current_valid(current, menu, (_("add to parental protection"), boundFunction(self.addParentalProtection, csel.getCurrentSelection())), level=0)
else:
append_when_current_valid(current, menu, (_("remove from parental protection"), boundFunction(self.removeParentalProtection, csel.getCurrentSelection())), level=0)
if config.ParentalControl.hideBlacklist.value and not parentalControl.sessionPinCached and config.ParentalControl.storeservicepin.value != "never":
append_when_current_valid(current, menu, (_("Unhide parental control services"), boundFunction(self.unhideParentalServices)), level=0)
if SystemInfo["3DMode"]:
if eDVBDB.getInstance().getFlag(eServiceReference(current.toString())) & FLAG_IS_DEDICATED_3D:
append_when_current_valid(current, menu, (_("Unmark service as dedicated 3D service"), self.removeDedicated3DFlag), level=0)
else:
append_when_current_valid(current, menu, (_("Mark service as dedicated 3D service"), self.addDedicated3DFlag), level=0)
if haveBouquets:
bouquets = self.csel.getBouquetList()
if bouquets is None:
bouquetCnt = 0
else:
bouquetCnt = len(bouquets)
if not self.inBouquet or bouquetCnt > 1:
append_when_current_valid(current, menu, (_("add service to bouquet"), self.addServiceToBouquetSelected), level=0, key="5")
self.addFunction = self.addServiceToBouquetSelected
if not self.inBouquet:
append_when_current_valid(current, menu, (_("remove entry"), self.removeEntry), level = 0, key="8")
self.removeFunction = self.removeSatelliteService
else:
if not self.inBouquet:
append_when_current_valid(current, menu, (_("add service to favourites"), self.addServiceToBouquetSelected), level=0, key="5")
self.addFunction = self.addServiceToBouquetSelected
if SystemInfo["PIPAvailable"]:
if not self.parentalControlEnabled or self.parentalControl.getProtectionLevel(csel.getCurrentSelection().toCompareString()) == -1:
if self.csel.dopipzap:
append_when_current_valid(current, menu, (_("play in mainwindow"), self.playMain), level=0, key="red")
else:
append_when_current_valid(current, menu, (_("play as picture in picture"), self.showServiceInPiP), level=0, key="blue")
append_when_current_valid(current, menu, (_("find currently played service"), self.findCurrentlyPlayed), level=0, key="3")
else:
if 'FROM SATELLITES' in current_root.getPath() and current and _("Services") in eServiceCenter.getInstance().info(current).getName(current):
unsigned_orbpos = current.getUnsignedData(4) >> 16
if unsigned_orbpos == 0xFFFF:
append_when_current_valid(current, menu, (_("remove cable services"), self.removeSatelliteServices), level = 0)
elif unsigned_orbpos == 0xEEEE:
append_when_current_valid(current, menu, (_("remove terrestrial services"), self.removeSatelliteServices), level = 0)
else:
append_when_current_valid(current, menu, (_("remove selected satellite"), self.removeSatelliteServices), level = 0)
if haveBouquets:
if not self.inBouquet and not "PROVIDERS" in current_sel_path:
append_when_current_valid(current, menu, (_("copy to bouquets"), self.copyCurrentToBouquetList), level=0)
if ("flags == %d" %(FLAG_SERVICE_NEW_FOUND)) in current_sel_path:
append_when_current_valid(current, menu, (_("remove all new found flags"), self.removeAllNewFoundFlags), level=0)
if self.inBouquet:
append_when_current_valid(current, menu, (_("rename entry"), self.renameEntry), level=0, key="2")
if not inAlternativeList:
append_when_current_valid(current, menu, (_("remove entry"), self.removeEntry), level=0, key="8")
self.removeFunction = self.removeCurrentService
if current_root and ("flags == %d" %(FLAG_SERVICE_NEW_FOUND)) in current_root.getPath():
append_when_current_valid(current, menu, (_("remove new found flag"), self.removeNewFoundFlag), level=0)
else:
if self.parentalControlEnabled:
if self.parentalControl.getProtectionLevel(csel.getCurrentSelection().toCompareString()) == -1:
append_when_current_valid(current, menu, (_("add bouquet to parental protection"), boundFunction(self.addParentalProtection, csel.getCurrentSelection())), level=0)
else:
append_when_current_valid(current, menu, (_("remove bouquet from parental protection"), boundFunction(self.removeParentalProtection, csel.getCurrentSelection())), level=0)
menu.append(ChoiceEntryComponent(text=(_("add bouquet"), self.showBouquetInputBox)))
append_when_current_valid(current, menu, (_("rename entry"), self.renameEntry), level=0, key="2")
append_when_current_valid(current, menu, (_("remove entry"), self.removeEntry), level=0, key="8")
self.removeFunction = self.removeBouquet
if removed_userbouquets_available():
append_when_current_valid(current, menu, (_("purge deleted userbouquets"), self.purgeDeletedBouquets), level=0)
append_when_current_valid(current, menu, (_("restore deleted userbouquets"), self.restoreDeletedBouquets), level=0)
if self.inBouquet: # current list is editable?
if csel.bouquet_mark_edit == OFF:
if csel.movemode:
append_when_current_valid(current, menu, (_("disable move mode"), self.toggleMoveMode), level=0, key="6")
else:
append_when_current_valid(current, menu, (_("enable move mode"), self.toggleMoveMode), level=1, key="6")
if not csel.entry_marked and not inBouquetRootList and current_root and not (current_root.flags & eServiceReference.isGroup):
if current.type != -1:
menu.append(ChoiceEntryComponent(text=(_("add marker"), self.showMarkerInputBox)))
if not csel.movemode:
if haveBouquets:
append_when_current_valid(current, menu, (_("enable bouquet edit"), self.bouquetMarkStart), level=0)
else:
append_when_current_valid(current, menu, (_("enable favourite edit"), self.bouquetMarkStart), level=0)
if current_sel_flags & eServiceReference.isGroup:
append_when_current_valid(current, menu, (_("edit alternatives"), self.editAlternativeServices), level=2)
append_when_current_valid(current, menu, (_("show alternatives"), self.showAlternativeServices), level=2)
append_when_current_valid(current, menu, (_("remove all alternatives"), self.removeAlternativeServices), level=2)
elif not current_sel_flags & eServiceReference.isMarker:
append_when_current_valid(current, menu, (_("add alternatives"), self.addAlternativeServices), level=2)
else:
if csel.bouquet_mark_edit == EDIT_BOUQUET:
if haveBouquets:
append_when_current_valid(current, menu, (_("end bouquet edit"), self.bouquetMarkEnd), level=0)
append_when_current_valid(current, menu, (_("abort bouquet edit"), self.bouquetMarkAbort), level=0)
else:
append_when_current_valid(current, menu, (_("end favourites edit"), self.bouquetMarkEnd), level=0)
append_when_current_valid(current, menu, (_("abort favourites edit"), self.bouquetMarkAbort), level=0)
if current_sel_flags & eServiceReference.isMarker:
append_when_current_valid(current, menu, (_("rename entry"), self.renameEntry), level=0, key="2")
append_when_current_valid(current, menu, (_("remove entry"), self.removeEntry), level=0, key="8")
self.removeFunction = self.removeCurrentService
else:
append_when_current_valid(current, menu, (_("end alternatives edit"), self.bouquetMarkEnd), level=0)
append_when_current_valid(current, menu, (_("abort alternatives edit"), self.bouquetMarkAbort), level=0)
menu.append(ChoiceEntryComponent(text = (_("Reload Services"), self.reloadServices)))
self["menu"] = ChoiceList(menu)
def set3DMode(self, value):
if config.osd.threeDmode.value == "auto" and self.session.nav.currentlyPlayingServiceReference == self.csel.getCurrentSelection():
from Screens.VideoMode import applySettings
applySettings(value and "sidebyside" or config.osd.threeDmode.value)
def addDedicated3DFlag(self):
eDVBDB.getInstance().addFlag(eServiceReference(self.csel.getCurrentSelection().toString()), FLAG_IS_DEDICATED_3D)
eDVBDB.getInstance().reloadBouquets()
self.set3DMode(True)
self.close()
def removeDedicated3DFlag(self):
eDVBDB.getInstance().removeFlag(eServiceReference(self.csel.getCurrentSelection().toString()), FLAG_IS_DEDICATED_3D)
eDVBDB.getInstance().reloadBouquets()
self.set3DMode(False)
self.close()
def isProtected(self):
return self.csel.protectContextMenu and config.ParentalControl.setuppinactive.value and config.ParentalControl.config_sections.context_menus.value
def protectResult(self, answer):
if answer:
self.csel.protectContextMenu = False
elif answer is not None:
self.session.openWithCallback(self.close, MessageBox, _("The pin code you entered is wrong."), MessageBox.TYPE_ERROR)
else:
self.close()
def addServiceToBouquetOrAlternative(self):
if self.addFunction:
self.addFunction()
else:
return 0
def getCurrentSelectionName(self):
cur = self.csel.getCurrentSelection()
if cur and cur.valid():
name = eServiceCenter.getInstance().info(cur).getName(cur) or ServiceReference(cur).getServiceName() or ""
name = name.replace('\xc2\x86', '').replace('\xc2\x87', '')
return name
return ""
def removeEntry(self):
ref = self.csel.servicelist.getCurrent()
if self.removeFunction and ref and ref.valid():
if self.csel.confirmRemove:
list = [(_("yes"), True), (_("no"), False), (_("yes") + ", " + _("and never ask again this session again"), "never")]
self.session.openWithCallback(self.removeFunction, MessageBox, _("Are you sure to remove this entry?") + "\n%s" % self.getCurrentSelectionName(), list=list)
else:
self.removeFunction(True)
else:
return 0
def removeCurrentService(self, answer):
if answer:
if answer == "never":
self.csel.confirmRemove = False
self.csel.removeCurrentService()
self.close()
def removeSatelliteService(self, answer):
if answer:
if answer == "never":
self.csel.confirmRemove = False
self.csel.removeSatelliteService()
self.close()
def removeBouquet(self, answer):
if answer:
self.csel.removeBouquet()
eDVBDB.getInstance().reloadBouquets()
self.close()
def purgeDeletedBouquets(self):
self.session.openWithCallback(self.purgeDeletedBouquetsCallback, MessageBox, _("Are you sure to purge all deleted userbouquets?"))
def purgeDeletedBouquetsCallback(self, answer):
if answer:
for file in os.listdir("/etc/enigma2/"):
if file.startswith("userbouquet") and file.endswith(".del"):
file = "/etc/enigma2/" + file
print "permantly remove file ", file
os.remove(file)
self.close()
def restoreDeletedBouquets(self):
for file in os.listdir("/etc/enigma2/"):
if file.startswith("userbouquet") and file.endswith(".del"):
file = "/etc/enigma2/" + file
print "restore file ", file[:-4]
os.rename(file, file[:-4])
eDVBDBInstance = eDVBDB.getInstance()
eDVBDBInstance.setLoadUnlinkedUserbouquets(True)
eDVBDBInstance.reloadBouquets()
eDVBDBInstance.setLoadUnlinkedUserbouquets(config.misc.load_unlinked_userbouquets.value)
refreshServiceList()
self.csel.showFavourites()
self.close()
def playMain(self):
sel = self.csel.getCurrentSelection()
if sel and sel.valid() and self.csel.dopipzap and (not self.parentalControlEnabled or self.parentalControl.getProtectionLevel(self.csel.getCurrentSelection().toCompareString()) == -1):
self.csel.zap()
self.csel.setCurrentSelection(sel)
self.close(True)
else:
return 0
def okbuttonClick(self):
self["menu"].getCurrent()[0][1]()
def openSetup(self):
from Screens.Setup import Setup
self.session.openWithCallback(self.cancelClick, Setup, "channelselection")
def cancelClick(self, dummy=False):
self.close(False)
def reloadServices(self):
eDVBDB.getInstance().reloadBouquets()
eDVBDB.getInstance().reloadServicelist()
self.session.openWithCallback(self.close, MessageBox, _("The servicelist is reloaded."), MessageBox.TYPE_INFO, timeout = 5)
def showServiceInformations(self):
self.session.open( ServiceInfo, self.csel.getCurrentSelection() )
def setStartupService(self):
self.session.openWithCallback(self.setStartupServiceCallback, MessageBox, _("Set startup service"), list = [(_("Only on startup"), "startup"), (_("Also on standby"), "standby")])
def setStartupServiceCallback(self, answer):
if answer:
config.servicelist.startupservice.value = self.csel.getCurrentSelection().toString()
path = ';'.join([i.toString() for i in self.csel.servicePath])
config.servicelist.startuproot.value = path
config.servicelist.startupmode.value = config.servicelist.lastmode.value
config.servicelist.startupservice_onstandby.value = answer == "standby"
config.servicelist.save()
configfile.save()
self.close()
def unsetStartupService(self):
config.servicelist.startupservice.value = ''
config.servicelist.startupservice_onstandby.value = False
config.servicelist.save()
configfile.save()
self.close()
def setStartupServiceStandby(self):
config.servicelist.startupservice_standby.value = self.csel.getCurrentSelection().toString()
config.servicelist.save()
configfile.save()
self.close()
def unsetStartupServiceStandby(self):
config.servicelist.startupservice_standby.value = ''
config.servicelist.save()
configfile.save()
self.close()
def showBouquetInputBox(self):
self.session.openWithCallback(self.bouquetInputCallback, VirtualKeyBoard, title=_("Please enter a name for the new bouquet"), text="bouquetname", maxSize=False, visible_width=56, type=Input.TEXT)
def bouquetInputCallback(self, bouquet):
if bouquet is not None:
self.csel.addBouquet(bouquet, None)
self.close()
def addParentalProtection(self, service):
self.parentalControl.protectService(service.toCompareString())
if config.ParentalControl.hideBlacklist.value and not self.parentalControl.sessionPinCached:
self.csel.servicelist.resetRoot()
self.close()
def removeParentalProtection(self, service):
self.session.openWithCallback(boundFunction(self.pinEntered, service.toCompareString()), PinInput, pinList=[config.ParentalControl.servicepin[0].value], triesEntry=config.ParentalControl.retries.servicepin, title=_("Enter the service pin"), windowTitle=_("Enter pin code"))
def pinEntered(self, service, answer):
if answer:
self.parentalControl.unProtectService(service)
self.close()
elif answer is not None:
self.session.openWithCallback(self.close, MessageBox, _("The pin code you entered is wrong."), MessageBox.TYPE_ERROR)
else:
self.close()
def unhideParentalServices(self):
if self.csel.protectContextMenu:
self.session.openWithCallback(self.unhideParentalServicesCallback, PinInput, pinList=[config.ParentalControl.servicepin[0].value], triesEntry=config.ParentalControl.retries.servicepin, title=_("Enter the service pin"), windowTitle=_("Enter pin code"))
else:
self.unhideParentalServicesCallback(True)
def unhideParentalServicesCallback(self, answer):
if answer:
service = self.csel.servicelist.getCurrent()
self.parentalControl.setSessionPinCached()
self.parentalControl.hideBlacklist()
self.csel.servicelist.resetRoot()
self.csel.servicelist.setCurrent(service)
self.close()
elif answer is not None:
self.session.openWithCallback(self.close, MessageBox, _("The pin code you entered is wrong."), MessageBox.TYPE_ERROR)
else:
self.close()
def showServiceInPiP(self):
if self.csel.dopipzap or (self.parentalControlEnabled and not self.parentalControl.getProtectionLevel(self.csel.getCurrentSelection().toCompareString()) == -1):
return 0
service = self.session.nav.getCurrentService()
info = service and service.info()
xres = str(info.getInfo(iServiceInformation.sVideoWidth))
if int(xres) <= 720 or not getMachineBuild() == 'blackbox7405':
if self.session.pipshown:
del self.session.pip
if SystemInfo["LCDMiniTV"] and int(config.lcd.modepip.value) >= 1:
print '[LCDMiniTV] disable PIP'
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.minitvmode.value)
f.close()
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.setAnimationMode(0)
self.session.pip.show()
newservice = self.csel.servicelist.getCurrent()
currentBouquet = self.csel.servicelist and self.csel.servicelist.getRoot()
if newservice and newservice.valid():
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = self.csel.getCurrentServicePath()
self.session.pip.servicePath[1] = currentBouquet
if SystemInfo["LCDMiniTV"] and int(config.lcd.modepip.value) >= 1:
print '[LCDMiniTV] enable PIP'
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.minitvpipmode.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
self.close(True)
else:
self.session.pipshown = False
del self.session.pip
if SystemInfo["LCDMiniTV"] and int(config.lcd.modepip.value) >= 1:
print '[LCDMiniTV] disable PIP'
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.minitvmode.value)
f.close()
self.session.openWithCallback(self.close, MessageBox, _("Could not open Picture in Picture"), MessageBox.TYPE_ERROR)
else:
self.session.open(MessageBox, _("Your %s %s does not support PiP HD") % (getMachineBrand(), getMachineName()), type = MessageBox.TYPE_INFO,timeout = 5 )
def addServiceToBouquetSelected(self):
bouquets = self.csel.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, bouquets, self.addCurrentServiceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addCurrentServiceToBouquet(bouquets[0][1], closeBouquetSelection=False)
def bouquetSelClosed(self, recursive):
self.bsel = None
if recursive:
self.close(False)
def removeSatelliteServices(self):
self.csel.removeSatelliteServices()
self.close()
def copyCurrentToBouquetList(self):
self.csel.copyCurrentToBouquetList()
self.close()
def showMarkerInputBox(self):
self.session.openWithCallback(self.markerInputCallback, VirtualKeyBoard, title=_("Please enter a name for the new marker"), text="markername", maxSize=False, visible_width=56, type=Input.TEXT)
def markerInputCallback(self, marker):
if marker is not None:
self.csel.addMarker(marker)
self.close()
def addCurrentServiceToBouquet(self, dest, closeBouquetSelection=True):
self.csel.addServiceToBouquet(dest)
if self.bsel is not None:
self.bsel.close(True)
else:
self.close(closeBouquetSelection) # close bouquet selection
def renameEntry(self):
if self.inBouquet and self.csel.servicelist.getCurrent() and self.csel.servicelist.getCurrent().valid() and not self.csel.entry_marked:
self.csel.renameEntry()
self.close()
else:
return 0
def toggleMoveMode(self):
if self.inBouquet and self.csel.servicelist.getCurrent() and self.csel.servicelist.getCurrent().valid():
self.csel.toggleMoveMode()
self.close()
else:
return 0
def toggleMoveModeSelect(self):
if self.inBouquet and self.csel.servicelist.getCurrent() and self.csel.servicelist.getCurrent().valid():
self.csel.toggleMoveMode(True)
self.close()
else:
return 0
def bouquetMarkStart(self):
self.csel.startMarkedEdit(EDIT_BOUQUET)
self.close()
def bouquetMarkEnd(self):
self.csel.endMarkedEdit(abort=False)
self.close()
def bouquetMarkAbort(self):
self.csel.endMarkedEdit(abort=True)
self.close()
def removeNewFoundFlag(self):
eDVBDB.getInstance().removeFlag(self.csel.getCurrentSelection(), FLAG_SERVICE_NEW_FOUND)
self.close()
def removeAllNewFoundFlags(self):
curpath = self.csel.getCurrentSelection().getPath()
idx = curpath.find("satellitePosition == ")
if idx != -1:
tmp = curpath[idx+21:]
idx = tmp.find(')')
if idx != -1:
satpos = int(tmp[:idx])
eDVBDB.getInstance().removeFlags(FLAG_SERVICE_NEW_FOUND, -1, -1, -1, satpos)
self.close()
def editAlternativeServices(self):
self.csel.startMarkedEdit(EDIT_ALTERNATIVES)
self.close()
def showAlternativeServices(self):
self.csel["Service"].editmode = True
self.csel.enterPath(self.csel.getCurrentSelection())
self.close()
def removeAlternativeServices(self):
self.csel.removeAlternativeServices()
self.close()
def addAlternativeServices(self):
self.csel.addAlternativeServices()
self.csel.startMarkedEdit(EDIT_ALTERNATIVES)
self.close()
def findCurrentlyPlayed(self):
sel = self.csel.getCurrentSelection()
if sel and sel.valid() and not self.csel.entry_marked:
currentPlayingService = (hasattr(self.csel, "dopipzap") and self.csel.dopipzap) and self.session.pip.getCurrentService() or self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.csel.servicelist.setCurrent(currentPlayingService, adjust=False)
if self.csel.getCurrentSelection() != currentPlayingService:
self.csel.setCurrentSelection(sel)
self.close()
else:
return 0
def runPlugin(self, plugin):
plugin(session=self.session, service=self.csel.getCurrentSelection())
self.close()
class SelectionEventInfo:
def __init__(self):
self["Service"] = self["ServiceEvent"] = ServiceEvent()
self["Event"] = Event()
self.servicelist.connectSelChanged(self.__selectionChanged)
self.timer = eTimer()
self.timer.callback.append(self.updateEventInfo)
self.onShown.append(self.__selectionChanged)
def __selectionChanged(self):
if self.execing:
self.timer.start(100, True)
def updateEventInfo(self):
cur = self.getCurrentSelection()
service = self["Service"]
service.newService(cur)
self["Event"].newEvent(service.event)
def parseCurentEvent(list):
if len(list) >= 0:
list = list[0]
begin = list[2] - (config.recording.margin_before.value * 60)
end = list[2] + list[3] + (config.recording.margin_after.value * 60)
name = list[1]
description = list[5]
eit = list[0]
return begin, end, name, description, eit
return False
def parseNextEvent(list):
if len(list) > 0:
list = list[1]
begin = list[2] - (config.recording.margin_before.value * 60)
end = list[2] + list[3] + (config.recording.margin_after.value * 60)
name = list[1]
description = list[5]
eit = list[0]
return begin, end, name, description, eit
return False
class ChannelSelectionEPG(InfoBarButtonSetup):
def __init__(self):
self.ChoiceBoxDialog = None
self.RemoveTimerDialog = None
self.hotkeys = [("Info (EPG)", "info", "Infobar/openEventView"),
("Info (EPG)" + " " + _("long"), "info_long", "Infobar/showEventInfoPlugins"),
("Epg/Guide", "epg", "Infobar/EPGPressed/1"),
("Epg/Guide" + " " + _("long"), "epg_long", "Infobar/showEventInfoPlugins")]
self["ChannelSelectEPGActions"] = ButtonSetupActionMap(["ChannelSelectEPGActions"], dict((x[1], self.ButtonSetupGlobal) for x in self.hotkeys))
self.currentSavedPath = []
self.onExecBegin.append(self.clearLongkeyPressed)
self["ChannelSelectEPGActions"] = ActionMap(["ChannelSelectEPGActions"],
{
"showEPGList": self.showEPGList,
})
self["recordingactions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"ShortRecord": (self.RecordTimerQuestion, _("Add a record timer")),
'LongRecord': (self.doZapTimer, _('Add a zap timer for next event'))
},-1)
self['dialogactions'] = ActionMap(['SetupActions'],
{
'cancel': self.closeChoiceBoxDialog,
})
self['dialogactions'].execEnd()
def getKeyFunctions(self, key):
selection = eval("config.misc.ButtonSetup." + key + ".value.split(',')")
selected = []
for x in selection:
function = list(function for function in getButtonSetupFunctions() if function[1] == x and function[2] == "EPG")
if function:
selected.append(function[0])
return selected
def RecordTimerQuestion(self):
serviceref = ServiceReference(self.getCurrentSelection())
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
self.epgcache = eEPGCache.getInstance()
test = [ 'ITBDSECX', (refstr, 1, -1, 12*60) ] # search next 12 hours
self.list = [] if self.epgcache is None else self.epgcache.lookupEvent(test)
if len(self.list) < 1:
return
eventid = self.list[0][0]
if len(self.list) == 1:
eventidnext = None
else:
eventidnext = self.list[1][0]
eventname = str(self.list[0][1])
if eventid is None:
return
indx = int(self.servicelist.getCurrentIndex())
selx = self.servicelist.instance.size().width()
while indx+1 > config.usage.serviceitems_per_page.value:
indx = indx - config.usage.serviceitems_per_page.value
pos = self.servicelist.instance.position().y()
sely = int(pos)+(int(self.servicelist.ItemHeight)*int(indx))
temp = int(self.servicelist.instance.position().y())+int(self.servicelist.instance.size().height())
if int(sely) >= temp:
sely = int(sely) - int(self.listHeight)
menu1 = _("Record now")
menu2 = _("Record next")
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
menu1 = _("Stop recording now")
elif eventidnext is not None:
if timer.eit == eventidnext and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
menu2 = _("Change next timer")
if eventidnext is not None:
menu = [(menu1, 'CALLFUNC', self.ChoiceBoxCB, self.doRecordCurrentTimer), (menu2, 'CALLFUNC', self.ChoiceBoxCB, self.doRecordNextTimer)]
else:
menu = [(menu1, 'CALLFUNC', self.ChoiceBoxCB, self.doRecordCurrentTimer)]
self.ChoiceBoxDialog = self.session.instantiateDialog(ChoiceBox, list=menu, keys=['red', 'green'], skin_name="RecordTimerQuestion")
self.ChoiceBoxDialog.instance.move(ePoint(selx-self.ChoiceBoxDialog.instance.size().width(),self.instance.position().y()+sely))
self.showChoiceBoxDialog()
def ChoiceBoxCB(self, choice):
self.closeChoiceBoxDialog()
if choice:
try:
choice()
except:
choice
def RemoveTimerDialogCB(self, choice):
self.closeChoiceBoxDialog()
if choice:
choice(self)
def showChoiceBoxDialog(self):
self['actions'].setEnabled(False)
self['recordingactions'].setEnabled(False)
self['ChannelSelectEPGActions'].setEnabled(False)
self["ChannelSelectBaseActions"].setEnabled(False)
self['dialogactions'].execBegin()
self.ChoiceBoxDialog['actions'].execBegin()
self.ChoiceBoxDialog.show()
def closeChoiceBoxDialog(self):
self['dialogactions'].execEnd()
if self.ChoiceBoxDialog:
self.ChoiceBoxDialog['actions'].execEnd()
self.session.deleteDialog(self.ChoiceBoxDialog)
self['actions'].setEnabled(True)
self['recordingactions'].setEnabled(True)
self['ChannelSelectEPGActions'].setEnabled(True)
self["ChannelSelectBaseActions"].setEnabled(True)
def doRecordCurrentTimer(self):
self.doInstantTimer(0, parseCurentEvent)
def doRecordNextTimer(self):
self.doInstantTimer(0, parseNextEvent, True)
def doZapTimer(self):
self.doInstantTimer(1, parseNextEvent)
def editTimer(self, timer):
self.session.open(TimerEntry, timer)
def doInstantTimer(self, zap, parseEvent, next=False):
serviceref = ServiceReference(self.getCurrentSelection())
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
self.epgcache = eEPGCache.getInstance()
test = [ 'ITBDSECX', (refstr, 1, -1, 12*60) ] # search next 12 hours
self.list = [] if self.epgcache is None else self.epgcache.lookupEvent(test)
if self.list is None:
return
if not next:
eventid = self.list[0][0]
eventname = str(self.list[0][1])
else:
if len(self.list) < 2:
return
eventid = self.list[1][0]
eventname = str(self.list[1][1])
if eventid is None:
return
indx = int(self.servicelist.getCurrentIndex())
selx = self.servicelist.instance.size().width()
while indx+1 > config.usage.serviceitems_per_page.value:
indx = indx - config.usage.serviceitems_per_page.value
pos = self.servicelist.instance.position().y()
sely = int(pos)+(int(self.servicelist.ItemHeight)*int(indx))
temp = int(self.servicelist.instance.position().y())+int(self.servicelist.instance.size().height())
if int(sely) >= temp:
sely = int(sely) - int(self.listHeight)
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
if not next:
cb_func = lambda ret: self.removeTimer(timer)
menu = [(_("Yes"), 'CALLFUNC', cb_func), (_("No"), 'CALLFUNC', self.ChoiceBoxCB)]
self.ChoiceBoxDialog = self.session.instantiateDialog(MessageBox, text=_('Do you really want to remove the timer for %s?') % eventname, list=menu, skin_name="RemoveTimerQuestion", picon=False)
else:
cb_func1 = lambda ret: self.removeTimer(timer)
cb_func2 = lambda ret: self.editTimer(timer)
menu = [(_("Delete timer"), 'CALLFUNC', self.RemoveTimerDialogCB, cb_func1), (_("Edit timer"), 'CALLFUNC', self.RemoveTimerDialogCB, cb_func2)]
self.ChoiceBoxDialog = self.session.instantiateDialog(ChoiceBox, title=_("Select action for timer %s:") % eventname, list=menu, keys=['green', 'blue'], skin_name="RecordTimerQuestion")
self.ChoiceBoxDialog.instance.move(ePoint(selx-self.ChoiceBoxDialog.instance.size().width(),self.instance.position().y()+sely))
self.showChoiceBoxDialog()
break
else:
newEntry = RecordTimerEntry(serviceref, checkOldTimers = True, dirname = preferredTimerPath(), *parseEvent(self.list))
if not newEntry:
return
self.InstantRecordDialog = self.session.instantiateDialog(InstantRecordTimerEntry, newEntry, zap)
retval = [True, self.InstantRecordDialog.retval()]
self.session.deleteDialogWithCallback(self.finishedAdd, self.InstantRecordDialog, retval)
def finishedAdd(self, answer):
# print "finished add"
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
for x in simulTimerList:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
if not entry.repeated and not config.recording.margin_before.value and not config.recording.margin_after.value and len(simulTimerList) > 1:
change_time = False
conflict_begin = simulTimerList[1].begin
conflict_end = simulTimerList[1].end
if conflict_begin == entry.end:
entry.end -= 30
change_time = True
elif entry.begin == conflict_end:
entry.begin += 30
change_time = True
if change_time:
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList)
def finishSanityCorrection(self, answer):
self.finishedAdd(answer)
def removeTimer(self, timer):
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.RecordTimer.removeEntry(timer)
self.closeChoiceBoxDialog()
def showEPGList(self):
ref=self.getCurrentSelection()
if ref:
self.savedService = ref
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, serviceChangeCB=self.changeServiceCB, EPGtype="single")
def SingleServiceEPGClosed(self, ret=False):
if ret:
service = self.getCurrentSelection()
if service is not None:
self.saveChannel(service)
self.addToHistory(service)
self.close()
else:
self.setCurrentSelection(self.savedService)
def changeServiceCB(self, direction, epg):
beg = self.getCurrentSelection()
while True:
if direction > 0:
self.moveDown()
else:
self.moveUp()
cur = self.getCurrentSelection()
if cur == beg or not (cur.flags & eServiceReference.isMarker):
break
epg.setService(ServiceReference(self.getCurrentSelection()))
def zapToService(self, service, preview=False, zapback=False):
if self.startServiceRef is None:
self.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet:
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrent(service)
if not zapback or preview:
self.zap(enable_pipzap=True)
if (self.dopipzap or zapback) and not preview:
self.zapBack()
if not preview:
self.startServiceRef = None
self.startRoot = None
class ChannelSelectionEdit:
def __init__(self):
self.entry_marked = False
self.bouquet_mark_edit = OFF
self.mutableList = None
self.__marked = [ ]
self.saved_title = None
self.saved_root = None
self.current_ref = None
self.editMode = False
self.confirmRemove = True
class ChannelSelectionEditActionMap(ActionMap):
def __init__(self, csel, contexts=None, actions=None, prio=0):
if not contexts: contexts = []
if not actions: actions = {}
ActionMap.__init__(self, contexts, actions, prio)
self.csel = csel
def action(self, contexts, action):
if action == "cancel":
self.csel.handleEditCancel()
return 0 # fall-trough
elif action == "ok":
return 0 # fall-trough
else:
return ActionMap.action(self, contexts, action)
self["ChannelSelectEditActions"] = ChannelSelectionEditActionMap(self, ["ChannelSelectEditActions", "OkCancelActions"],
{
"contextMenu": self.doContext,
})
def getMutableList(self, root=eServiceReference()):
if not self.mutableList is None:
return self.mutableList
serviceHandler = eServiceCenter.getInstance()
if not root.valid():
root=self.getRoot()
list = root and serviceHandler.list(root)
if list is not None:
return list.startEdit()
return None
def buildBouquetID(self, str):
tmp = str.lower()
name = ''
for c in tmp:
if ('a' <= c <= 'z') or ('0' <= c <= '9'):
name += c
else:
name += '_'
return name
def renameEntry(self):
self.editMode = True
cur = self.getCurrentSelection()
if cur and cur.valid():
name = eServiceCenter.getInstance().info(cur).getName(cur) or ServiceReference(cur).getServiceName() or ""
name = name.replace('\xc2\x86', '').replace('\xc2\x87', '')
if name:
self.session.openWithCallback(self.renameEntryCallback, VirtualKeyBoard, title=_("Please enter new name:"), text=name)
else:
return 0
def renameEntryCallback(self, name):
if name:
mutableList = self.getMutableList()
if mutableList:
current = self.servicelist.getCurrent()
current.setName(name)
index = self.servicelist.getCurrentIndex()
mutableList.removeService(current, False)
mutableList.addService(current)
mutableList.moveService(current, index)
mutableList.flushChanges()
self.servicelist.addService(current, True)
self.servicelist.removeCurrent()
if not self.servicelist.atEnd():
self.servicelist.moveUp()
def addMarker(self, name):
current = self.servicelist.getCurrent()
mutableList = self.getMutableList()
cnt = 0
while mutableList:
str = '1:64:%d:0:0:0:0:0:0:0::%s'%(cnt, name)
ref = eServiceReference(str)
if current and current.valid():
if not mutableList.addService(ref, current):
self.servicelist.addService(ref, True)
mutableList.flushChanges()
break
elif not mutableList.addService(ref):
self.servicelist.addService(ref, True)
mutableList.flushChanges()
break
cnt+=1
def addAlternativeServices(self):
cur_service = ServiceReference(self.getCurrentSelection())
root = self.getRoot()
cur_root = root and ServiceReference(root)
mutableBouquet = cur_root.list().startEdit()
if mutableBouquet:
name = cur_service.getServiceName()
if self.mode == MODE_TV:
str = '1:134:1:0:0:0:0:0:0:0:FROM BOUQUET \"alternatives.%s.tv\" ORDER BY bouquet'%(self.buildBouquetID(name))
else:
str = '1:134:2:0:0:0:0:0:0:0:FROM BOUQUET \"alternatives.%s.radio\" ORDER BY bouquet'%(self.buildBouquetID(name))
new_ref = ServiceReference(str)
if not mutableBouquet.addService(new_ref.ref, cur_service.ref):
mutableBouquet.removeService(cur_service.ref)
mutableBouquet.flushChanges()
eDVBDB.getInstance().reloadBouquets()
mutableAlternatives = new_ref.list().startEdit()
if mutableAlternatives:
mutableAlternatives.setListName(name)
if mutableAlternatives.addService(cur_service.ref):
print "add", cur_service.ref.toString(), "to new alternatives failed"
mutableAlternatives.flushChanges()
self.servicelist.addService(new_ref.ref, True)
self.servicelist.removeCurrent()
if not self.atEnd():
self.servicelist.moveUp()
if cur_service.ref.toString() == self.lastservice.value:
self.saveChannel(new_ref.ref)
if self.startServiceRef and cur_service.ref == self.startServiceRef:
self.startServiceRef = new_ref.ref
else:
print "get mutable list for new created alternatives failed"
else:
print "add", str, "to", cur_root.getServiceName(), "failed"
else:
print "bouquetlist is not editable"
def addBouquet(self, bName, services):
serviceHandler = eServiceCenter.getInstance()
mutableBouquetList = serviceHandler.list(self.bouquet_root).startEdit()
if mutableBouquetList:
if self.mode == MODE_TV:
bName += _(" (TV)")
str = '1:7:1:0:0:0:0:0:0:0:FROM BOUQUET \"userbouquet.%s.tv\" ORDER BY bouquet'%(self.buildBouquetID(bName))
else:
bName += _(" (Radio)")
str = '1:7:2:0:0:0:0:0:0:0:FROM BOUQUET \"userbouquet.%s.radio\" ORDER BY bouquet'%(self.buildBouquetID(bName))
new_bouquet_ref = eServiceReference(str)
if not mutableBouquetList.addService(new_bouquet_ref):
mutableBouquetList.flushChanges()
eDVBDB.getInstance().reloadBouquets()
mutableBouquet = serviceHandler.list(new_bouquet_ref).startEdit()
if mutableBouquet:
mutableBouquet.setListName(bName)
if services is not None:
for service in services:
if mutableBouquet.addService(service):
print "add", service.toString(), "to new bouquet failed"
mutableBouquet.flushChanges()
else:
print "get mutable list for new created bouquet failed"
# do some voodoo to check if current_root is equal to bouquet_root
cur_root = self.getRoot()
str1 = cur_root and cur_root.toString()
pos1 = str1 and str1.find("FROM BOUQUET") or -1
pos2 = self.bouquet_rootstr.find("FROM BOUQUET")
if pos1 != -1 and pos2 != -1 and str1[pos1:] == self.bouquet_rootstr[pos2:]:
self.servicelist.addService(new_bouquet_ref)
self.servicelist.resetRoot()
else:
print "add", str, "to bouquets failed"
else:
print "bouquetlist is not editable"
def copyCurrentToBouquetList(self):
provider = ServiceReference(self.getCurrentSelection())
providerName = provider.getServiceName()
serviceHandler = eServiceCenter.getInstance()
services = serviceHandler.list(provider.ref)
self.addBouquet(providerName, services and services.getContent('R', True))
def removeAlternativeServices(self):
cur_service = ServiceReference(self.getCurrentSelection())
end = self.atEnd()
root = self.getRoot()
cur_root = root and ServiceReference(root)
list = cur_service.list()
first_in_alternative = list and list.getNext()
if first_in_alternative:
edit_root = cur_root and cur_root.list().startEdit()
if edit_root:
if not edit_root.addService(first_in_alternative, cur_service.ref):
self.servicelist.addService(first_in_alternative, True)
if cur_service.ref.toString() == self.lastservice.value:
self.saveChannel(first_in_alternative)
if self.startServiceRef and cur_service.ref == self.startServiceRef:
self.startServiceRef = first_in_alternative
else:
print "couldn't add first alternative service to current root"
else:
print "couldn't edit current root!!"
else:
print "remove empty alternative list !!"
self.removeBouquet()
if not end:
self.servicelist.moveUp()
def removeBouquet(self):
refstr = self.getCurrentSelection().toString()
pos = refstr.find('FROM BOUQUET "')
filename = None
self.removeCurrentService(bouquet=True)
def removeSatelliteService(self):
current = self.getCurrentSelection()
eDVBDB.getInstance().removeService(current)
refreshServiceList()
if not self.atEnd():
self.servicelist.moveUp()
def removeSatelliteServices(self):
current = self.getCurrentSelection()
unsigned_orbpos = current.getUnsignedData(4) >> 16
if unsigned_orbpos == 0xFFFF:
messageText = _("Are you sure to remove all cable services?")
elif unsigned_orbpos == 0xEEEE:
messageText = _("Are you sure to remove all terrestrial services?")
else:
if unsigned_orbpos > 1800:
unsigned_orbpos = 3600 - unsigned_orbpos
direction = _("W")
else:
direction = _("E")
messageText = _("Are you sure to remove all %d.%d%s%s services?") % (unsigned_orbpos/10, unsigned_orbpos%10, "\xc2\xb0", direction)
self.session.openWithCallback(self.removeSatelliteServicesCallback, MessageBox, messageText)
def removeSatelliteServicesCallback(self, answer):
if answer:
currentIndex = self.servicelist.getCurrentIndex()
current = self.getCurrentSelection()
unsigned_orbpos = current.getUnsignedData(4) >> 16
if unsigned_orbpos == 0xFFFF:
eDVBDB.getInstance().removeServices(int("0xFFFF0000", 16) - 0x100000000)
elif unsigned_orbpos == 0xEEEE:
eDVBDB.getInstance().removeServices(int("0xEEEE0000", 16) - 0x100000000)
else:
curpath = current.getPath()
idx = curpath.find("satellitePosition == ")
if idx != -1:
tmp = curpath[idx + 21:]
idx = tmp.find(')')
if idx != -1:
satpos = int(tmp[:idx])
eDVBDB.getInstance().removeServices(-1, -1, -1, satpos)
refreshServiceList()
if hasattr(self, 'showSatellites'):
self.showSatellites()
self.servicelist.moveToIndex(currentIndex)
if currentIndex != self.servicelist.getCurrentIndex():
self.servicelist.instance.moveSelection(self.servicelist.instance.moveEnd)
# multiple marked entry stuff ( edit mode, later multiepg selection )
def startMarkedEdit(self, type):
self.savedPath = self.servicePath[:]
if type == EDIT_ALTERNATIVES:
self.current_ref = self.getCurrentSelection()
self.enterPath(self.current_ref)
self.mutableList = self.getMutableList()
# add all services from the current list to internal marked set in listboxservicecontent
self.clearMarks() # this clears the internal marked set in the listboxservicecontent
self.saved_title = self.getTitle()
pos = self.saved_title.find(')')
new_title = self.saved_title[:pos+1]
if type == EDIT_ALTERNATIVES:
self.bouquet_mark_edit = EDIT_ALTERNATIVES
new_title += ' ' + _("[alternative edit]")
else:
self.bouquet_mark_edit = EDIT_BOUQUET
if config.usage.multibouquet.value:
new_title += ' ' + _("[bouquet edit]")
else:
new_title += ' ' + _("[favourite edit]")
self.setTitle(new_title)
self.__marked = self.servicelist.getRootServices()
for x in self.__marked:
self.servicelist.addMarked(eServiceReference(x))
self["Service"].editmode = True
def endMarkedEdit(self, abort):
if not abort and self.mutableList is not None:
new_marked = set(self.servicelist.getMarked())
old_marked = set(self.__marked)
removed = old_marked - new_marked
added = new_marked - old_marked
changed = False
for x in removed:
changed = True
self.mutableList.removeService(eServiceReference(x))
for x in added:
changed = True
self.mutableList.addService(eServiceReference(x))
if changed:
if self.bouquet_mark_edit == EDIT_ALTERNATIVES and not new_marked and self.__marked:
self.mutableList.addService(eServiceReference(self.__marked[0]))
self.mutableList.flushChanges()
self.__marked = []
self.clearMarks()
self.bouquet_mark_edit = OFF
self.mutableList = None
self.setTitle(self.saved_title)
self.saved_title = None
# self.servicePath is just a reference to servicePathTv or Radio...
# so we never ever do use the asignment operator in self.servicePath
del self.servicePath[:] # remove all elements
self.servicePath += self.savedPath # add saved elements
del self.savedPath
self.setRoot(self.servicePath[-1])
if self.current_ref:
self.setCurrentSelection(self.current_ref)
self.current_ref = None
def clearMarks(self):
self.servicelist.clearMarks()
def doMark(self):
ref = self.servicelist.getCurrent()
if self.servicelist.isMarked(ref):
self.servicelist.removeMarked(ref)
else:
self.servicelist.addMarked(ref)
def removeCurrentEntry(self, bouquet=False):
if self.confirmRemove:
list = [(_("yes"), True), (_("no"), False), (_("yes") + ", " + _("and never ask again this session again"), "never")]
self.session.openWithCallback(boundFunction(self.removeCurrentEntryCallback, bouquet), MessageBox, _("Are you sure to remove this entry?"), list=list)
else:
self.removeCurrentEntryCallback(bouquet, True)
def removeCurrentEntryCallback(self, bouquet, answer):
if answer:
if answer == "never":
self.confirmRemove = False
if bouquet:
self.removeBouquet()
else:
self.removeCurrentService()
def removeCurrentService(self, bouquet=False):
self.editMode = True
ref = self.servicelist.getCurrent()
mutableList = self.getMutableList()
if ref.valid() and mutableList is not None:
if not mutableList.removeService(ref):
mutableList.flushChanges() #FIXME dont flush on each single removed service
self.servicelist.removeCurrent()
self.servicelist.resetRoot()
playingref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if not bouquet and playingref and ref == playingref:
try:
doClose = not config.usage.servicelistpreview_mode.value or ref == self.session.nav.getCurrentlyPlayingServiceOrGroup()
except:
doClose = False
if self.startServiceRef is None and not doClose:
self.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
ref = self.getCurrentSelection()
if self.movemode and (self.isBasePathEqual(self.bouquet_root) or "userbouquet." in ref.toString()):
self.toggleMoveMarked()
elif (ref.flags & eServiceReference.flagDirectory) == eServiceReference.flagDirectory:
if Components.ParentalControl.parentalControl.isServicePlayable(ref, self.bouquetParentalControlCallback, self.session):
self.enterPath(ref)
self.gotoCurrentServiceOrProvider(ref)
elif self.bouquet_mark_edit != OFF:
if not (self.bouquet_mark_edit == EDIT_ALTERNATIVES and ref.flags & eServiceReference.isGroup):
self.doMark()
elif not (ref.flags & eServiceReference.isMarker or ref.type == -1):
root = self.getRoot()
if not root or not (root.flags & eServiceReference.isGroup):
self.zap(enable_pipzap=doClose, preview_zap=not doClose)
self.asciiOff()
def addServiceToBouquet(self, dest, service=None):
mutableList = self.getMutableList(dest)
if not mutableList is None:
if service is None: #use current selected service
service = self.servicelist.getCurrent()
if not mutableList.addService(service):
mutableList.flushChanges()
# do some voodoo to check if current_root is equal to dest
cur_root = self.getRoot()
str1 = cur_root and cur_root.toString() or -1
str2 = dest.toString()
pos1 = str1.find("FROM BOUQUET")
pos2 = str2.find("FROM BOUQUET")
if pos1 != -1 and pos2 != -1 and str1[pos1:] == str2[pos2:]:
self.servicelist.addService(service)
self.servicelist.resetRoot()
def toggleMoveMode(self, select=False):
self.editMode = True
if self.movemode:
if self.entry_marked:
self.toggleMoveMarked() # unmark current entry
self.movemode = False
self.mutableList.flushChanges() # FIXME add check if changes was made
self.mutableList = None
self.setTitle(self.saved_title)
self.saved_title = None
self.servicelist.resetRoot()
self.servicelist.setCurrent(self.servicelist.getCurrent())
else:
self.mutableList = self.getMutableList()
self.movemode = True
select and self.toggleMoveMarked()
self.saved_title = self.getTitle()
pos = self.saved_title.find(')')
self.setTitle(self.saved_title[:pos+1] + ' ' + _("[move mode]") + self.saved_title[pos+1:]);
self.servicelist.setCurrent(self.servicelist.getCurrent())
self["Service"].editmode = True
def handleEditCancel(self):
if self.movemode: #movemode active?
self.toggleMoveMode() # disable move mode
elif self.bouquet_mark_edit != OFF:
self.endMarkedEdit(True) # abort edit mode
def toggleMoveMarked(self):
if self.entry_marked:
self.servicelist.setCurrentMarked(False)
self.entry_marked = False
self.pathChangeDisabled = False # re-enable path change
else:
self.servicelist.setCurrentMarked(True)
self.entry_marked = True
self.pathChangeDisabled = True # no path change allowed in movemod
def doContext(self):
self.session.openWithCallback(self.exitContext, ChannelContextMenu, self)
def exitContext(self, close = False):
l = self["list"]
l.setFontsize()
l.setItemsPerPage()
l.setMode('MODE_TV')
if close:
self.cancel()
MODE_TV = 0
MODE_RADIO = 1
# type 1 = digital television service
# type 4 = nvod reference service (NYI)
# type 17 = MPEG-2 HD digital television service
# type 22 = advanced codec SD digital television
# type 24 = advanced codec SD NVOD reference service (NYI)
# type 25 = advanced codec HD digital television
# type 27 = advanced codec HD NVOD reference service (NYI)
# type 2 = digital radio sound service
# type 10 = advanced codec digital radio sound service
# type 31 = High Efficiency Video Coing digital television
service_types_tv = '1:7:1:0:0:0:0:0:0:0:(type == 1) || (type == 17) || (type == 22) || (type == 25) || (type == 31) || (type == 134) || (type == 195)'
service_types_radio = '1:7:2:0:0:0:0:0:0:0:(type == 2) || (type == 10)'
class ChannelSelectionBase(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["key_red"] = Button(_("All"))
self["key_green"] = Button(_("Reception lists"))
self["key_yellow"] = Button(_("Providers"))
self["key_blue"] = Button(_("Favourites"))
self["list"] = ServiceList(self)
self.servicelist = self["list"]
self.numericalTextInput = NumericalTextInput(handleTimeout=False)
self.servicePathTV = [ ]
self.servicePathRadio = [ ]
self.servicePath = [ ]
self.history = [ ]
self.rootChanged = False
self.startRoot = None
self.selectionNumber = ""
self.clearNumberSelectionNumberTimer = eTimer()
self.clearNumberSelectionNumberTimer.callback.append(self.clearNumberSelectionNumber)
self.protectContextMenu = True
self.mode = MODE_TV
self.dopipzap = False
self.pathChangeDisabled = False
self.movemode = False
self.showSatDetails = False
self["ChannelSelectBaseActions"] = NumberActionMap(["ChannelSelectBaseActions", "NumberActions", "InputAsciiActions"],
{
"showFavourites": self.showFavourites,
"showAllServices": self.showAllServices,
"showProviders": self.showProviders,
"showSatellites": boundFunction(self.showSatellites, changeMode=True),
"nextBouquet": self.nextBouquet,
"prevBouquet": self.prevBouquet,
"nextMarker": self.nextMarker,
"prevMarker": self.prevMarker,
"gotAsciiCode": self.keyAsciiCode,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.maintitle = _("Channel selection")
self.recallBouquetMode()
self.onShown.append(self.applyKeyMap)
def applyKeyMap(self):
if config.usage.show_channel_jump_in_servicelist.value == "alpha":
self.numericalTextInput.setUseableChars(u'abcdefghijklmnopqrstuvwxyz1234567890')
else:
self.numericalTextInput.setUseableChars(u'1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ')
def getBouquetNumOffset(self, bouquet):
if not config.usage.multibouquet.value:
return 0
str = bouquet.toString()
offset = 0
if 'userbouquet.' in bouquet.toCompareString():
serviceHandler = eServiceCenter.getInstance()
servicelist = serviceHandler.list(bouquet)
if servicelist is not None:
while True:
serviceIterator = servicelist.getNext()
if not serviceIterator.valid(): #check if end of list
break
number = serviceIterator.getChannelNum()
if number > 0:
offset = number - 1
break
return offset
def recallBouquetMode(self):
if self.mode == MODE_TV:
self.service_types = service_types_tv
if config.usage.multibouquet.value:
self.bouquet_rootstr = '1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "bouquets.tv" ORDER BY bouquet'
else:
self.bouquet_rootstr = '%s FROM BOUQUET "userbouquet.favourites.tv" ORDER BY bouquet'% self.service_types
else:
self.service_types = service_types_radio
if config.usage.multibouquet.value:
self.bouquet_rootstr = '1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "bouquets.radio" ORDER BY bouquet'
else:
self.bouquet_rootstr = '%s FROM BOUQUET "userbouquet.favourites.radio" ORDER BY bouquet'% self.service_types
self.bouquet_root = eServiceReference(self.bouquet_rootstr)
def setTvMode(self):
self.mode = MODE_TV
self.servicePath = self.servicePathTV
self.recallBouquetMode()
# title = self.maintitle
# pos = title.find(" (")
# if pos != -1:
# title = title[:pos]
title = _(' (TV)')
self.setTitle(title)
def setRadioMode(self):
self.mode = MODE_RADIO
self.servicePath = self.servicePathRadio
self.recallBouquetMode()
# title = self.maintitle
# pos = title.find(" (")
# if pos != -1:
# title = title[:pos]
title = _(' (Radio)')
self.setTitle(title)
def setRoot(self, root, justSet=False):
if self.startRoot is None:
self.startRoot = self.getRoot()
path = root.getPath()
isBouquet = 'FROM BOUQUET' in path and (root.flags & eServiceReference.isDirectory)
inBouquetRootList = 'FROM BOUQUET "bouquets.' in path #FIXME HACK
if not inBouquetRootList and isBouquet:
self.servicelist.setMode(ServiceList.MODE_FAVOURITES)
else:
self.servicelist.setMode(ServiceList.MODE_NORMAL)
self.servicelist.setRoot(root, justSet)
self.rootChanged = True
self.buildTitleString()
def removeModeStr(self, str):
if self.mode == MODE_TV:
pos = str.find(_(' (TV)'))
else:
pos = str.find(_(' (Radio)'))
if pos != -1:
return str[:pos]
return str
def getServiceName(self, ref):
str = self.removeModeStr(ServiceReference(ref).getServiceName())
if 'User - bouquets' in str:
return _('User - bouquets')
if not str:
pathstr = ref.getPath()
if 'FROM PROVIDERS' in pathstr:
return _('Provider')
if 'FROM SATELLITES' in pathstr:
return _('Reception lists')
if ') ORDER BY name' in pathstr:
return _('All')
return str
def buildTitleString(self):
titleStr = self.getTitle()
nameStr = ''
pos = titleStr.find(']')
if pos == -1:
pos = titleStr.find(')')
if pos != -1:
titleStr = titleStr[:pos+1]
if titleStr.find(' (TV)') != -1:
titleStr = titleStr[-5:]
elif titleStr.find(' (Radio)') != -1:
titleStr = titleStr[-8:]
Len = len(self.servicePath)
if Len > 0:
base_ref = self.servicePath[0]
if Len > 1:
end_ref = self.servicePath[Len - 1]
else:
end_ref = None
nameStr = self.getServiceName(base_ref)
# titleStr += ' - ' + nameStr
if end_ref is not None:
# if Len > 2:
# titleStr += '/../'
# else:
# titleStr += '/'
nameStr = self.getServiceName(end_ref)
titleStr += nameStr
self.setTitle(titleStr)
def moveUp(self):
self.servicelist.moveUp()
def moveDown(self):
self.servicelist.moveDown()
def moveTop(self):
self.servicelist.moveTop()
def moveEnd(self):
self.servicelist.moveEnd()
def clearPath(self):
del self.servicePath[:]
def enterPath(self, ref, justSet=False):
self.servicePath.append(ref)
self.setRoot(ref, justSet)
def enterUserbouquet(self, root, save_root=True):
self.clearPath()
self.recallBouquetMode()
if self.bouquet_root:
self.enterPath(self.bouquet_root)
self.enterPath(root)
self.startRoot = None
if save_root:
self.saveRoot()
def pathUp(self, justSet=False):
prev = self.servicePath.pop()
if self.servicePath:
current = self.servicePath[-1]
self.setRoot(current, justSet)
if not justSet:
self.setCurrentSelection(prev)
return prev
def isBasePathEqual(self, ref):
if len(self.servicePath) > 1 and self.servicePath[0] == ref:
return True
return False
def isPrevPathEqual(self, ref):
length = len(self.servicePath)
if length > 1 and self.servicePath[length - 2] == ref:
return True
return False
def preEnterPath(self, refstr):
return False
def showAllServices(self):
self["key_green"].setText(_("Reception lists"))
if not self.pathChangeDisabled:
refstr = '%s ORDER BY name'%(self.service_types)
if not self.preEnterPath(refstr):
ref = eServiceReference(refstr)
currentRoot = self.getRoot()
if currentRoot is None or currentRoot != ref:
self.clearPath()
self.enterPath(ref)
playingref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if playingref:
self.setCurrentSelectionAlternative(playingref)
def showSatellites(self, changeMode=False):
if not self.pathChangeDisabled:
refstr = '%s FROM SATELLITES ORDER BY satellitePosition'%(self.service_types)
if self.showSatDetails:
self["key_green"].setText(_("Simple"))
else:
self["key_green"].setText(_("Extended"))
if not self.preEnterPath(refstr):
ref = eServiceReference(refstr)
justSet = False
prev = None
if self.isBasePathEqual(ref):
if self.isPrevPathEqual(ref):
justSet = True
prev = self.pathUp(justSet)
else:
currentRoot = self.getRoot()
if currentRoot is None or currentRoot != ref:
justSet = True
self.clearPath()
self.enterPath(ref, True)
if changeMode and currentRoot and currentRoot == ref:
self.showSatDetails = not self.showSatDetails
justSet = True
self.clearPath()
self.enterPath(ref, True)
if self.showSatDetails:
self["key_green"].setText(_("Simple"))
else:
self["key_green"].setText(_("Extended"))
if justSet:
addCableAndTerrestrialLater = []
serviceHandler = eServiceCenter.getInstance()
servicelist = serviceHandler.list(ref)
if servicelist is not None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
unsigned_orbpos = service.getUnsignedData(4) >> 16
orbpos = service.getData(4) >> 16
if orbpos < 0:
orbpos += 3600
if "FROM PROVIDER" in service.getPath():
service_type = self.showSatDetails and _("Providers")
elif ("flags == %d" %(FLAG_SERVICE_NEW_FOUND)) in service.getPath():
service_type = self.showSatDetails and _("New")
else:
service_type = _("Services")
if service_type:
if unsigned_orbpos == 0xFFFF: #Cable
service_name = _("Cable")
addCableAndTerrestrialLater.append(("%s - %s" % (service_name, service_type), service.toString()))
elif unsigned_orbpos == 0xEEEE: #Terrestrial
service_name = _("Terrestrial")
addCableAndTerrestrialLater.append(("%s - %s" % (service_name, service_type), service.toString()))
else:
try:
service_name = str(nimmanager.getSatDescription(orbpos))
except:
if orbpos > 1800: # west
orbpos = 3600 - orbpos
h = _("W")
else:
h = _("E")
service_name = ("%d.%d" + h) % (orbpos / 10, orbpos % 10)
service.setName("%s - %s" % (service_name, service_type))
self.servicelist.addService(service)
cur_ref = self.session.nav.getCurrentlyPlayingServiceReference()
self.servicelist.l.sort()
if cur_ref:
pos = self.service_types.rfind(':')
refstr = '%s (channelID == %08x%04x%04x) && %s ORDER BY name' %(self.service_types[:pos+1],
cur_ref.getUnsignedData(4), # NAMESPACE
cur_ref.getUnsignedData(2), # TSID
cur_ref.getUnsignedData(3), # ONID
self.service_types[pos+1:])
ref = eServiceReference(refstr)
ref.setName(_("Current transponder"))
self.servicelist.addService(ref, beforeCurrent=True)
for (service_name, service_ref) in addCableAndTerrestrialLater:
ref = eServiceReference(service_ref)
ref.setName(service_name)
self.servicelist.addService(ref, beforeCurrent=True)
self.servicelist.l.FillFinished()
if prev is not None:
self.setCurrentSelection(prev)
elif cur_ref:
refstr = cur_ref.toString()
op = "".join(refstr.split(':', 10)[6:7])
if len(op) >= 4:
hop = int(op[:-4],16)
if len(op) >= 7 and not op.endswith('0000'):
op = op[:-4] + '0000'
refstr = '1:7:0:0:0:0:%s:0:0:0:(satellitePosition == %s) && %s ORDER BY name' % (op, hop, self.service_types[self.service_types.rfind(':')+1:])
self.setCurrentSelectionAlternative(eServiceReference(refstr))
def showProviders(self):
self["key_green"].setText(_("Reception lists"))
if not self.pathChangeDisabled:
refstr = '%s FROM PROVIDERS ORDER BY name'%(self.service_types)
if not self.preEnterPath(refstr):
ref = eServiceReference(refstr)
if self.isBasePathEqual(ref):
self.pathUp()
else:
currentRoot = self.getRoot()
if currentRoot is None or currentRoot != ref:
self.clearPath()
self.enterPath(ref)
service = self.session.nav.getCurrentService()
if service:
info = service.info()
if info:
provider = info.getInfoString(iServiceInformation.sProvider)
refstr = '1:7:0:0:0:0:0:0:0:0:(provider == \"%s\") && %s ORDER BY name:%s' % (provider, self.service_types[self.service_types.rfind(':')+1:],provider)
self.setCurrentSelectionAlternative(eServiceReference(refstr))
def changeBouquet(self, direction):
if not self.pathChangeDisabled:
if len(self.servicePath) > 1:
ref = eServiceReference('%s FROM SATELLITES ORDER BY satellitePosition' % self.service_types)
if self.isBasePathEqual(ref):
self.showSatellites()
else:
self.pathUp()
if direction < 0:
self.moveUp()
else:
self.moveDown()
ref = self.getCurrentSelection()
self.enterPath(ref)
prev = None
root = self.getRoot()
for path in self.history:
if len(path) > 2 and path[1] == root:
prev = path[2]
if prev is not None:
self.setCurrentSelection(prev)
def inBouquet(self):
if self.servicePath and self.servicePath[0] == self.bouquet_root:
return True
return False
def atBegin(self):
return self.servicelist.atBegin()
def atEnd(self):
return self.servicelist.atEnd()
def nextBouquet(self):
if "reverseB" in config.usage.servicelist_cursor_behavior.value:
if config.usage.channelbutton_mode.value == '0' or config.usage.channelbutton_mode.value == '3':
self.changeBouquet(-1)
else:
self.servicelist.moveDown()
else:
if config.usage.channelbutton_mode.value == '0' or config.usage.channelbutton_mode.value == '3':
self.changeBouquet(+1)
else:
self.servicelist.moveUp()
def prevBouquet(self):
if "reverseB" in config.usage.servicelist_cursor_behavior.value:
if config.usage.channelbutton_mode.value == '0' or config.usage.channelbutton_mode.value == '3':
self.changeBouquet(+1)
else:
self.servicelist.moveUp()
else:
if config.usage.channelbutton_mode.value == '0' or config.usage.channelbutton_mode.value == '3':
self.changeBouquet(-1)
else:
self.servicelist.moveDown()
def showFavourites(self):
self["key_green"].setText(_("Reception lists"))
if not self.pathChangeDisabled:
if not self.preEnterPath(self.bouquet_rootstr):
if self.isBasePathEqual(self.bouquet_root):
self.pathUp()
else:
currentRoot = self.getRoot()
if currentRoot is None or currentRoot != self.bouquet_root:
self.clearPath()
self.enterPath(self.bouquet_root)
def keyNumberGlobal(self, number):
if config.usage.show_channel_jump_in_servicelist.value == "quick":
if self.isBasePathEqual(self.bouquet_root):
if hasattr(self, "editMode") and self.editMode:
if number == 2:
self.renameEntry()
if number == 6:
self.toggleMoveMode(select=True)
if number == 8:
self.removeCurrentEntry(bouquet=False)
else:
self.numberSelectionActions(number)
else:
current_root = self.getRoot()
if current_root and 'FROM BOUQUET "bouquets.' in current_root.getPath():
if hasattr(self, "editMode") and self.editMode:
if number == 2:
self.renameEntry()
if number == 6:
self.toggleMoveMode(select=True)
if number == 8:
self.removeCurrentEntry(bouquet=True)
else:
self.numberSelectionActions(number)
else:
unichar = self.numericalTextInput.getKey(number)
charstr = unichar.encode("utf-8")
if len(charstr) == 1:
self.servicelist.moveToChar(charstr[0])
else:
unichar = self.numericalTextInput.getKey(number)
charstr = unichar.encode("utf-8")
if len(charstr) == 1:
self.servicelist.moveToChar(charstr[0])
def numberSelectionActions(self, number):
if not(hasattr(self, "movemode") and self.movemode):
if len(self.selectionNumber)>4:
self.clearNumberSelectionNumber()
self.selectionNumber = self.selectionNumber + str(number)
ref, bouquet = Screens.InfoBar.InfoBar.instance.searchNumber(int(self.selectionNumber), bouquet=self.getRoot())
if ref:
if not ref.flags & eServiceReference.isMarker:
self.enterUserbouquet(bouquet, save_root=False)
self.servicelist.setCurrent(ref)
self.clearNumberSelectionNumberTimer.start(1000, True)
else:
self.clearNumberSelectionNumber()
def clearNumberSelectionNumber(self):
self.clearNumberSelectionNumberTimer.stop()
self.selectionNumber = ""
def keyAsciiCode(self):
unichar = unichr(getPrevAsciiCode())
charstr = unichar.encode('utf-8')
if len(charstr) == 1:
self.servicelist.moveToChar(charstr[0])
def getRoot(self):
return self.servicelist.getRoot()
def getCurrentSelection(self):
return self.servicelist.getCurrent()
def setCurrentSelection(self, service):
if service:
self.servicelist.setCurrent(service, adjust=False)
def setCurrentSelectionAlternative(self, ref):
if self.bouquet_mark_edit == EDIT_ALTERNATIVES and not (ref.flags & eServiceReference.isDirectory):
for markedService in self.servicelist.getMarked():
markedService = eServiceReference(markedService)
self.setCurrentSelection(markedService)
if markedService == self.getCurrentSelection():
return
self.setCurrentSelection(ref)
def getBouquetList(self):
bouquets = []
serviceHandler = eServiceCenter.getInstance()
if config.usage.multibouquet.value:
list = serviceHandler.list(self.bouquet_root)
if list:
while True:
s = list.getNext()
if not s.valid():
break
if s.flags & eServiceReference.isDirectory and not s.flags & eServiceReference.isInvisible:
info = serviceHandler.info(s)
if info:
bouquets.append((info.getName(s), s))
return bouquets
else:
info = serviceHandler.info(self.bouquet_root)
if info:
bouquets.append((info.getName(self.bouquet_root), self.bouquet_root))
return bouquets
return None
def keyGoUp(self):
if len(self.servicePath) > 1:
if self.isBasePathEqual(self.bouquet_root):
self.showFavourites()
else:
ref = eServiceReference('%s FROM SATELLITES ORDER BY satellitePosition'% self.service_types)
if self.isBasePathEqual(ref):
self.showSatellites()
else:
ref = eServiceReference('%s FROM PROVIDERS ORDER BY name'% self.service_types)
if self.isBasePathEqual(ref):
self.showProviders()
else:
self.showAllServices()
def nextMarker(self):
self.servicelist.moveToNextMarker()
def prevMarker(self):
self.servicelist.moveToPrevMarker()
def gotoCurrentServiceOrProvider(self, ref):
str = ref.toString()
if _("Providers") in str:
service = self.session.nav.getCurrentService()
if service:
info = service.info()
if info:
provider = info.getInfoString(iServiceInformation.sProvider)
op = int(self.session.nav.getCurrentlyPlayingServiceOrGroup().toString().split(':')[6][:-4] or "0",16)
refstr = '1:7:0:0:0:0:0:0:0:0:(provider == \"%s\") && (satellitePosition == %s) && %s ORDER BY name:%s' % (provider, op, self.service_types[self.service_types.rfind(':')+1:],provider)
self.servicelist.setCurrent(eServiceReference(refstr))
elif not self.isBasePathEqual(self.bouquet_root) or self.bouquet_mark_edit == EDIT_ALTERNATIVES:
playingref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if playingref:
self.setCurrentSelectionAlternative(playingref)
HISTORYSIZE = 20
#config for lastservice
config.tv = ConfigSubsection()
config.tv.lastservice = ConfigText()
config.tv.lastroot = ConfigText()
config.radio = ConfigSubsection()
config.radio.lastservice = ConfigText()
config.radio.lastroot = ConfigText()
config.servicelist = ConfigSubsection()
config.servicelist.lastmode = ConfigText(default='tv')
config.servicelist.startupservice = ConfigText()
config.servicelist.startupservice_standby = ConfigText()
config.servicelist.startupservice_onstandby = ConfigYesNo(default = False)
config.servicelist.startuproot = ConfigText()
config.servicelist.startupmode = ConfigText(default='tv')
class ChannelSelection(ChannelSelectionBase, ChannelSelectionEdit, ChannelSelectionEPG, SelectionEventInfo):
instance = None
def __init__(self, session):
ChannelSelectionBase.__init__(self, session)
ChannelSelectionEdit.__init__(self)
ChannelSelectionEPG.__init__(self)
SelectionEventInfo.__init__(self)
if config.usage.servicelist_mode.value == 'simple':
self.skinName = ["SlimChannelSelection","SimpleChannelSelection","ChannelSelection"]
else:
self.skinName = "ChannelSelection"
self["actions"] = ActionMap(["OkCancelActions", "TvRadioActions"],
{
"cancel": self.cancel,
"ok": self.channelSelected,
"keyRadio": self.toogleTvRadio,
"keyTV": self.toogleTvRadio,
})
self.radioTV = 0
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__evServiceStart,
iPlayableService.evEnd: self.__evServiceEnd
})
assert ChannelSelection.instance is None, "class InfoBar is a singleton class and just one instance of this class is allowed!"
ChannelSelection.instance = self
self.startServiceRef = None
self.history_tv = []
self.history_radio = []
self.history = self.history_tv
self.history_pos = 0
self.delhistpoint = None
if config.servicelist.startupservice.value and config.servicelist.startuproot.value:
config.servicelist.lastmode.value = config.servicelist.startupmode.value
if config.servicelist.lastmode.value == 'tv':
config.tv.lastservice.value = config.servicelist.startupservice.value
config.tv.lastroot.value = config.servicelist.startuproot.value
elif config.servicelist.lastmode.value == 'radio':
config.radio.lastservice.value = config.servicelist.startupservice.value
config.radio.lastroot.value = config.servicelist.startuproot.value
self.lastservice = config.tv.lastservice
self.lastroot = config.tv.lastroot
self.revertMode = None
config.usage.multibouquet.addNotifier(self.multibouquet_config_changed)
self.new_service_played = False
self.dopipzap = False
if config.misc.remotecontrol_text_support.value:
self.onExecBegin.append(self.asciiOff)
else:
self.onExecBegin.append(self.asciiOn)
self.mainScreenMode = None
self.mainScreenRoot = None
self.lastChannelRootTimer = eTimer()
self.lastChannelRootTimer.callback.append(self.__onCreate)
self.lastChannelRootTimer.start(100, True)
self.pipzaptimer = eTimer()
def asciiOn(self):
rcinput = eRCInput.getInstance()
rcinput.setKeyboardMode(rcinput.kmAscii)
def asciiOff(self):
rcinput = eRCInput.getInstance()
rcinput.setKeyboardMode(rcinput.kmNone)
def multibouquet_config_changed(self, val):
self.recallBouquetMode()
def __evServiceStart(self):
if self.dopipzap and hasattr(self.session, 'pip'):
self.servicelist.setPlayableIgnoreService(self.session.pip.getCurrentServiceReference() or eServiceReference())
else:
service = self.session.nav.getCurrentService()
if service:
info = service.info()
if info:
refstr = info.getInfoString(iServiceInformation.sServiceref)
self.servicelist.setPlayableIgnoreService(eServiceReference(refstr))
def __evServiceEnd(self):
self.servicelist.setPlayableIgnoreService(eServiceReference())
def setMode(self):
self.rootChanged = True
self.restoreRoot()
lastservice = eServiceReference(self.lastservice.value)
if lastservice.valid():
self.setCurrentSelection(lastservice)
def toogleTvRadio(self):
if self.radioTV == 1:
self.radioTV = 0
self.setModeTv()
else:
self.radioTV = 1
self.setModeRadio()
def setModeTv(self):
if self.revertMode is None and config.servicelist.lastmode.value == 'radio':
self.revertMode = MODE_RADIO
self.history = self.history_tv
self.lastservice = config.tv.lastservice
self.lastroot = config.tv.lastroot
config.servicelist.lastmode.value = 'tv'
self.setTvMode()
self.setMode()
def setModeRadio(self):
if self.revertMode is None and config.servicelist.lastmode.value == 'tv':
self.revertMode = MODE_TV
if config.usage.e1like_radio_mode.value:
self.history = self.history_radio
self.lastservice = config.radio.lastservice
self.lastroot = config.radio.lastroot
config.servicelist.lastmode.value = 'radio'
self.setRadioMode()
self.setMode()
def __onCreate(self):
if config.usage.e1like_radio_mode.value:
if config.servicelist.lastmode.value == 'tv':
self.setModeTv()
else:
self.setModeRadio()
else:
self.setModeTv()
lastservice = eServiceReference(self.lastservice.value)
if lastservice.valid():
self.zap()
def channelSelected(self):
ref = self.getCurrentSelection()
try:
doClose = not config.usage.servicelistpreview_mode.value or ref == self.session.nav.getCurrentlyPlayingServiceOrGroup()
except:
doClose = False
if self.startServiceRef is None and not doClose:
self.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
ref = self.getCurrentSelection()
if self.movemode and (self.isBasePathEqual(self.bouquet_root) or "userbouquet." in ref.toString()):
self.toggleMoveMarked()
elif (ref.flags & eServiceReference.flagDirectory) == eServiceReference.flagDirectory:
if Components.ParentalControl.parentalControl.isServicePlayable(ref, self.bouquetParentalControlCallback, self.session):
self.enterPath(ref)
self.gotoCurrentServiceOrProvider(ref)
elif self.bouquet_mark_edit != OFF:
if not (self.bouquet_mark_edit == EDIT_ALTERNATIVES and ref.flags & eServiceReference.isGroup):
self.doMark()
elif not (ref.flags & eServiceReference.isMarker or ref.type == -1):
root = self.getRoot()
if not root or not (root.flags & eServiceReference.isGroup):
self.zap(enable_pipzap=doClose, preview_zap=not doClose)
self.asciiOff()
if doClose:
if self.dopipzap:
self.zapBack()
self.startServiceRef = None
self.startRoot = None
self.correctChannelNumber()
self.movemode and self.toggleMoveMode()
self.editMode = False
self.protectContextMenu = True
self["key_green"].setText(_("Reception lists"))
self.close(ref)
def bouquetParentalControlCallback(self, ref):
self.enterPath(ref)
self.gotoCurrentServiceOrProvider(ref)
def togglePipzap(self):
assert self.session.pip
title = self.instance.getTitle()
pos = title.find(' (')
if pos != -1:
title = title[:pos]
if self.dopipzap:
# Mark PiP as inactive and effectively deactivate pipzap
self.hidePipzapMessage()
self.dopipzap = False
# Disable PiP if not playing a service
if self.session.pip.pipservice is None:
self.session.pipshown = False
del self.session.pip
self.__evServiceStart()
# Move to playing service
lastservice = eServiceReference(self.lastservice.value)
if lastservice.valid() and self.getCurrentSelection() != lastservice:
self.setCurrentSelection(lastservice)
title += _(' (TV)')
else:
# Mark PiP as active and effectively active pipzap
self.showPipzapMessage()
self.dopipzap = True
self.__evServiceStart()
# Move to service playing in pip (will not work with subservices)
self.setCurrentSelection(self.session.pip.getCurrentService())
title += _(' (PiP)')
self.setTitle(title)
self.buildTitleString()
def showPipzapMessage(self):
time = config.usage.infobar_timeout.index
if time:
self.pipzaptimer.callback.append(self.hidePipzapMessage)
self.pipzaptimer.startLongTimer(time)
self.session.pip.active()
def hidePipzapMessage(self):
if self.pipzaptimer.isActive():
self.pipzaptimer.callback.remove(self.hidePipzapMessage)
self.pipzaptimer.stop()
self.session.pip.inactive()
#called from infoBar and channelSelected
def zap(self, enable_pipzap=False, preview_zap=False, checkParentalControl=True, ref=None):
self.curRoot = self.startRoot
nref = ref or self.getCurrentSelection()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if enable_pipzap and self.dopipzap:
ref = self.session.pip.getCurrentService()
if ref is None or ref != nref:
nref = self.session.pip.resolveAlternatePipService(nref)
if nref and (not checkParentalControl or Components.ParentalControl.parentalControl.isServicePlayable(nref, boundFunction(self.zap, enable_pipzap=True, checkParentalControl=False))):
self.session.pip.playService(nref)
self.__evServiceStart()
self.showPipzapMessage()
else:
self.setStartRoot(self.curRoot)
self.setCurrentSelection(ref)
elif ref is None or ref != nref:
Screens.InfoBar.InfoBar.instance.checkTimeshiftRunning(boundFunction(self.zapCheckTimeshiftCallback, enable_pipzap, preview_zap, nref))
elif not preview_zap:
self.saveRoot()
self.saveChannel(nref)
config.servicelist.lastmode.save()
self.setCurrentSelection(nref)
if self.startServiceRef is None or nref != self.startServiceRef:
self.addToHistory(nref)
self.rootChanged = False
self.revertMode = None
def zapCheckTimeshiftCallback(self, enable_pipzap, preview_zap, nref, answer):
if answer:
self.new_service_played = True
self.session.nav.playService(nref)
if not preview_zap:
self.saveRoot()
self.saveChannel(nref)
config.servicelist.lastmode.save()
if self.startServiceRef is None or nref != self.startServiceRef:
self.addToHistory(nref)
if self.dopipzap:
self.setCurrentSelection(self.session.pip.getCurrentService())
else:
self.mainScreenMode = config.servicelist.lastmode.value
self.mainScreenRoot = self.getRoot()
self.revertMode = None
else:
Notifications.RemovePopup("Parental control")
self.setCurrentSelection(nref)
else:
self.setStartRoot(self.curRoot)
self.setCurrentSelection(self.session.nav.getCurrentlyPlayingServiceOrGroup())
if not preview_zap:
self.hide()
def newServicePlayed(self):
ret = self.new_service_played
self.new_service_played = False
return ret
def addToHistory(self, ref):
if self.delhistpoint is not None:
x = self.delhistpoint
while x <= len(self.history)-1:
del self.history[x]
self.delhistpoint = None
if self.servicePath is not None:
tmp = self.servicePath[:]
tmp.append(ref)
self.history.append(tmp)
hlen = len(self.history)
x = 0
while x < hlen - 1:
if self.history[x][-1] == ref:
del self.history[x]
hlen -= 1
else:
x += 1
if hlen > HISTORYSIZE:
del self.history[0]
hlen -= 1
self.history_pos = hlen - 1
def historyBack(self):
hlen = len(self.history)
currentPlayedRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if hlen > 0 and currentPlayedRef and self.history[self.history_pos][-1] != currentPlayedRef:
self.addToHistory(currentPlayedRef)
hlen = len(self.history)
if hlen > 1 and self.history_pos > 0:
self.history_pos -= 1
self.setHistoryPath()
self.delhistpoint = self.history_pos+1
def historyNext(self):
self.delhistpoint = None
hlen = len(self.history)
if hlen > 1 and self.history_pos < hlen - 1:
self.history_pos += 1
self.setHistoryPath()
def setHistoryPath(self, doZap=True):
path = self.history[self.history_pos][:]
ref = path.pop()
del self.servicePath[:]
self.servicePath += path
self.saveRoot()
root = path[-1]
cur_root = self.getRoot()
if cur_root and cur_root != root:
self.setRoot(root)
self.servicelist.setCurrent(ref)
if doZap:
self.session.nav.playService(ref)
if self.dopipzap:
self.setCurrentSelection(self.session.pip.getCurrentService())
else:
self.setCurrentSelection(ref)
self.saveChannel(ref)
def historyClear(self):
if self and self.servicelist:
for i in range(0, len(self.history)-1):
del self.history[0]
self.history_pos = len(self.history)-1
return True
return False
def historyZap(self, direction):
hlen = len(self.history)
if hlen < 1: return
mark = self.history_pos
selpos = self.history_pos + direction
if selpos < 0: selpos = 0
if selpos > hlen-1: selpos = hlen-1
serviceHandler = eServiceCenter.getInstance()
historylist = [ ]
for x in self.history:
info = serviceHandler.info(x[-1])
if info: historylist.append((info.getName(x[-1]), x[-1]))
self.session.openWithCallback(self.historyMenuClosed, HistoryZapSelector, historylist, selpos, mark, invert_items=True, redirect_buttons=True, wrap_around=True)
def historyMenuClosed(self, retval):
if not retval: return
hlen = len(self.history)
pos = 0
for x in self.history:
if x[-1] == retval: break
pos += 1
self.delhistpoint = pos+1
if pos < hlen and pos != self.history_pos:
tmp = self.history[pos]
# self.history.append(tmp)
# del self.history[pos]
self.history_pos = pos
self.setHistoryPath()
def saveRoot(self):
path = ''
for i in self.servicePath:
path += i.toString()
path += ';'
if path and path != self.lastroot.value:
if self.mode == MODE_RADIO and 'FROM BOUQUET "bouquets.tv"' in path:
self.setModeTv()
elif self.mode == MODE_TV and 'FROM BOUQUET "bouquets.radio"' in path:
self.setModeRadio()
self.lastroot.value = path
self.lastroot.save()
def restoreRoot(self):
tmp = [ x for x in self.lastroot.value.split(';') if x != '' ]
current = [ x.toString() for x in self.servicePath ]
if tmp != current or self.rootChanged:
self.clearPath()
cnt = 0
for i in tmp:
self.servicePath.append(eServiceReference(i))
cnt += 1
if cnt:
path = self.servicePath.pop()
self.enterPath(path)
else:
self.showFavourites()
self.saveRoot()
self.rootChanged = False
def preEnterPath(self, refstr):
if self.servicePath and self.servicePath[0] != eServiceReference(refstr):
pathstr = self.lastroot.value
if pathstr is not None and refstr in pathstr:
self.restoreRoot()
lastservice = eServiceReference(self.lastservice.value)
if lastservice.valid():
self.setCurrentSelection(lastservice)
return True
return False
def saveChannel(self, ref):
if ref is not None:
refstr = ref.toString()
else:
refstr = ''
if refstr != self.lastservice.value:
self.lastservice.value = refstr
self.lastservice.save()
def setCurrentServicePath(self, path, doZap=True):
if self.history:
self.history[self.history_pos] = path
else:
self.history.append(path)
self.setHistoryPath(doZap)
def getCurrentServicePath(self):
if self.history:
return self.history[self.history_pos]
return None
def recallPrevService(self):
hlen = len(self.history)
currentPlayedRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if hlen > 0 and currentPlayedRef and self.history[self.history_pos][-1] != currentPlayedRef:
self.addToHistory(currentPlayedRef)
hlen = len(self.history)
if hlen > 1:
if self.history_pos == hlen - 1:
tmp = self.history[self.history_pos]
self.history[self.history_pos] = self.history[self.history_pos - 1]
self.history[self.history_pos - 1] = tmp
else:
tmp = self.history[self.history_pos + 1]
self.history[self.history_pos + 1] = self.history[self.history_pos]
self.history[self.history_pos] = tmp
self.setHistoryPath()
def cancel(self):
if self.revertMode is None:
self.restoreRoot()
if self.dopipzap:
# This unfortunately won't work with subservices
self.setCurrentSelection(self.session.pip.getCurrentService())
else:
lastservice = eServiceReference(self.lastservice.value)
if lastservice.valid() and self.getCurrentSelection() != lastservice:
self.setCurrentSelection(lastservice)
self.asciiOff()
if config.usage.servicelistpreview_mode.value:
self.zapBack()
self.correctChannelNumber()
self.editMode = False
self.protectContextMenu = True
self.close(None)
def zapBack(self):
currentPlayedRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if self.startServiceRef and (currentPlayedRef is None or currentPlayedRef != self.startServiceRef):
self.setStartRoot(self.startRoot)
self.new_service_played = True
self.session.nav.playService(self.startServiceRef)
self.saveChannel(self.startServiceRef)
else:
self.restoreMode()
self.startServiceRef = None
self.startRoot = None
if self.dopipzap:
# This unfortunately won't work with subservices
self.setCurrentSelection(self.session.pip.getCurrentService())
else:
lastservice = eServiceReference(self.lastservice.value)
if lastservice.valid() and self.getCurrentSelection() == lastservice:
pass # keep current selection
else:
self.setCurrentSelection(currentPlayedRef)
def setStartRoot(self, root):
if root:
if self.revertMode == MODE_TV:
self.setModeTv()
elif self.revertMode == MODE_RADIO:
self.setModeRadio()
self.revertMode = None
self.enterUserbouquet(root)
def restoreMode(self):
if self.revertMode == MODE_TV:
self.setModeTv()
elif self.revertMode == MODE_RADIO:
self.setModeRadio()
self.revertMode = None
def correctChannelNumber(self):
current_ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if self.dopipzap:
tmp_mode = config.servicelist.lastmode.value
tmp_root = self.getRoot()
tmp_ref = self.getCurrentSelection()
pip_ref = self.session.pip.getCurrentService()
if tmp_ref and pip_ref and tmp_ref != pip_ref:
self.revertMode = None
return
if self.mainScreenMode == "tv":
self.setModeTv()
elif self.mainScreenMode == "radio":
self.setModeRadio()
if self.mainScreenRoot:
self.setRoot(self.mainScreenRoot)
self.setCurrentSelection(current_ref)
selected_ref = self.getCurrentSelection()
if selected_ref and current_ref and selected_ref.getChannelNum() != current_ref.getChannelNum():
oldref = self.session.nav.currentlyPlayingServiceReference
if oldref and selected_ref == oldref or (oldref != current_ref and selected_ref == current_ref):
self.session.nav.currentlyPlayingServiceOrGroup = selected_ref
self.session.nav.pnav.navEvent(iPlayableService.evStart)
if self.dopipzap:
if tmp_mode == "tv":
self.setModeTv()
elif tmp_mode == "radio":
self.setModeRadio()
self.enterUserbouquet(tmp_root)
title = self.instance.getTitle()
pos = title.find(" (")
if pos != -1:
title = title[:pos]
title += _(" (PiP)")
self.setTitle(title)
self.buildTitleString()
if tmp_ref and pip_ref and tmp_ref.getChannelNum() != pip_ref.getChannelNum():
self.session.pip.currentService = tmp_ref
self.setCurrentSelection(tmp_ref)
self.revertMode = None
class PiPZapSelection(ChannelSelection):
def __init__(self, session):
ChannelSelection.__init__(self, session)
self.skinName = ["SlimChannelSelection","SimpleChannelSelection","ChannelSelection"]
self.startservice = None
self.pipzapfailed = None
if plugin_PiPServiceRelation_installed:
self.pipServiceRelation = getRelationDict()
else:
self.pipServiceRelation = {}
self.keymaptimer = eTimer()
self.keymaptimer.callback.append(self.enableKeyMap)
self.onShown.append(self.disableKeyMap)
def disableKeyMap(self):
if not hasattr(self.session, 'pip'):
if not self.pipzapfailed:
self.startservice = self.session.nav.getCurrentlyPlayingServiceReference() or self.servicelist.getCurrent()
else:
self.startservice = self.startservice
self.setCurrentSelection(self.startservice)
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
self.session.pip.playService(self.startservice)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
self.keymaptimer.start(1000, True)
def enableKeyMap(self):
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
def channelSelected(self):
ref = self.servicelist.getCurrent()
if (ref.flags & eServiceReference.flagDirectory) == eServiceReference.flagDirectory:
self.enterPath(ref)
self.gotoCurrentServiceOrProvider(ref)
elif not (ref.flags & eServiceReference.isMarker or ref.toString().startswith("-1")):
root = self.getRoot()
if not root or not (root.flags & eServiceReference.isGroup):
n_service = self.pipServiceRelation.get(str(ref), None)
if n_service is not None:
newservice = eServiceReference(n_service)
else:
newservice = ref
if not hasattr(self.session, 'pip'):
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
if self.session.pip.playService(newservice):
self.pipzapfailed = False
self.session.pipshown = True
self.session.pip.servicePath = self.getCurrentServicePath()
self.setStartRoot(self.curRoot)
self.saveRoot()
self.saveChannel(ref)
self.setCurrentSelection(ref)
if SystemInfo["LCDMiniTVPiP"] and int(config.lcd.minitvpipmode.value) >= 1:
print '[LCDMiniTV] enable PIP'
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.minitvpipmode.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
self.close(True)
else:
self.pipzapfailed = True
self.session.pipshown = False
del self.session.pip
if SystemInfo["LCDMiniTVPiP"] and int(config.lcd.minitvpipmode.value) >= 1:
print '[LCDMiniTV] disable PIP'
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.minitvmode.value)
f.close()
self.close(None)
def cancel(self):
self.asciiOff()
if self.startservice and hasattr(self.session, 'pip') and self.session.pip.getCurrentService() and self.startservice == self.session.pip.getCurrentService():
self.session.pipshown = False
del self.session.pip
if SystemInfo["LCDMiniTVPiP"] and int(config.lcd.minitvpipmode.value) >= 1:
print '[LCDMiniTV] disable PIP'
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.minitvmode.value)
f.close()
self.correctChannelNumber()
self.close(None)
class RadioInfoBar(Screen):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Radio Channel Selection"))
self['RdsDecoder'] = RdsDecoder(self.session.nav)
class ChannelSelectionRadio(ChannelSelectionBase, ChannelSelectionEdit, ChannelSelectionEPG, InfoBarBase, SelectionEventInfo):
ALLOW_SUSPEND = True
def __init__(self, session, infobar):
ChannelSelectionBase.__init__(self, session)
ChannelSelectionEdit.__init__(self)
ChannelSelectionEPG.__init__(self)
InfoBarBase.__init__(self)
SelectionEventInfo.__init__(self)
self.infobar = infobar
self.startServiceRef = None
self.onLayoutFinish.append(self.onCreate)
self.info = session.instantiateDialog(RadioInfoBar) # our simple infobar
self.info.setAnimationMode(0)
self["actions"] = ActionMap(["OkCancelActions", "TvRadioActions"],
{
"keyTV": self.cancel,
"keyRadio": self.cancel,
"cancel": self.cancel,
"ok": self.channelSelected,
})
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__evServiceStart,
iPlayableService.evEnd: self.__evServiceEnd
})
########## RDS Radiotext / Rass Support BEGIN
self.infobar = infobar # reference to real infobar (the one and only)
self["RdsDecoder"] = self.info["RdsDecoder"]
self["RdsActions"] = HelpableActionMap(self, "InfobarRdsActions",
{
"startRassInteractive": (self.startRassInteractive, _("View Rass interactive..."))
},-1)
self["RdsActions"].setEnabled(False)
infobar.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
self.onClose.append(self.__onClose)
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
def __onClose(self):
del self.info["RdsDecoder"]
self.session.deleteDialog(self.info)
self.infobar.rds_display.onRassInteractivePossibilityChanged.remove(self.RassInteractivePossibilityChanged)
lastservice=eServiceReference(config.tv.lastservice.value)
self.session.nav.playService(lastservice)
def startRassInteractive(self):
self.info.hide()
self.infobar.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self):
self.info.show()
self.infobar.rass_interactive = None
self.infobar.RassSlidePicChanged()
def RassInteractivePossibilityChanged(self, state):
self['RdsActions'].setEnabled(state)
def __onExecBegin(self):
self.info.show()
def __onExecEnd(self):
self.info.hide()
def cancel(self):
self.info.hide()
self.close(None)
def __evServiceStart(self):
service = self.session.nav.getCurrentService()
if service:
info = service.info()
if info:
refstr = info.getInfoString(iServiceInformation.sServiceref)
self.servicelist.setPlayableIgnoreService(eServiceReference(refstr))
def __evServiceEnd(self):
self.servicelist.setPlayableIgnoreService(eServiceReference())
def saveRoot(self):
path = ''
for i in self.servicePathRadio:
path += i.toString()
path += ';'
if path and path != config.radio.lastroot.value:
config.radio.lastroot.value = path
config.radio.lastroot.save()
def restoreRoot(self):
tmp = [ x for x in config.radio.lastroot.value.split(';') if x != '' ]
current = [ x.toString() for x in self.servicePath ]
if tmp != current or self.rootChanged:
cnt = 0
for i in tmp:
self.servicePathRadio.append(eServiceReference(i))
cnt += 1
if cnt:
path = self.servicePathRadio.pop()
self.enterPath(path)
else:
self.showFavourites()
self.saveRoot()
self.rootChanged = False
def preEnterPath(self, refstr):
if self.servicePathRadio and self.servicePathRadio[0] != eServiceReference(refstr):
pathstr = config.radio.lastroot.value
if pathstr is not None and refstr in pathstr:
self.restoreRoot()
lastservice = eServiceReference(config.radio.lastservice.value)
if lastservice.valid():
self.setCurrentSelection(lastservice)
return True
return False
def onCreate(self):
self.setRadioMode()
self.restoreRoot()
lastservice = eServiceReference(config.radio.lastservice.value)
if lastservice.valid():
self.servicelist.setCurrent(lastservice)
self.session.nav.playService(lastservice)
else:
self.session.nav.stopService()
self.info.show()
def channelSelected(self, doClose=False): # just return selected service
ref = self.getCurrentSelection()
if self.movemode:
self.toggleMoveMarked()
elif (ref.flags & eServiceReference.flagDirectory) == eServiceReference.flagDirectory:
self.enterPath(ref)
self.gotoCurrentServiceOrProvider(ref)
elif self.bouquet_mark_edit != OFF:
if not (self.bouquet_mark_edit == EDIT_ALTERNATIVES and ref.flags & eServiceReference.isGroup):
self.doMark()
elif not (ref.flags & eServiceReference.isMarker): # no marker
cur_root = self.getRoot()
if not cur_root or not (cur_root.flags & eServiceReference.isGroup):
playingref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if playingref is None or playingref != ref:
self.session.nav.playService(ref)
config.radio.lastservice.value = ref.toString()
config.radio.lastservice.save()
self.saveRoot()
def zapBack(self):
self.channelSelected()
class SimpleChannelSelection(ChannelSelectionBase):
def __init__(self, session, title, currentBouquet=False):
ChannelSelectionBase.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "TvRadioActions"],
{
"cancel": self.close,
"ok": self.channelSelected,
"keyRadio": self.setModeRadio,
"keyTV": self.setModeTv,
})
self.bouquet_mark_edit = OFF
self.title = title
self.currentBouquet = currentBouquet
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setModeTv()
if self.currentBouquet:
ref = Screens.InfoBar.InfoBar.instance.servicelist.getRoot()
if ref:
self.enterPath(ref)
self.gotoCurrentServiceOrProvider(ref)
def saveRoot(self):
pass
def keyRecord(self):
return 0
def channelSelected(self): # just return selected service
ref = self.getCurrentSelection()
if (ref.flags & eServiceReference.flagDirectory) == eServiceReference.flagDirectory:
self.enterPath(ref)
self.gotoCurrentServiceOrProvider(ref)
elif not (ref.flags & eServiceReference.isMarker):
ref = self.getCurrentSelection()
self.close(ref)
def setModeTv(self):
self.setTvMode()
self.showFavourites()
def setModeRadio(self):
self.setRadioMode()
self.showFavourites()
class HistoryZapSelector(Screen):
def __init__(self, session, items=None, sel_item=0, mark_item=0, invert_items=False, redirect_buttons=False, wrap_around=True):
if not items: items = []
Screen.__init__(self, session)
self.redirectButton = redirect_buttons
self.invertItems = invert_items
if self.invertItems:
self.currentPos = len(items) - sel_item - 1
else:
self.currentPos = sel_item
self["actions"] = ActionMap(["OkCancelActions", "InfobarCueSheetActions"],
{
"ok": self.okbuttonClick,
"cancel": self.cancelClick,
"jumpPreviousMark": self.prev,
"jumpNextMark": self.next,
"toggleMark": self.okbuttonClick,
})
self.setTitle(_("History zap..."))
self.list = []
cnt = 0
serviceHandler = eServiceCenter.getInstance()
for x in items:
info = serviceHandler.info(x[-1])
if info:
serviceName = info.getName(x[-1])
if serviceName is None:
serviceName = ""
eventName = ""
descriptionName = ""
durationTime = ""
# if config.plugins.SetupZapSelector.event.value != "0":
event = info.getEvent(x[-1])
if event:
eventName = event.getEventName()
if eventName is None:
eventName = ""
else:
eventName = eventName.replace('(18+)', '').replace('18+', '').replace('(16+)', '').replace('16+', '').replace('(12+)', '').replace('12+', '').replace('(7+)', '').replace('7+', '').replace('(6+)', '').replace('6+', '').replace('(0+)', '').replace('0+', '')
# if config.plugins.SetupZapSelector.event.value == "2":
descriptionName = event.getShortDescription()
if descriptionName is None or descriptionName == "":
descriptionName = event.getExtendedDescription()
if descriptionName is None:
descriptionName = ""
# if config.plugins.SetupZapSelector.duration.value:
begin = event.getBeginTime()
if begin is not None:
end = begin + event.getDuration()
remaining = (end - int(time())) / 60
prefix = ""
if remaining > 0:
prefix = "+"
local_begin = localtime(begin)
local_end = localtime(end)
durationTime = _("%02d.%02d - %02d.%02d (%s%d min)") % (local_begin[3],local_begin[4],local_end[3],local_end[4],prefix, remaining)
png = ""
picon = getPiconName(str(ServiceReference(x[1])))
if picon != "":
png = loadPNG(picon)
if self.invertItems:
self.list.insert(0, (x[1], cnt == mark_item and "»" or "", x[0], eventName, descriptionName, durationTime, png))
else:
self.list.append((x[1], cnt == mark_item and "»" or "", x[0], eventName, descriptionName, durationTime, png))
cnt += 1
self["menu"] = List(self.list, enableWrapAround=wrap_around)
self.onShown.append(self.__onShown)
def __onShown(self):
self["menu"].index = self.currentPos
def prev(self):
if self.redirectButton:
self.down()
else:
self.up()
def next(self):
if self.redirectButton:
self.up()
else:
self.down()
def up(self):
self["menu"].selectPrevious()
def down(self):
self["menu"].selectNext()
def getCurrent(self):
cur = self["menu"].current
return cur and cur[0]
def okbuttonClick(self):
self.close(self.getCurrent())
def cancelClick(self):
self.close(None)
| gpl-2.0 | -5,276,986,568,105,051,000 | 35.816934 | 302 | 0.714351 | false |
justinlulejian/fah-gae | lib/requests/packages/chardet/langcyrillicmodel.py | 2762 | 17725 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# KOI8-R language model
# Character Mapping Table:
KOI8R_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
)
win1251_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
)
latin5_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
macCyrillic_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
)
IBM855_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
)
IBM866_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 97.6601%
# first 1024 sequences: 2.3389%
# rest sequences: 0.1237%
# negative sequences: 0.0009%
RussianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
)
Koi8rModel = {
'charToOrderMap': KOI8R_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "KOI8-R"
}
Win1251CyrillicModel = {
'charToOrderMap': win1251_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
Latin5CyrillicModel = {
'charToOrderMap': latin5_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
MacCyrillicModel = {
'charToOrderMap': macCyrillic_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "MacCyrillic"
};
Ibm866Model = {
'charToOrderMap': IBM866_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM866"
}
Ibm855Model = {
'charToOrderMap': IBM855_CharToOrderMap,
'precedenceMatrix': RussianLangModel,
'mTypicalPositiveRatio': 0.976601,
'keepEnglishLetter': False,
'charsetName': "IBM855"
}
# flake8: noqa
| mit | 558,245,759,205,829,570 | 52.87538 | 70 | 0.582398 | false |
Johnzero/OE7 | openerp/report/render/rml2pdf/customfonts.py | 57 | 6317 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 P. Christeas, Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2013 OpenERP SA. (http://www.openerp.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import glob
import logging
import os
import platform
from reportlab import rl_config
from openerp.tools import config
#.apidoc title: TTF Font Table
"""This module allows the mapping of some system-available TTF fonts to
the reportlab engine.
This file could be customized per distro (although most Linux/Unix ones)
should have the same filenames, only need the code below).
Due to an awful configuration that ships with reportlab at many Linux
and Ubuntu distros, we have to override the search path, too.
"""
_logger = logging.getLogger(__name__)
CustomTTFonts = [ ('Helvetica',"DejaVu Sans", "DejaVuSans.ttf", 'normal'),
('Helvetica',"DejaVu Sans Bold", "DejaVuSans-Bold.ttf", 'bold'),
('Helvetica',"DejaVu Sans Oblique", "DejaVuSans-Oblique.ttf", 'italic'),
('Helvetica',"DejaVu Sans BoldOblique", "DejaVuSans-BoldOblique.ttf", 'bolditalic'),
('Times',"Liberation Serif", "LiberationSerif-Regular.ttf", 'normal'),
('Times',"Liberation Serif Bold", "LiberationSerif-Bold.ttf", 'bold'),
('Times',"Liberation Serif Italic", "LiberationSerif-Italic.ttf", 'italic'),
('Times',"Liberation Serif BoldItalic", "LiberationSerif-BoldItalic.ttf", 'bolditalic'),
('Times-Roman',"Liberation Serif", "LiberationSerif-Regular.ttf", 'normal'),
('Times-Roman',"Liberation Serif Bold", "LiberationSerif-Bold.ttf", 'bold'),
('Times-Roman',"Liberation Serif Italic", "LiberationSerif-Italic.ttf", 'italic'),
('Times-Roman',"Liberation Serif BoldItalic", "LiberationSerif-BoldItalic.ttf", 'bolditalic'),
('Courier',"FreeMono", "FreeMono.ttf", 'normal'),
('Courier',"FreeMono Bold", "FreeMonoBold.ttf", 'bold'),
('Courier',"FreeMono Oblique", "FreeMonoOblique.ttf", 'italic'),
('Courier',"FreeMono BoldOblique", "FreeMonoBoldOblique.ttf", 'bolditalic'),
# Sun-ExtA can be downloaded from http://okuc.net/SunWb/
('Sun-ExtA',"Sun-ExtA", "Sun-ExtA.ttf", 'normal'),
]
TTFSearchPath_Linux = [
'/usr/share/fonts/truetype', # SuSE
'/usr/share/fonts/dejavu', '/usr/share/fonts/liberation', # Fedora, RHEL
'/usr/share/fonts/truetype/*', # Ubuntu,
'/usr/share/fonts/TTF/*', # at Mandriva/Mageia
'/usr/share/fonts/TTF', # Arch Linux
]
TTFSearchPath_Windows = [
'c:/winnt/fonts',
'c:/windows/fonts'
]
TTFSearchPath_Darwin = [
#mac os X - from
#http://developer.apple.com/technotes/tn/tn2024.html
'~/Library/Fonts',
'/Library/Fonts',
'/Network/Library/Fonts',
'/System/Library/Fonts',
]
TTFSearchPathMap = {
'Darwin': TTFSearchPath_Darwin,
'Windows': TTFSearchPath_Windows,
'Linux': TTFSearchPath_Linux,
}
# ----- The code below is less distro-specific, please avoid editing! -------
__foundFonts = None
def FindCustomFonts():
"""Fill the __foundFonts list with those filenames, whose fonts
can be found in the reportlab ttf font path.
This process needs only be done once per loading of this module,
it is cached. But, if the system admin adds some font in the
meanwhile, the server must be restarted eventually.
"""
dirpath = []
global __foundFonts
__foundFonts = {}
searchpath = []
if config.get('fonts_search_path'):
searchpath += map(str.strip, config.get('fonts_search_path').split(','))
local_platform = platform.system()
if local_platform in TTFSearchPathMap:
searchpath += TTFSearchPathMap[local_platform]
# Append the original search path of reportlab (at the end)
searchpath += rl_config.TTFSearchPath
# Perform the search for font files ourselves, as reportlab's
# TTFOpenFile is not very good at it.
for dirglob in searchpath:
dirglob = os.path.expanduser(dirglob)
for dirname in glob.iglob(dirglob):
abp = os.path.abspath(dirname)
if os.path.isdir(abp):
dirpath.append(abp)
for k, (name, font, filename, mode) in enumerate(CustomTTFonts):
if filename in __foundFonts:
continue
for d in dirpath:
abs_filename = os.path.join(d, filename)
if os.path.exists(abs_filename):
_logger.debug("Found font %s at %s", filename, abs_filename)
__foundFonts[filename] = abs_filename
break
def SetCustomFonts(rmldoc):
""" Map some font names to the corresponding TTF fonts
The ttf font may not even have the same name, as in
Times -> Liberation Serif.
This function is called once per report, so it should
avoid system-wide processing (cache it, instead).
"""
global __foundFonts
if __foundFonts is None:
FindCustomFonts()
for name, font, filename, mode in CustomTTFonts:
if os.path.isabs(filename) and os.path.exists(filename):
rmldoc.setTTFontMapping(name, font, filename, mode)
elif filename in __foundFonts:
rmldoc.setTTFontMapping(name, font, __foundFonts[filename], mode)
return True
#eof
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -1,474,445,017,707,548,400 | 38.48125 | 102 | 0.634162 | false |
yaqiyang/autorest | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/Lro/autorestlongrunningoperationtestservice/operations/lr_os_custom_header_operations.py | 4 | 15472 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class LROsCustomHeaderOperations(object):
"""LROsCustomHeaderOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def put_async_retry_succeeded(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is
required message header for all requests. Long running put request,
service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in
the Azure-AsyncOperation header for operation status.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/lro/customheader/putasync/retry/succeeded'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def put201_creating_succeeded200(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is
required message header for all requests. Long running put request,
service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until
the last poll returns a ‘200’ with ProvisioningState=’Succeeded’.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/lro/customheader/put/201/creating/succeeded/200'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if response.status_code == 201:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def post202_retry200(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is
required message header for all requests. Long running post request,
service returns a 202 to the initial request, with 'Location' and
'Retry-After' headers, Polls return a 200 with a response body after
success.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/lro/customheader/post/202/retry/200'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def post_async_retry_succeeded(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 is
required message header for all requests. Long running post request,
service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in
the Azure-AsyncOperation header for operation status.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/lro/customheader/postasync/retry/succeeded'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
| mit | -392,949,431,673,902,700 | 39.135065 | 140 | 0.61649 | false |
fedorpatlin/ansible | lib/ansible/modules/windows/win_firewall_rule.py | 41 | 3499 | #!/usr/bin/env python
# (c) 2014, Timothy Vandenbrande <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_firewall_rule
version_added: "2.0"
author: Timothy Vandenbrande
short_description: Windows firewall automation
description:
- allows you to create/remove/update firewall rules
options:
enable:
description:
- is this firewall rule enabled or disabled
default: true
required: false
state:
description:
- should this rule be added or removed
default: "present"
required: true
choices: ['present', 'absent']
name:
description:
- the rules name
default: null
required: true
direction:
description:
- is this rule for inbound or outbound traffic
default: null
required: true
choices: ['in', 'out']
action:
description:
- what to do with the items this rule is for
default: null
required: true
choices: ['allow', 'block', 'bypass']
description:
description:
- description for the firewall rule
default: null
required: false
localip:
description:
- the local ip address this rule applies to
default: 'any'
required: false
remoteip:
description:
- the remote ip address/range this rule applies to
default: 'any'
required: false
localport:
description:
- the local port this rule applies to
default: 'any'
required: false
remoteport:
description:
- the remote port this rule applies to
default: 'any'
required: false
program:
description:
- the program this rule applies to
default: null
required: false
service:
description:
- the service this rule applies to
default: 'any'
required: false
protocol:
description:
- the protocol this rule applies to
default: 'any'
required: false
profile:
description:
- the profile this rule applies to, e.g. Domain,Private,Public
default: 'any'
required: false
force:
description:
- Enforces the change if a rule with different values exists
default: false
required: false
'''
EXAMPLES = r'''
- name: Firewall rule to allow smtp on TCP port 25
action: win_firewall_rule
args:
name: smtp
enable: yes
state: present
localport: 25
action: allow
direction: In
protocol: TCP
'''
| gpl-3.0 | 1,125,677,097,592,265,100 | 26.335938 | 74 | 0.605316 | false |
gannetson/django | django/core/serializers/xml_serializer.py | 16 | 15345 | """
XML serializer.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from xml.dom import pulldom
from xml.sax import handler
from xml.sax.expatreader import ExpatParser as _ExpatParser
from django.apps import apps
from django.conf import settings
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils.encoding import smart_text
from django.utils.xmlutils import SimplerXMLGenerator
class Serializer(base.Serializer):
"""
Serializes a QuerySet to XML.
"""
def indent(self, level):
if self.options.get('indent') is not None:
self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent') * level)
def start_serialization(self):
"""
Start serialization -- open the XML document and the root element.
"""
self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
self.xml.startDocument()
self.xml.startElement("django-objects", {"version": "1.0"})
def end_serialization(self):
"""
End serialization -- end the document.
"""
self.indent(0)
self.xml.endElement("django-objects")
self.xml.endDocument()
def start_object(self, obj):
"""
Called as each object is handled.
"""
if not hasattr(obj, "_meta"):
raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
self.indent(1)
attrs = OrderedDict([("model", smart_text(obj._meta))])
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
obj_pk = obj._get_pk_val()
if obj_pk is not None:
attrs['pk'] = smart_text(obj_pk)
self.xml.startElement("object", attrs)
def end_object(self, obj):
"""
Called after handling all fields for an object.
"""
self.indent(1)
self.xml.endElement("object")
def handle_field(self, obj, field):
"""
Called to handle each field on an object (except for ForeignKeys and
ManyToManyFields)
"""
self.indent(2)
self.xml.startElement("field", OrderedDict([
("name", field.name),
("type", field.get_internal_type()),
]))
# Get a "string version" of the object's data.
if getattr(obj, field.name) is not None:
self.xml.characters(field.value_to_string(obj))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
"""
self._start_relational_field(field)
related_att = getattr(obj, field.get_attname())
if related_att is not None:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement("natural", {})
self.xml.characters(smart_text(key_value))
self.xml.endElement("natural")
else:
self.xml.characters(smart_text(related_att))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField. Related objects are only
serialized as references to the object's PK (i.e. the related *data*
is not dumped, just the relation).
"""
if field.remote_field.through._meta.auto_created:
self._start_relational_field(field)
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
# If the objects in the m2m have a natural key, use it
def handle_m2m(value):
natural = value.natural_key()
# Iterable natural keys are rolled out as subelements
self.xml.startElement("object", {})
for key_value in natural:
self.xml.startElement("natural", {})
self.xml.characters(smart_text(key_value))
self.xml.endElement("natural")
self.xml.endElement("object")
else:
def handle_m2m(value):
self.xml.addQuickElement("object", attrs={
'pk': smart_text(value._get_pk_val())
})
for relobj in getattr(obj, field.name).iterator():
handle_m2m(relobj)
self.xml.endElement("field")
def _start_relational_field(self, field):
"""
Helper to output the <field> element for relational fields
"""
self.indent(2)
self.xml.startElement("field", OrderedDict([
("name", field.name),
("rel", field.remote_field.__class__.__name__),
("to", smart_text(field.remote_field.model._meta)),
]))
class Deserializer(base.Deserializer):
"""
Deserialize XML.
"""
def __init__(self, stream_or_string, **options):
super(Deserializer, self).__init__(stream_or_string, **options)
self.event_stream = pulldom.parse(self.stream, self._make_parser())
self.db = options.pop('using', DEFAULT_DB_ALIAS)
self.ignore = options.pop('ignorenonexistent', False)
def _make_parser(self):
"""Create a hardened XML parser (no custom/external entities)."""
return DefusedExpatParser()
def __next__(self):
for event, node in self.event_stream:
if event == "START_ELEMENT" and node.nodeName == "object":
self.event_stream.expandNode(node)
return self._handle_object(node)
raise StopIteration
def _handle_object(self, node):
"""
Convert an <object> node to a DeserializedObject.
"""
# Look up the model using the model loading mechanism. If this fails,
# bail.
Model = self._get_model_from_node(node, "model")
# Start building a data dictionary from the object.
data = {}
if node.hasAttribute('pk'):
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
node.getAttribute('pk'))
# Also start building a dict of m2m data (this is saved as
# {m2m_accessor_attribute : [list_of_related_objects]})
m2m_data = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Deserialize each field.
for field_node in node.getElementsByTagName("field"):
# If the field is missing the name attribute, bail (are you
# sensing a pattern here?)
field_name = field_node.getAttribute("name")
if not field_name:
raise base.DeserializationError("<field> node is missing the 'name' attribute")
# Get the field from the Model. This will raise a
# FieldDoesNotExist if, well, the field doesn't exist, which will
# be propagated correctly unless ignorenonexistent=True is used.
if self.ignore and field_name not in field_names:
continue
field = Model._meta.get_field(field_name)
# As is usually the case, relation fields get the special treatment.
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
data[field.attname] = self._handle_fk_field_node(field_node, field)
else:
if field_node.getElementsByTagName('None'):
value = None
else:
value = field.to_python(getInnerText(field_node).strip())
data[field.name] = value
obj = base.build_instance(Model, data, self.db)
# Return a DeserializedObject so that the m2m data has a place to live.
return base.DeserializedObject(obj, m2m_data)
def _handle_fk_field_node(self, node, field):
"""
Handle a <field> node for a ForeignKey
"""
# Check if there is a child node named 'None', returning None if so.
if node.getElementsByTagName('None'):
return None
else:
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
keys = node.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj = field.remote_field.model._default_manager.db_manager(self.db).get_by_natural_key(*field_value)
obj_pk = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.remote_field.model._meta.pk.remote_field:
obj_pk = obj_pk.pk
else:
# Otherwise, treat like a normal PK
field_value = getInnerText(node).strip()
obj_pk = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
return obj_pk
else:
field_value = getInnerText(node).strip()
return field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
def _handle_m2m_field_node(self, node, field):
"""
Handle a <field> node for a ManyToManyField.
"""
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
def m2m_convert(n):
keys = n.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj_pk = field.remote_field.model._default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk
else:
# Otherwise, treat like a normal PK value.
obj_pk = field.remote_field.model._meta.pk.to_python(n.getAttribute('pk'))
return obj_pk
else:
m2m_convert = lambda n: field.remote_field.model._meta.pk.to_python(n.getAttribute('pk'))
return [m2m_convert(c) for c in node.getElementsByTagName("object")]
def _get_model_from_node(self, node, attr):
"""
Helper to look up a model from a <object model=...> or a <field
rel=... to=...> node.
"""
model_identifier = node.getAttribute(attr)
if not model_identifier:
raise base.DeserializationError(
"<%s> node is missing the required '%s' attribute"
% (node.nodeName, attr))
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError(
"<%s> node has invalid model identifier: '%s'"
% (node.nodeName, model_identifier))
def getInnerText(node):
"""
Get all the inner text of a DOM node (recursively).
"""
# inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
inner_text = []
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
inner_text.append(child.data)
elif child.nodeType == child.ELEMENT_NODE:
inner_text.extend(getInnerText(child))
else:
pass
return "".join(inner_text)
# Below code based on Christian Heimes' defusedxml
class DefusedExpatParser(_ExpatParser):
"""
An expat parser hardened against XML bomb attacks.
Forbids DTDs, external entity references
"""
def __init__(self, *args, **kwargs):
_ExpatParser.__init__(self, *args, **kwargs)
self.setFeature(handler.feature_external_ges, False)
self.setFeature(handler.feature_external_pes, False)
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise DTDForbidden(name, sysid, pubid)
def entity_decl(self, name, is_parameter_entity, value, base,
sysid, pubid, notation_name):
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
def external_entity_ref_handler(self, context, base, sysid, pubid):
raise ExternalReferenceForbidden(context, base, sysid, pubid)
def reset(self):
_ExpatParser.reset(self)
parser = self._parser
parser.StartDoctypeDeclHandler = self.start_doctype_decl
parser.EntityDeclHandler = self.entity_decl
parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
class DefusedXmlException(ValueError):
"""Base exception."""
def __repr__(self):
return str(self)
class DTDForbidden(DefusedXmlException):
"""Document type definition is forbidden."""
def __init__(self, name, sysid, pubid):
super(DTDForbidden, self).__init__()
self.name = name
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class EntitiesForbidden(DefusedXmlException):
"""Entity definition is forbidden."""
def __init__(self, name, value, base, sysid, pubid, notation_name):
super(EntitiesForbidden, self).__init__()
self.name = name
self.value = value
self.base = base
self.sysid = sysid
self.pubid = pubid
self.notation_name = notation_name
def __str__(self):
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class ExternalReferenceForbidden(DefusedXmlException):
"""Resolving an external reference is forbidden."""
def __init__(self, context, base, sysid, pubid):
super(ExternalReferenceForbidden, self).__init__()
self.context = context
self.base = base
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
return tpl.format(self.sysid, self.pubid)
| bsd-3-clause | -4,052,712,863,014,442,500 | 38.447301 | 126 | 0.59218 | false |
jgoclawski/django | django/contrib/auth/hashers.py | 211 | 17463 | from __future__ import unicode_literals
import base64
import binascii
import hashlib
import importlib
from collections import OrderedDict
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils import lru_cache
from django.utils.crypto import (
constant_time_compare, get_random_string, pbkdf2,
)
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.module_loading import import_string
from django.utils.translation import ugettext_noop as _
UNUSABLE_PASSWORD_PREFIX = '!' # This will never be a valid encoded hash
UNUSABLE_PASSWORD_SUFFIX_LENGTH = 40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX
def is_password_usable(encoded):
if encoded is None or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
return False
try:
identify_hasher(encoded)
except ValueError:
return False
return True
def check_password(password, encoded, setter=None, preferred='default'):
"""
Returns a boolean of whether the raw password matches the three
part encoded digest.
If setter is specified, it'll be called when you need to
regenerate the password.
"""
if password is None or not is_password_usable(encoded):
return False
preferred = get_hasher(preferred)
hasher = identify_hasher(encoded)
must_update = hasher.algorithm != preferred.algorithm
if not must_update:
must_update = preferred.must_update(encoded)
is_correct = hasher.verify(password, encoded)
if setter and is_correct and must_update:
setter(password)
return is_correct
def make_password(password, salt=None, hasher='default'):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generates a new random salt.
If password is None then a concatenation of
UNUSABLE_PASSWORD_PREFIX and a random string will be returned
which disallows logins. Additional random string reduces chances
of gaining access to staff or superuser accounts.
See ticket #20079 for more info.
"""
if password is None:
return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH)
hasher = get_hasher(hasher)
if not salt:
salt = hasher.salt()
return hasher.encode(password, salt)
@lru_cache.lru_cache()
def get_hashers():
hashers = []
for hasher_path in settings.PASSWORD_HASHERS:
hasher_cls = import_string(hasher_path)
hasher = hasher_cls()
if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an "
"algorithm name: %s" % hasher_path)
hashers.append(hasher)
return hashers
@lru_cache.lru_cache()
def get_hashers_by_algorithm():
return {hasher.algorithm: hasher for hasher in get_hashers()}
@receiver(setting_changed)
def reset_hashers(**kwargs):
if kwargs['setting'] == 'PASSWORD_HASHERS':
get_hashers.cache_clear()
get_hashers_by_algorithm.cache_clear()
def get_hasher(algorithm='default'):
"""
Returns an instance of a loaded password hasher.
If algorithm is 'default', the default hasher will be returned.
This function will also lazy import hashers specified in your
settings file if needed.
"""
if hasattr(algorithm, 'algorithm'):
return algorithm
elif algorithm == 'default':
return get_hashers()[0]
else:
hashers = get_hashers_by_algorithm()
try:
return hashers[algorithm]
except KeyError:
raise ValueError("Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm)
def identify_hasher(encoded):
"""
Returns an instance of a loaded password hasher.
Identifies hasher algorithm by examining encoded hash, and calls
get_hasher() to return hasher. Raises ValueError if
algorithm cannot be identified, or if hasher is not loaded.
"""
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if ((len(encoded) == 32 and '$' not in encoded) or
(len(encoded) == 37 and encoded.startswith('md5$$'))):
algorithm = 'unsalted_md5'
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith('sha1$$'):
algorithm = 'unsalted_sha1'
else:
algorithm = encoded.split('$', 1)[0]
return get_hasher(algorithm)
def mask_hash(hash, show=6, char="*"):
"""
Returns the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
class BasePasswordHasher(object):
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError as e:
raise ValueError("Couldn't load %r algorithm library: %s" %
(self.__class__.__name__, e))
return module
raise ValueError("Hasher %r doesn't specify a library attribute" %
self.__class__.__name__)
def salt(self):
"""
Generates a cryptographically secure nonce salt in ASCII
"""
return get_random_string()
def verify(self, password, encoded):
"""
Checks if the given password is correct
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide a verify() method')
def encode(self, password, salt):
"""
Creates an encoded database value
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide an encode() method')
def safe_summary(self, encoded):
"""
Returns a summary of safe values
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide a safe_summary() method')
def must_update(self, encoded):
return False
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 24000
digest = hashlib.sha256
def encode(self, password, salt, iterations=None):
assert password is not None
assert salt and '$' not in salt
if not iterations:
iterations = self.iterations
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = base64.b64encode(hash).decode('ascii').strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
def verify(self, password, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt, int(iterations))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('iterations'), iterations),
(_('salt'), mask_hash(salt)),
(_('hash'), mask_hash(hash)),
])
def must_update(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
return int(iterations) != self.iterations
class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher):
"""
Alternate PBKDF2 hasher which uses SHA1, the default PRF
recommended by PKCS #5. This is compatible with other
implementations of PBKDF2, such as openssl's
PKCS5_PBKDF2_HMAC_SHA1().
"""
algorithm = "pbkdf2_sha1"
digest = hashlib.sha1
class BCryptSHA256PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the bcrypt algorithm (recommended)
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
"""
algorithm = "bcrypt_sha256"
digest = hashlib.sha256
library = ("bcrypt", "bcrypt")
rounds = 12
def salt(self):
bcrypt = self._load_library()
return bcrypt.gensalt(rounds=self.rounds)
def encode(self, password, salt):
bcrypt = self._load_library()
# Need to reevaluate the force_bytes call once bcrypt is supported on
# Python 3
# Hash the password prior to using bcrypt to prevent password truncation
# See: https://code.djangoproject.com/ticket/20138
if self.digest is not None:
# We use binascii.hexlify here because Python3 decided that a hex encoded
# bytestring is somehow a unicode.
password = binascii.hexlify(self.digest(force_bytes(password)).digest())
else:
password = force_bytes(password)
data = bcrypt.hashpw(password, salt)
return "%s$%s" % (self.algorithm, force_text(data))
def verify(self, password, encoded):
algorithm, data = encoded.split('$', 1)
assert algorithm == self.algorithm
bcrypt = self._load_library()
# Hash the password prior to using bcrypt to prevent password truncation
# See: https://code.djangoproject.com/ticket/20138
if self.digest is not None:
# We use binascii.hexlify here because Python3 decided that a hex encoded
# bytestring is somehow a unicode.
password = binascii.hexlify(self.digest(force_bytes(password)).digest())
else:
password = force_bytes(password)
# Ensure that our data is a bytestring
data = force_bytes(data)
# force_bytes() necessary for py-bcrypt compatibility
hashpw = force_bytes(bcrypt.hashpw(password, data))
return constant_time_compare(data, hashpw)
def safe_summary(self, encoded):
algorithm, empty, algostr, work_factor, data = encoded.split('$', 4)
assert algorithm == self.algorithm
salt, checksum = data[:22], data[22:]
return OrderedDict([
(_('algorithm'), algorithm),
(_('work factor'), work_factor),
(_('salt'), mask_hash(salt)),
(_('checksum'), mask_hash(checksum)),
])
def must_update(self, encoded):
algorithm, empty, algostr, rounds, data = encoded.split('$', 4)
return int(rounds) != self.rounds
class BCryptPasswordHasher(BCryptSHA256PasswordHasher):
"""
Secure password hashing using the bcrypt algorithm
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
This hasher does not first hash the password which means it is subject to
the 72 character bcrypt password truncation, most use cases should prefer
the BCryptSha512PasswordHasher.
See: https://code.djangoproject.com/ticket/20138
"""
algorithm = "bcrypt"
digest = None
class SHA1PasswordHasher(BasePasswordHasher):
"""
The SHA1 password hashing algorithm (not recommended)
"""
algorithm = "sha1"
def encode(self, password, salt):
assert password is not None
assert salt and '$' not in salt
hash = hashlib.sha1(force_bytes(salt + password)).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class MD5PasswordHasher(BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = "md5"
def encode(self, password, salt):
assert password is not None
assert salt and '$' not in salt
hash = hashlib.md5(force_bytes(salt + password)).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class UnsaltedSHA1PasswordHasher(BasePasswordHasher):
"""
Very insecure algorithm that you should *never* use; stores SHA1 hashes
with an empty salt.
This class is implemented because Django used to accept such password
hashes. Some older Django installs still have these values lingering
around so we need to handle and upgrade them properly.
"""
algorithm = "unsalted_sha1"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
hash = hashlib.sha1(force_bytes(password)).hexdigest()
return 'sha1$$%s' % hash
def verify(self, password, encoded):
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
assert encoded.startswith('sha1$$')
hash = encoded[6:]
return OrderedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(hash)),
])
class UnsaltedMD5PasswordHasher(BasePasswordHasher):
"""
Incredibly insecure algorithm that you should *never* use; stores unsalted
MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an
empty salt.
This class is implemented because Django used to store passwords this way
and to accept such password hashes. Some older Django installs still have
these values lingering around so we need to handle and upgrade them
properly.
"""
algorithm = "unsalted_md5"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
return hashlib.md5(force_bytes(password)).hexdigest()
def verify(self, password, encoded):
if len(encoded) == 37 and encoded.startswith('md5$$'):
encoded = encoded[5:]
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
return OrderedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(encoded, show=3)),
])
class CryptPasswordHasher(BasePasswordHasher):
"""
Password hashing using UNIX crypt (not recommended)
The crypt module is not supported on all platforms.
"""
algorithm = "crypt"
library = "crypt"
def salt(self):
return get_random_string(2)
def encode(self, password, salt):
crypt = self._load_library()
assert len(salt) == 2
data = crypt.crypt(force_str(password), salt)
# we don't need to store the salt, but Django used to do this
return "%s$%s$%s" % (self.algorithm, '', data)
def verify(self, password, encoded):
crypt = self._load_library()
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return constant_time_compare(data, crypt.crypt(force_str(password), data))
def safe_summary(self, encoded):
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return OrderedDict([
(_('algorithm'), algorithm),
(_('salt'), salt),
(_('hash'), mask_hash(data, show=3)),
])
| bsd-3-clause | -8,282,980,165,179,837,000 | 32.777563 | 106 | 0.640783 | false |
dandan94/OpenGLTest | finalOpenGL/HelloGLFW/lib/boost_1_59_0/libs/python/test/shared_ptr.py | 46 | 2017 | # Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
'''
>>> from shared_ptr_ext import *
Test that shared_ptr<Derived> can be converted to shared_ptr<Base>
>>> Y.store(YYY(42))
>>> x = X(17)
>>> null_x = null(x)
>>> null_x # should be None
>>> identity(null_x) # should also be None
>>> a = New(1)
>>> A.call_f(a)
1
>>> New(0)
>>> type(factory(3))
<class 'shared_ptr_ext.Y'>
>>> type(factory(42))
<class 'shared_ptr_ext.YY'>
>>> class P(Z):
... def v(self):
... return -Z.v(self);
... def __del__(self):
... print 'bye'
...
>>> p = P(12)
>>> p.value()
12
>>> p.v()
-12
>>> look(p)
12
>>> try: modify(p)
... except TypeError: pass
... else: 'print expected a TypeError'
>>> look(None)
-1
>>> store(p)
>>> del p
>>> Z.get().v()
-12
>>> Z.count()
1
>>> Z.look_store()
12
>>> Z.release()
bye
>>> Z.count()
0
>>> z = Z(13)
>>> z.value()
13
>>> z.v()
13
>>> try: modify(z)
... except TypeError: pass
... else: 'print expected a TypeError'
>>> Z.get() # should be None
>>> store(z)
>>> assert Z.get() is z # show that deleter introspection works
>>> del z
>>> Z.get().value()
13
>>> Z.count()
1
>>> Z.look_store()
13
>>> Z.release()
>>> Z.count()
0
>>> x = X(17)
>>> x.value()
17
>>> look(x)
17
>>> try: modify(x)
... except TypeError: pass
... else: 'print expected a TypeError'
>>> look(None)
-1
>>> store(x)
>>> del x
>>> X.count()
1
>>> X.look_store()
17
>>> X.release()
>>> X.count()
0
>>> y = Y(19)
>>> y.value()
19
>>> modify(y)
>>> look(y)
-1
>>> store(Y(23))
>>> Y.count()
1
>>> Y.look_store()
23
>>> Y.release()
>>> Y.count()
0
'''
def run(args = None):
import sys
import doctest
if args is not None:
sys.argv = args
return doctest.testmod(sys.modules.get(__name__))
if __name__ == '__main__':
print "running..."
import sys
status = run()[0]
if (status == 0): print "Done."
sys.exit(status)
| gpl-3.0 | -7,482,129,758,503,748,000 | 14.515385 | 71 | 0.548835 | false |
Rogdham/pelican | pelican/tests/test_urlwrappers.py | 7 | 3417 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from pelican.tests.support import unittest
from pelican.urlwrappers import Author, Category, Tag, URLWrapper
class TestURLWrapper(unittest.TestCase):
def test_ordering(self):
# URLWrappers are sorted by name
wrapper_a = URLWrapper(name='first', settings={})
wrapper_b = URLWrapper(name='last', settings={})
self.assertFalse(wrapper_a > wrapper_b)
self.assertFalse(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
self.assertTrue(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertTrue(wrapper_a < wrapper_b)
wrapper_b.name = 'first'
self.assertFalse(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertTrue(wrapper_a == wrapper_b)
self.assertFalse(wrapper_a != wrapper_b)
self.assertTrue(wrapper_a <= wrapper_b)
self.assertFalse(wrapper_a < wrapper_b)
wrapper_a.name = 'last'
self.assertTrue(wrapper_a > wrapper_b)
self.assertTrue(wrapper_a >= wrapper_b)
self.assertFalse(wrapper_a == wrapper_b)
self.assertTrue(wrapper_a != wrapper_b)
self.assertFalse(wrapper_a <= wrapper_b)
self.assertFalse(wrapper_a < wrapper_b)
def test_equality(self):
tag = Tag('test', settings={})
cat = Category('test', settings={})
author = Author('test', settings={})
# same name, but different class
self.assertNotEqual(tag, cat)
self.assertNotEqual(tag, author)
# should be equal vs text representing the same name
self.assertEqual(tag, u'test')
# should not be equal vs binary
self.assertNotEqual(tag, b'test')
# Tags describing the same should be equal
tag_equal = Tag('Test', settings={})
self.assertEqual(tag, tag_equal)
# Author describing the same should be equal
author_equal = Author('Test', settings={})
self.assertEqual(author, author_equal)
cat_ascii = Category('指導書', settings={})
self.assertEqual(cat_ascii, u'zhi-dao-shu')
def test_slugify_with_substitutions_and_dots(self):
tag = Tag('Tag Dot',
settings={
'TAG_SUBSTITUTIONS': [('Tag Dot', 'tag.dot', True)]
})
cat = Category('Category Dot',
settings={
'CATEGORY_SUBSTITUTIONS': (('Category Dot',
'cat.dot',
True),)
})
self.assertEqual(tag.slug, 'tag.dot')
self.assertEqual(cat.slug, 'cat.dot')
def test_author_slug_substitutions(self):
settings = {
'AUTHOR_SUBSTITUTIONS': [
('Alexander Todorov', 'atodorov', False),
('Krasimir Tsonev', 'krasimir', False),
]
}
author1 = Author('Mr. Senko', settings=settings)
author2 = Author('Alexander Todorov', settings=settings)
author3 = Author('Krasimir Tsonev', settings=settings)
self.assertEqual(author1.slug, 'mr-senko')
self.assertEqual(author2.slug, 'atodorov')
self.assertEqual(author3.slug, 'krasimir')
| agpl-3.0 | 5,795,915,438,564,902,000 | 37.325843 | 77 | 0.569628 | false |
gklyne/annalist | src/annalist_root/annalist/views/form_utils/fieldchoice.py | 1 | 5788 | """
This module defines a class used to represent a choice for an
enumerated-value field.
"""
from __future__ import unicode_literals
from __future__ import absolute_import, division, print_function
__author__ = "Graham Klyne ([email protected])"
__copyright__ = "Copyright 2015, G. Klyne"
__license__ = "MIT (http://opensource.org/licenses/MIT)"
import re
import logging
log = logging.getLogger(__name__)
from collections import namedtuple
from utils.py3porting import to_unicode
from django.utils.html import format_html, mark_safe, escape
_FieldChoice_tuple = namedtuple("FieldChoice", ("id", "value", "label", "link", "choice_value"))
class FieldChoice(_FieldChoice_tuple):
"""
Class representing a choice for an enumerated field.
>>> c1 = FieldChoice('id1', 'value1', 'label1', 'link1', choice_value=True)
>>> c1 == FieldChoice(id='id1', value='value1', label='label1', link='link1', choice_value=True)
True
>>> c1.id == 'id1'
True
>>> c1.value == 'value1'
True
>>> c1.label == 'label1'
True
>>> c1.link == 'link1'
True
>>> c1.choice_html() == u'label1 (value1)'
True
>>> c2 = FieldChoice('id2', 'value2', 'label2', 'link2', choice_value=False)
>>> c2 == FieldChoice(id='id2', value='value2', label='label2', link='link2', choice_value=False)
True
>>> c2.id == 'id2'
True
>>> c2.value == 'value2'
True
>>> c2.label == 'label2'
True
>>> c2.link == 'link2'
True
>>> c2.choice() == u'label2'
True
>>> c3 = FieldChoice(id='id3', value='value3', link='link3')
>>> c3 == FieldChoice(id='id3', value='value3', label='value3', link='link3', choice_value=False)
True
>>> c3.id == 'id3'
True
>>> c3.value == 'value3'
True
>>> c3.label == 'value3'
True
>>> c3.link == 'link3'
True
>>> c4 = FieldChoice('id4', link='link4')
>>> c4 == FieldChoice(id='id4', value='id4', label='id4', link='link4', choice_value=False)
True
>>> c4.id == 'id4'
True
>>> c4.value == 'id4'
True
>>> c4.label == 'id4'
True
>>> c4.link == 'link4'
True
>>> c5 = FieldChoice('')
>>> c5 == FieldChoice(id='', value='', label='', link=None, choice_value=False)
True
"""
def __new__(_cls, id=None, value=None, label=None, link=None, choice_value=False):
if value is None: value = id
if label is None: label = value
result = super(FieldChoice, _cls).__new__(_cls, id, value, label, link, choice_value)
return result
def __eq__(self, other):
"""
Returns True if self == other for sorting and equivalence purposes
"""
return self.id.__eq__(other.id)
def __ne__(self, other):
"""
Returns True if self != other for sorting and equivalence purposes
Note: required for Python2.
"""
return self.id.__ne__(other.id)
def __lt__(self, other):
"""
Returns True if self < other for sorting purposes
"""
return self.id.__lt__(other.id)
def __hash__(self):
"""
pylint says this should be defined if __eq__ is defined.
Something to do with sets?
"""
return hash(self.id)
def choice(self, sep=u"\xa0\xa0\xa0"):
"""
Return choice string
"""
if self.choice_value:
choice_text = self.option_label(sep=sep)
else:
choice_text = to_unicode(self.label)
return choice_text
def choice_html(self, sep=u" "):
"""
Return choice string HTML for option in drop-down list.
"""
return self.choice(sep=sep)
def add_link(self, link=None):
return FieldChoice(self.id, self.value, self.label, link)
def option_label(self, sep=u"\xa0\xa0\xa0"):
"""
Returns string used for displayed option label.
This function is used mainly for testing, to isolate details of
option presentation from the majority of test cases.
"""
if self.label:
return format_html(u"{}{}({})", self.label, mark_safe(sep), self.value)
else:
return escape(self.value)
def option_label_html(self, sep=u" "):
"""
Variation of option_label returns HTML-encoded form of label text
"""
return self.option_label(sep=sep)
def update_choice_labels(fieldchoices):
"""
Update choice labels in supplied list of FieldChoice values so that duplicate labels can
be distinguished.
Returns an updated list of options.
"""
# Detect non-unique labels
labels = {}
for o in fieldchoices:
l = o.label
labels[l] = labels.get(l, 0) + 1
# Generate updated choice values
new_choices = []
for o in fieldchoices:
if labels[o.label] > 1:
new_choices.append(
FieldChoice(id=o.id, value=o.value, label=o.label, link=o.link, choice_value=True)
)
else:
new_choices.append(o)
return new_choices
def get_choice_labels(fieldchoices):
"""
Return a list of choice labels based on the supplied list of FieldChoice values
>>> c1 = FieldChoice('id1', 'value1', 'label1', 'link1')
>>> c2a = FieldChoice('id2a', 'value2a', 'label2', 'link2')
>>> c2b = FieldChoice('id2b', 'value2b', 'label2', 'link2')
>>> labels = get_choice_labels([c1,c2a,c2b])
>>> labels == ['label1', u'label2\\xa0\\xa0\\xa0(value2a)', u'label2\\xa0\\xa0\\xa0(value2b)']
True
"""
return [ fc.choice() for fc in update_choice_labels(fieldchoices) ]
if __name__ == "__main__":
import doctest
doctest.testmod()
# End.
| mit | -7,639,511,344,817,495,000 | 28.989637 | 101 | 0.575674 | false |
kaczla/PJN | src/Przecinki/scikit.py | 1 | 1048 | #!/usr/bin/python2
# -*- coding: utf-8 -*-
import sys
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets
from sklearn.cross_validation import cross_val_predict
from sklearn import linear_model
from sklearn import datasets
X = []
Y = []
for line in sys.stdin:
line = line.rstrip()
X.append([len(line.split())])
Y.append(line.count(","))
lr = linear_model.LinearRegression()
predicted = cross_val_predict(lr, X, Y)
FILE = open(sys.argv[1], "r")
X_TEST = []
Y_TEST = []
for line in FILE:
line = line.rstrip()
Y_TEST.append(line.count(","))
line = line.replace(",", "")
X_TEST.append([len(line.split())])
regr = linear_model.LinearRegression()
regr.fit(X, Y)
print "Coefficients: ", regr.coef_
print "Residual sum of squares: %.2f" % np.mean((regr.predict(X_TEST) - Y_TEST) ** 2)
print "Variance score: %.2f" % regr.score(X_TEST, Y_TEST)
plt.scatter(X_TEST, Y_TEST, color='black')
plt.plot(X_TEST, regr.predict(X_TEST), color='green', linewidth=2)
plt.xticks(())
plt.yticks(())
plt.show()
| gpl-2.0 | 6,433,059,116,830,044,000 | 25.2 | 85 | 0.666031 | false |
maoy/zknova | nova/tests/api/openstack/test_faults.py | 1 | 7383 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from xml.dom import minidom
import webob
import webob.dec
import webob.exc
from nova.api.openstack import common
from nova.api.openstack import wsgi
from nova.openstack.common import jsonutils
from nova import test
class TestFaults(test.TestCase):
"""Tests covering `nova.api.openstack.faults:Fault` class."""
def _prepare_xml(self, xml_string):
"""Remove characters from string which hinder XML equality testing."""
xml_string = xml_string.replace(" ", "")
xml_string = xml_string.replace("\n", "")
xml_string = xml_string.replace("\t", "")
return xml_string
def test_400_fault_json(self):
# Test fault serialized to JSON via file-extension and/or header.
requests = [
webob.Request.blank('/.json'),
webob.Request.blank('/', headers={"Accept": "application/json"}),
]
for request in requests:
fault = wsgi.Fault(webob.exc.HTTPBadRequest(explanation='scram'))
response = request.get_response(fault)
expected = {
"badRequest": {
"message": "scram",
"code": 400,
},
}
actual = jsonutils.loads(response.body)
self.assertEqual(response.content_type, "application/json")
self.assertEqual(expected, actual)
def test_413_fault_json(self):
# Test fault serialized to JSON via file-extension and/or header.
requests = [
webob.Request.blank('/.json'),
webob.Request.blank('/', headers={"Accept": "application/json"}),
]
for request in requests:
exc = webob.exc.HTTPRequestEntityTooLarge
fault = wsgi.Fault(exc(explanation='sorry',
headers={'Retry-After': 4}))
response = request.get_response(fault)
expected = {
"overLimit": {
"message": "sorry",
"code": 413,
"retryAfter": 4,
},
}
actual = jsonutils.loads(response.body)
self.assertEqual(response.content_type, "application/json")
self.assertEqual(expected, actual)
def test_raise(self):
# Ensure the ability to raise :class:`Fault` in WSGI-ified methods.
@webob.dec.wsgify
def raiser(req):
raise wsgi.Fault(webob.exc.HTTPNotFound(explanation='whut?'))
req = webob.Request.blank('/.xml')
resp = req.get_response(raiser)
self.assertEqual(resp.content_type, "application/xml")
self.assertEqual(resp.status_int, 404)
self.assertTrue('whut?' in resp.body)
def test_raise_403(self):
# Ensure the ability to raise :class:`Fault` in WSGI-ified methods.
@webob.dec.wsgify
def raiser(req):
raise wsgi.Fault(webob.exc.HTTPForbidden(explanation='whut?'))
req = webob.Request.blank('/.xml')
resp = req.get_response(raiser)
self.assertEqual(resp.content_type, "application/xml")
self.assertEqual(resp.status_int, 403)
self.assertTrue('resizeNotAllowed' not in resp.body)
self.assertTrue('forbidden' in resp.body)
def test_fault_has_status_int(self):
# Ensure the status_int is set correctly on faults.
fault = wsgi.Fault(webob.exc.HTTPBadRequest(explanation='what?'))
self.assertEqual(fault.status_int, 400)
def test_xml_serializer(self):
# Ensure that a v1.1 request responds with a v1.1 xmlns.
request = webob.Request.blank('/v1.1',
headers={"Accept": "application/xml"})
fault = wsgi.Fault(webob.exc.HTTPBadRequest(explanation='scram'))
response = request.get_response(fault)
self.assertTrue(common.XML_NS_V11 in response.body)
self.assertEqual(response.content_type, "application/xml")
self.assertEqual(response.status_int, 400)
class FaultsXMLSerializationTestV11(test.TestCase):
"""Tests covering `nova.api.openstack.faults:Fault` class."""
def _prepare_xml(self, xml_string):
xml_string = xml_string.replace(" ", "")
xml_string = xml_string.replace("\n", "")
xml_string = xml_string.replace("\t", "")
return xml_string
def test_400_fault(self):
metadata = {'attributes': {"badRequest": 'code'}}
serializer = wsgi.XMLDictSerializer(metadata=metadata,
xmlns=common.XML_NS_V11)
fixture = {
"badRequest": {
"message": "scram",
"code": 400,
},
}
output = serializer.serialize(fixture)
actual = minidom.parseString(self._prepare_xml(output))
expected = minidom.parseString(self._prepare_xml("""
<badRequest code="400" xmlns="%s">
<message>scram</message>
</badRequest>
""") % common.XML_NS_V11)
self.assertEqual(expected.toxml(), actual.toxml())
def test_413_fault(self):
metadata = {'attributes': {"overLimit": 'code'}}
serializer = wsgi.XMLDictSerializer(metadata=metadata,
xmlns=common.XML_NS_V11)
fixture = {
"overLimit": {
"message": "sorry",
"code": 413,
"retryAfter": 4,
},
}
output = serializer.serialize(fixture)
actual = minidom.parseString(self._prepare_xml(output))
expected = minidom.parseString(self._prepare_xml("""
<overLimit code="413" xmlns="%s">
<message>sorry</message>
<retryAfter>4</retryAfter>
</overLimit>
""") % common.XML_NS_V11)
self.assertEqual(expected.toxml(), actual.toxml())
def test_404_fault(self):
metadata = {'attributes': {"itemNotFound": 'code'}}
serializer = wsgi.XMLDictSerializer(metadata=metadata,
xmlns=common.XML_NS_V11)
fixture = {
"itemNotFound": {
"message": "sorry",
"code": 404,
},
}
output = serializer.serialize(fixture)
actual = minidom.parseString(self._prepare_xml(output))
expected = minidom.parseString(self._prepare_xml("""
<itemNotFound code="404" xmlns="%s">
<message>sorry</message>
</itemNotFound>
""") % common.XML_NS_V11)
self.assertEqual(expected.toxml(), actual.toxml())
| apache-2.0 | -6,985,270,577,377,998,000 | 34.495192 | 78 | 0.574563 | false |
hfp/tensorflow-xsmm | tensorflow/python/training/basic_loops.py | 13 | 2348 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Basic loop for training."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import errors
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=["train.basic_train_loop"])
def basic_train_loop(supervisor, train_step_fn, args=None,
kwargs=None, master=""):
"""Basic loop to train a model.
Calls `train_step_fn` in a loop to train a model. The function is called as:
```python
train_step_fn(session, *args, **kwargs)
```
It is passed a `tf.Session` in addition to `args` and `kwargs`. The function
typically runs one training step in the session.
Args:
supervisor: `tf.train.Supervisor` to run the training services.
train_step_fn: Callable to execute one training step. Called
repeatedly as `train_step_fn(session, *args **kwargs)`.
args: Optional positional arguments passed to `train_step_fn`.
kwargs: Optional keyword arguments passed to `train_step_fn`.
master: Master to use to create the training session. Defaults to
`""` which causes the session to be created in the local process.
"""
if args is None:
args = []
if kwargs is None:
kwargs = {}
should_retry = True
while should_retry:
try:
should_retry = False
with supervisor.managed_session(master) as sess:
while not supervisor.should_stop():
train_step_fn(sess, *args, **kwargs)
except errors.AbortedError:
# Always re-run on AbortedError as it indicates a restart of one of the
# distributed tensorflow servers.
should_retry = True
| apache-2.0 | -4,477,418,664,260,154,000 | 37.491803 | 80 | 0.682709 | false |
Sylrob434/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/fourtube.py | 19 | 3728 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
compat_urllib_request,
unified_strdate,
str_to_int,
parse_duration,
clean_html,
)
class FourTubeIE(InfoExtractor):
IE_NAME = '4tube'
_VALID_URL = r'https?://(?:www\.)?4tube\.com/videos/(?P<id>\d+)'
_TEST = {
'url': 'http://www.4tube.com/videos/209733/hot-babe-holly-michaels-gets-her-ass-stuffed-by-black',
'md5': '6516c8ac63b03de06bc8eac14362db4f',
'info_dict': {
'id': '209733',
'ext': 'mp4',
'title': 'Hot Babe Holly Michaels gets her ass stuffed by black',
'uploader': 'WCP Club',
'uploader_id': 'wcp-club',
'upload_date': '20131031',
'duration': 583,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage_url = 'http://www.4tube.com/videos/' + video_id
webpage = self._download_webpage(webpage_url, video_id)
self.report_extraction(video_id)
playlist_json = self._html_search_regex(r'var playerConfigPlaylist\s+=\s+([^;]+)', webpage, 'Playlist')
media_id = self._search_regex(r'idMedia:\s*(\d+)', playlist_json, 'Media Id')
sources = self._search_regex(r'sources:\s*\[([^\]]*)\]', playlist_json, 'Sources').split(',')
title = self._search_regex(r'title:\s*"([^"]*)', playlist_json, 'Title')
thumbnail_url = self._search_regex(r'image:\s*"([^"]*)', playlist_json, 'Thumbnail', fatal=False)
uploader_str = self._search_regex(r'<span>Uploaded by</span>(.*?)<span>', webpage, 'uploader', fatal=False)
mobj = re.search(r'<a href="/sites/(?P<id>[^"]+)"><strong>(?P<name>[^<]+)</strong></a>', uploader_str)
(uploader, uploader_id) = (mobj.group('name'), mobj.group('id')) if mobj else (clean_html(uploader_str), None)
upload_date = None
view_count = None
duration = None
description = self._html_search_meta('description', webpage, 'description')
if description:
upload_date = self._search_regex(r'Published Date: (\d{2} [a-zA-Z]{3} \d{4})', description, 'upload date',
fatal=False)
if upload_date:
upload_date = unified_strdate(upload_date)
view_count = self._search_regex(r'Views: ([\d,\.]+)', description, 'view count', fatal=False)
if view_count:
view_count = str_to_int(view_count)
duration = parse_duration(self._search_regex(r'Length: (\d+m\d+s)', description, 'duration', fatal=False))
token_url = "http://tkn.4tube.com/{0}/desktop/{1}".format(media_id, "+".join(sources))
headers = {
b'Content-Type': b'application/x-www-form-urlencoded',
b'Origin': b'http://www.4tube.com',
}
token_req = compat_urllib_request.Request(token_url, b'{}', headers)
tokens = self._download_json(token_req, video_id)
formats = [{
'url': tokens[format]['token'],
'format_id': format + 'p',
'resolution': format + 'p',
'quality': int(format),
} for format in sources]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail_url,
'uploader': uploader,
'uploader_id': uploader_id,
'upload_date': upload_date,
'view_count': view_count,
'duration': duration,
'age_limit': 18,
'webpage_url': webpage_url,
} | gpl-3.0 | 9,063,025,987,505,879,000 | 38.252632 | 118 | 0.549624 | false |
srluge/SickRage | lib/sqlalchemy/orm/mapper.py | 75 | 108686 | # orm/mapper.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Logic to map Python classes to and from selectables.
Defines the :class:`~sqlalchemy.orm.mapper.Mapper` class, the central
configurational unit which associates a class with a database table.
This is a semi-private module; the main configurational API of the ORM is
available in :class:`~sqlalchemy.orm.`.
"""
from __future__ import absolute_import
import types
import weakref
from itertools import chain
from collections import deque
from .. import sql, util, log, exc as sa_exc, event, schema, inspection
from ..sql import expression, visitors, operators, util as sql_util
from . import instrumentation, attributes, exc as orm_exc, loading
from . import properties
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
from .base import _class_to_mapper, _state_mapper, class_mapper, \
state_str, _INSTRUMENTOR
from .path_registry import PathRegistry
import sys
_mapper_registry = weakref.WeakKeyDictionary()
_already_compiling = False
_memoized_configured_property = util.group_expirable_memoized_property()
# a constant returned by _get_attr_by_column to indicate
# this mapper is not handling an attribute for a particular
# column
NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE')
# lock used to synchronize the "mapper configure" step
_CONFIGURE_MUTEX = util.threading.RLock()
@inspection._self_inspects
@log.class_logger
class Mapper(_InspectionAttr):
"""Define the correlation of class attributes to database table
columns.
The :class:`.Mapper` object is instantiated using the
:func:`~sqlalchemy.orm.mapper` function. For information
about instantiating new :class:`.Mapper` objects, see
that function's documentation.
When :func:`.mapper` is used
explicitly to link a user defined class with table
metadata, this is referred to as *classical mapping*.
Modern SQLAlchemy usage tends to favor the
:mod:`sqlalchemy.ext.declarative` extension for class
configuration, which
makes usage of :func:`.mapper` behind the scenes.
Given a particular class known to be mapped by the ORM,
the :class:`.Mapper` which maintains it can be acquired
using the :func:`.inspect` function::
from sqlalchemy import inspect
mapper = inspect(MyClass)
A class which was mapped by the :mod:`sqlalchemy.ext.declarative`
extension will also have its mapper available via the ``__mapper__``
attribute.
"""
_new_mappers = False
def __init__(self,
class_,
local_table=None,
properties=None,
primary_key=None,
non_primary=False,
inherits=None,
inherit_condition=None,
inherit_foreign_keys=None,
extension=None,
order_by=False,
always_refresh=False,
version_id_col=None,
version_id_generator=None,
polymorphic_on=None,
_polymorphic_map=None,
polymorphic_identity=None,
concrete=False,
with_polymorphic=None,
allow_partial_pks=True,
batch=True,
column_prefix=None,
include_properties=None,
exclude_properties=None,
passive_updates=True,
confirm_deleted_rows=True,
eager_defaults=False,
legacy_is_orphan=False,
_compiled_cache_size=100,
):
"""Return a new :class:`~.Mapper` object.
This function is typically used behind the scenes
via the Declarative extension. When using Declarative,
many of the usual :func:`.mapper` arguments are handled
by the Declarative extension itself, including ``class_``,
``local_table``, ``properties``, and ``inherits``.
Other options are passed to :func:`.mapper` using
the ``__mapper_args__`` class variable::
class MyClass(Base):
__tablename__ = 'my_table'
id = Column(Integer, primary_key=True)
type = Column(String(50))
alt = Column("some_alt", Integer)
__mapper_args__ = {
'polymorphic_on' : type
}
Explicit use of :func:`.mapper`
is often referred to as *classical mapping*. The above
declarative example is equivalent in classical form to::
my_table = Table("my_table", metadata,
Column('id', Integer, primary_key=True),
Column('type', String(50)),
Column("some_alt", Integer)
)
class MyClass(object):
pass
mapper(MyClass, my_table,
polymorphic_on=my_table.c.type,
properties={
'alt':my_table.c.some_alt
})
.. seealso::
:ref:`classical_mapping` - discussion of direct usage of
:func:`.mapper`
:param class\_: The class to be mapped. When using Declarative,
this argument is automatically passed as the declared class
itself.
:param local_table: The :class:`.Table` or other selectable
to which the class is mapped. May be ``None`` if
this mapper inherits from another mapper using single-table
inheritance. When using Declarative, this argument is
automatically passed by the extension, based on what
is configured via the ``__table__`` argument or via the
:class:`.Table` produced as a result of the ``__tablename__``
and :class:`.Column` arguments present.
:param always_refresh: If True, all query operations for this mapped
class will overwrite all data within object instances that already
exist within the session, erasing any in-memory changes with
whatever information was loaded from the database. Usage of this
flag is highly discouraged; as an alternative, see the method
:meth:`.Query.populate_existing`.
:param allow_partial_pks: Defaults to True. Indicates that a
composite primary key with some NULL values should be considered as
possibly existing within the database. This affects whether a
mapper will assign an incoming row to an existing identity, as well
as if :meth:`.Session.merge` will check the database first for a
particular primary key value. A "partial primary key" can occur if
one has mapped to an OUTER JOIN, for example.
:param batch: Defaults to ``True``, indicating that save operations
of multiple entities can be batched together for efficiency.
Setting to False indicates
that an instance will be fully saved before saving the next
instance. This is used in the extremely rare case that a
:class:`.MapperEvents` listener requires being called
in between individual row persistence operations.
:param column_prefix: A string which will be prepended
to the mapped attribute name when :class:`.Column`
objects are automatically assigned as attributes to the
mapped class. Does not affect explicitly specified
column-based properties.
See the section :ref:`column_prefix` for an example.
:param concrete: If True, indicates this mapper should use concrete
table inheritance with its parent mapper.
See the section :ref:`concrete_inheritance` for an example.
:param confirm_deleted_rows: defaults to True; when a DELETE occurs
of one more more rows based on specific primary keys, a warning is
emitted when the number of rows matched does not equal the number
of rows expected. This parameter may be set to False to handle the case
where database ON DELETE CASCADE rules may be deleting some of those
rows automatically. The warning may be changed to an exception
in a future release.
.. versionadded:: 0.9.4 - added :paramref:`.mapper.confirm_deleted_rows`
as well as conditional matched row checking on delete.
:param eager_defaults: if True, the ORM will immediately fetch the
value of server-generated default values after an INSERT or UPDATE,
rather than leaving them as expired to be fetched on next access.
This can be used for event schemes where the server-generated values
are needed immediately before the flush completes. By default,
this scheme will emit an individual ``SELECT`` statement per row
inserted or updated, which note can add significant performance
overhead. However, if the
target database supports :term:`RETURNING`, the default values will be
returned inline with the INSERT or UPDATE statement, which can
greatly enhance performance for an application that needs frequent
access to just-generated server defaults.
.. versionchanged:: 0.9.0 The ``eager_defaults`` option can now
make use of :term:`RETURNING` for backends which support it.
:param exclude_properties: A list or set of string column names to
be excluded from mapping.
See :ref:`include_exclude_cols` for an example.
:param extension: A :class:`.MapperExtension` instance or
list of :class:`.MapperExtension` instances which will be applied
to all operations by this :class:`.Mapper`. **Deprecated.**
Please see :class:`.MapperEvents`.
:param include_properties: An inclusive list or set of string column
names to map.
See :ref:`include_exclude_cols` for an example.
:param inherits: A mapped class or the corresponding :class:`.Mapper`
of one indicating a superclass to which this :class:`.Mapper`
should *inherit* from. The mapped class here must be a subclass
of the other mapper's class. When using Declarative, this argument
is passed automatically as a result of the natural class
hierarchy of the declared classes.
.. seealso::
:ref:`inheritance_toplevel`
:param inherit_condition: For joined table inheritance, a SQL
expression which will
define how the two tables are joined; defaults to a natural join
between the two tables.
:param inherit_foreign_keys: When ``inherit_condition`` is used and the
columns present are missing a :class:`.ForeignKey` configuration,
this parameter can be used to specify which columns are "foreign".
In most cases can be left as ``None``.
:param legacy_is_orphan: Boolean, defaults to ``False``.
When ``True``, specifies that "legacy" orphan consideration
is to be applied to objects mapped by this mapper, which means
that a pending (that is, not persistent) object is auto-expunged
from an owning :class:`.Session` only when it is de-associated
from *all* parents that specify a ``delete-orphan`` cascade towards
this mapper. The new default behavior is that the object is auto-expunged
when it is de-associated with *any* of its parents that specify
``delete-orphan`` cascade. This behavior is more consistent with
that of a persistent object, and allows behavior to be consistent
in more scenarios independently of whether or not an orphanable
object has been flushed yet or not.
See the change note and example at :ref:`legacy_is_orphan_addition`
for more detail on this change.
.. versionadded:: 0.8 - the consideration of a pending object as
an "orphan" has been modified to more closely match the
behavior as that of persistent objects, which is that the object
is expunged from the :class:`.Session` as soon as it is
de-associated from any of its orphan-enabled parents. Previously,
the pending object would be expunged only if de-associated
from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
is added to :func:`.orm.mapper` which re-establishes the
legacy behavior.
:param non_primary: Specify that this :class:`.Mapper` is in addition
to the "primary" mapper, that is, the one used for persistence.
The :class:`.Mapper` created here may be used for ad-hoc
mapping of the class to an alternate selectable, for loading
only.
:paramref:`.Mapper.non_primary` is not an often used option, but
is useful in some specific :func:`.relationship` cases.
.. seealso::
:ref:`relationship_non_primary_mapper`
:param order_by: A single :class:`.Column` or list of :class:`.Column`
objects for which selection operations should use as the default
ordering for entities. By default mappers have no pre-defined
ordering.
:param passive_updates: Indicates UPDATE behavior of foreign key
columns when a primary key column changes on a joined-table
inheritance mapping. Defaults to ``True``.
When True, it is assumed that ON UPDATE CASCADE is configured on
the foreign key in the database, and that the database will handle
propagation of an UPDATE from a source column to dependent columns
on joined-table rows.
When False, it is assumed that the database does not enforce
referential integrity and will not be issuing its own CASCADE
operation for an update. The unit of work process will
emit an UPDATE statement for the dependent columns during a
primary key change.
.. seealso::
:ref:`passive_updates` - description of a similar feature as
used with :func:`.relationship`
:param polymorphic_on: Specifies the column, attribute, or
SQL expression used to determine the target class for an
incoming row, when inheriting classes are present.
This value is commonly a :class:`.Column` object that's
present in the mapped :class:`.Table`::
class Employee(Base):
__tablename__ = 'employee'
id = Column(Integer, primary_key=True)
discriminator = Column(String(50))
__mapper_args__ = {
"polymorphic_on":discriminator,
"polymorphic_identity":"employee"
}
It may also be specified
as a SQL expression, as in this example where we
use the :func:`.case` construct to provide a conditional
approach::
class Employee(Base):
__tablename__ = 'employee'
id = Column(Integer, primary_key=True)
discriminator = Column(String(50))
__mapper_args__ = {
"polymorphic_on":case([
(discriminator == "EN", "engineer"),
(discriminator == "MA", "manager"),
], else_="employee"),
"polymorphic_identity":"employee"
}
It may also refer to any attribute
configured with :func:`.column_property`, or to the
string name of one::
class Employee(Base):
__tablename__ = 'employee'
id = Column(Integer, primary_key=True)
discriminator = Column(String(50))
employee_type = column_property(
case([
(discriminator == "EN", "engineer"),
(discriminator == "MA", "manager"),
], else_="employee")
)
__mapper_args__ = {
"polymorphic_on":employee_type,
"polymorphic_identity":"employee"
}
.. versionchanged:: 0.7.4
``polymorphic_on`` may be specified as a SQL expression,
or refer to any attribute configured with
:func:`.column_property`, or to the string name of one.
When setting ``polymorphic_on`` to reference an
attribute or expression that's not present in the
locally mapped :class:`.Table`, yet the value
of the discriminator should be persisted to the database,
the value of the
discriminator is not automatically set on new
instances; this must be handled by the user,
either through manual means or via event listeners.
A typical approach to establishing such a listener
looks like::
from sqlalchemy import event
from sqlalchemy.orm import object_mapper
@event.listens_for(Employee, "init", propagate=True)
def set_identity(instance, *arg, **kw):
mapper = object_mapper(instance)
instance.discriminator = mapper.polymorphic_identity
Where above, we assign the value of ``polymorphic_identity``
for the mapped class to the ``discriminator`` attribute,
thus persisting the value to the ``discriminator`` column
in the database.
.. seealso::
:ref:`inheritance_toplevel`
:param polymorphic_identity: Specifies the value which
identifies this particular class as returned by the
column expression referred to by the ``polymorphic_on``
setting. As rows are received, the value corresponding
to the ``polymorphic_on`` column expression is compared
to this value, indicating which subclass should
be used for the newly reconstructed object.
:param properties: A dictionary mapping the string names of object
attributes to :class:`.MapperProperty` instances, which define the
persistence behavior of that attribute. Note that :class:`.Column`
objects present in
the mapped :class:`.Table` are automatically placed into
``ColumnProperty`` instances upon mapping, unless overridden.
When using Declarative, this argument is passed automatically,
based on all those :class:`.MapperProperty` instances declared
in the declared class body.
:param primary_key: A list of :class:`.Column` objects which define the
primary key to be used against this mapper's selectable unit.
This is normally simply the primary key of the ``local_table``, but
can be overridden here.
:param version_id_col: A :class:`.Column`
that will be used to keep a running version id of rows
in the table. This is used to detect concurrent updates or
the presence of stale data in a flush. The methodology is to
detect if an UPDATE statement does not match the last known
version id, a
:class:`~sqlalchemy.orm.exc.StaleDataError` exception is
thrown.
By default, the column must be of :class:`.Integer` type,
unless ``version_id_generator`` specifies an alternative version
generator.
.. seealso::
:ref:`mapper_version_counter` - discussion of version counting
and rationale.
:param version_id_generator: Define how new version ids should
be generated. Defaults to ``None``, which indicates that
a simple integer counting scheme be employed. To provide a custom
versioning scheme, provide a callable function of the form::
def generate_version(version):
return next_version
Alternatively, server-side versioning functions such as triggers,
or programmatic versioning schemes outside of the version id generator
may be used, by specifying the value ``False``.
Please see :ref:`server_side_version_counter` for a discussion
of important points when using this option.
.. versionadded:: 0.9.0 ``version_id_generator`` supports server-side
version number generation.
.. seealso::
:ref:`custom_version_counter`
:ref:`server_side_version_counter`
:param with_polymorphic: A tuple in the form ``(<classes>,
<selectable>)`` indicating the default style of "polymorphic"
loading, that is, which tables are queried at once. <classes> is
any single or list of mappers and/or classes indicating the
inherited classes that should be loaded at once. The special value
``'*'`` may be used to indicate all descending classes should be
loaded immediately. The second tuple argument <selectable>
indicates a selectable that will be used to query for multiple
classes.
.. seealso::
:ref:`with_polymorphic` - discussion of polymorphic querying techniques.
"""
self.class_ = util.assert_arg_type(class_, type, 'class_')
self.class_manager = None
self._primary_key_argument = util.to_list(primary_key)
self.non_primary = non_primary
if order_by is not False:
self.order_by = util.to_list(order_by)
else:
self.order_by = order_by
self.always_refresh = always_refresh
if isinstance(version_id_col, MapperProperty):
self.version_id_prop = version_id_col
self.version_id_col = None
else:
self.version_id_col = version_id_col
if version_id_generator is False:
self.version_id_generator = False
elif version_id_generator is None:
self.version_id_generator = lambda x: (x or 0) + 1
else:
self.version_id_generator = version_id_generator
self.concrete = concrete
self.single = False
self.inherits = inherits
self.local_table = local_table
self.inherit_condition = inherit_condition
self.inherit_foreign_keys = inherit_foreign_keys
self._init_properties = properties or {}
self._delete_orphans = []
self.batch = batch
self.eager_defaults = eager_defaults
self.column_prefix = column_prefix
self.polymorphic_on = expression._clause_element_as_expr(
polymorphic_on)
self._dependency_processors = []
self.validators = util.immutabledict()
self.passive_updates = passive_updates
self.legacy_is_orphan = legacy_is_orphan
self._clause_adapter = None
self._requires_row_aliasing = False
self._inherits_equated_pairs = None
self._memoized_values = {}
self._compiled_cache_size = _compiled_cache_size
self._reconstructor = None
self._deprecated_extensions = util.to_list(extension or [])
self.allow_partial_pks = allow_partial_pks
if self.inherits and not self.concrete:
self.confirm_deleted_rows = False
else:
self.confirm_deleted_rows = confirm_deleted_rows
self._set_with_polymorphic(with_polymorphic)
if isinstance(self.local_table, expression.SelectBase):
raise sa_exc.InvalidRequestError(
"When mapping against a select() construct, map against "
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
)
if self.with_polymorphic and \
isinstance(self.with_polymorphic[1],
expression.SelectBase):
self.with_polymorphic = (self.with_polymorphic[0],
self.with_polymorphic[1].alias())
# our 'polymorphic identity', a string name that when located in a
# result set row indicates this Mapper should be used to construct
# the object instance for that row.
self.polymorphic_identity = polymorphic_identity
# a dictionary of 'polymorphic identity' names, associating those
# names with Mappers that will be used to construct object instances
# upon a select operation.
if _polymorphic_map is None:
self.polymorphic_map = {}
else:
self.polymorphic_map = _polymorphic_map
if include_properties is not None:
self.include_properties = util.to_set(include_properties)
else:
self.include_properties = None
if exclude_properties:
self.exclude_properties = util.to_set(exclude_properties)
else:
self.exclude_properties = None
self.configured = False
# prevent this mapper from being constructed
# while a configure_mappers() is occurring (and defer a
# configure_mappers() until construction succeeds)
_CONFIGURE_MUTEX.acquire()
try:
self.dispatch._events._new_mapper_instance(class_, self)
self._configure_inheritance()
self._configure_legacy_instrument_class()
self._configure_class_instrumentation()
self._configure_listeners()
self._configure_properties()
self._configure_polymorphic_setter()
self._configure_pks()
Mapper._new_mappers = True
self._log("constructed")
self._expire_memoizations()
finally:
_CONFIGURE_MUTEX.release()
# major attributes initialized at the classlevel so that
# they can be Sphinx-documented.
is_mapper = True
"""Part of the inspection API."""
@property
def mapper(self):
"""Part of the inspection API.
Returns self.
"""
return self
@property
def entity(self):
"""Part of the inspection API.
Returns self.class\_.
"""
return self.class_
local_table = None
"""The :class:`.Selectable` which this :class:`.Mapper` manages.
Typically is an instance of :class:`.Table` or :class:`.Alias`.
May also be ``None``.
The "local" table is the
selectable that the :class:`.Mapper` is directly responsible for
managing from an attribute access and flush perspective. For
non-inheriting mappers, the local table is the same as the
"mapped" table. For joined-table inheritance mappers, local_table
will be the particular sub-table of the overall "join" which
this :class:`.Mapper` represents. If this mapper is a
single-table inheriting mapper, local_table will be ``None``.
.. seealso::
:attr:`~.Mapper.mapped_table`.
"""
mapped_table = None
"""The :class:`.Selectable` to which this :class:`.Mapper` is mapped.
Typically an instance of :class:`.Table`, :class:`.Join`, or
:class:`.Alias`.
The "mapped" table is the selectable that
the mapper selects from during queries. For non-inheriting
mappers, the mapped table is the same as the "local" table.
For joined-table inheritance mappers, mapped_table references the
full :class:`.Join` representing full rows for this particular
subclass. For single-table inheritance mappers, mapped_table
references the base table.
.. seealso::
:attr:`~.Mapper.local_table`.
"""
inherits = None
"""References the :class:`.Mapper` which this :class:`.Mapper`
inherits from, if any.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
configured = None
"""Represent ``True`` if this :class:`.Mapper` has been configured.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
.. seealso::
:func:`.configure_mappers`.
"""
concrete = None
"""Represent ``True`` if this :class:`.Mapper` is a concrete
inheritance mapper.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
tables = None
"""An iterable containing the collection of :class:`.Table` objects
which this :class:`.Mapper` is aware of.
If the mapper is mapped to a :class:`.Join`, or an :class:`.Alias`
representing a :class:`.Select`, the individual :class:`.Table`
objects that comprise the full construct will be represented here.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
primary_key = None
"""An iterable containing the collection of :class:`.Column` objects
which comprise the 'primary key' of the mapped table, from the
perspective of this :class:`.Mapper`.
This list is against the selectable in :attr:`~.Mapper.mapped_table`. In
the case of inheriting mappers, some columns may be managed by a
superclass mapper. For example, in the case of a :class:`.Join`, the
primary key is determined by all of the primary key columns across all
tables referenced by the :class:`.Join`.
The list is also not necessarily the same as the primary key column
collection associated with the underlying tables; the :class:`.Mapper`
features a ``primary_key`` argument that can override what the
:class:`.Mapper` considers as primary key columns.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
class_ = None
"""The Python class which this :class:`.Mapper` maps.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
class_manager = None
"""The :class:`.ClassManager` which maintains event listeners
and class-bound descriptors for this :class:`.Mapper`.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
single = None
"""Represent ``True`` if this :class:`.Mapper` is a single table
inheritance mapper.
:attr:`~.Mapper.local_table` will be ``None`` if this flag is set.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
non_primary = None
"""Represent ``True`` if this :class:`.Mapper` is a "non-primary"
mapper, e.g. a mapper that is used only to selet rows but not for
persistence management.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
polymorphic_on = None
"""The :class:`.Column` or SQL expression specified as the
``polymorphic_on`` argument
for this :class:`.Mapper`, within an inheritance scenario.
This attribute is normally a :class:`.Column` instance but
may also be an expression, such as one derived from
:func:`.cast`.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
polymorphic_map = None
"""A mapping of "polymorphic identity" identifiers mapped to
:class:`.Mapper` instances, within an inheritance scenario.
The identifiers can be of any type which is comparable to the
type of column represented by :attr:`~.Mapper.polymorphic_on`.
An inheritance chain of mappers will all reference the same
polymorphic map object. The object is used to correlate incoming
result rows to target mappers.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
polymorphic_identity = None
"""Represent an identifier which is matched against the
:attr:`~.Mapper.polymorphic_on` column during result row loading.
Used only with inheritance, this object can be of any type which is
comparable to the type of column represented by
:attr:`~.Mapper.polymorphic_on`.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
base_mapper = None
"""The base-most :class:`.Mapper` in an inheritance chain.
In a non-inheriting scenario, this attribute will always be this
:class:`.Mapper`. In an inheritance scenario, it references
the :class:`.Mapper` which is parent to all other :class:`.Mapper`
objects in the inheritance chain.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
columns = None
"""A collection of :class:`.Column` or other scalar expression
objects maintained by this :class:`.Mapper`.
The collection behaves the same as that of the ``c`` attribute on
any :class:`.Table` object, except that only those columns included in
this mapping are present, and are keyed based on the attribute name
defined in the mapping, not necessarily the ``key`` attribute of the
:class:`.Column` itself. Additionally, scalar expressions mapped
by :func:`.column_property` are also present here.
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
"""
validators = None
"""An immutable dictionary of attributes which have been decorated
using the :func:`~.orm.validates` decorator.
The dictionary contains string attribute names as keys
mapped to the actual validation method.
"""
c = None
"""A synonym for :attr:`~.Mapper.columns`."""
@util.memoized_property
def _path_registry(self):
return PathRegistry.per_mapper(self)
def _configure_inheritance(self):
"""Configure settings related to inherting and/or inherited mappers
being present."""
# a set of all mappers which inherit from this one.
self._inheriting_mappers = util.WeakSequence()
if self.inherits:
if isinstance(self.inherits, type):
self.inherits = class_mapper(self.inherits, configure=False)
if not issubclass(self.class_, self.inherits.class_):
raise sa_exc.ArgumentError(
"Class '%s' does not inherit from '%s'" %
(self.class_.__name__, self.inherits.class_.__name__))
if self.non_primary != self.inherits.non_primary:
np = not self.non_primary and "primary" or "non-primary"
raise sa_exc.ArgumentError(
"Inheritance of %s mapper for class '%s' is "
"only allowed from a %s mapper" %
(np, self.class_.__name__, np))
# inherit_condition is optional.
if self.local_table is None:
self.local_table = self.inherits.local_table
self.mapped_table = self.inherits.mapped_table
self.single = True
elif not self.local_table is self.inherits.local_table:
if self.concrete:
self.mapped_table = self.local_table
for mapper in self.iterate_to_root():
if mapper.polymorphic_on is not None:
mapper._requires_row_aliasing = True
else:
if self.inherit_condition is None:
# figure out inherit condition from our table to the
# immediate table of the inherited mapper, not its
# full table which could pull in other stuff we dont
# want (allows test/inheritance.InheritTest4 to pass)
self.inherit_condition = sql_util.join_condition(
self.inherits.local_table,
self.local_table)
self.mapped_table = sql.join(
self.inherits.mapped_table,
self.local_table,
self.inherit_condition)
fks = util.to_set(self.inherit_foreign_keys)
self._inherits_equated_pairs = sql_util.criterion_as_pairs(
self.mapped_table.onclause,
consider_as_foreign_keys=fks)
else:
self.mapped_table = self.local_table
if self.polymorphic_identity is not None and not self.concrete:
self._identity_class = self.inherits._identity_class
else:
self._identity_class = self.class_
if self.version_id_col is None:
self.version_id_col = self.inherits.version_id_col
self.version_id_generator = self.inherits.version_id_generator
elif self.inherits.version_id_col is not None and \
self.version_id_col is not self.inherits.version_id_col:
util.warn(
"Inheriting version_id_col '%s' does not match inherited "
"version_id_col '%s' and will not automatically populate "
"the inherited versioning column. "
"version_id_col should only be specified on "
"the base-most mapper that includes versioning." %
(self.version_id_col.description,
self.inherits.version_id_col.description)
)
if self.order_by is False and \
not self.concrete and \
self.inherits.order_by is not False:
self.order_by = self.inherits.order_by
self.polymorphic_map = self.inherits.polymorphic_map
self.batch = self.inherits.batch
self.inherits._inheriting_mappers.append(self)
self.base_mapper = self.inherits.base_mapper
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
if self.polymorphic_identity is not None:
self.polymorphic_map[self.polymorphic_identity] = self
else:
self._all_tables = set()
self.base_mapper = self
self.mapped_table = self.local_table
if self.polymorphic_identity is not None:
self.polymorphic_map[self.polymorphic_identity] = self
self._identity_class = self.class_
if self.mapped_table is None:
raise sa_exc.ArgumentError(
"Mapper '%s' does not have a mapped_table specified."
% self)
def _set_with_polymorphic(self, with_polymorphic):
if with_polymorphic == '*':
self.with_polymorphic = ('*', None)
elif isinstance(with_polymorphic, (tuple, list)):
if isinstance(with_polymorphic[0], util.string_types + (tuple, list)):
self.with_polymorphic = with_polymorphic
else:
self.with_polymorphic = (with_polymorphic, None)
elif with_polymorphic is not None:
raise sa_exc.ArgumentError("Invalid setting for with_polymorphic")
else:
self.with_polymorphic = None
if isinstance(self.local_table, expression.SelectBase):
raise sa_exc.InvalidRequestError(
"When mapping against a select() construct, map against "
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
)
if self.with_polymorphic and \
isinstance(self.with_polymorphic[1],
expression.SelectBase):
self.with_polymorphic = (self.with_polymorphic[0],
self.with_polymorphic[1].alias())
if self.configured:
self._expire_memoizations()
def _set_concrete_base(self, mapper):
"""Set the given :class:`.Mapper` as the 'inherits' for this
:class:`.Mapper`, assuming this :class:`.Mapper` is concrete
and does not already have an inherits."""
assert self.concrete
assert not self.inherits
assert isinstance(mapper, Mapper)
self.inherits = mapper
self.inherits.polymorphic_map.update(self.polymorphic_map)
self.polymorphic_map = self.inherits.polymorphic_map
for mapper in self.iterate_to_root():
if mapper.polymorphic_on is not None:
mapper._requires_row_aliasing = True
self.batch = self.inherits.batch
for mp in self.self_and_descendants:
mp.base_mapper = self.inherits.base_mapper
self.inherits._inheriting_mappers.append(self)
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
for key, prop in mapper._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
column=None):
self._adapt_inherited_property(key, prop, False)
def _set_polymorphic_on(self, polymorphic_on):
self.polymorphic_on = polymorphic_on
self._configure_polymorphic_setter(True)
def _configure_legacy_instrument_class(self):
if self.inherits:
self.dispatch._update(self.inherits.dispatch)
super_extensions = set(
chain(*[m._deprecated_extensions
for m in self.inherits.iterate_to_root()]))
else:
super_extensions = set()
for ext in self._deprecated_extensions:
if ext not in super_extensions:
ext._adapt_instrument_class(self, ext)
def _configure_listeners(self):
if self.inherits:
super_extensions = set(
chain(*[m._deprecated_extensions
for m in self.inherits.iterate_to_root()]))
else:
super_extensions = set()
for ext in self._deprecated_extensions:
if ext not in super_extensions:
ext._adapt_listener(self, ext)
def _configure_class_instrumentation(self):
"""If this mapper is to be a primary mapper (i.e. the
non_primary flag is not set), associate this Mapper with the
given class_ and entity name.
Subsequent calls to ``class_mapper()`` for the class_/entity
name combination will return this mapper. Also decorate the
`__init__` method on the mapped class to include optional
auto-session attachment logic.
"""
manager = attributes.manager_of_class(self.class_)
if self.non_primary:
if not manager or not manager.is_mapped:
raise sa_exc.InvalidRequestError(
"Class %s has no primary mapper configured. Configure "
"a primary mapper first before setting up a non primary "
"Mapper." % self.class_)
self.class_manager = manager
self._identity_class = manager.mapper._identity_class
_mapper_registry[self] = True
return
if manager is not None:
assert manager.class_ is self.class_
if manager.is_mapped:
raise sa_exc.ArgumentError(
"Class '%s' already has a primary mapper defined. "
"Use non_primary=True to "
"create a non primary Mapper. clear_mappers() will "
"remove *all* current mappers from all classes." %
self.class_)
#else:
# a ClassManager may already exist as
# ClassManager.instrument_attribute() creates
# new managers for each subclass if they don't yet exist.
_mapper_registry[self] = True
self.dispatch.instrument_class(self, self.class_)
if manager is None:
manager = instrumentation.register_class(self.class_)
self.class_manager = manager
manager.mapper = self
manager.deferred_scalar_loader = util.partial(
loading.load_scalar_attributes, self)
# The remaining members can be added by any mapper,
# e_name None or not.
if manager.info.get(_INSTRUMENTOR, False):
return
event.listen(manager, 'first_init', _event_on_first_init, raw=True)
event.listen(manager, 'init', _event_on_init, raw=True)
event.listen(manager, 'resurrect', _event_on_resurrect, raw=True)
for key, method in util.iterate_attributes(self.class_):
if isinstance(method, types.FunctionType):
if hasattr(method, '__sa_reconstructor__'):
self._reconstructor = method
event.listen(manager, 'load', _event_on_load, raw=True)
elif hasattr(method, '__sa_validators__'):
validation_opts = method.__sa_validation_opts__
for name in method.__sa_validators__:
self.validators = self.validators.union(
{name: (method, validation_opts)}
)
manager.info[_INSTRUMENTOR] = self
@classmethod
def _configure_all(cls):
"""Class-level path to the :func:`.configure_mappers` call.
"""
configure_mappers()
def dispose(self):
# Disable any attribute-based compilation.
self.configured = True
if hasattr(self, '_configure_failed'):
del self._configure_failed
if not self.non_primary and \
self.class_manager is not None and \
self.class_manager.is_mapped and \
self.class_manager.mapper is self:
instrumentation.unregister_class(self.class_)
def _configure_pks(self):
self.tables = sql_util.find_tables(self.mapped_table)
self._pks_by_table = {}
self._cols_by_table = {}
all_cols = util.column_set(chain(*[
col.proxy_set for col in
self._columntoproperty]))
pk_cols = util.column_set(c for c in all_cols if c.primary_key)
# identify primary key columns which are also mapped by this mapper.
tables = set(self.tables + [self.mapped_table])
self._all_tables.update(tables)
for t in tables:
if t.primary_key and pk_cols.issuperset(t.primary_key):
# ordering is important since it determines the ordering of
# mapper.primary_key (and therefore query.get())
self._pks_by_table[t] = \
util.ordered_column_set(t.primary_key).\
intersection(pk_cols)
self._cols_by_table[t] = \
util.ordered_column_set(t.c).\
intersection(all_cols)
# determine cols that aren't expressed within our tables; mark these
# as "read only" properties which are refreshed upon INSERT/UPDATE
self._readonly_props = set(
self._columntoproperty[col]
for col in self._columntoproperty
if not hasattr(col, 'table') or
col.table not in self._cols_by_table)
# if explicit PK argument sent, add those columns to the
# primary key mappings
if self._primary_key_argument:
for k in self._primary_key_argument:
if k.table not in self._pks_by_table:
self._pks_by_table[k.table] = util.OrderedSet()
self._pks_by_table[k.table].add(k)
# otherwise, see that we got a full PK for the mapped table
elif self.mapped_table not in self._pks_by_table or \
len(self._pks_by_table[self.mapped_table]) == 0:
raise sa_exc.ArgumentError(
"Mapper %s could not assemble any primary "
"key columns for mapped table '%s'" %
(self, self.mapped_table.description))
elif self.local_table not in self._pks_by_table and \
isinstance(self.local_table, schema.Table):
util.warn("Could not assemble any primary "
"keys for locally mapped table '%s' - "
"no rows will be persisted in this Table."
% self.local_table.description)
if self.inherits and \
not self.concrete and \
not self._primary_key_argument:
# if inheriting, the "primary key" for this mapper is
# that of the inheriting (unless concrete or explicit)
self.primary_key = self.inherits.primary_key
else:
# determine primary key from argument or mapped_table pks -
# reduce to the minimal set of columns
if self._primary_key_argument:
primary_key = sql_util.reduce_columns(
[self.mapped_table.corresponding_column(c) for c in
self._primary_key_argument],
ignore_nonexistent_tables=True)
else:
primary_key = sql_util.reduce_columns(
self._pks_by_table[self.mapped_table],
ignore_nonexistent_tables=True)
if len(primary_key) == 0:
raise sa_exc.ArgumentError(
"Mapper %s could not assemble any primary "
"key columns for mapped table '%s'" %
(self, self.mapped_table.description))
self.primary_key = tuple(primary_key)
self._log("Identified primary key columns: %s", primary_key)
def _configure_properties(self):
# Column and other ClauseElement objects which are mapped
self.columns = self.c = util.OrderedProperties()
# object attribute names mapped to MapperProperty objects
self._props = util.OrderedDict()
# table columns mapped to lists of MapperProperty objects
# using a list allows a single column to be defined as
# populating multiple object attributes
self._columntoproperty = _ColumnMapping(self)
# load custom properties
if self._init_properties:
for key, prop in self._init_properties.items():
self._configure_property(key, prop, False)
# pull properties from the inherited mapper if any.
if self.inherits:
for key, prop in self.inherits._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
column=None):
self._adapt_inherited_property(key, prop, False)
# create properties for each column in the mapped table,
# for those columns which don't already map to a property
for column in self.mapped_table.columns:
if column in self._columntoproperty:
continue
column_key = (self.column_prefix or '') + column.key
if self._should_exclude(
column.key, column_key,
local=self.local_table.c.contains_column(column),
column=column
):
continue
# adjust the "key" used for this column to that
# of the inheriting mapper
for mapper in self.iterate_to_root():
if column in mapper._columntoproperty:
column_key = mapper._columntoproperty[column].key
self._configure_property(column_key,
column,
init=False,
setparent=True)
def _configure_polymorphic_setter(self, init=False):
"""Configure an attribute on the mapper representing the
'polymorphic_on' column, if applicable, and not
already generated by _configure_properties (which is typical).
Also create a setter function which will assign this
attribute to the value of the 'polymorphic_identity'
upon instance construction, also if applicable. This
routine will run when an instance is created.
"""
setter = False
if self.polymorphic_on is not None:
setter = True
if isinstance(self.polymorphic_on, util.string_types):
# polymorphic_on specified as as string - link
# it to mapped ColumnProperty
try:
self.polymorphic_on = self._props[self.polymorphic_on]
except KeyError:
raise sa_exc.ArgumentError(
"Can't determine polymorphic_on "
"value '%s' - no attribute is "
"mapped to this name." % self.polymorphic_on)
if self.polymorphic_on in self._columntoproperty:
# polymorphic_on is a column that is already mapped
# to a ColumnProperty
prop = self._columntoproperty[self.polymorphic_on]
polymorphic_key = prop.key
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
elif isinstance(self.polymorphic_on, MapperProperty):
# polymorphic_on is directly a MapperProperty,
# ensure it's a ColumnProperty
if not isinstance(self.polymorphic_on,
properties.ColumnProperty):
raise sa_exc.ArgumentError(
"Only direct column-mapped "
"property or SQL expression "
"can be passed for polymorphic_on")
prop = self.polymorphic_on
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
elif not expression._is_column(self.polymorphic_on):
# polymorphic_on is not a Column and not a ColumnProperty;
# not supported right now.
raise sa_exc.ArgumentError(
"Only direct column-mapped "
"property or SQL expression "
"can be passed for polymorphic_on"
)
else:
# polymorphic_on is a Column or SQL expression and
# doesn't appear to be mapped. this means it can be 1.
# only present in the with_polymorphic selectable or
# 2. a totally standalone SQL expression which we'd
# hope is compatible with this mapper's mapped_table
col = self.mapped_table.corresponding_column(
self.polymorphic_on)
if col is None:
# polymorphic_on doesn't derive from any
# column/expression isn't present in the mapped
# table. we will make a "hidden" ColumnProperty
# for it. Just check that if it's directly a
# schema.Column and we have with_polymorphic, it's
# likely a user error if the schema.Column isn't
# represented somehow in either mapped_table or
# with_polymorphic. Otherwise as of 0.7.4 we
# just go with it and assume the user wants it
# that way (i.e. a CASE statement)
setter = False
instrument = False
col = self.polymorphic_on
if isinstance(col, schema.Column) and (
self.with_polymorphic is None or \
self.with_polymorphic[1].\
corresponding_column(col) is None
):
raise sa_exc.InvalidRequestError(
"Could not map polymorphic_on column "
"'%s' to the mapped table - polymorphic "
"loads will not function properly"
% col.description)
else:
# column/expression that polymorphic_on derives from
# is present in our mapped table
# and is probably mapped, but polymorphic_on itself
# is not. This happens when
# the polymorphic_on is only directly present in the
# with_polymorphic selectable, as when use
# polymorphic_union.
# we'll make a separate ColumnProperty for it.
instrument = True
key = getattr(col, 'key', None)
if key:
if self._should_exclude(col.key, col.key, False, col):
raise sa_exc.InvalidRequestError(
"Cannot exclude or override the "
"discriminator column %r" %
col.key)
else:
self.polymorphic_on = col = \
col.label("_sa_polymorphic_on")
key = col.key
self._configure_property(
key,
properties.ColumnProperty(col,
_instrument=instrument),
init=init, setparent=True)
polymorphic_key = key
else:
# no polymorphic_on was set.
# check inheriting mappers for one.
for mapper in self.iterate_to_root():
# determine if polymorphic_on of the parent
# should be propagated here. If the col
# is present in our mapped table, or if our mapped
# table is the same as the parent (i.e. single table
# inheritance), we can use it
if mapper.polymorphic_on is not None:
if self.mapped_table is mapper.mapped_table:
self.polymorphic_on = mapper.polymorphic_on
else:
self.polymorphic_on = \
self.mapped_table.corresponding_column(
mapper.polymorphic_on)
# we can use the parent mapper's _set_polymorphic_identity
# directly; it ensures the polymorphic_identity of the
# instance's mapper is used so is portable to subclasses.
if self.polymorphic_on is not None:
self._set_polymorphic_identity = \
mapper._set_polymorphic_identity
self._validate_polymorphic_identity = \
mapper._validate_polymorphic_identity
else:
self._set_polymorphic_identity = None
return
if setter:
def _set_polymorphic_identity(state):
dict_ = state.dict
state.get_impl(polymorphic_key).set(state, dict_,
state.manager.mapper.polymorphic_identity, None)
def _validate_polymorphic_identity(mapper, state, dict_):
if polymorphic_key in dict_ and \
dict_[polymorphic_key] not in \
mapper._acceptable_polymorphic_identities:
util.warn(
"Flushing object %s with "
"incompatible polymorphic identity %r; the "
"object may not refresh and/or load correctly" % (
state_str(state),
dict_[polymorphic_key]
)
)
self._set_polymorphic_identity = _set_polymorphic_identity
self._validate_polymorphic_identity = _validate_polymorphic_identity
else:
self._set_polymorphic_identity = None
_validate_polymorphic_identity = None
@_memoized_configured_property
def _version_id_prop(self):
if self.version_id_col is not None:
return self._columntoproperty[self.version_id_col]
else:
return None
@_memoized_configured_property
def _acceptable_polymorphic_identities(self):
identities = set()
stack = deque([self])
while stack:
item = stack.popleft()
if item.mapped_table is self.mapped_table:
identities.add(item.polymorphic_identity)
stack.extend(item._inheriting_mappers)
return identities
def _adapt_inherited_property(self, key, prop, init):
if not self.concrete:
self._configure_property(key, prop, init=False, setparent=False)
elif key not in self._props:
self._configure_property(
key,
properties.ConcreteInheritedProperty(),
init=init, setparent=True)
def _configure_property(self, key, prop, init=True, setparent=True):
self._log("_configure_property(%s, %s)", key, prop.__class__.__name__)
if not isinstance(prop, MapperProperty):
prop = self._property_from_column(key, prop)
if isinstance(prop, properties.ColumnProperty):
col = self.mapped_table.corresponding_column(prop.columns[0])
# if the column is not present in the mapped table,
# test if a column has been added after the fact to the
# parent table (or their parent, etc.) [ticket:1570]
if col is None and self.inherits:
path = [self]
for m in self.inherits.iterate_to_root():
col = m.local_table.corresponding_column(prop.columns[0])
if col is not None:
for m2 in path:
m2.mapped_table._reset_exported()
col = self.mapped_table.corresponding_column(
prop.columns[0])
break
path.append(m)
# subquery expression, column not present in the mapped
# selectable.
if col is None:
col = prop.columns[0]
# column is coming in after _readonly_props was
# initialized; check for 'readonly'
if hasattr(self, '_readonly_props') and \
(not hasattr(col, 'table') or
col.table not in self._cols_by_table):
self._readonly_props.add(prop)
else:
# if column is coming in after _cols_by_table was
# initialized, ensure the col is in the right set
if hasattr(self, '_cols_by_table') and \
col.table in self._cols_by_table and \
col not in self._cols_by_table[col.table]:
self._cols_by_table[col.table].add(col)
# if this properties.ColumnProperty represents the "polymorphic
# discriminator" column, mark it. We'll need this when rendering
# columns in SELECT statements.
if not hasattr(prop, '_is_polymorphic_discriminator'):
prop._is_polymorphic_discriminator = \
(col is self.polymorphic_on or
prop.columns[0] is self.polymorphic_on)
self.columns[key] = col
for col in prop.columns + prop._orig_columns:
for col in col.proxy_set:
self._columntoproperty[col] = prop
prop.key = key
if setparent:
prop.set_parent(self, init)
if key in self._props and \
getattr(self._props[key], '_mapped_by_synonym', False):
syn = self._props[key]._mapped_by_synonym
raise sa_exc.ArgumentError(
"Can't call map_column=True for synonym %r=%r, "
"a ColumnProperty already exists keyed to the name "
"%r for column %r" % (syn, key, key, syn)
)
if key in self._props and \
not isinstance(prop, properties.ColumnProperty) and \
not isinstance(self._props[key], properties.ColumnProperty):
util.warn("Property %s on %s being replaced with new "
"property %s; the old property will be discarded" % (
self._props[key],
self,
prop,
))
self._props[key] = prop
if not self.non_primary:
prop.instrument_class(self)
for mapper in self._inheriting_mappers:
mapper._adapt_inherited_property(key, prop, init)
if init:
prop.init()
prop.post_instrument_class(self)
if self.configured:
self._expire_memoizations()
def _property_from_column(self, key, prop):
"""generate/update a :class:`.ColumnProprerty` given a
:class:`.Column` object. """
# we were passed a Column or a list of Columns;
# generate a properties.ColumnProperty
columns = util.to_list(prop)
column = columns[0]
if not expression._is_column(column):
raise sa_exc.ArgumentError(
"%s=%r is not an instance of MapperProperty or Column"
% (key, prop))
prop = self._props.get(key, None)
if isinstance(prop, properties.ColumnProperty):
if prop.parent is self:
raise sa_exc.InvalidRequestError(
"Implicitly combining column %s with column "
"%s under attribute '%s'. Please configure one "
"or more attributes for these same-named columns "
"explicitly."
% (prop.columns[-1], column, key))
# existing properties.ColumnProperty from an inheriting
# mapper. make a copy and append our column to it
prop = prop.copy()
prop.columns.insert(0, column)
self._log("inserting column to existing list "
"in properties.ColumnProperty %s" % (key))
return prop
elif prop is None or isinstance(prop,
properties.ConcreteInheritedProperty):
mapped_column = []
for c in columns:
mc = self.mapped_table.corresponding_column(c)
if mc is None:
mc = self.local_table.corresponding_column(c)
if mc is not None:
# if the column is in the local table but not the
# mapped table, this corresponds to adding a
# column after the fact to the local table.
# [ticket:1523]
self.mapped_table._reset_exported()
mc = self.mapped_table.corresponding_column(c)
if mc is None:
raise sa_exc.ArgumentError(
"When configuring property '%s' on %s, "
"column '%s' is not represented in the mapper's "
"table. Use the `column_property()` function to "
"force this column to be mapped as a read-only "
"attribute." % (key, self, c))
mapped_column.append(mc)
return properties.ColumnProperty(*mapped_column)
else:
raise sa_exc.ArgumentError(
"WARNING: when configuring property '%s' on %s, "
"column '%s' conflicts with property '%r'. "
"To resolve this, map the column to the class under a "
"different name in the 'properties' dictionary. Or, "
"to remove all awareness of the column entirely "
"(including its availability as a foreign key), "
"use the 'include_properties' or 'exclude_properties' "
"mapper arguments to control specifically which table "
"columns get mapped." %
(key, self, column.key, prop))
def _post_configure_properties(self):
"""Call the ``init()`` method on all ``MapperProperties``
attached to this mapper.
This is a deferred configuration step which is intended
to execute once all mappers have been constructed.
"""
self._log("_post_configure_properties() started")
l = [(key, prop) for key, prop in self._props.items()]
for key, prop in l:
self._log("initialize prop %s", key)
if prop.parent is self and not prop._configure_started:
prop.init()
if prop._configure_finished:
prop.post_instrument_class(self)
self._log("_post_configure_properties() complete")
self.configured = True
def add_properties(self, dict_of_properties):
"""Add the given dictionary of properties to this mapper,
using `add_property`.
"""
for key, value in dict_of_properties.items():
self.add_property(key, value)
def add_property(self, key, prop):
"""Add an individual MapperProperty to this mapper.
If the mapper has not been configured yet, just adds the
property to the initial properties dictionary sent to the
constructor. If this Mapper has already been configured, then
the given MapperProperty is configured immediately.
"""
self._init_properties[key] = prop
self._configure_property(key, prop, init=self.configured)
def _expire_memoizations(self):
for mapper in self.iterate_to_root():
_memoized_configured_property.expire_instance(mapper)
@property
def _log_desc(self):
return "(" + self.class_.__name__ + \
"|" + \
(self.local_table is not None and
self.local_table.description or
str(self.local_table)) +\
(self.non_primary and
"|non-primary" or "") + ")"
def _log(self, msg, *args):
self.logger.info(
"%s " + msg, *((self._log_desc,) + args)
)
def _log_debug(self, msg, *args):
self.logger.debug(
"%s " + msg, *((self._log_desc,) + args)
)
def __repr__(self):
return '<Mapper at 0x%x; %s>' % (
id(self), self.class_.__name__)
def __str__(self):
return "Mapper|%s|%s%s" % (
self.class_.__name__,
self.local_table is not None and
self.local_table.description or None,
self.non_primary and "|non-primary" or ""
)
def _is_orphan(self, state):
orphan_possible = False
for mapper in self.iterate_to_root():
for (key, cls) in mapper._delete_orphans:
orphan_possible = True
has_parent = attributes.manager_of_class(cls).has_parent(
state, key, optimistic=state.has_identity)
if self.legacy_is_orphan and has_parent:
return False
elif not self.legacy_is_orphan and not has_parent:
return True
if self.legacy_is_orphan:
return orphan_possible
else:
return False
def has_property(self, key):
return key in self._props
def get_property(self, key, _configure_mappers=True):
"""return a MapperProperty associated with the given key.
"""
if _configure_mappers and Mapper._new_mappers:
configure_mappers()
try:
return self._props[key]
except KeyError:
raise sa_exc.InvalidRequestError(
"Mapper '%s' has no property '%s'" % (self, key))
def get_property_by_column(self, column):
"""Given a :class:`.Column` object, return the
:class:`.MapperProperty` which maps this column."""
return self._columntoproperty[column]
@property
def iterate_properties(self):
"""return an iterator of all MapperProperty objects."""
if Mapper._new_mappers:
configure_mappers()
return iter(self._props.values())
def _mappers_from_spec(self, spec, selectable):
"""given a with_polymorphic() argument, return the set of mappers it
represents.
Trims the list of mappers to just those represented within the given
selectable, if present. This helps some more legacy-ish mappings.
"""
if spec == '*':
mappers = list(self.self_and_descendants)
elif spec:
mappers = set()
for m in util.to_list(spec):
m = _class_to_mapper(m)
if not m.isa(self):
raise sa_exc.InvalidRequestError(
"%r does not inherit from %r" %
(m, self))
if selectable is None:
mappers.update(m.iterate_to_root())
else:
mappers.add(m)
mappers = [m for m in self.self_and_descendants if m in mappers]
else:
mappers = []
if selectable is not None:
tables = set(sql_util.find_tables(selectable,
include_aliases=True))
mappers = [m for m in mappers if m.local_table in tables]
return mappers
def _selectable_from_mappers(self, mappers, innerjoin):
"""given a list of mappers (assumed to be within this mapper's
inheritance hierarchy), construct an outerjoin amongst those mapper's
mapped tables.
"""
from_obj = self.mapped_table
for m in mappers:
if m is self:
continue
if m.concrete:
raise sa_exc.InvalidRequestError(
"'with_polymorphic()' requires 'selectable' argument "
"when concrete-inheriting mappers are used.")
elif not m.single:
if innerjoin:
from_obj = from_obj.join(m.local_table,
m.inherit_condition)
else:
from_obj = from_obj.outerjoin(m.local_table,
m.inherit_condition)
return from_obj
@_memoized_configured_property
def _single_table_criterion(self):
if self.single and \
self.inherits and \
self.polymorphic_on is not None:
return self.polymorphic_on.in_(
m.polymorphic_identity
for m in self.self_and_descendants)
else:
return None
@_memoized_configured_property
def _with_polymorphic_mappers(self):
if Mapper._new_mappers:
configure_mappers()
if not self.with_polymorphic:
return []
return self._mappers_from_spec(*self.with_polymorphic)
@_memoized_configured_property
def _with_polymorphic_selectable(self):
if not self.with_polymorphic:
return self.mapped_table
spec, selectable = self.with_polymorphic
if selectable is not None:
return selectable
else:
return self._selectable_from_mappers(
self._mappers_from_spec(spec, selectable),
False)
with_polymorphic_mappers = _with_polymorphic_mappers
"""The list of :class:`.Mapper` objects included in the
default "polymorphic" query.
"""
@property
def selectable(self):
"""The :func:`.select` construct this :class:`.Mapper` selects from
by default.
Normally, this is equivalent to :attr:`.mapped_table`, unless
the ``with_polymorphic`` feature is in use, in which case the
full "polymorphic" selectable is returned.
"""
return self._with_polymorphic_selectable
def _with_polymorphic_args(self, spec=None, selectable=False,
innerjoin=False):
if self.with_polymorphic:
if not spec:
spec = self.with_polymorphic[0]
if selectable is False:
selectable = self.with_polymorphic[1]
elif selectable is False:
selectable = None
mappers = self._mappers_from_spec(spec, selectable)
if selectable is not None:
return mappers, selectable
else:
return mappers, self._selectable_from_mappers(mappers,
innerjoin)
@_memoized_configured_property
def _polymorphic_properties(self):
return list(self._iterate_polymorphic_properties(
self._with_polymorphic_mappers))
def _iterate_polymorphic_properties(self, mappers=None):
"""Return an iterator of MapperProperty objects which will render into
a SELECT."""
if mappers is None:
mappers = self._with_polymorphic_mappers
if not mappers:
for c in self.iterate_properties:
yield c
else:
# in the polymorphic case, filter out discriminator columns
# from other mappers, as these are sometimes dependent on that
# mapper's polymorphic selectable (which we don't want rendered)
for c in util.unique_list(
chain(*[
list(mapper.iterate_properties) for mapper in
[self] + mappers
])
):
if getattr(c, '_is_polymorphic_discriminator', False) and \
(self.polymorphic_on is None or
c.columns[0] is not self.polymorphic_on):
continue
yield c
@util.memoized_property
def attrs(self):
"""A namespace of all :class:`.MapperProperty` objects
associated this mapper.
This is an object that provides each property based on
its key name. For instance, the mapper for a
``User`` class which has ``User.name`` attribute would
provide ``mapper.attrs.name``, which would be the
:class:`.ColumnProperty` representing the ``name``
column. The namespace object can also be iterated,
which would yield each :class:`.MapperProperty`.
:class:`.Mapper` has several pre-filtered views
of this attribute which limit the types of properties
returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`,
:attr:`.relationships`, and :attr:`.composites`.
.. seealso::
:attr:`.Mapper.all_orm_descriptors`
"""
if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(self._props)
@util.memoized_property
def all_orm_descriptors(self):
"""A namespace of all :class:`._InspectionAttr` attributes associated
with the mapped class.
These attributes are in all cases Python :term:`descriptors` associated
with the mapped class or its superclasses.
This namespace includes attributes that are mapped to the class
as well as attributes declared by extension modules.
It includes any Python descriptor type that inherits from
:class:`._InspectionAttr`. This includes :class:`.QueryableAttribute`,
as well as extension types such as :class:`.hybrid_property`,
:class:`.hybrid_method` and :class:`.AssociationProxy`.
To distinguish between mapped attributes and extension attributes,
the attribute :attr:`._InspectionAttr.extension_type` will refer
to a constant that distinguishes between different extension types.
When dealing with a :class:`.QueryableAttribute`, the
:attr:`.QueryableAttribute.property` attribute refers to the
:class:`.MapperProperty` property, which is what you get when referring
to the collection of mapped properties via :attr:`.Mapper.attrs`.
.. versionadded:: 0.8.0
.. seealso::
:attr:`.Mapper.attrs`
"""
return util.ImmutableProperties(
dict(self.class_manager._all_sqla_attributes()))
@_memoized_configured_property
def synonyms(self):
"""Return a namespace of all :class:`.SynonymProperty`
properties maintained by this :class:`.Mapper`.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.SynonymProperty)
@_memoized_configured_property
def column_attrs(self):
"""Return a namespace of all :class:`.ColumnProperty`
properties maintained by this :class:`.Mapper`.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.ColumnProperty)
@_memoized_configured_property
def relationships(self):
"""Return a namespace of all :class:`.RelationshipProperty`
properties maintained by this :class:`.Mapper`.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.RelationshipProperty)
@_memoized_configured_property
def composites(self):
"""Return a namespace of all :class:`.CompositeProperty`
properties maintained by this :class:`.Mapper`.
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
objects.
"""
return self._filter_properties(properties.CompositeProperty)
def _filter_properties(self, type_):
if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(util.OrderedDict(
(k, v) for k, v in self._props.items()
if isinstance(v, type_)
))
@_memoized_configured_property
def _get_clause(self):
"""create a "get clause" based on the primary key. this is used
by query.get() and many-to-one lazyloads to load this item
by primary key.
"""
params = [(primary_key, sql.bindparam(None, type_=primary_key.type))
for primary_key in self.primary_key]
return sql.and_(*[k == v for (k, v) in params]), \
util.column_dict(params)
@_memoized_configured_property
def _equivalent_columns(self):
"""Create a map of all *equivalent* columns, based on
the determination of column pairs that are equated to
one another based on inherit condition. This is designed
to work with the queries that util.polymorphic_union
comes up with, which often don't include the columns from
the base table directly (including the subclass table columns
only).
The resulting structure is a dictionary of columns mapped
to lists of equivalent columns, i.e.
{
tablea.col1:
set([tableb.col1, tablec.col1]),
tablea.col2:
set([tabled.col2])
}
"""
result = util.column_dict()
def visit_binary(binary):
if binary.operator == operators.eq:
if binary.left in result:
result[binary.left].add(binary.right)
else:
result[binary.left] = util.column_set((binary.right,))
if binary.right in result:
result[binary.right].add(binary.left)
else:
result[binary.right] = util.column_set((binary.left,))
for mapper in self.base_mapper.self_and_descendants:
if mapper.inherit_condition is not None:
visitors.traverse(
mapper.inherit_condition, {},
{'binary': visit_binary})
return result
def _is_userland_descriptor(self, obj):
if isinstance(obj, (_MappedAttribute,
instrumentation.ClassManager,
expression.ColumnElement)):
return False
else:
return True
def _should_exclude(self, name, assigned_name, local, column):
"""determine whether a particular property should be implicitly
present on the class.
This occurs when properties are propagated from an inherited class, or
are applied from the columns present in the mapped table.
"""
# check for class-bound attributes and/or descriptors,
# either local or from an inherited class
if local:
if self.class_.__dict__.get(assigned_name, None) is not None \
and self._is_userland_descriptor(
self.class_.__dict__[assigned_name]):
return True
else:
if getattr(self.class_, assigned_name, None) is not None \
and self._is_userland_descriptor(
getattr(self.class_, assigned_name)):
return True
if self.include_properties is not None and \
name not in self.include_properties and \
(column is None or column not in self.include_properties):
self._log("not including property %s" % (name))
return True
if self.exclude_properties is not None and \
(
name in self.exclude_properties or \
(column is not None and column in self.exclude_properties)
):
self._log("excluding property %s" % (name))
return True
return False
def common_parent(self, other):
"""Return true if the given mapper shares a
common inherited parent as this mapper."""
return self.base_mapper is other.base_mapper
def _canload(self, state, allow_subtypes):
s = self.primary_mapper()
if self.polymorphic_on is not None or allow_subtypes:
return _state_mapper(state).isa(s)
else:
return _state_mapper(state) is s
def isa(self, other):
"""Return True if the this mapper inherits from the given mapper."""
m = self
while m and m is not other:
m = m.inherits
return bool(m)
def iterate_to_root(self):
m = self
while m:
yield m
m = m.inherits
@_memoized_configured_property
def self_and_descendants(self):
"""The collection including this mapper and all descendant mappers.
This includes not just the immediately inheriting mappers but
all their inheriting mappers as well.
"""
descendants = []
stack = deque([self])
while stack:
item = stack.popleft()
descendants.append(item)
stack.extend(item._inheriting_mappers)
return util.WeakSequence(descendants)
def polymorphic_iterator(self):
"""Iterate through the collection including this mapper and
all descendant mappers.
This includes not just the immediately inheriting mappers but
all their inheriting mappers as well.
To iterate through an entire hierarchy, use
``mapper.base_mapper.polymorphic_iterator()``.
"""
return iter(self.self_and_descendants)
def primary_mapper(self):
"""Return the primary mapper corresponding to this mapper's class key
(class)."""
return self.class_manager.mapper
@property
def primary_base_mapper(self):
return self.class_manager.mapper.base_mapper
def identity_key_from_row(self, row, adapter=None):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
:param row: A :class:`.RowProxy` instance. The columns which are mapped
by this :class:`.Mapper` should be locatable in the row, preferably
via the :class:`.Column` object directly (as is the case when a
:func:`.select` construct is executed), or via string names of the form
``<tablename>_<colname>``.
"""
pk_cols = self.primary_key
if adapter:
pk_cols = [adapter.columns[c] for c in pk_cols]
return self._identity_class, \
tuple(row[column] for column in pk_cols)
def identity_key_from_primary_key(self, primary_key):
"""Return an identity-map key for use in storing/retrieving an
item from an identity map.
:param primary_key: A list of values indicating the identifier.
"""
return self._identity_class, tuple(primary_key)
def identity_key_from_instance(self, instance):
"""Return the identity key for the given instance, based on
its primary key attributes.
If the instance's state is expired, calling this method
will result in a database check to see if the object has been deleted.
If the row no longer exists,
:class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
This value is typically also found on the instance state under the
attribute name `key`.
"""
return self.identity_key_from_primary_key(
self.primary_key_from_instance(instance))
def _identity_key_from_state(self, state):
dict_ = state.dict
manager = state.manager
return self._identity_class, tuple([
manager[self._columntoproperty[col].key].\
impl.get(state, dict_, attributes.PASSIVE_OFF)
for col in self.primary_key
])
def primary_key_from_instance(self, instance):
"""Return the list of primary key values for the given
instance.
If the instance's state is expired, calling this method
will result in a database check to see if the object has been deleted.
If the row no longer exists,
:class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
"""
state = attributes.instance_state(instance)
return self._primary_key_from_state(state)
def _primary_key_from_state(self, state):
dict_ = state.dict
manager = state.manager
return [
manager[self._columntoproperty[col].key].\
impl.get(state, dict_, attributes.PASSIVE_OFF)
for col in self.primary_key
]
def _get_state_attr_by_column(self, state, dict_, column,
passive=attributes.PASSIVE_OFF):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.get(state, dict_, passive=passive)
def _set_state_attr_by_column(self, state, dict_, column, value):
prop = self._columntoproperty[column]
state.manager[prop.key].impl.set(state, dict_, value, None)
def _get_committed_attr_by_column(self, obj, column):
state = attributes.instance_state(obj)
dict_ = attributes.instance_dict(obj)
return self._get_committed_state_attr_by_column(state, dict_, column)
def _get_committed_state_attr_by_column(self, state, dict_,
column, passive=attributes.PASSIVE_OFF):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.\
get_committed_value(state, dict_, passive=passive)
def _optimized_get_statement(self, state, attribute_names):
"""assemble a WHERE clause which retrieves a given state by primary
key, using a minimized set of tables.
Applies to a joined-table inheritance mapper where the
requested attribute names are only present on joined tables,
not the base table. The WHERE clause attempts to include
only those tables to minimize joins.
"""
props = self._props
tables = set(chain(
*[sql_util.find_tables(c, check_columns=True)
for key in attribute_names
for c in props[key].columns]
))
if self.base_mapper.local_table in tables:
return None
class ColumnsNotAvailable(Exception):
pass
def visit_binary(binary):
leftcol = binary.left
rightcol = binary.right
if leftcol is None or rightcol is None:
return
if leftcol.table not in tables:
leftval = self._get_committed_state_attr_by_column(
state, state.dict,
leftcol,
passive=attributes.PASSIVE_NO_INITIALIZE)
if leftval is attributes.PASSIVE_NO_RESULT or leftval is None:
raise ColumnsNotAvailable()
binary.left = sql.bindparam(None, leftval,
type_=binary.right.type)
elif rightcol.table not in tables:
rightval = self._get_committed_state_attr_by_column(
state, state.dict,
rightcol,
passive=attributes.PASSIVE_NO_INITIALIZE)
if rightval is attributes.PASSIVE_NO_RESULT or \
rightval is None:
raise ColumnsNotAvailable()
binary.right = sql.bindparam(None, rightval,
type_=binary.right.type)
allconds = []
try:
start = False
for mapper in reversed(list(self.iterate_to_root())):
if mapper.local_table in tables:
start = True
elif not isinstance(mapper.local_table, expression.TableClause):
return None
if start and not mapper.single:
allconds.append(visitors.cloned_traverse(
mapper.inherit_condition,
{},
{'binary': visit_binary}
)
)
except ColumnsNotAvailable:
return None
cond = sql.and_(*allconds)
cols = []
for key in attribute_names:
cols.extend(props[key].columns)
return sql.select(cols, cond, use_labels=True)
def cascade_iterator(self, type_, state, halt_on=None):
"""Iterate each element and its mapper in an object graph,
for all relationships that meet the given cascade rule.
:param type_:
The name of the cascade rule (i.e. save-update, delete,
etc.)
:param state:
The lead InstanceState. child items will be processed per
the relationships defined for this object's mapper.
the return value are object instances; this provides a strong
reference so that they don't fall out of scope immediately.
"""
visited_states = set()
prp, mpp = object(), object()
visitables = deque([(deque(self._props.values()), prp,
state, state.dict)])
while visitables:
iterator, item_type, parent_state, parent_dict = visitables[-1]
if not iterator:
visitables.pop()
continue
if item_type is prp:
prop = iterator.popleft()
if type_ not in prop.cascade:
continue
queue = deque(prop.cascade_iterator(type_, parent_state,
parent_dict, visited_states, halt_on))
if queue:
visitables.append((queue, mpp, None, None))
elif item_type is mpp:
instance, instance_mapper, corresponding_state, \
corresponding_dict = iterator.popleft()
yield instance, instance_mapper, \
corresponding_state, corresponding_dict
visitables.append((deque(instance_mapper._props.values()),
prp, corresponding_state,
corresponding_dict))
@_memoized_configured_property
def _compiled_cache(self):
return util.LRUCache(self._compiled_cache_size)
@_memoized_configured_property
def _sorted_tables(self):
table_to_mapper = {}
for mapper in self.base_mapper.self_and_descendants:
for t in mapper.tables:
table_to_mapper.setdefault(t, mapper)
extra_dependencies = []
for table, mapper in table_to_mapper.items():
super_ = mapper.inherits
if super_:
extra_dependencies.extend([
(super_table, table)
for super_table in super_.tables
])
def skip(fk):
# attempt to skip dependencies that are not
# significant to the inheritance chain
# for two tables that are related by inheritance.
# while that dependency may be important, it's techinically
# not what we mean to sort on here.
parent = table_to_mapper.get(fk.parent.table)
dep = table_to_mapper.get(fk.column.table)
if parent is not None and \
dep is not None and \
dep is not parent and \
dep.inherit_condition is not None:
cols = set(sql_util._find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
cols = cols.union(sql_util._find_columns(
parent.inherit_condition))
return fk.parent not in cols and fk.column not in cols
else:
return fk.parent not in cols
return False
sorted_ = sql_util.sort_tables(table_to_mapper,
skip_fn=skip,
extra_dependencies=extra_dependencies)
ret = util.OrderedDict()
for t in sorted_:
ret[t] = table_to_mapper[t]
return ret
def _memo(self, key, callable_):
if key in self._memoized_values:
return self._memoized_values[key]
else:
self._memoized_values[key] = value = callable_()
return value
@util.memoized_property
def _table_to_equated(self):
"""memoized map of tables to collections of columns to be
synchronized upwards to the base mapper."""
result = util.defaultdict(list)
for table in self._sorted_tables:
cols = set(table.c)
for m in self.iterate_to_root():
if m._inherits_equated_pairs and \
cols.intersection(
util.reduce(set.union,
[l.proxy_set for l, r in m._inherits_equated_pairs])
):
result[table].append((m, m._inherits_equated_pairs))
return result
def configure_mappers():
"""Initialize the inter-mapper relationships of all mappers that
have been constructed thus far.
This function can be called any number of times, but in
most cases is handled internally.
"""
if not Mapper._new_mappers:
return
_CONFIGURE_MUTEX.acquire()
try:
global _already_compiling
if _already_compiling:
return
_already_compiling = True
try:
# double-check inside mutex
if not Mapper._new_mappers:
return
Mapper.dispatch(Mapper).before_configured()
# initialize properties on all mappers
# note that _mapper_registry is unordered, which
# may randomly conceal/reveal issues related to
# the order of mapper compilation
for mapper in list(_mapper_registry):
if getattr(mapper, '_configure_failed', False):
e = sa_exc.InvalidRequestError(
"One or more mappers failed to initialize - "
"can't proceed with initialization of other "
"mappers. Original exception was: %s"
% mapper._configure_failed)
e._configure_failed = mapper._configure_failed
raise e
if not mapper.configured:
try:
mapper._post_configure_properties()
mapper._expire_memoizations()
mapper.dispatch.mapper_configured(
mapper, mapper.class_)
except:
exc = sys.exc_info()[1]
if not hasattr(exc, '_configure_failed'):
mapper._configure_failed = exc
raise
Mapper._new_mappers = False
finally:
_already_compiling = False
finally:
_CONFIGURE_MUTEX.release()
Mapper.dispatch(Mapper).after_configured()
def reconstructor(fn):
"""Decorate a method as the 'reconstructor' hook.
Designates a method as the "reconstructor", an ``__init__``-like
method that will be called by the ORM after the instance has been
loaded from the database or otherwise reconstituted.
The reconstructor will be invoked with no arguments. Scalar
(non-collection) database-mapped attributes of the instance will
be available for use within the function. Eagerly-loaded
collections are generally not yet available and will usually only
contain the first element. ORM state changes made to objects at
this stage will not be recorded for the next flush() operation, so
the activity within a reconstructor should be conservative.
"""
fn.__sa_reconstructor__ = True
return fn
def validates(*names, **kw):
"""Decorate a method as a 'validator' for one or more named properties.
Designates a method as a validator, a method which receives the
name of the attribute as well as a value to be assigned, or in the
case of a collection, the value to be added to the collection.
The function can then raise validation exceptions to halt the
process from continuing (where Python's built-in ``ValueError``
and ``AssertionError`` exceptions are reasonable choices), or can
modify or replace the value before proceeding. The function should
otherwise return the given value.
Note that a validator for a collection **cannot** issue a load of that
collection within the validation routine - this usage raises
an assertion to avoid recursion overflows. This is a reentrant
condition which is not supported.
:param \*names: list of attribute names to be validated.
:param include_removes: if True, "remove" events will be
sent as well - the validation function must accept an additional
argument "is_remove" which will be a boolean.
.. versionadded:: 0.7.7
:param include_backrefs: defaults to ``True``; if ``False``, the
validation function will not emit if the originator is an attribute
event related via a backref. This can be used for bi-directional
:func:`.validates` usage where only one validator should emit per
attribute operation.
.. versionadded:: 0.9.0
.. seealso::
:ref:`simple_validators` - usage examples for :func:`.validates`
"""
include_removes = kw.pop('include_removes', False)
include_backrefs = kw.pop('include_backrefs', True)
def wrap(fn):
fn.__sa_validators__ = names
fn.__sa_validation_opts__ = {
"include_removes": include_removes,
"include_backrefs": include_backrefs
}
return fn
return wrap
def _event_on_load(state, ctx):
instrumenting_mapper = state.manager.info[_INSTRUMENTOR]
if instrumenting_mapper._reconstructor:
instrumenting_mapper._reconstructor(state.obj())
def _event_on_first_init(manager, cls):
"""Initial mapper compilation trigger.
instrumentation calls this one when InstanceState
is first generated, and is needed for legacy mutable
attributes to work.
"""
instrumenting_mapper = manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
if Mapper._new_mappers:
configure_mappers()
def _event_on_init(state, args, kwargs):
"""Run init_instance hooks.
This also includes mapper compilation, normally not needed
here but helps with some piecemeal configuration
scenarios (such as in the ORM tutorial).
"""
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
if Mapper._new_mappers:
configure_mappers()
if instrumenting_mapper._set_polymorphic_identity:
instrumenting_mapper._set_polymorphic_identity(state)
def _event_on_resurrect(state):
# re-populate the primary key elements
# of the dict based on the mapping.
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
for col, val in zip(instrumenting_mapper.primary_key, state.key[1]):
instrumenting_mapper._set_state_attr_by_column(
state, state.dict, col, val)
class _ColumnMapping(dict):
"""Error reporting helper for mapper._columntoproperty."""
def __init__(self, mapper):
self.mapper = mapper
def __missing__(self, column):
prop = self.mapper._props.get(column)
if prop:
raise orm_exc.UnmappedColumnError(
"Column '%s.%s' is not available, due to "
"conflicting property '%s':%r" % (
column.table.name, column.name, column.key, prop))
raise orm_exc.UnmappedColumnError(
"No column %s is configured on mapper %s..." %
(column, self.mapper))
| gpl-3.0 | -1,884,553,419,290,850,000 | 39.12034 | 86 | 0.577756 | false |
hojel/calibre | src/calibre/library/restore.py | 14 | 11515 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
import re, os, traceback, shutil
from threading import Thread
from operator import itemgetter
from calibre.ptempfile import TemporaryDirectory
from calibre.ebooks.metadata.opf2 import OPF
from calibre.library.database2 import LibraryDatabase2
from calibre.library.prefs import DBPrefs
from calibre.constants import filesystem_encoding
from calibre.utils.date import utcfromtimestamp
from calibre import isbytestring
NON_EBOOK_EXTENSIONS = frozenset([
'jpg', 'jpeg', 'gif', 'png', 'bmp',
'opf', 'swp', 'swo'
])
class RestoreDatabase(LibraryDatabase2):
PATH_LIMIT = 10
WINDOWS_LIBRARY_PATH_LIMIT = 180
def set_path(self, *args, **kwargs):
pass
def dirtied(self, *args, **kwargs):
pass
class Restore(Thread):
def __init__(self, library_path, progress_callback=None):
super(Restore, self).__init__()
if isbytestring(library_path):
library_path = library_path.decode(filesystem_encoding)
self.src_library_path = os.path.abspath(library_path)
self.progress_callback = progress_callback
self.db_id_regexp = re.compile(r'^.* \((\d+)\)$')
self.bad_ext_pat = re.compile(r'[^a-z0-9_]+')
if not callable(self.progress_callback):
self.progress_callback = lambda x, y: x
self.dirs = []
self.ignored_dirs = []
self.failed_dirs = []
self.books = []
self.conflicting_custom_cols = {}
self.failed_restores = []
self.mismatched_dirs = []
self.successes = 0
self.tb = None
self.authors_links = {}
@property
def errors_occurred(self):
return self.failed_dirs or self.mismatched_dirs or \
self.conflicting_custom_cols or self.failed_restores
@property
def report(self):
ans = ''
failures = list(self.failed_dirs) + [(x['dirpath'], tb) for x, tb in
self.failed_restores]
if failures:
ans += 'Failed to restore the books in the following folders:\n'
for dirpath, tb in failures:
ans += '\t' + dirpath + ' with error:\n'
ans += '\n'.join('\t\t'+x for x in tb.splitlines())
ans += '\n\n'
if self.conflicting_custom_cols:
ans += '\n\n'
ans += 'The following custom columns have conflicting definitions ' \
'and were not fully restored:\n'
for x in self.conflicting_custom_cols:
ans += '\t#'+x+'\n'
ans += '\tused:\t%s, %s, %s, %s\n'%(self.custom_columns[x][1],
self.custom_columns[x][2],
self.custom_columns[x][3],
self.custom_columns[x][5])
for coldef in self.conflicting_custom_cols[x]:
ans += '\tother:\t%s, %s, %s, %s\n'%(coldef[1], coldef[2],
coldef[3], coldef[5])
if self.mismatched_dirs:
ans += '\n\n'
ans += 'The following folders were ignored:\n'
for x in self.mismatched_dirs:
ans += '\t'+x+'\n'
return ans
def run(self):
try:
with TemporaryDirectory('_library_restore') as tdir:
self.library_path = tdir
self.scan_library()
if not self.load_preferences():
# Something went wrong with preferences restore. Start over
# with a new database and attempt to rebuild the structure
# from the metadata in the opf
dbpath = os.path.join(self.library_path, 'metadata.db')
if os.path.exists(dbpath):
os.remove(dbpath)
self.create_cc_metadata()
self.restore_books()
if self.successes == 0 and len(self.dirs) > 0:
raise Exception(('Something bad happened'))
self.replace_db()
except:
self.tb = traceback.format_exc()
def load_preferences(self):
self.progress_callback(None, 1)
self.progress_callback(_('Starting restoring preferences and column metadata'), 0)
prefs_path = os.path.join(self.src_library_path, 'metadata_db_prefs_backup.json')
if not os.path.exists(prefs_path):
self.progress_callback(_('Cannot restore preferences. Backup file not found.'), 1)
return False
try:
prefs = DBPrefs.read_serialized(self.src_library_path, recreate_prefs=False)
db = RestoreDatabase(self.library_path, default_prefs=prefs,
restore_all_prefs=True,
progress_callback=self.progress_callback)
db.commit()
db.close()
self.progress_callback(None, 1)
if 'field_metadata' in prefs:
self.progress_callback(_('Finished restoring preferences and column metadata'), 1)
return True
self.progress_callback(_('Finished restoring preferences'), 1)
return False
except:
traceback.print_exc()
self.progress_callback(None, 1)
self.progress_callback(_('Restoring preferences and column metadata failed'), 0)
return False
def scan_library(self):
for dirpath, dirnames, filenames in os.walk(self.src_library_path):
leaf = os.path.basename(dirpath)
m = self.db_id_regexp.search(leaf)
if m is None or 'metadata.opf' not in filenames:
self.ignored_dirs.append(dirpath)
continue
self.dirs.append((dirpath, filenames, m.group(1)))
self.progress_callback(None, len(self.dirs))
for i, x in enumerate(self.dirs):
dirpath, filenames, book_id = x
try:
self.process_dir(dirpath, filenames, book_id)
except:
self.failed_dirs.append((dirpath, traceback.format_exc()))
self.progress_callback(_('Processed') + ' ' + dirpath, i+1)
def is_ebook_file(self, filename):
ext = os.path.splitext(filename)[1]
if not ext:
return False
ext = ext[1:].lower()
if ext in NON_EBOOK_EXTENSIONS or \
self.bad_ext_pat.search(ext) is not None:
return False
return True
def process_dir(self, dirpath, filenames, book_id):
book_id = int(book_id)
formats = filter(self.is_ebook_file, filenames)
fmts = [os.path.splitext(x)[1][1:].upper() for x in formats]
sizes = [os.path.getsize(os.path.join(dirpath, x)) for x in formats]
names = [os.path.splitext(x)[0] for x in formats]
opf = os.path.join(dirpath, 'metadata.opf')
mi = OPF(opf, basedir=dirpath).to_book_metadata()
timestamp = os.path.getmtime(opf)
path = os.path.relpath(dirpath, self.src_library_path).replace(os.sep,
'/')
if int(mi.application_id) == book_id:
self.books.append({
'mi': mi,
'timestamp': timestamp,
'formats': list(zip(fmts, sizes, names)),
'id': book_id,
'dirpath': dirpath,
'path': path,
})
else:
self.mismatched_dirs.append(dirpath)
alm = mi.get('author_link_map', {})
for author, link in alm.iteritems():
existing_link, timestamp = self.authors_links.get(author, (None, None))
if existing_link is None or existing_link != link and timestamp < mi.timestamp:
self.authors_links[author] = (link, mi.timestamp)
def create_cc_metadata(self):
self.books.sort(key=itemgetter('timestamp'))
self.custom_columns = {}
fields = ('label', 'name', 'datatype', 'is_multiple', 'is_editable',
'display')
for b in self.books:
for key in b['mi'].custom_field_keys():
cfm = b['mi'].metadata_for_field(key)
args = []
for x in fields:
if x in cfm:
if x == 'is_multiple':
args.append(bool(cfm[x]))
else:
args.append(cfm[x])
if len(args) == len(fields):
# TODO: Do series type columns need special handling?
label = cfm['label']
if label in self.custom_columns and args != self.custom_columns[label]:
if label not in self.conflicting_custom_cols:
self.conflicting_custom_cols[label] = []
if self.custom_columns[label] not in self.conflicting_custom_cols[label]:
self.conflicting_custom_cols[label].append(self.custom_columns[label])
self.custom_columns[label] = args
db = RestoreDatabase(self.library_path)
self.progress_callback(None, len(self.custom_columns))
if len(self.custom_columns):
for i,args in enumerate(self.custom_columns.values()):
db.create_custom_column(*args)
self.progress_callback(_('creating custom column ')+args[0], i+1)
db.close()
def restore_books(self):
self.progress_callback(None, len(self.books))
self.books.sort(key=itemgetter('id'))
db = RestoreDatabase(self.library_path)
for i, book in enumerate(self.books):
try:
self.restore_book(book, db)
except:
self.failed_restores.append((book, traceback.format_exc()))
self.progress_callback(book['mi'].title, i+1)
for author in self.authors_links.iterkeys():
link, ign = self.authors_links[author]
db.conn.execute('UPDATE authors SET link=? WHERE name=?',
(link, author.replace(',', '|')))
db.conn.commit()
db.close()
def restore_book(self, book, db):
db.create_book_entry(book['mi'], add_duplicates=True,
force_id=book['id'])
if book['mi'].uuid:
db.set_uuid(book['id'], book['mi'].uuid, commit=False, notify=False)
db.conn.execute('UPDATE books SET path=?,last_modified=? WHERE id=?', (book['path'],
utcfromtimestamp(book['timestamp']), book['id']))
for fmt, size, name in book['formats']:
db.conn.execute('''
INSERT INTO data (book,format,uncompressed_size,name)
VALUES (?,?,?,?)''', (book['id'], fmt, size, name))
db.conn.commit()
self.successes += 1
def replace_db(self):
dbpath = os.path.join(self.src_library_path, 'metadata.db')
ndbpath = os.path.join(self.library_path, 'metadata.db')
save_path = self.olddb = os.path.splitext(dbpath)[0]+'_pre_restore.db'
if os.path.exists(save_path):
os.remove(save_path)
os.rename(dbpath, save_path)
shutil.copyfile(ndbpath, dbpath)
| gpl-3.0 | -4,468,883,689,135,527,400 | 39.978648 | 98 | 0.543986 | false |
perlygatekeeper/glowing-robot | Little_Alchemy_2/Scraper_python/env/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py | 56 | 26248 | from __future__ import absolute_import, division, unicode_literals
import re
from xml.sax.saxutils import escape, unescape
from pip._vendor.six.moves import urllib_parse as urlparse
from . import base
from ..constants import namespaces, prefixes
__all__ = ["Filter"]
allowed_elements = frozenset((
(namespaces['html'], 'a'),
(namespaces['html'], 'abbr'),
(namespaces['html'], 'acronym'),
(namespaces['html'], 'address'),
(namespaces['html'], 'area'),
(namespaces['html'], 'article'),
(namespaces['html'], 'aside'),
(namespaces['html'], 'audio'),
(namespaces['html'], 'b'),
(namespaces['html'], 'big'),
(namespaces['html'], 'blockquote'),
(namespaces['html'], 'br'),
(namespaces['html'], 'button'),
(namespaces['html'], 'canvas'),
(namespaces['html'], 'caption'),
(namespaces['html'], 'center'),
(namespaces['html'], 'cite'),
(namespaces['html'], 'code'),
(namespaces['html'], 'col'),
(namespaces['html'], 'colgroup'),
(namespaces['html'], 'command'),
(namespaces['html'], 'datagrid'),
(namespaces['html'], 'datalist'),
(namespaces['html'], 'dd'),
(namespaces['html'], 'del'),
(namespaces['html'], 'details'),
(namespaces['html'], 'dfn'),
(namespaces['html'], 'dialog'),
(namespaces['html'], 'dir'),
(namespaces['html'], 'div'),
(namespaces['html'], 'dl'),
(namespaces['html'], 'dt'),
(namespaces['html'], 'em'),
(namespaces['html'], 'event-source'),
(namespaces['html'], 'fieldset'),
(namespaces['html'], 'figcaption'),
(namespaces['html'], 'figure'),
(namespaces['html'], 'footer'),
(namespaces['html'], 'font'),
(namespaces['html'], 'form'),
(namespaces['html'], 'header'),
(namespaces['html'], 'h1'),
(namespaces['html'], 'h2'),
(namespaces['html'], 'h3'),
(namespaces['html'], 'h4'),
(namespaces['html'], 'h5'),
(namespaces['html'], 'h6'),
(namespaces['html'], 'hr'),
(namespaces['html'], 'i'),
(namespaces['html'], 'img'),
(namespaces['html'], 'input'),
(namespaces['html'], 'ins'),
(namespaces['html'], 'keygen'),
(namespaces['html'], 'kbd'),
(namespaces['html'], 'label'),
(namespaces['html'], 'legend'),
(namespaces['html'], 'li'),
(namespaces['html'], 'm'),
(namespaces['html'], 'map'),
(namespaces['html'], 'menu'),
(namespaces['html'], 'meter'),
(namespaces['html'], 'multicol'),
(namespaces['html'], 'nav'),
(namespaces['html'], 'nextid'),
(namespaces['html'], 'ol'),
(namespaces['html'], 'output'),
(namespaces['html'], 'optgroup'),
(namespaces['html'], 'option'),
(namespaces['html'], 'p'),
(namespaces['html'], 'pre'),
(namespaces['html'], 'progress'),
(namespaces['html'], 'q'),
(namespaces['html'], 's'),
(namespaces['html'], 'samp'),
(namespaces['html'], 'section'),
(namespaces['html'], 'select'),
(namespaces['html'], 'small'),
(namespaces['html'], 'sound'),
(namespaces['html'], 'source'),
(namespaces['html'], 'spacer'),
(namespaces['html'], 'span'),
(namespaces['html'], 'strike'),
(namespaces['html'], 'strong'),
(namespaces['html'], 'sub'),
(namespaces['html'], 'sup'),
(namespaces['html'], 'table'),
(namespaces['html'], 'tbody'),
(namespaces['html'], 'td'),
(namespaces['html'], 'textarea'),
(namespaces['html'], 'time'),
(namespaces['html'], 'tfoot'),
(namespaces['html'], 'th'),
(namespaces['html'], 'thead'),
(namespaces['html'], 'tr'),
(namespaces['html'], 'tt'),
(namespaces['html'], 'u'),
(namespaces['html'], 'ul'),
(namespaces['html'], 'var'),
(namespaces['html'], 'video'),
(namespaces['mathml'], 'maction'),
(namespaces['mathml'], 'math'),
(namespaces['mathml'], 'merror'),
(namespaces['mathml'], 'mfrac'),
(namespaces['mathml'], 'mi'),
(namespaces['mathml'], 'mmultiscripts'),
(namespaces['mathml'], 'mn'),
(namespaces['mathml'], 'mo'),
(namespaces['mathml'], 'mover'),
(namespaces['mathml'], 'mpadded'),
(namespaces['mathml'], 'mphantom'),
(namespaces['mathml'], 'mprescripts'),
(namespaces['mathml'], 'mroot'),
(namespaces['mathml'], 'mrow'),
(namespaces['mathml'], 'mspace'),
(namespaces['mathml'], 'msqrt'),
(namespaces['mathml'], 'mstyle'),
(namespaces['mathml'], 'msub'),
(namespaces['mathml'], 'msubsup'),
(namespaces['mathml'], 'msup'),
(namespaces['mathml'], 'mtable'),
(namespaces['mathml'], 'mtd'),
(namespaces['mathml'], 'mtext'),
(namespaces['mathml'], 'mtr'),
(namespaces['mathml'], 'munder'),
(namespaces['mathml'], 'munderover'),
(namespaces['mathml'], 'none'),
(namespaces['svg'], 'a'),
(namespaces['svg'], 'animate'),
(namespaces['svg'], 'animateColor'),
(namespaces['svg'], 'animateMotion'),
(namespaces['svg'], 'animateTransform'),
(namespaces['svg'], 'clipPath'),
(namespaces['svg'], 'circle'),
(namespaces['svg'], 'defs'),
(namespaces['svg'], 'desc'),
(namespaces['svg'], 'ellipse'),
(namespaces['svg'], 'font-face'),
(namespaces['svg'], 'font-face-name'),
(namespaces['svg'], 'font-face-src'),
(namespaces['svg'], 'g'),
(namespaces['svg'], 'glyph'),
(namespaces['svg'], 'hkern'),
(namespaces['svg'], 'linearGradient'),
(namespaces['svg'], 'line'),
(namespaces['svg'], 'marker'),
(namespaces['svg'], 'metadata'),
(namespaces['svg'], 'missing-glyph'),
(namespaces['svg'], 'mpath'),
(namespaces['svg'], 'path'),
(namespaces['svg'], 'polygon'),
(namespaces['svg'], 'polyline'),
(namespaces['svg'], 'radialGradient'),
(namespaces['svg'], 'rect'),
(namespaces['svg'], 'set'),
(namespaces['svg'], 'stop'),
(namespaces['svg'], 'svg'),
(namespaces['svg'], 'switch'),
(namespaces['svg'], 'text'),
(namespaces['svg'], 'title'),
(namespaces['svg'], 'tspan'),
(namespaces['svg'], 'use'),
))
allowed_attributes = frozenset((
# HTML attributes
(None, 'abbr'),
(None, 'accept'),
(None, 'accept-charset'),
(None, 'accesskey'),
(None, 'action'),
(None, 'align'),
(None, 'alt'),
(None, 'autocomplete'),
(None, 'autofocus'),
(None, 'axis'),
(None, 'background'),
(None, 'balance'),
(None, 'bgcolor'),
(None, 'bgproperties'),
(None, 'border'),
(None, 'bordercolor'),
(None, 'bordercolordark'),
(None, 'bordercolorlight'),
(None, 'bottompadding'),
(None, 'cellpadding'),
(None, 'cellspacing'),
(None, 'ch'),
(None, 'challenge'),
(None, 'char'),
(None, 'charoff'),
(None, 'choff'),
(None, 'charset'),
(None, 'checked'),
(None, 'cite'),
(None, 'class'),
(None, 'clear'),
(None, 'color'),
(None, 'cols'),
(None, 'colspan'),
(None, 'compact'),
(None, 'contenteditable'),
(None, 'controls'),
(None, 'coords'),
(None, 'data'),
(None, 'datafld'),
(None, 'datapagesize'),
(None, 'datasrc'),
(None, 'datetime'),
(None, 'default'),
(None, 'delay'),
(None, 'dir'),
(None, 'disabled'),
(None, 'draggable'),
(None, 'dynsrc'),
(None, 'enctype'),
(None, 'end'),
(None, 'face'),
(None, 'for'),
(None, 'form'),
(None, 'frame'),
(None, 'galleryimg'),
(None, 'gutter'),
(None, 'headers'),
(None, 'height'),
(None, 'hidefocus'),
(None, 'hidden'),
(None, 'high'),
(None, 'href'),
(None, 'hreflang'),
(None, 'hspace'),
(None, 'icon'),
(None, 'id'),
(None, 'inputmode'),
(None, 'ismap'),
(None, 'keytype'),
(None, 'label'),
(None, 'leftspacing'),
(None, 'lang'),
(None, 'list'),
(None, 'longdesc'),
(None, 'loop'),
(None, 'loopcount'),
(None, 'loopend'),
(None, 'loopstart'),
(None, 'low'),
(None, 'lowsrc'),
(None, 'max'),
(None, 'maxlength'),
(None, 'media'),
(None, 'method'),
(None, 'min'),
(None, 'multiple'),
(None, 'name'),
(None, 'nohref'),
(None, 'noshade'),
(None, 'nowrap'),
(None, 'open'),
(None, 'optimum'),
(None, 'pattern'),
(None, 'ping'),
(None, 'point-size'),
(None, 'poster'),
(None, 'pqg'),
(None, 'preload'),
(None, 'prompt'),
(None, 'radiogroup'),
(None, 'readonly'),
(None, 'rel'),
(None, 'repeat-max'),
(None, 'repeat-min'),
(None, 'replace'),
(None, 'required'),
(None, 'rev'),
(None, 'rightspacing'),
(None, 'rows'),
(None, 'rowspan'),
(None, 'rules'),
(None, 'scope'),
(None, 'selected'),
(None, 'shape'),
(None, 'size'),
(None, 'span'),
(None, 'src'),
(None, 'start'),
(None, 'step'),
(None, 'style'),
(None, 'summary'),
(None, 'suppress'),
(None, 'tabindex'),
(None, 'target'),
(None, 'template'),
(None, 'title'),
(None, 'toppadding'),
(None, 'type'),
(None, 'unselectable'),
(None, 'usemap'),
(None, 'urn'),
(None, 'valign'),
(None, 'value'),
(None, 'variable'),
(None, 'volume'),
(None, 'vspace'),
(None, 'vrml'),
(None, 'width'),
(None, 'wrap'),
(namespaces['xml'], 'lang'),
# MathML attributes
(None, 'actiontype'),
(None, 'align'),
(None, 'columnalign'),
(None, 'columnalign'),
(None, 'columnalign'),
(None, 'columnlines'),
(None, 'columnspacing'),
(None, 'columnspan'),
(None, 'depth'),
(None, 'display'),
(None, 'displaystyle'),
(None, 'equalcolumns'),
(None, 'equalrows'),
(None, 'fence'),
(None, 'fontstyle'),
(None, 'fontweight'),
(None, 'frame'),
(None, 'height'),
(None, 'linethickness'),
(None, 'lspace'),
(None, 'mathbackground'),
(None, 'mathcolor'),
(None, 'mathvariant'),
(None, 'mathvariant'),
(None, 'maxsize'),
(None, 'minsize'),
(None, 'other'),
(None, 'rowalign'),
(None, 'rowalign'),
(None, 'rowalign'),
(None, 'rowlines'),
(None, 'rowspacing'),
(None, 'rowspan'),
(None, 'rspace'),
(None, 'scriptlevel'),
(None, 'selection'),
(None, 'separator'),
(None, 'stretchy'),
(None, 'width'),
(None, 'width'),
(namespaces['xlink'], 'href'),
(namespaces['xlink'], 'show'),
(namespaces['xlink'], 'type'),
# SVG attributes
(None, 'accent-height'),
(None, 'accumulate'),
(None, 'additive'),
(None, 'alphabetic'),
(None, 'arabic-form'),
(None, 'ascent'),
(None, 'attributeName'),
(None, 'attributeType'),
(None, 'baseProfile'),
(None, 'bbox'),
(None, 'begin'),
(None, 'by'),
(None, 'calcMode'),
(None, 'cap-height'),
(None, 'class'),
(None, 'clip-path'),
(None, 'color'),
(None, 'color-rendering'),
(None, 'content'),
(None, 'cx'),
(None, 'cy'),
(None, 'd'),
(None, 'dx'),
(None, 'dy'),
(None, 'descent'),
(None, 'display'),
(None, 'dur'),
(None, 'end'),
(None, 'fill'),
(None, 'fill-opacity'),
(None, 'fill-rule'),
(None, 'font-family'),
(None, 'font-size'),
(None, 'font-stretch'),
(None, 'font-style'),
(None, 'font-variant'),
(None, 'font-weight'),
(None, 'from'),
(None, 'fx'),
(None, 'fy'),
(None, 'g1'),
(None, 'g2'),
(None, 'glyph-name'),
(None, 'gradientUnits'),
(None, 'hanging'),
(None, 'height'),
(None, 'horiz-adv-x'),
(None, 'horiz-origin-x'),
(None, 'id'),
(None, 'ideographic'),
(None, 'k'),
(None, 'keyPoints'),
(None, 'keySplines'),
(None, 'keyTimes'),
(None, 'lang'),
(None, 'marker-end'),
(None, 'marker-mid'),
(None, 'marker-start'),
(None, 'markerHeight'),
(None, 'markerUnits'),
(None, 'markerWidth'),
(None, 'mathematical'),
(None, 'max'),
(None, 'min'),
(None, 'name'),
(None, 'offset'),
(None, 'opacity'),
(None, 'orient'),
(None, 'origin'),
(None, 'overline-position'),
(None, 'overline-thickness'),
(None, 'panose-1'),
(None, 'path'),
(None, 'pathLength'),
(None, 'points'),
(None, 'preserveAspectRatio'),
(None, 'r'),
(None, 'refX'),
(None, 'refY'),
(None, 'repeatCount'),
(None, 'repeatDur'),
(None, 'requiredExtensions'),
(None, 'requiredFeatures'),
(None, 'restart'),
(None, 'rotate'),
(None, 'rx'),
(None, 'ry'),
(None, 'slope'),
(None, 'stemh'),
(None, 'stemv'),
(None, 'stop-color'),
(None, 'stop-opacity'),
(None, 'strikethrough-position'),
(None, 'strikethrough-thickness'),
(None, 'stroke'),
(None, 'stroke-dasharray'),
(None, 'stroke-dashoffset'),
(None, 'stroke-linecap'),
(None, 'stroke-linejoin'),
(None, 'stroke-miterlimit'),
(None, 'stroke-opacity'),
(None, 'stroke-width'),
(None, 'systemLanguage'),
(None, 'target'),
(None, 'text-anchor'),
(None, 'to'),
(None, 'transform'),
(None, 'type'),
(None, 'u1'),
(None, 'u2'),
(None, 'underline-position'),
(None, 'underline-thickness'),
(None, 'unicode'),
(None, 'unicode-range'),
(None, 'units-per-em'),
(None, 'values'),
(None, 'version'),
(None, 'viewBox'),
(None, 'visibility'),
(None, 'width'),
(None, 'widths'),
(None, 'x'),
(None, 'x-height'),
(None, 'x1'),
(None, 'x2'),
(namespaces['xlink'], 'actuate'),
(namespaces['xlink'], 'arcrole'),
(namespaces['xlink'], 'href'),
(namespaces['xlink'], 'role'),
(namespaces['xlink'], 'show'),
(namespaces['xlink'], 'title'),
(namespaces['xlink'], 'type'),
(namespaces['xml'], 'base'),
(namespaces['xml'], 'lang'),
(namespaces['xml'], 'space'),
(None, 'y'),
(None, 'y1'),
(None, 'y2'),
(None, 'zoomAndPan'),
))
attr_val_is_uri = frozenset((
(None, 'href'),
(None, 'src'),
(None, 'cite'),
(None, 'action'),
(None, 'longdesc'),
(None, 'poster'),
(None, 'background'),
(None, 'datasrc'),
(None, 'dynsrc'),
(None, 'lowsrc'),
(None, 'ping'),
(namespaces['xlink'], 'href'),
(namespaces['xml'], 'base'),
))
svg_attr_val_allows_ref = frozenset((
(None, 'clip-path'),
(None, 'color-profile'),
(None, 'cursor'),
(None, 'fill'),
(None, 'filter'),
(None, 'marker'),
(None, 'marker-start'),
(None, 'marker-mid'),
(None, 'marker-end'),
(None, 'mask'),
(None, 'stroke'),
))
svg_allow_local_href = frozenset((
(None, 'altGlyph'),
(None, 'animate'),
(None, 'animateColor'),
(None, 'animateMotion'),
(None, 'animateTransform'),
(None, 'cursor'),
(None, 'feImage'),
(None, 'filter'),
(None, 'linearGradient'),
(None, 'pattern'),
(None, 'radialGradient'),
(None, 'textpath'),
(None, 'tref'),
(None, 'set'),
(None, 'use')
))
allowed_css_properties = frozenset((
'azimuth',
'background-color',
'border-bottom-color',
'border-collapse',
'border-color',
'border-left-color',
'border-right-color',
'border-top-color',
'clear',
'color',
'cursor',
'direction',
'display',
'elevation',
'float',
'font',
'font-family',
'font-size',
'font-style',
'font-variant',
'font-weight',
'height',
'letter-spacing',
'line-height',
'overflow',
'pause',
'pause-after',
'pause-before',
'pitch',
'pitch-range',
'richness',
'speak',
'speak-header',
'speak-numeral',
'speak-punctuation',
'speech-rate',
'stress',
'text-align',
'text-decoration',
'text-indent',
'unicode-bidi',
'vertical-align',
'voice-family',
'volume',
'white-space',
'width',
))
allowed_css_keywords = frozenset((
'auto',
'aqua',
'black',
'block',
'blue',
'bold',
'both',
'bottom',
'brown',
'center',
'collapse',
'dashed',
'dotted',
'fuchsia',
'gray',
'green',
'!important',
'italic',
'left',
'lime',
'maroon',
'medium',
'none',
'navy',
'normal',
'nowrap',
'olive',
'pointer',
'purple',
'red',
'right',
'solid',
'silver',
'teal',
'top',
'transparent',
'underline',
'white',
'yellow',
))
allowed_svg_properties = frozenset((
'fill',
'fill-opacity',
'fill-rule',
'stroke',
'stroke-width',
'stroke-linecap',
'stroke-linejoin',
'stroke-opacity',
))
allowed_protocols = frozenset((
'ed2k',
'ftp',
'http',
'https',
'irc',
'mailto',
'news',
'gopher',
'nntp',
'telnet',
'webcal',
'xmpp',
'callto',
'feed',
'urn',
'aim',
'rsync',
'tag',
'ssh',
'sftp',
'rtsp',
'afs',
'data',
))
allowed_content_types = frozenset((
'image/png',
'image/jpeg',
'image/gif',
'image/webp',
'image/bmp',
'text/plain',
))
data_content_type = re.compile(r'''
^
# Match a content type <application>/<type>
(?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
# Match any character set and encoding
(?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
|(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
# Assume the rest is data
,.*
$
''',
re.VERBOSE)
class Filter(base.Filter):
"""Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes"""
def __init__(self,
source,
allowed_elements=allowed_elements,
allowed_attributes=allowed_attributes,
allowed_css_properties=allowed_css_properties,
allowed_css_keywords=allowed_css_keywords,
allowed_svg_properties=allowed_svg_properties,
allowed_protocols=allowed_protocols,
allowed_content_types=allowed_content_types,
attr_val_is_uri=attr_val_is_uri,
svg_attr_val_allows_ref=svg_attr_val_allows_ref,
svg_allow_local_href=svg_allow_local_href):
"""Creates a Filter
:arg allowed_elements: set of elements to allow--everything else will
be escaped
:arg allowed_attributes: set of attributes to allow in
elements--everything else will be stripped
:arg allowed_css_properties: set of CSS properties to allow--everything
else will be stripped
:arg allowed_css_keywords: set of CSS keywords to allow--everything
else will be stripped
:arg allowed_svg_properties: set of SVG properties to allow--everything
else will be removed
:arg allowed_protocols: set of allowed protocols for URIs
:arg allowed_content_types: set of allowed content types for ``data`` URIs.
:arg attr_val_is_uri: set of attributes that have URI values--values
that have a scheme not listed in ``allowed_protocols`` are removed
:arg svg_attr_val_allows_ref: set of SVG attributes that can have
references
:arg svg_allow_local_href: set of SVG elements that can have local
hrefs--these are removed
"""
super(Filter, self).__init__(source)
self.allowed_elements = allowed_elements
self.allowed_attributes = allowed_attributes
self.allowed_css_properties = allowed_css_properties
self.allowed_css_keywords = allowed_css_keywords
self.allowed_svg_properties = allowed_svg_properties
self.allowed_protocols = allowed_protocols
self.allowed_content_types = allowed_content_types
self.attr_val_is_uri = attr_val_is_uri
self.svg_attr_val_allows_ref = svg_attr_val_allows_ref
self.svg_allow_local_href = svg_allow_local_href
def __iter__(self):
for token in base.Filter.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
# Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
# stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes
# are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and
# ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI
# are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are
# allowed.
#
# sanitize_html('<script> do_nasty_stuff() </script>')
# => <script> do_nasty_stuff() </script>
# sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')
# => <a>Click here for $100</a>
def sanitize_token(self, token):
# accommodate filters which use token_type differently
token_type = token["type"]
if token_type in ("StartTag", "EndTag", "EmptyTag"):
name = token["name"]
namespace = token["namespace"]
if ((namespace, name) in self.allowed_elements or
(namespace is None and
(namespaces["html"], name) in self.allowed_elements)):
return self.allowed_token(token)
else:
return self.disallowed_token(token)
elif token_type == "Comment":
pass
else:
return token
def allowed_token(self, token):
if "data" in token:
attrs = token["data"]
attr_names = set(attrs.keys())
# Remove forbidden attributes
for to_remove in (attr_names - self.allowed_attributes):
del token["data"][to_remove]
attr_names.remove(to_remove)
# Remove attributes with disallowed URL values
for attr in (attr_names & self.attr_val_is_uri):
assert attr in attrs
# I don't have a clue where this regexp comes from or why it matches those
# characters, nor why we call unescape. I just know it's always been here.
# Should you be worried by this comment in a sanitizer? Yes. On the other hand, all
# this will do is remove *more* than it otherwise would.
val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '',
unescape(attrs[attr])).lower()
# remove replacement characters from unescaped characters
val_unescaped = val_unescaped.replace("\ufffd", "")
try:
uri = urlparse.urlparse(val_unescaped)
except ValueError:
uri = None
del attrs[attr]
if uri and uri.scheme:
if uri.scheme not in self.allowed_protocols:
del attrs[attr]
if uri.scheme == 'data':
m = data_content_type.match(uri.path)
if not m:
del attrs[attr]
elif m.group('content_type') not in self.allowed_content_types:
del attrs[attr]
for attr in self.svg_attr_val_allows_ref:
if attr in attrs:
attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
' ',
unescape(attrs[attr]))
if (token["name"] in self.svg_allow_local_href and
(namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*',
attrs[(namespaces['xlink'], 'href')])):
del attrs[(namespaces['xlink'], 'href')]
if (None, 'style') in attrs:
attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')])
token["data"] = attrs
return token
def disallowed_token(self, token):
token_type = token["type"]
if token_type == "EndTag":
token["data"] = "</%s>" % token["name"]
elif token["data"]:
assert token_type in ("StartTag", "EmptyTag")
attrs = []
for (ns, name), v in token["data"].items():
attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v)))
token["data"] = "<%s%s>" % (token["name"], ''.join(attrs))
else:
token["data"] = "<%s>" % token["name"]
if token.get("selfClosing"):
token["data"] = token["data"][:-1] + "/>"
token["type"] = "Characters"
del token["name"]
return token
def sanitize_css(self, style):
# disallow urls
style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
return ''
if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
return ''
clean = []
for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style):
if not value:
continue
if prop.lower() in self.allowed_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
'padding']:
for keyword in value.split():
if keyword not in self.allowed_css_keywords and \
not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa
break
else:
clean.append(prop + ': ' + value + ';')
elif prop.lower() in self.allowed_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
| artistic-2.0 | -698,549,267,309,599,400 | 28.294643 | 158 | 0.511201 | false |
zasdfgbnm/tensorflow | tensorflow/contrib/kernel_methods/python/losses_test.py | 23 | 11003 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for third_party.tensorflow.contrib.kernel_methods.python.losses."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.kernel_methods.python import losses
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class SparseMulticlassHingeLossTest(test.TestCase):
def testInvalidLogitsShape(self):
"""An error is raised when logits have invalid shape."""
with self.test_session():
logits = constant_op.constant([-1.0, 2.1], shape=(2,))
labels = constant_op.constant([0, 1])
with self.assertRaises(ValueError):
_ = losses.sparse_multiclass_hinge_loss(labels, logits)
def testInvalidLabelsShape(self):
"""An error is raised when labels have invalid shape."""
with self.test_session():
logits = constant_op.constant([-1.0, 2.1], shape=(2, 1))
labels = constant_op.constant([1, 0], shape=(1, 1, 2))
with self.assertRaises(ValueError):
_ = losses.sparse_multiclass_hinge_loss(labels, logits)
def testInvalidWeightsShape(self):
"""An error is raised when weights have invalid shape."""
with self.test_session():
logits = constant_op.constant([-1.0, 2.1], shape=(2, 1))
labels = constant_op.constant([1, 0], shape=(2,))
weights = constant_op.constant([1.5, 0.2], shape=(2, 1, 1))
with self.assertRaises(ValueError):
_ = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
def testInvalidLabelsDtype(self):
"""An error is raised when labels have invalid shape."""
with self.test_session():
logits = constant_op.constant([-1.0, 2.1], shape=(2, 1))
labels = constant_op.constant([1, 0], dtype=dtypes.float32)
with self.assertRaises(ValueError):
_ = losses.sparse_multiclass_hinge_loss(labels, logits)
def testNoneWeightRaisesValueError(self):
"""An error is raised when weights are None."""
with self.test_session():
logits = constant_op.constant([-1.0, 2.1], shape=(2, 1))
labels = constant_op.constant([1, 0])
with self.assertRaises(ValueError):
_ = losses.sparse_multiclass_hinge_loss(labels, logits, weights=None)
def testInconsistentLabelsAndWeightsShapesSameRank(self):
"""Error raised when weights and labels have same ranks, different sizes."""
with self.test_session():
logits = constant_op.constant([-1.0, 2.1, 4.1], shape=(3, 1))
labels = constant_op.constant([1, 0, 2], shape=(3, 1))
weights = constant_op.constant([1.1, 2.0], shape=(2, 1))
with self.assertRaises(ValueError):
_ = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
def testInconsistentLabelsAndWeightsShapesDifferentRank(self):
"""Error raised when weights and labels have different ranks and sizes."""
with self.test_session():
logits = constant_op.constant([-1.0, 2.1], shape=(2, 1))
labels = constant_op.constant([1, 0], shape=(2, 1))
weights = constant_op.constant([1.1, 2.0, 2.8], shape=(3,))
with self.assertRaises(ValueError):
_ = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
def testOutOfRangeLabels(self):
"""An error is raised when labels are not in [0, num_classes)."""
with self.test_session():
logits = constant_op.constant([[1.2, -1.4, -1.0], [1.4, 1.8, 4.0],
[0.5, 1.8, -1.0]])
labels = constant_op.constant([1, 0, 4])
loss = losses.sparse_multiclass_hinge_loss(labels, logits)
with self.assertRaises(errors.InvalidArgumentError):
loss.eval()
def testZeroLossInt32Labels(self):
"""Loss is 0 if true class logits sufficiently higher than other classes."""
with self.test_session():
logits = constant_op.constant([[1.2, -1.4, -1.0], [1.4, 1.8, 4.0],
[0.5, 1.8, -1.0]])
labels = constant_op.constant([0, 2, 1], dtype=dtypes.int32)
loss = losses.sparse_multiclass_hinge_loss(labels, logits)
self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testZeroLossInt64Labels(self):
"""Loss is 0 if true class logits sufficiently higher than other classes."""
with self.test_session():
logits = constant_op.constant([[2.1, -0.4, -1.0], [1.4, 2.8, 4.0],
[-0.5, 0.8, -1.0]])
labels = constant_op.constant([0, 2, 1], dtype=dtypes.int64)
loss = losses.sparse_multiclass_hinge_loss(labels, logits)
self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testUnknownShape(self):
"""Result keeps same with `testZeroLossInt32Labels`"""
logits_np = np.array([[1.2, -1.4, -1.0], [1.4, 1.8, 4.0], [0.5, 1.8, -1.0]])
labels_np = np.array([0, 2, 1], dtype=np.int32)
logits_shapes = [
[3, 3], # batch_size, num_classes
[None, 3],
[3, None],
[None, None]
]
for batch_size, num_classes in logits_shapes:
with self.test_session():
logits = array_ops.placeholder(
dtypes.float32, shape=(batch_size, num_classes))
labels = array_ops.placeholder(dtypes.int32, shape=(batch_size,))
loss = losses.sparse_multiclass_hinge_loss(labels, logits)
result = loss.eval(feed_dict={logits: logits_np, labels: labels_np})
self.assertAlmostEqual(result, 0.0, 3)
def testCorrectPredictionsSomeClassesInsideMargin(self):
"""Loss is > 0 even if true class logits are higher than other classes."""
with self.test_session():
logits = constant_op.constant([[1.2, -1.4, 0.8], [1.4, 1.8, 4.0],
[1.5, 1.8, -1.0]])
labels = constant_op.constant([0, 2, 1])
loss = losses.sparse_multiclass_hinge_loss(labels, logits)
# The first and third samples incur some loss (0.6 and 0.7 respectively).
self.assertAlmostEqual(loss.eval(), 0.4333, 3)
def testIncorrectPredictions(self):
"""Loss is >0 when an incorrect class has higher logits than true class."""
with self.test_session():
logits = constant_op.constant([[2.6, 0.4, 0.8], [1.4, 0.8, -1.0],
[0.5, -1.8, 2.0]])
labels = constant_op.constant([1, 0, 2])
loss = losses.sparse_multiclass_hinge_loss(labels, logits)
# The first examples incurs a high loss (3.2) since the logits of an
# incorrect class (0) are higher than the logits of the ground truth. The
# second example also incures a (smaller) loss (0.4).
self.assertAlmostEqual(loss.eval(), 1.2, 3)
def testIncorrectPredictionsColumnLabels(self):
"""Same as above but labels is a rank-2 tensor."""
with self.test_session():
logits = constant_op.constant([[1.6, -0.4, 0.8], [1.5, 0.8, -1.0],
[0.2, -1.8, 4.0]])
labels = constant_op.constant([1, 0, 2], shape=(3, 1))
loss = losses.sparse_multiclass_hinge_loss(labels, logits)
# The first examples incurs a high loss (3.0) since the logits of an
# incorrect class (0) are higher than the logits of the ground truth. The
# second example also incures a (smaller) loss (0.3).
self.assertAlmostEqual(loss.eval(), 1.1, 3)
def testIncorrectPredictionsZeroWeights(self):
"""Loss is 0 when all weights are missing even if predictions are wrong."""
with self.test_session():
logits = constant_op.constant([[1.6, -0.4, 0.8], [1.5, 0.8, -1.0],
[0.2, -1.8, 4.0]])
labels = constant_op.constant([1, 0, 2], shape=(3, 1))
weights = constant_op.constant([0.0, 0.0, 0.0], shape=(3, 1))
loss = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
# No overall loss since all weights are 0.
self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testNonZeroLossWithPythonScalarWeights(self):
"""Weighted loss is correctly computed when weights is a python scalar."""
with self.test_session():
logits = constant_op.constant([[1.6, -0.4, 0.8], [1.5, 0.8, -1.0],
[0.2, -1.8, 4.0]])
labels = constant_op.constant([1, 0, 2], shape=(3, 1))
weights = 10.0
loss = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
self.assertAlmostEqual(loss.eval(), 11.0, 3)
def testNonZeroLossWithScalarTensorWeights(self):
"""Weighted loss is correctly computed when weights is a rank-0 tensor."""
with self.test_session():
logits = constant_op.constant([[1.6, -0.4, 0.8], [1.5, 0.8, -1.0],
[0.2, -1.8, 4.0]])
labels = constant_op.constant([1, 0, 2], shape=(3, 1))
weights = constant_op.constant(5.0)
loss = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
self.assertAlmostEqual(loss.eval(), 5.5, 3)
def testNonZeroLossWith1DTensorWeightsColumnLabels(self):
"""Weighted loss is correctly computed when weights is a rank-0 tensor."""
with self.test_session():
logits = constant_op.constant([[1.6, -0.4, 0.8], [1.5, 0.8, -1.0],
[0.2, -1.8, 4.0]])
labels = constant_op.constant([1, 0, 2], shape=(3, 1))
weights = constant_op.constant([1.0, 0.5, 2.0], shape=(3,))
loss = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
# The overall loss is 1/3 *(3.0*1.0 + 0.5*0.3+ 2.0*0.0) = 1.05
self.assertAlmostEqual(loss.eval(), 1.05, 3)
def testNonZeroLossWith2DTensorWeights1DLabelsSomeWeightsMissing(self):
"""Weighted loss is correctly computed when weights is a rank-0 tensor."""
with self.test_session():
logits = constant_op.constant([[1.6, -0.4, 0.8], [1.5, 0.8, -1.0],
[0.2, -1.8, 4.0], [1.6, 1.8, -4.0]])
labels = constant_op.constant([1, 0, 2, 1])
weights = constant_op.constant([[1.0], [0.0], [2.0], [4.0]])
loss = losses.sparse_multiclass_hinge_loss(labels, logits, weights)
# The overall loss is 1/3 *(3.0*1.0 + 0.0*0.3+ 2.0*0.0 + 4.0*0.8) = 6.2/3.
self.assertAlmostEqual(loss.eval(), 2.06666, 3)
if __name__ == '__main__':
test.main()
| apache-2.0 | 3,086,953,198,525,964,000 | 46.83913 | 80 | 0.624466 | false |
dhamaniasad/mythbox | resources/lib/twisted/twisted/test/test_logfile.py | 59 | 9810 | # Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.trial import unittest
# system imports
import os, time, stat
# twisted imports
from twisted.python import logfile, runtime
class LogFileTestCase(unittest.TestCase):
"""
Test the rotating log file.
"""
def setUp(self):
self.dir = self.mktemp()
os.makedirs(self.dir)
self.name = "test.log"
self.path = os.path.join(self.dir, self.name)
def tearDown(self):
"""
Restore back write rights on created paths: if tests modified the
rights, that will allow the paths to be removed easily afterwards.
"""
os.chmod(self.dir, 0777)
if os.path.exists(self.path):
os.chmod(self.path, 0777)
def testWriting(self):
log = logfile.LogFile(self.name, self.dir)
log.write("123")
log.write("456")
log.flush()
log.write("7890")
log.close()
f = open(self.path, "r")
self.assertEquals(f.read(), "1234567890")
f.close()
def testRotation(self):
# this logfile should rotate every 10 bytes
log = logfile.LogFile(self.name, self.dir, rotateLength=10)
# test automatic rotation
log.write("123")
log.write("4567890")
log.write("1" * 11)
self.assert_(os.path.exists("%s.1" % self.path))
self.assert_(not os.path.exists("%s.2" % self.path))
log.write('')
self.assert_(os.path.exists("%s.1" % self.path))
self.assert_(os.path.exists("%s.2" % self.path))
self.assert_(not os.path.exists("%s.3" % self.path))
log.write("3")
self.assert_(not os.path.exists("%s.3" % self.path))
# test manual rotation
log.rotate()
self.assert_(os.path.exists("%s.3" % self.path))
self.assert_(not os.path.exists("%s.4" % self.path))
log.close()
self.assertEquals(log.listLogs(), [1, 2, 3])
def testAppend(self):
log = logfile.LogFile(self.name, self.dir)
log.write("0123456789")
log.close()
log = logfile.LogFile(self.name, self.dir)
self.assertEquals(log.size, 10)
self.assertEquals(log._file.tell(), log.size)
log.write("abc")
self.assertEquals(log.size, 13)
self.assertEquals(log._file.tell(), log.size)
f = log._file
f.seek(0, 0)
self.assertEquals(f.read(), "0123456789abc")
log.close()
def testLogReader(self):
log = logfile.LogFile(self.name, self.dir)
log.write("abc\n")
log.write("def\n")
log.rotate()
log.write("ghi\n")
log.flush()
# check reading logs
self.assertEquals(log.listLogs(), [1])
reader = log.getCurrentLog()
reader._file.seek(0)
self.assertEquals(reader.readLines(), ["ghi\n"])
self.assertEquals(reader.readLines(), [])
reader.close()
reader = log.getLog(1)
self.assertEquals(reader.readLines(), ["abc\n", "def\n"])
self.assertEquals(reader.readLines(), [])
reader.close()
# check getting illegal log readers
self.assertRaises(ValueError, log.getLog, 2)
self.assertRaises(TypeError, log.getLog, "1")
# check that log numbers are higher for older logs
log.rotate()
self.assertEquals(log.listLogs(), [1, 2])
reader = log.getLog(1)
reader._file.seek(0)
self.assertEquals(reader.readLines(), ["ghi\n"])
self.assertEquals(reader.readLines(), [])
reader.close()
reader = log.getLog(2)
self.assertEquals(reader.readLines(), ["abc\n", "def\n"])
self.assertEquals(reader.readLines(), [])
reader.close()
def testModePreservation(self):
"""
Check rotated files have same permissions as original.
"""
f = open(self.path, "w").close()
os.chmod(self.path, 0707)
mode = os.stat(self.path)[stat.ST_MODE]
log = logfile.LogFile(self.name, self.dir)
log.write("abc")
log.rotate()
self.assertEquals(mode, os.stat(self.path)[stat.ST_MODE])
def test_noPermission(self):
"""
Check it keeps working when permission on dir changes.
"""
log = logfile.LogFile(self.name, self.dir)
log.write("abc")
# change permissions so rotation would fail
os.chmod(self.dir, 0555)
# if this succeeds, chmod doesn't restrict us, so we can't
# do the test
try:
f = open(os.path.join(self.dir,"xxx"), "w")
except (OSError, IOError):
pass
else:
f.close()
return
log.rotate() # this should not fail
log.write("def")
log.flush()
f = log._file
self.assertEquals(f.tell(), 6)
f.seek(0, 0)
self.assertEquals(f.read(), "abcdef")
log.close()
def test_maxNumberOfLog(self):
"""
Test it respect the limit on the number of files when maxRotatedFiles
is not None.
"""
log = logfile.LogFile(self.name, self.dir, rotateLength=10,
maxRotatedFiles=3)
log.write("1" * 11)
log.write("2" * 11)
self.failUnless(os.path.exists("%s.1" % self.path))
log.write("3" * 11)
self.failUnless(os.path.exists("%s.2" % self.path))
log.write("4" * 11)
self.failUnless(os.path.exists("%s.3" % self.path))
self.assertEquals(file("%s.3" % self.path).read(), "1" * 11)
log.write("5" * 11)
self.assertEquals(file("%s.3" % self.path).read(), "2" * 11)
self.failUnless(not os.path.exists("%s.4" % self.path))
def test_fromFullPath(self):
"""
Test the fromFullPath method.
"""
log1 = logfile.LogFile(self.name, self.dir, 10, defaultMode=0777)
log2 = logfile.LogFile.fromFullPath(self.path, 10, defaultMode=0777)
self.assertEquals(log1.name, log2.name)
self.assertEquals(os.path.abspath(log1.path), log2.path)
self.assertEquals(log1.rotateLength, log2.rotateLength)
self.assertEquals(log1.defaultMode, log2.defaultMode)
def test_defaultPermissions(self):
"""
Test the default permission of the log file: if the file exist, it
should keep the permission.
"""
f = file(self.path, "w")
os.chmod(self.path, 0707)
currentMode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
f.close()
log1 = logfile.LogFile(self.name, self.dir)
self.assertEquals(stat.S_IMODE(os.stat(self.path)[stat.ST_MODE]),
currentMode)
def test_specifiedPermissions(self):
"""
Test specifying the permissions used on the log file.
"""
log1 = logfile.LogFile(self.name, self.dir, defaultMode=0066)
mode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
if runtime.platform.isWindows():
# The only thing we can get here is global read-only
self.assertEquals(mode, 0444)
else:
self.assertEquals(mode, 0066)
def test_reopen(self):
"""
L{logfile.LogFile.reopen} allows to rename the currently used file and
make L{logfile.LogFile} create a new file.
"""
log1 = logfile.LogFile(self.name, self.dir)
log1.write("hello1")
savePath = os.path.join(self.dir, "save.log")
os.rename(self.path, savePath)
log1.reopen()
log1.write("hello2")
log1.close()
f = open(self.path, "r")
self.assertEquals(f.read(), "hello2")
f.close()
f = open(savePath, "r")
self.assertEquals(f.read(), "hello1")
f.close()
if runtime.platform.isWindows():
test_reopen.skip = "Can't test reopen on Windows"
class RiggedDailyLogFile(logfile.DailyLogFile):
_clock = 0.0
def _openFile(self):
logfile.DailyLogFile._openFile(self)
# rig the date to match _clock, not mtime
self.lastDate = self.toDate()
def toDate(self, *args):
if args:
return time.gmtime(*args)[:3]
return time.gmtime(self._clock)[:3]
class DailyLogFileTestCase(unittest.TestCase):
"""
Test rotating log file.
"""
def setUp(self):
self.dir = self.mktemp()
os.makedirs(self.dir)
self.name = "testdaily.log"
self.path = os.path.join(self.dir, self.name)
def testWriting(self):
log = RiggedDailyLogFile(self.name, self.dir)
log.write("123")
log.write("456")
log.flush()
log.write("7890")
log.close()
f = open(self.path, "r")
self.assertEquals(f.read(), "1234567890")
f.close()
def testRotation(self):
# this logfile should rotate every 10 bytes
log = RiggedDailyLogFile(self.name, self.dir)
days = [(self.path + '.' + log.suffix(day * 86400)) for day in range(3)]
# test automatic rotation
log._clock = 0.0 # 1970/01/01 00:00.00
log.write("123")
log._clock = 43200 # 1970/01/01 12:00.00
log.write("4567890")
log._clock = 86400 # 1970/01/02 00:00.00
log.write("1" * 11)
self.assert_(os.path.exists(days[0]))
self.assert_(not os.path.exists(days[1]))
log._clock = 172800 # 1970/01/03 00:00.00
log.write('')
self.assert_(os.path.exists(days[0]))
self.assert_(os.path.exists(days[1]))
self.assert_(not os.path.exists(days[2]))
log._clock = 259199 # 1970/01/03 23:59.59
log.write("3")
self.assert_(not os.path.exists(days[2]))
| gpl-2.0 | 7,716,343,718,608,017,000 | 30.242038 | 80 | 0.57105 | false |
svn2github/django | django/core/management/commands/test.py | 111 | 2965 | import sys
import os
from optparse import make_option, OptionParser
from django.conf import settings
from django.core.management.base import BaseCommand
from django.test.utils import get_runner
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noinput',
action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--failfast',
action='store_true', dest='failfast', default=False,
help='Tells Django to stop running the test suite after first '
'failed test.'),
make_option('--testrunner',
action='store', dest='testrunner',
help='Tells Django to use specified test runner class instead of '
'the one specified by the TEST_RUNNER setting.'),
make_option('--liveserver',
action='store', dest='liveserver', default=None,
help='Overrides the default address where the live server (used '
'with LiveServerTestCase) is expected to run from. The '
'default value is localhost:8081.'),
)
help = ('Runs the test suite for the specified applications, or the '
'entire site if no apps are specified.')
args = '[appname ...]'
requires_model_validation = False
def __init__(self):
self.test_runner = None
super(Command, self).__init__()
def run_from_argv(self, argv):
"""
Pre-parse the command line to extract the value of the --testrunner
option. This allows a test runner to define additional command line
arguments.
"""
option = '--testrunner='
for arg in argv[2:]:
if arg.startswith(option):
self.test_runner = arg[len(option):]
break
super(Command, self).run_from_argv(argv)
def create_parser(self, prog_name, subcommand):
test_runner_class = get_runner(settings, self.test_runner)
options = self.option_list + getattr(
test_runner_class, 'option_list', ())
return OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version(),
option_list=options)
def handle(self, *test_labels, **options):
from django.conf import settings
from django.test.utils import get_runner
TestRunner = get_runner(settings, options.get('testrunner'))
options['verbosity'] = int(options.get('verbosity'))
if options.get('liveserver') is not None:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = options['liveserver']
del options['liveserver']
test_runner = TestRunner(**options)
failures = test_runner.run_tests(test_labels)
if failures:
sys.exit(bool(failures))
| bsd-3-clause | -3,677,656,493,452,897,300 | 38.533333 | 81 | 0.601012 | false |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.2/Lib/plat-linux2/DLFCN.py | 2 | 1632 | # Generated by h2py from /usr/include/dlfcn.h
_DLFCN_H = 1
# Included from features.h
_FEATURES_H = 1
__USE_ANSI = 1
__FAVOR_BSD = 1
_ISOC99_SOURCE = 1
_POSIX_SOURCE = 1
_POSIX_C_SOURCE = 199506L
_XOPEN_SOURCE = 600
_XOPEN_SOURCE_EXTENDED = 1
_LARGEFILE64_SOURCE = 1
_BSD_SOURCE = 1
_SVID_SOURCE = 1
_BSD_SOURCE = 1
_SVID_SOURCE = 1
__USE_ISOC99 = 1
_POSIX_SOURCE = 1
_POSIX_C_SOURCE = 2
_POSIX_C_SOURCE = 199506L
__USE_POSIX = 1
__USE_POSIX2 = 1
__USE_POSIX199309 = 1
__USE_POSIX199506 = 1
__USE_XOPEN = 1
__USE_XOPEN_EXTENDED = 1
__USE_UNIX98 = 1
_LARGEFILE_SOURCE = 1
__USE_XOPEN2K = 1
__USE_ISOC99 = 1
__USE_XOPEN_EXTENDED = 1
__USE_LARGEFILE = 1
__USE_LARGEFILE64 = 1
__USE_FILE_OFFSET64 = 1
__USE_MISC = 1
__USE_BSD = 1
__USE_SVID = 1
__USE_GNU = 1
__USE_REENTRANT = 1
__STDC_IEC_559__ = 1
__STDC_IEC_559_COMPLEX__ = 1
__STDC_ISO_10646__ = 200009L
__GNU_LIBRARY__ = 6
__GLIBC__ = 2
__GLIBC_MINOR__ = 2
# Included from sys/cdefs.h
_SYS_CDEFS_H = 1
def __PMT(args): return args
def __P(args): return args
def __PMT(args): return args
def __STRING(x): return #x
__flexarr = []
__flexarr = [0]
__flexarr = []
__flexarr = [1]
def __ASMNAME(cname): return __ASMNAME2 (__USER_LABEL_PREFIX__, cname)
def __attribute__(xyz): return
def __attribute_format_arg__(x): return __attribute__ ((__format_arg__ (x)))
def __attribute_format_arg__(x): return
__USE_LARGEFILE = 1
__USE_LARGEFILE64 = 1
__USE_EXTERN_INLINES = 1
# Included from gnu/stubs.h
# Included from bits/dlfcn.h
RTLD_LAZY = 0x00001
RTLD_NOW = 0x00002
RTLD_BINDING_MASK = 0x3
RTLD_NOLOAD = 0x00004
RTLD_GLOBAL = 0x00100
RTLD_LOCAL = 0
RTLD_NODELETE = 0x01000
| mit | -901,191,265,845,006,600 | 18.662651 | 76 | 0.651348 | false |
hoevenvd/weewx_poller | bin/weedb/sqlite.py | 1 | 4562 | #
# Copyright (c) 2012 Tom Keffer <[email protected]>
#
# See the file LICENSE.txt for your full rights.
#
# $Revision: 829 $
# $Author: tkeffer $
# $Date: 2013-01-19 08:05:49 -0800 (Sat, 19 Jan 2013) $
#
"""Driver for sqlite"""
import os.path
# Import sqlite3. If it does not support the 'with' statement, then
# import pysqlite2, which might...
import sqlite3
if not hasattr(sqlite3.Connection, "__exit__"):
del sqlite3
from pysqlite2 import dbapi2 as sqlite3 #@Reimport @UnresolvedImport
import weedb
def connect(database='', root='', driver='', **argv):
"""Factory function, to keep things compatible with DBAPI. """
return Connection(database=database, root=root, **argv)
def create(database='', root='', driver='', **argv):
"""Create the database specified by the db_dict. If it already exists,
an exception of type DatabaseExists will be thrown."""
file_path = os.path.join(root, database)
# Check whether the database file exists:
if os.path.exists(file_path):
raise weedb.DatabaseExists("Database %s already exists" % (file_path,))
else:
# If it doesn't exist, create the parent directories
fileDirectory = os.path.dirname(file_path)
if not os.path.exists(fileDirectory):
os.makedirs(fileDirectory)
connection = sqlite3.connect(file_path, **argv)
connection.close()
def drop(database='', root='', driver='', **argv):
file_path = os.path.join(root, database)
try:
os.remove(file_path)
except OSError:
raise weedb.NoDatabase("""Attempt to drop non-existent database %s""" % (file_path,))
class Connection(weedb.Connection):
"""A wrapper around a sqlite3 connection object."""
def __init__(self, database='', root='', **argv):
"""Initialize an instance of Connection.
Parameters:
file: Path to the sqlite file (required)
fileroot: An optional path to be prefixed to parameter 'file'. If not given,
nothing will be prefixed.
If the operation fails, an exception of type weedb.OperationalError will be raised.
"""
self.file_path = os.path.join(root, database)
if not os.path.exists(self.file_path):
raise weedb.OperationalError("Attempt to open a non-existent database %s" % database)
try:
connection = sqlite3.connect(self.file_path, **argv)
except sqlite3.OperationalError:
# The Pysqlite driver does not include the database file path.
# Include it in case it might be useful.
raise weedb.OperationalError("Unable to open database '%s'" % (self.file_path,))
weedb.Connection.__init__(self, connection, database, 'sqlite')
def cursor(self):
"""Return a cursor object."""
return Cursor(self.connection)
def tables(self):
"""Returns a list of tables in the database."""
table_list = list()
for row in self.connection.execute("""SELECT tbl_name FROM sqlite_master WHERE type='table';"""):
# Extract the table name. Sqlite returns unicode, so always
# convert to a regular string:
table_list.append(str(row[0]))
return table_list
def columnsOf(self, table):
"""Return a list of columns in the specified table. If the table does not exist,
None is returned."""
column_list = list()
for row in self.connection.execute("""PRAGMA table_info(%s);""" % table):
# Append this column to the list of columns.
column_list.append(str(row[1]))
# If there are no columns (which means the table did not exist) raise an exceptional
if not column_list:
raise weedb.OperationalError("No such table %s" % table)
return column_list
def begin(self):
self.connection.execute("BEGIN TRANSACTION")
class Cursor(sqlite3.Cursor):
"""A wrapper around the sqlite cursor object"""
# The sqlite3 cursor object is very full featured. We need only turn
# the sqlite exceptions into weedb exceptions.
def __init__(self, *args, **kwargs):
sqlite3.Cursor.__init__(self, *args, **kwargs)
def execute(self, *args, **kwargs):
try:
return sqlite3.Cursor.execute(self, *args, **kwargs)
except sqlite3.OperationalError, e:
# Convert to a weedb exception
raise weedb.OperationalError(e) | gpl-3.0 | 3,043,879,038,853,410,000 | 37.025 | 105 | 0.623411 | false |
mozilla/make.mozilla.org | vendor-local/lib/python/celery/tests/test_backends/test_redis.py | 14 | 3385 | from __future__ import absolute_import
from __future__ import with_statement
import sys
import socket
from nose import SkipTest
from celery.exceptions import ImproperlyConfigured
from celery import states
from celery.utils import uuid
from celery.backends import redis
from celery.backends.redis import RedisBackend
from celery.tests.utils import Case, mask_modules
_no_redis_msg = "* Redis %s. Will not execute related tests."
_no_redis_msg_emitted = False
try:
from redis.exceptions import ConnectionError
except ImportError:
class ConnectionError(socket.error): # noqa
pass
class SomeClass(object):
def __init__(self, data):
self.data = data
def get_redis_or_SkipTest():
def emit_no_redis_msg(reason):
global _no_redis_msg_emitted
if not _no_redis_msg_emitted:
sys.stderr.write("\n" + _no_redis_msg % reason + "\n")
_no_redis_msg_emitted = True
if redis.redis is None:
emit_no_redis_msg("not installed")
raise SkipTest("redis library not installed")
try:
tb = RedisBackend(redis_db="celery_unittest")
try:
# Evaluate lazy connection
tb.client.info()
except ConnectionError, exc:
emit_no_redis_msg("not running")
raise SkipTest("can't connect to redis: %s" % (exc, ))
return tb
except ImproperlyConfigured, exc:
if "need to install" in str(exc):
return emit_no_redis_msg("not installed")
return emit_no_redis_msg("not configured")
class TestRedisBackend(Case):
def test_mark_as_done(self):
tb = get_redis_or_SkipTest()
tid = uuid()
self.assertEqual(tb.get_status(tid), states.PENDING)
self.assertIsNone(tb.get_result(tid))
tb.mark_as_done(tid, 42)
self.assertEqual(tb.get_status(tid), states.SUCCESS)
self.assertEqual(tb.get_result(tid), 42)
def test_is_pickled(self):
tb = get_redis_or_SkipTest()
tid2 = uuid()
result = {"foo": "baz", "bar": SomeClass(12345)}
tb.mark_as_done(tid2, result)
# is serialized properly.
rindb = tb.get_result(tid2)
self.assertEqual(rindb.get("foo"), "baz")
self.assertEqual(rindb.get("bar").data, 12345)
def test_mark_as_failure(self):
tb = get_redis_or_SkipTest()
tid3 = uuid()
try:
raise KeyError("foo")
except KeyError, exception:
pass
tb.mark_as_failure(tid3, exception)
self.assertEqual(tb.get_status(tid3), states.FAILURE)
self.assertIsInstance(tb.get_result(tid3), KeyError)
class TestRedisBackendNoRedis(Case):
def test_redis_None_if_redis_not_installed(self):
prev = sys.modules.pop("celery.backends.redis")
try:
with mask_modules("redis"):
from celery.backends.redis import redis
self.assertIsNone(redis)
finally:
sys.modules["celery.backends.redis"] = prev
def test_constructor_raises_if_redis_not_installed(self):
from celery.backends import redis
prev = redis.RedisBackend.redis
redis.RedisBackend.redis = None
try:
with self.assertRaises(ImproperlyConfigured):
redis.RedisBackend()
finally:
redis.RedisBackend.redis = prev
| bsd-3-clause | -5,259,620,428,144,572,000 | 28.181034 | 66 | 0.625702 | false |
hryamzik/ansible | lib/ansible/modules/windows/win_package.py | 4 | 9677 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Trond Hindenes <[email protected]>, and others
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_package
version_added: "1.7"
short_description: Installs/uninstalls an installable package
description:
- Installs or uninstalls a package in either an MSI or EXE format.
- These packages can be sources from the local file system, network file share
or a url.
- Please read the notes section around some caveats with this module.
options:
arguments:
description:
- Any arguments the installer needs to either install or uninstall the
package.
- If the package is an MSI do not supply the C(/qn), C(/log) or
C(/norestart) arguments.
- As of Ansible 2.5, this parameter can be a list of arguments and the
module will escape the arguments as necessary, it is recommended to use a
string when dealing with MSI packages due to the unique escaping issues
with msiexec.
creates_path:
description:
- Will check the existance of the path specified and use the result to
determine whether the package is already installed.
- You can use this in conjunction with C(product_id) and other C(creates_*).
type: path
version_added: '2.4'
creates_service:
description:
- Will check the existing of the service specified and use the result to
determine whether the package is already installed.
- You can use this in conjunction with C(product_id) and other C(creates_*).
version_added: '2.4'
creates_version:
description:
- Will check the file version property of the file at C(creates_path) and
use the result to determine whether the package is already installed.
- C(creates_path) MUST be set and is a file.
- You can use this in conjunction with C(product_id) and other C(creates_*).
version_added: '2.4'
expected_return_code:
description:
- One or more return codes from the package installation that indicates
success.
- Before Ansible 2.4 this was just 0 but since 2.4 this is both C(0) and
C(3010).
- A return code of C(3010) usually means that a reboot is required, the
C(reboot_required) return value is set if the return code is C(3010).
type: list
default: [0, 3010]
password:
description:
- The password for C(user_name), must be set when C(user_name) is.
aliases: [ user_password ]
path:
description:
- Location of the package to be installed or uninstalled.
- This package can either be on the local file system, network share or a
url.
- If the path is on a network share and the current WinRM transport doesn't
support credential delegation, then C(user_name) and C(user_password)
must be set to access the file.
- There are cases where this file will be copied locally to the server so
it can access it, see the notes for more info.
- If C(state=present) then this value MUST be set.
- If C(state=absent) then this value does not need to be set if
C(product_id) is.
product_id:
description:
- The product id of the installed packaged.
- This is used for checking whether the product is already installed and
getting the uninstall information if C(state=absent).
- You can find product ids for installed programs in the Windows registry
editor either at
C(HKLM:Software\Microsoft\Windows\CurrentVersion\Uninstall) or for 32 bit
programs at
C(HKLM:Software\Wow6432Node\Microsoft\Windows\CurrentVersion\Uninstall).
- This SHOULD be set when the package is not an MSI, or the path is a url
or a network share and credential delegation is not being used. The
C(creates_*) options can be used instead but is not recommended.
aliases: [ productid ]
state:
description:
- Whether to install or uninstall the package.
- The module uses C(product_id) and whether it exists at the registry path
to see whether it needs to install or uninstall the package.
default: present
aliases: [ ensure ]
username:
description:
- Username of an account with access to the package if it is located on a
file share.
- This is only needed if the WinRM transport is over an auth method that
does not support credential delegation like Basic or NTLM.
aliases: [ user_name ]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be
used on personally controlled sites using self-signed certificates.
- Before Ansible 2.4 this defaulted to C(no).
type: bool
default: 'yes'
version_added: '2.4'
notes:
- For non Windows targets, use the M(package) module instead.
- When C(state=absent) and the product is an exe, the path may be different
from what was used to install the package originally. If path is not set then
the path used will be what is set under C(UninstallString) in the registry
for that product_id.
- Not all product ids are in a GUID form, some programs incorrectly use a
different structure but this module should support any format.
- By default all msi installs and uninstalls will be run with the options
C(/log, /qn, /norestart).
- It is recommended you download the package first from the URL using the
M(win_get_url) module as it opens up more flexibility with what must be set
when calling C(win_package).
- Packages will be temporarily downloaded or copied locally when path is a
network location and credential delegation is not set, or path is a URL
and the file is not an MSI.
- All the installation checks under C(product_id) and C(creates_*) add
together, if one fails then the program is considered to be absent.
author:
- Trond Hindenes (@trondhindenes)
- Jordan Borean (@jborean93)
'''
EXAMPLES = r'''
- name: Install the Visual C thingy
win_package:
path: http://download.microsoft.com/download/1/6/B/16B06F60-3B20-4FF2-B699-5E9B7962F9AE/VSU_4/vcredist_x64.exe
product_id: '{CF2BEA3C-26EA-32F8-AA9B-331F7E34BA97}'
arguments: /install /passive /norestart
- name: Install Visual C thingy with list of arguments instead of a string
win_package:
path: http://download.microsoft.com/download/1/6/B/16B06F60-3B20-4FF2-B699-5E9B7962F9AE/VSU_4/vcredist_x64.exe
product_id: '{CF2BEA3C-26EA-32F8-AA9B-331F7E34BA97}'
arguments:
- /install
- /passive
- /norestart
- name: Install Remote Desktop Connection Manager from msi
win_package:
path: https://download.microsoft.com/download/A/F/0/AF0071F3-B198-4A35-AA90-C68D103BDCCF/rdcman.msi
product_id: '{0240359E-6A4C-4884-9E94-B397A02D893C}'
state: present
- name: Uninstall Remote Desktop Connection Manager
win_package:
product_id: '{0240359E-6A4C-4884-9E94-B397A02D893C}'
state: absent
- name: Install Remote Desktop Connection Manager locally omitting the product_id
win_package:
path: C:\temp\rdcman.msi
state: present
- name: Uninstall Remote Desktop Connection Manager from local MSI omitting the product_id
win_package:
path: C:\temp\rdcman.msi
state: absent
# 7-Zip exe doesn't use a guid for the Product ID
- name: Install 7zip from a network share specifying the credentials
win_package:
path: \\domain\programs\7z.exe
product_id: 7-Zip
arguments: /S
state: present
user_name: DOMAIN\User
user_password: Password
- name: Install 7zip and use a file version for the installation check
win_package:
path: C:\temp\7z.exe
creates_path: C:\Program Files\7-Zip\7z.exe
creates_version: 16.04
state: present
- name: Uninstall 7zip from the exe
win_package:
path: C:\Program Files\7-Zip\Uninstall.exe
product_id: 7-Zip
arguments: /S
state: absent
- name: Uninstall 7zip without specifying the path
win_package:
product_id: 7-Zip
arguments: /S
state: absent
- name: Install application and override expected return codes
win_package:
path: https://download.microsoft.com/download/1/6/7/167F0D79-9317-48AE-AEDB-17120579F8E2/NDP451-KB2858728-x86-x64-AllOS-ENU.exe
product_id: '{7DEBE4EB-6B40-3766-BB35-5CBBC385DA37}'
arguments: '/q /norestart'
state: present
expected_return_code: [0, 666, 3010]
'''
RETURN = r'''
exit_code:
description: See rc, this will be removed in favour of rc in Ansible 2.6.
returned: change occured
type: int
sample: 0
log:
description: The contents of the MSI log.
returned: change occured and package is an MSI
type: str
sample: Installation completed successfully
rc:
description: The return code of the package process.
returned: change occured
type: int
sample: 0
reboot_required:
description: Whether a reboot is required to finalise package. This is set
to true if the executable return code is 3010.
returned: always
type: bool
sample: True
restart_required:
description: See reboot_required, this will be removed in favour of
reboot_required in Ansible 2.6
returned: always
type: bool
sample: True
stdout:
description: The stdout stream of the package process.
returned: failure during install or uninstall
type: str
sample: Installing program
stderr:
description: The stderr stream of the package process.
returned: failure during install or uninstall
type: str
sample: Failed to install program
'''
| gpl-3.0 | -4,069,391,142,130,802,700 | 36.94902 | 131 | 0.715718 | false |
thaddeusdiamond/Social-Hierarchical-Learning | ext/protobuf/python/google/protobuf/internal/test_util.py | 295 | 27103 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for Python proto2 tests.
This is intentionally modeled on C++ code in
//google/protobuf/test_util.*.
"""
__author__ = '[email protected] (Will Robinson)'
import os.path
from google.protobuf import unittest_import_pb2
from google.protobuf import unittest_pb2
def SetAllFields(message):
"""Sets every field in the message to a unique value.
Args:
message: A unittest_pb2.TestAllTypes instance.
"""
#
# Optional fields.
#
message.optional_int32 = 101
message.optional_int64 = 102
message.optional_uint32 = 103
message.optional_uint64 = 104
message.optional_sint32 = 105
message.optional_sint64 = 106
message.optional_fixed32 = 107
message.optional_fixed64 = 108
message.optional_sfixed32 = 109
message.optional_sfixed64 = 110
message.optional_float = 111
message.optional_double = 112
message.optional_bool = True
# TODO(robinson): Firmly spec out and test how
# protos interact with unicode. One specific example:
# what happens if we change the literal below to
# u'115'? What *should* happen? Still some discussion
# to finish with Kenton about bytes vs. strings
# and forcing everything to be utf8. :-/
message.optional_string = '115'
message.optional_bytes = '116'
message.optionalgroup.a = 117
message.optional_nested_message.bb = 118
message.optional_foreign_message.c = 119
message.optional_import_message.d = 120
message.optional_nested_enum = unittest_pb2.TestAllTypes.BAZ
message.optional_foreign_enum = unittest_pb2.FOREIGN_BAZ
message.optional_import_enum = unittest_import_pb2.IMPORT_BAZ
message.optional_string_piece = '124'
message.optional_cord = '125'
#
# Repeated fields.
#
message.repeated_int32.append(201)
message.repeated_int64.append(202)
message.repeated_uint32.append(203)
message.repeated_uint64.append(204)
message.repeated_sint32.append(205)
message.repeated_sint64.append(206)
message.repeated_fixed32.append(207)
message.repeated_fixed64.append(208)
message.repeated_sfixed32.append(209)
message.repeated_sfixed64.append(210)
message.repeated_float.append(211)
message.repeated_double.append(212)
message.repeated_bool.append(True)
message.repeated_string.append('215')
message.repeated_bytes.append('216')
message.repeatedgroup.add().a = 217
message.repeated_nested_message.add().bb = 218
message.repeated_foreign_message.add().c = 219
message.repeated_import_message.add().d = 220
message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAR)
message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAR)
message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAR)
message.repeated_string_piece.append('224')
message.repeated_cord.append('225')
# Add a second one of each field.
message.repeated_int32.append(301)
message.repeated_int64.append(302)
message.repeated_uint32.append(303)
message.repeated_uint64.append(304)
message.repeated_sint32.append(305)
message.repeated_sint64.append(306)
message.repeated_fixed32.append(307)
message.repeated_fixed64.append(308)
message.repeated_sfixed32.append(309)
message.repeated_sfixed64.append(310)
message.repeated_float.append(311)
message.repeated_double.append(312)
message.repeated_bool.append(False)
message.repeated_string.append('315')
message.repeated_bytes.append('316')
message.repeatedgroup.add().a = 317
message.repeated_nested_message.add().bb = 318
message.repeated_foreign_message.add().c = 319
message.repeated_import_message.add().d = 320
message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAZ)
message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAZ)
message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAZ)
message.repeated_string_piece.append('324')
message.repeated_cord.append('325')
#
# Fields that have defaults.
#
message.default_int32 = 401
message.default_int64 = 402
message.default_uint32 = 403
message.default_uint64 = 404
message.default_sint32 = 405
message.default_sint64 = 406
message.default_fixed32 = 407
message.default_fixed64 = 408
message.default_sfixed32 = 409
message.default_sfixed64 = 410
message.default_float = 411
message.default_double = 412
message.default_bool = False
message.default_string = '415'
message.default_bytes = '416'
message.default_nested_enum = unittest_pb2.TestAllTypes.FOO
message.default_foreign_enum = unittest_pb2.FOREIGN_FOO
message.default_import_enum = unittest_import_pb2.IMPORT_FOO
message.default_string_piece = '424'
message.default_cord = '425'
def SetAllExtensions(message):
"""Sets every extension in the message to a unique value.
Args:
message: A unittest_pb2.TestAllExtensions instance.
"""
extensions = message.Extensions
pb2 = unittest_pb2
import_pb2 = unittest_import_pb2
#
# Optional fields.
#
extensions[pb2.optional_int32_extension] = 101
extensions[pb2.optional_int64_extension] = 102
extensions[pb2.optional_uint32_extension] = 103
extensions[pb2.optional_uint64_extension] = 104
extensions[pb2.optional_sint32_extension] = 105
extensions[pb2.optional_sint64_extension] = 106
extensions[pb2.optional_fixed32_extension] = 107
extensions[pb2.optional_fixed64_extension] = 108
extensions[pb2.optional_sfixed32_extension] = 109
extensions[pb2.optional_sfixed64_extension] = 110
extensions[pb2.optional_float_extension] = 111
extensions[pb2.optional_double_extension] = 112
extensions[pb2.optional_bool_extension] = True
extensions[pb2.optional_string_extension] = '115'
extensions[pb2.optional_bytes_extension] = '116'
extensions[pb2.optionalgroup_extension].a = 117
extensions[pb2.optional_nested_message_extension].bb = 118
extensions[pb2.optional_foreign_message_extension].c = 119
extensions[pb2.optional_import_message_extension].d = 120
extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
extensions[pb2.optional_foreign_enum_extension] = pb2.FOREIGN_BAZ
extensions[pb2.optional_import_enum_extension] = import_pb2.IMPORT_BAZ
extensions[pb2.optional_string_piece_extension] = '124'
extensions[pb2.optional_cord_extension] = '125'
#
# Repeated fields.
#
extensions[pb2.repeated_int32_extension].append(201)
extensions[pb2.repeated_int64_extension].append(202)
extensions[pb2.repeated_uint32_extension].append(203)
extensions[pb2.repeated_uint64_extension].append(204)
extensions[pb2.repeated_sint32_extension].append(205)
extensions[pb2.repeated_sint64_extension].append(206)
extensions[pb2.repeated_fixed32_extension].append(207)
extensions[pb2.repeated_fixed64_extension].append(208)
extensions[pb2.repeated_sfixed32_extension].append(209)
extensions[pb2.repeated_sfixed64_extension].append(210)
extensions[pb2.repeated_float_extension].append(211)
extensions[pb2.repeated_double_extension].append(212)
extensions[pb2.repeated_bool_extension].append(True)
extensions[pb2.repeated_string_extension].append('215')
extensions[pb2.repeated_bytes_extension].append('216')
extensions[pb2.repeatedgroup_extension].add().a = 217
extensions[pb2.repeated_nested_message_extension].add().bb = 218
extensions[pb2.repeated_foreign_message_extension].add().c = 219
extensions[pb2.repeated_import_message_extension].add().d = 220
extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAR)
extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAR)
extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAR)
extensions[pb2.repeated_string_piece_extension].append('224')
extensions[pb2.repeated_cord_extension].append('225')
# Append a second one of each field.
extensions[pb2.repeated_int32_extension].append(301)
extensions[pb2.repeated_int64_extension].append(302)
extensions[pb2.repeated_uint32_extension].append(303)
extensions[pb2.repeated_uint64_extension].append(304)
extensions[pb2.repeated_sint32_extension].append(305)
extensions[pb2.repeated_sint64_extension].append(306)
extensions[pb2.repeated_fixed32_extension].append(307)
extensions[pb2.repeated_fixed64_extension].append(308)
extensions[pb2.repeated_sfixed32_extension].append(309)
extensions[pb2.repeated_sfixed64_extension].append(310)
extensions[pb2.repeated_float_extension].append(311)
extensions[pb2.repeated_double_extension].append(312)
extensions[pb2.repeated_bool_extension].append(False)
extensions[pb2.repeated_string_extension].append('315')
extensions[pb2.repeated_bytes_extension].append('316')
extensions[pb2.repeatedgroup_extension].add().a = 317
extensions[pb2.repeated_nested_message_extension].add().bb = 318
extensions[pb2.repeated_foreign_message_extension].add().c = 319
extensions[pb2.repeated_import_message_extension].add().d = 320
extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAZ)
extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAZ)
extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAZ)
extensions[pb2.repeated_string_piece_extension].append('324')
extensions[pb2.repeated_cord_extension].append('325')
#
# Fields with defaults.
#
extensions[pb2.default_int32_extension] = 401
extensions[pb2.default_int64_extension] = 402
extensions[pb2.default_uint32_extension] = 403
extensions[pb2.default_uint64_extension] = 404
extensions[pb2.default_sint32_extension] = 405
extensions[pb2.default_sint64_extension] = 406
extensions[pb2.default_fixed32_extension] = 407
extensions[pb2.default_fixed64_extension] = 408
extensions[pb2.default_sfixed32_extension] = 409
extensions[pb2.default_sfixed64_extension] = 410
extensions[pb2.default_float_extension] = 411
extensions[pb2.default_double_extension] = 412
extensions[pb2.default_bool_extension] = False
extensions[pb2.default_string_extension] = '415'
extensions[pb2.default_bytes_extension] = '416'
extensions[pb2.default_nested_enum_extension] = pb2.TestAllTypes.FOO
extensions[pb2.default_foreign_enum_extension] = pb2.FOREIGN_FOO
extensions[pb2.default_import_enum_extension] = import_pb2.IMPORT_FOO
extensions[pb2.default_string_piece_extension] = '424'
extensions[pb2.default_cord_extension] = '425'
def SetAllFieldsAndExtensions(message):
"""Sets every field and extension in the message to a unique value.
Args:
message: A unittest_pb2.TestAllExtensions message.
"""
message.my_int = 1
message.my_string = 'foo'
message.my_float = 1.0
message.Extensions[unittest_pb2.my_extension_int] = 23
message.Extensions[unittest_pb2.my_extension_string] = 'bar'
def ExpectAllFieldsAndExtensionsInOrder(serialized):
"""Ensures that serialized is the serialization we expect for a message
filled with SetAllFieldsAndExtensions(). (Specifically, ensures that the
serialization is in canonical, tag-number order).
"""
my_extension_int = unittest_pb2.my_extension_int
my_extension_string = unittest_pb2.my_extension_string
expected_strings = []
message = unittest_pb2.TestFieldOrderings()
message.my_int = 1 # Field 1.
expected_strings.append(message.SerializeToString())
message.Clear()
message.Extensions[my_extension_int] = 23 # Field 5.
expected_strings.append(message.SerializeToString())
message.Clear()
message.my_string = 'foo' # Field 11.
expected_strings.append(message.SerializeToString())
message.Clear()
message.Extensions[my_extension_string] = 'bar' # Field 50.
expected_strings.append(message.SerializeToString())
message.Clear()
message.my_float = 1.0
expected_strings.append(message.SerializeToString())
message.Clear()
expected = ''.join(expected_strings)
if expected != serialized:
raise ValueError('Expected %r, found %r' % (expected, serialized))
def ExpectAllFieldsSet(test_case, message):
"""Check all fields for correct values have after Set*Fields() is called."""
test_case.assertTrue(message.HasField('optional_int32'))
test_case.assertTrue(message.HasField('optional_int64'))
test_case.assertTrue(message.HasField('optional_uint32'))
test_case.assertTrue(message.HasField('optional_uint64'))
test_case.assertTrue(message.HasField('optional_sint32'))
test_case.assertTrue(message.HasField('optional_sint64'))
test_case.assertTrue(message.HasField('optional_fixed32'))
test_case.assertTrue(message.HasField('optional_fixed64'))
test_case.assertTrue(message.HasField('optional_sfixed32'))
test_case.assertTrue(message.HasField('optional_sfixed64'))
test_case.assertTrue(message.HasField('optional_float'))
test_case.assertTrue(message.HasField('optional_double'))
test_case.assertTrue(message.HasField('optional_bool'))
test_case.assertTrue(message.HasField('optional_string'))
test_case.assertTrue(message.HasField('optional_bytes'))
test_case.assertTrue(message.HasField('optionalgroup'))
test_case.assertTrue(message.HasField('optional_nested_message'))
test_case.assertTrue(message.HasField('optional_foreign_message'))
test_case.assertTrue(message.HasField('optional_import_message'))
test_case.assertTrue(message.optionalgroup.HasField('a'))
test_case.assertTrue(message.optional_nested_message.HasField('bb'))
test_case.assertTrue(message.optional_foreign_message.HasField('c'))
test_case.assertTrue(message.optional_import_message.HasField('d'))
test_case.assertTrue(message.HasField('optional_nested_enum'))
test_case.assertTrue(message.HasField('optional_foreign_enum'))
test_case.assertTrue(message.HasField('optional_import_enum'))
test_case.assertTrue(message.HasField('optional_string_piece'))
test_case.assertTrue(message.HasField('optional_cord'))
test_case.assertEqual(101, message.optional_int32)
test_case.assertEqual(102, message.optional_int64)
test_case.assertEqual(103, message.optional_uint32)
test_case.assertEqual(104, message.optional_uint64)
test_case.assertEqual(105, message.optional_sint32)
test_case.assertEqual(106, message.optional_sint64)
test_case.assertEqual(107, message.optional_fixed32)
test_case.assertEqual(108, message.optional_fixed64)
test_case.assertEqual(109, message.optional_sfixed32)
test_case.assertEqual(110, message.optional_sfixed64)
test_case.assertEqual(111, message.optional_float)
test_case.assertEqual(112, message.optional_double)
test_case.assertEqual(True, message.optional_bool)
test_case.assertEqual('115', message.optional_string)
test_case.assertEqual('116', message.optional_bytes)
test_case.assertEqual(117, message.optionalgroup.a)
test_case.assertEqual(118, message.optional_nested_message.bb)
test_case.assertEqual(119, message.optional_foreign_message.c)
test_case.assertEqual(120, message.optional_import_message.d)
test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
message.optional_nested_enum)
test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
message.optional_foreign_enum)
test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
message.optional_import_enum)
# -----------------------------------------------------------------
test_case.assertEqual(2, len(message.repeated_int32))
test_case.assertEqual(2, len(message.repeated_int64))
test_case.assertEqual(2, len(message.repeated_uint32))
test_case.assertEqual(2, len(message.repeated_uint64))
test_case.assertEqual(2, len(message.repeated_sint32))
test_case.assertEqual(2, len(message.repeated_sint64))
test_case.assertEqual(2, len(message.repeated_fixed32))
test_case.assertEqual(2, len(message.repeated_fixed64))
test_case.assertEqual(2, len(message.repeated_sfixed32))
test_case.assertEqual(2, len(message.repeated_sfixed64))
test_case.assertEqual(2, len(message.repeated_float))
test_case.assertEqual(2, len(message.repeated_double))
test_case.assertEqual(2, len(message.repeated_bool))
test_case.assertEqual(2, len(message.repeated_string))
test_case.assertEqual(2, len(message.repeated_bytes))
test_case.assertEqual(2, len(message.repeatedgroup))
test_case.assertEqual(2, len(message.repeated_nested_message))
test_case.assertEqual(2, len(message.repeated_foreign_message))
test_case.assertEqual(2, len(message.repeated_import_message))
test_case.assertEqual(2, len(message.repeated_nested_enum))
test_case.assertEqual(2, len(message.repeated_foreign_enum))
test_case.assertEqual(2, len(message.repeated_import_enum))
test_case.assertEqual(2, len(message.repeated_string_piece))
test_case.assertEqual(2, len(message.repeated_cord))
test_case.assertEqual(201, message.repeated_int32[0])
test_case.assertEqual(202, message.repeated_int64[0])
test_case.assertEqual(203, message.repeated_uint32[0])
test_case.assertEqual(204, message.repeated_uint64[0])
test_case.assertEqual(205, message.repeated_sint32[0])
test_case.assertEqual(206, message.repeated_sint64[0])
test_case.assertEqual(207, message.repeated_fixed32[0])
test_case.assertEqual(208, message.repeated_fixed64[0])
test_case.assertEqual(209, message.repeated_sfixed32[0])
test_case.assertEqual(210, message.repeated_sfixed64[0])
test_case.assertEqual(211, message.repeated_float[0])
test_case.assertEqual(212, message.repeated_double[0])
test_case.assertEqual(True, message.repeated_bool[0])
test_case.assertEqual('215', message.repeated_string[0])
test_case.assertEqual('216', message.repeated_bytes[0])
test_case.assertEqual(217, message.repeatedgroup[0].a)
test_case.assertEqual(218, message.repeated_nested_message[0].bb)
test_case.assertEqual(219, message.repeated_foreign_message[0].c)
test_case.assertEqual(220, message.repeated_import_message[0].d)
test_case.assertEqual(unittest_pb2.TestAllTypes.BAR,
message.repeated_nested_enum[0])
test_case.assertEqual(unittest_pb2.FOREIGN_BAR,
message.repeated_foreign_enum[0])
test_case.assertEqual(unittest_import_pb2.IMPORT_BAR,
message.repeated_import_enum[0])
test_case.assertEqual(301, message.repeated_int32[1])
test_case.assertEqual(302, message.repeated_int64[1])
test_case.assertEqual(303, message.repeated_uint32[1])
test_case.assertEqual(304, message.repeated_uint64[1])
test_case.assertEqual(305, message.repeated_sint32[1])
test_case.assertEqual(306, message.repeated_sint64[1])
test_case.assertEqual(307, message.repeated_fixed32[1])
test_case.assertEqual(308, message.repeated_fixed64[1])
test_case.assertEqual(309, message.repeated_sfixed32[1])
test_case.assertEqual(310, message.repeated_sfixed64[1])
test_case.assertEqual(311, message.repeated_float[1])
test_case.assertEqual(312, message.repeated_double[1])
test_case.assertEqual(False, message.repeated_bool[1])
test_case.assertEqual('315', message.repeated_string[1])
test_case.assertEqual('316', message.repeated_bytes[1])
test_case.assertEqual(317, message.repeatedgroup[1].a)
test_case.assertEqual(318, message.repeated_nested_message[1].bb)
test_case.assertEqual(319, message.repeated_foreign_message[1].c)
test_case.assertEqual(320, message.repeated_import_message[1].d)
test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
message.repeated_nested_enum[1])
test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
message.repeated_foreign_enum[1])
test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
message.repeated_import_enum[1])
# -----------------------------------------------------------------
test_case.assertTrue(message.HasField('default_int32'))
test_case.assertTrue(message.HasField('default_int64'))
test_case.assertTrue(message.HasField('default_uint32'))
test_case.assertTrue(message.HasField('default_uint64'))
test_case.assertTrue(message.HasField('default_sint32'))
test_case.assertTrue(message.HasField('default_sint64'))
test_case.assertTrue(message.HasField('default_fixed32'))
test_case.assertTrue(message.HasField('default_fixed64'))
test_case.assertTrue(message.HasField('default_sfixed32'))
test_case.assertTrue(message.HasField('default_sfixed64'))
test_case.assertTrue(message.HasField('default_float'))
test_case.assertTrue(message.HasField('default_double'))
test_case.assertTrue(message.HasField('default_bool'))
test_case.assertTrue(message.HasField('default_string'))
test_case.assertTrue(message.HasField('default_bytes'))
test_case.assertTrue(message.HasField('default_nested_enum'))
test_case.assertTrue(message.HasField('default_foreign_enum'))
test_case.assertTrue(message.HasField('default_import_enum'))
test_case.assertEqual(401, message.default_int32)
test_case.assertEqual(402, message.default_int64)
test_case.assertEqual(403, message.default_uint32)
test_case.assertEqual(404, message.default_uint64)
test_case.assertEqual(405, message.default_sint32)
test_case.assertEqual(406, message.default_sint64)
test_case.assertEqual(407, message.default_fixed32)
test_case.assertEqual(408, message.default_fixed64)
test_case.assertEqual(409, message.default_sfixed32)
test_case.assertEqual(410, message.default_sfixed64)
test_case.assertEqual(411, message.default_float)
test_case.assertEqual(412, message.default_double)
test_case.assertEqual(False, message.default_bool)
test_case.assertEqual('415', message.default_string)
test_case.assertEqual('416', message.default_bytes)
test_case.assertEqual(unittest_pb2.TestAllTypes.FOO,
message.default_nested_enum)
test_case.assertEqual(unittest_pb2.FOREIGN_FOO,
message.default_foreign_enum)
test_case.assertEqual(unittest_import_pb2.IMPORT_FOO,
message.default_import_enum)
def GoldenFile(filename):
"""Finds the given golden file and returns a file object representing it."""
# Search up the directory tree looking for the C++ protobuf source code.
path = '.'
while os.path.exists(path):
if os.path.exists(os.path.join(path, 'src/google/protobuf')):
# Found it. Load the golden file from the testdata directory.
full_path = os.path.join(path, 'src/google/protobuf/testdata', filename)
return open(full_path, 'rb')
path = os.path.join(path, '..')
raise RuntimeError(
'Could not find golden files. This test must be run from within the '
'protobuf source package so that it can read test data files from the '
'C++ source tree.')
def SetAllPackedFields(message):
"""Sets every field in the message to a unique value.
Args:
message: A unittest_pb2.TestPackedTypes instance.
"""
message.packed_int32.extend([601, 701])
message.packed_int64.extend([602, 702])
message.packed_uint32.extend([603, 703])
message.packed_uint64.extend([604, 704])
message.packed_sint32.extend([605, 705])
message.packed_sint64.extend([606, 706])
message.packed_fixed32.extend([607, 707])
message.packed_fixed64.extend([608, 708])
message.packed_sfixed32.extend([609, 709])
message.packed_sfixed64.extend([610, 710])
message.packed_float.extend([611.0, 711.0])
message.packed_double.extend([612.0, 712.0])
message.packed_bool.extend([True, False])
message.packed_enum.extend([unittest_pb2.FOREIGN_BAR,
unittest_pb2.FOREIGN_BAZ])
def SetAllPackedExtensions(message):
"""Sets every extension in the message to a unique value.
Args:
message: A unittest_pb2.TestPackedExtensions instance.
"""
extensions = message.Extensions
pb2 = unittest_pb2
extensions[pb2.packed_int32_extension].extend([601, 701])
extensions[pb2.packed_int64_extension].extend([602, 702])
extensions[pb2.packed_uint32_extension].extend([603, 703])
extensions[pb2.packed_uint64_extension].extend([604, 704])
extensions[pb2.packed_sint32_extension].extend([605, 705])
extensions[pb2.packed_sint64_extension].extend([606, 706])
extensions[pb2.packed_fixed32_extension].extend([607, 707])
extensions[pb2.packed_fixed64_extension].extend([608, 708])
extensions[pb2.packed_sfixed32_extension].extend([609, 709])
extensions[pb2.packed_sfixed64_extension].extend([610, 710])
extensions[pb2.packed_float_extension].extend([611.0, 711.0])
extensions[pb2.packed_double_extension].extend([612.0, 712.0])
extensions[pb2.packed_bool_extension].extend([True, False])
extensions[pb2.packed_enum_extension].extend([unittest_pb2.FOREIGN_BAR,
unittest_pb2.FOREIGN_BAZ])
def SetAllUnpackedFields(message):
"""Sets every field in the message to a unique value.
Args:
message: A unittest_pb2.TestUnpackedTypes instance.
"""
message.unpacked_int32.extend([601, 701])
message.unpacked_int64.extend([602, 702])
message.unpacked_uint32.extend([603, 703])
message.unpacked_uint64.extend([604, 704])
message.unpacked_sint32.extend([605, 705])
message.unpacked_sint64.extend([606, 706])
message.unpacked_fixed32.extend([607, 707])
message.unpacked_fixed64.extend([608, 708])
message.unpacked_sfixed32.extend([609, 709])
message.unpacked_sfixed64.extend([610, 710])
message.unpacked_float.extend([611.0, 711.0])
message.unpacked_double.extend([612.0, 712.0])
message.unpacked_bool.extend([True, False])
message.unpacked_enum.extend([unittest_pb2.FOREIGN_BAR,
unittest_pb2.FOREIGN_BAZ])
| mit | 2,753,309,851,533,791,700 | 41.68189 | 78 | 0.749253 | false |
chatelak/RMG-Py | rmgpy/solver/__init__.py | 11 | 1557 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green ([email protected]) and the
# RMG Team ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
from .base import ReactionSystem, TerminationTime, TerminationConversion
from .simple import SimpleReactor
| mit | -4,041,207,743,184,250,000 | 47.65625 | 80 | 0.671805 | false |
danforthcenter/plantcv | plantcv/plantcv/photosynthesis/analyze_fvfm.py | 2 | 5529 | # Fluorescence Analysis
import os
import cv2
import numpy as np
import pandas as pd
from plotnine import ggplot, geom_label, aes, geom_line
from plantcv.plantcv import print_image
from plantcv.plantcv import plot_image
from plantcv.plantcv import fatal_error
from plantcv.plantcv import params
from plantcv.plantcv import outputs
def analyze_fvfm(fdark, fmin, fmax, mask, bins=256, label="default"):
"""Analyze PSII camera images.
Inputs:
fdark = grayscale fdark image
fmin = grayscale fmin image
fmax = grayscale fmax image
mask = mask of plant (binary, single channel)
bins = number of bins (1 to 256 for 8-bit; 1 to 65,536 for 16-bit; default is 256)
label = optional label parameter, modifies the variable name of observations recorded
Returns:
analysis_images = list of images (fv image and fvfm histogram image)
:param fdark: numpy.ndarray
:param fmin: numpy.ndarray
:param fmax: numpy.ndarray
:param mask: numpy.ndarray
:param bins: int
:param label: str
:return analysis_images: numpy.ndarray
"""
# Auto-increment the device counter
params.device += 1
# Check that fdark, fmin, and fmax are grayscale (single channel)
if not all(len(np.shape(i)) == 2 for i in [fdark, fmin, fmax]):
fatal_error("The fdark, fmin, and fmax images must be grayscale images.")
# QC Fdark Image
fdark_mask = cv2.bitwise_and(fdark, fdark, mask=mask)
if np.amax(fdark_mask) > 2000:
qc_fdark = False
else:
qc_fdark = True
# Mask Fmin and Fmax Image
fmin_mask = cv2.bitwise_and(fmin, fmin, mask=mask)
fmax_mask = cv2.bitwise_and(fmax, fmax, mask=mask)
# Calculate Fvariable, where Fv = Fmax - Fmin (masked)
fv = np.subtract(fmax_mask, fmin_mask)
# When Fmin is greater than Fmax, a negative value is returned.
# Because the data type is unsigned integers, negative values roll over, resulting in nonsensical values
# Wherever Fmin is greater than Fmax, set Fv to zero
fv[np.where(fmax_mask < fmin_mask)] = 0
analysis_images = []
# Calculate Fv/Fm (Fvariable / Fmax) where Fmax is greater than zero
# By definition above, wherever Fmax is zero, Fvariable will also be zero
# To calculate the divisions properly we need to change from unit16 to float64 data types
fvfm = fv.astype(np.float64)
analysis_images.append(fvfm)
fmax_flt = fmax_mask.astype(np.float64)
fvfm[np.where(fmax_mask > 0)] /= fmax_flt[np.where(fmax_mask > 0)]
# Calculate the median Fv/Fm value for non-zero pixels
fvfm_median = np.median(fvfm[np.where(fvfm > 0)])
# Calculate the histogram of Fv/Fm non-zero values
fvfm_hist, fvfm_bins = np.histogram(fvfm[np.where(fvfm > 0)], bins, range=(0, 1))
# fvfm_bins is a bins + 1 length list of bin endpoints, so we need to calculate bin midpoints so that
# the we have a one-to-one list of x (FvFm) and y (frequency) values.
# To do this we add half the bin width to each lower bin edge x-value
midpoints = fvfm_bins[:-1] + 0.5 * np.diff(fvfm_bins)
# Calculate which non-zero bin has the maximum Fv/Fm value
max_bin = midpoints[np.argmax(fvfm_hist)]
# Create a dataframe
dataset = pd.DataFrame({'Plant Pixels': fvfm_hist, 'Fv/Fm': midpoints})
# Make the histogram figure using plotnine
fvfm_hist_fig = (ggplot(data=dataset, mapping=aes(x='Fv/Fm', y='Plant Pixels'))
+ geom_line(color='green', show_legend=True)
+ geom_label(label='Peak Bin Value: ' + str(max_bin),
x=.15, y=205, size=8, color='green'))
analysis_images.append(fvfm_hist_fig)
if params.debug == 'print':
print_image(fmin_mask, os.path.join(params.debug_outdir, str(params.device) + '_fmin_mask.png'))
print_image(fmax_mask, os.path.join(params.debug_outdir, str(params.device) + '_fmax_mask.png'))
print_image(fv, os.path.join(params.debug_outdir, str(params.device) + '_fv_convert.png'))
fvfm_hist_fig.save(os.path.join(params.debug_outdir, str(params.device) + '_fv_hist.png'), verbose=False)
elif params.debug == 'plot':
plot_image(fmin_mask, cmap='gray')
plot_image(fmax_mask, cmap='gray')
plot_image(fv, cmap='gray')
print(fvfm_hist_fig)
outputs.add_observation(sample=label, variable='fvfm_hist', trait='Fv/Fm frequencies',
method='plantcv.plantcv.fluor_fvfm', scale='none', datatype=list,
value=fvfm_hist.tolist(), label=np.around(midpoints, decimals=len(str(bins))).tolist())
outputs.add_observation(sample=label, variable='fvfm_hist_peak', trait='peak Fv/Fm value',
method='plantcv.plantcv.fluor_fvfm', scale='none', datatype=float,
value=float(max_bin), label='none')
outputs.add_observation(sample=label, variable='fvfm_median', trait='Fv/Fm median',
method='plantcv.plantcv.fluor_fvfm', scale='none', datatype=float,
value=float(np.around(fvfm_median, decimals=4)), label='none')
outputs.add_observation(sample=label, variable='fdark_passed_qc', trait='Fdark passed QC',
method='plantcv.plantcv.fluor_fvfm', scale='none', datatype=bool,
value=qc_fdark, label='none')
# Store images
outputs.images.append(analysis_images)
return analysis_images
| mit | 4,291,254,288,133,156,400 | 45.462185 | 115 | 0.649485 | false |
vicky2135/lucious | oscar/lib/python2.7/site-packages/urllib3/exceptions.py | 223 | 6603 | from __future__ import absolute_import
from .packages.six.moves.http_client import (
IncompleteRead as httplib_IncompleteRead
)
# Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
class HTTPWarning(Warning):
"Base warning used by this module."
pass
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, None)
class RequestError(PoolError):
"Base exception for PoolErrors that have associated URLs."
def __init__(self, pool, url, message):
self.url = url
PoolError.__init__(self, pool, message)
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, self.url, None)
class SSLError(HTTPError):
"Raised when SSL certificate fails in an HTTPS connection."
pass
class ProxyError(HTTPError):
"Raised when the connection to a proxy fails."
pass
class DecodeError(HTTPError):
"Raised when automatic decoding based on Content-Type fails."
pass
class ProtocolError(HTTPError):
"Raised when something unexpected happens mid-request/response."
pass
#: Renamed to ProtocolError but aliased for backwards compatibility.
ConnectionError = ProtocolError
# Leaf Exceptions
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
:param pool: The connection pool
:type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
:param string url: The requested Url
:param exceptions.Exception reason: The underlying error
"""
def __init__(self, pool, url, reason=None):
self.reason = reason
message = "Max retries exceeded with url: %s (Caused by %r)" % (
url, reason)
RequestError.__init__(self, pool, url, message)
class HostChangedError(RequestError):
"Raised when an existing pool gets a request for a foreign host."
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
RequestError.__init__(self, pool, url, message)
self.retries = retries
class TimeoutStateError(HTTPError):
""" Raised when passing an invalid state to a timeout """
pass
class TimeoutError(HTTPError):
""" Raised when a socket timeout error occurs.
Catching this error will catch both :exc:`ReadTimeoutErrors
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
"""
pass
class ReadTimeoutError(TimeoutError, RequestError):
"Raised when a socket timeout occurs while receiving data from a server"
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
"Raised when a socket timeout occurs while connecting to a server"
pass
class NewConnectionError(ConnectTimeoutError, PoolError):
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
pass
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
class ClosedPoolError(PoolError):
"Raised when a request enters a pool after the pool has been closed."
pass
class LocationValueError(ValueError, HTTPError):
"Raised when there is something wrong with a given URL input."
pass
class LocationParseError(LocationValueError):
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
message = "Failed to parse: %s" % location
HTTPError.__init__(self, message)
self.location = location
class ResponseError(HTTPError):
"Used as a container for an error reason supplied in a MaxRetryError."
GENERIC_ERROR = 'too many error responses'
SPECIFIC_ERROR = 'too many {status_code} error responses'
class SecurityWarning(HTTPWarning):
"Warned when perfoming security reducing actions"
pass
class SubjectAltNameWarning(SecurityWarning):
"Warned when connecting to a host with a certificate missing a SAN."
pass
class InsecureRequestWarning(SecurityWarning):
"Warned when making an unverified HTTPS request."
pass
class SystemTimeWarning(SecurityWarning):
"Warned when system time is suspected to be wrong"
pass
class InsecurePlatformWarning(SecurityWarning):
"Warned when certain SSL configuration is not available on a platform."
pass
class SNIMissingWarning(HTTPWarning):
"Warned when making a HTTPS request without SNI available."
pass
class DependencyWarning(HTTPWarning):
"""
Warned when an attempt is made to import a module with missing optional
dependencies.
"""
pass
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass
class BodyNotHttplibCompatible(HTTPError):
"""
Body should be httplib.HTTPResponse like (have an fp attribute which
returns raw chunks) for read_chunked().
"""
pass
class IncompleteRead(HTTPError, httplib_IncompleteRead):
"""
Response length doesn't match expected Content-Length
Subclass of http_client.IncompleteRead to allow int value
for `partial` to avoid creating large objects on streamed
reads.
"""
def __init__(self, partial, expected):
super(IncompleteRead, self).__init__(partial, expected)
def __repr__(self):
return ('IncompleteRead(%i bytes read, '
'%i more expected)' % (self.partial, self.expected))
class InvalidHeader(HTTPError):
"The header provided was somehow invalid."
pass
class ProxySchemeUnknown(AssertionError, ValueError):
"ProxyManager does not support the supplied scheme"
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
message = "Not supported proxy scheme %s" % scheme
super(ProxySchemeUnknown, self).__init__(message)
class HeaderParsingError(HTTPError):
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
def __init__(self, defects, unparsed_data):
message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
super(HeaderParsingError, self).__init__(message)
class UnrewindableBodyError(HTTPError):
"urllib3 encountered an error when trying to rewind a body"
pass
| bsd-3-clause | 4,375,550,607,050,402,000 | 25.841463 | 84 | 0.700288 | false |
tutengfei/zerorpc-python | tests/zmqbug.py | 94 | 5183 | # -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Based on https://github.com/traviscline/gevent-zeromq/blob/master/gevent_zeromq/core.py
import zmq
import gevent.event
import gevent.core
STOP_EVERYTHING = False
class ZMQSocket(zmq.Socket):
def __init__(self, context, socket_type):
super(ZMQSocket, self).__init__(context, socket_type)
on_state_changed_fd = self.getsockopt(zmq.FD)
self._readable = gevent.event.Event()
self._writable = gevent.event.Event()
try:
# gevent>=1.0
self._state_event = gevent.hub.get_hub().loop.io(
on_state_changed_fd, gevent.core.READ)
self._state_event.start(self._on_state_changed)
except AttributeError:
# gevent<1.0
self._state_event = gevent.core.read_event(on_state_changed_fd,
self._on_state_changed, persist=True)
def _on_state_changed(self, event=None, _evtype=None):
if self.closed:
self._writable.set()
self._readable.set()
return
events = self.getsockopt(zmq.EVENTS)
if events & zmq.POLLOUT:
self._writable.set()
if events & zmq.POLLIN:
self._readable.set()
def close(self):
if not self.closed and getattr(self, '_state_event', None):
try:
# gevent>=1.0
self._state_event.stop()
except AttributeError:
# gevent<1.0
self._state_event.cancel()
super(ZMQSocket, self).close()
def send(self, data, flags=0, copy=True, track=False):
if flags & zmq.NOBLOCK:
return super(ZMQSocket, self).send(data, flags, copy, track)
flags |= zmq.NOBLOCK
while True:
try:
return super(ZMQSocket, self).send(data, flags, copy, track)
except zmq.ZMQError, e:
if e.errno != zmq.EAGAIN:
raise
self._writable.clear()
self._writable.wait()
def recv(self, flags=0, copy=True, track=False):
if flags & zmq.NOBLOCK:
return super(ZMQSocket, self).recv(flags, copy, track)
flags |= zmq.NOBLOCK
while True:
try:
return super(ZMQSocket, self).recv(flags, copy, track)
except zmq.ZMQError, e:
if e.errno != zmq.EAGAIN:
raise
self._readable.clear()
while not self._readable.wait(timeout=10):
events = self.getsockopt(zmq.EVENTS)
if bool(events & zmq.POLLIN):
print "here we go, nobody told me about new messages!"
global STOP_EVERYTHING
STOP_EVERYTHING = True
raise gevent.GreenletExit()
zmq_context = zmq.Context()
def server():
socket = ZMQSocket(zmq_context, zmq.REP)
socket.bind('ipc://zmqbug')
class Cnt:
responded = 0
cnt = Cnt()
def responder():
while not STOP_EVERYTHING:
msg = socket.recv()
socket.send(msg)
cnt.responded += 1
gevent.spawn(responder)
while not STOP_EVERYTHING:
print "cnt.responded=", cnt.responded
gevent.sleep(0.5)
def client():
socket = ZMQSocket(zmq_context, zmq.DEALER)
socket.connect('ipc://zmqbug')
class Cnt:
recv = 0
send = 0
cnt = Cnt()
def recvmsg():
while not STOP_EVERYTHING:
socket.recv()
socket.recv()
cnt.recv += 1
def sendmsg():
while not STOP_EVERYTHING:
socket.send('', flags=zmq.SNDMORE)
socket.send('hello')
cnt.send += 1
gevent.sleep(0)
gevent.spawn(recvmsg)
gevent.spawn(sendmsg)
while not STOP_EVERYTHING:
print "cnt.recv=", cnt.recv, "cnt.send=", cnt.send
gevent.sleep(0.5)
gevent.spawn(server)
client()
| mit | -7,863,638,105,190,453,000 | 30.797546 | 89 | 0.59753 | false |
chachan/nodeshot | nodeshot/community/notifications/migrations/0001_initial.py | 5 | 4082 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('added', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created on')),
('updated', models.DateTimeField(default=django.utils.timezone.now, verbose_name='updated on')),
('type', models.CharField(max_length=64, verbose_name='type', choices=[(b'node_created', 'node_created'), (b'node_status_changed', 'node_status_changed'), (b'node_own_status_changed', 'node_own_status_changed'), (b'node_deleted', 'node_deleted'), (b'custom', 'custom')])),
('object_id', models.PositiveIntegerField(null=True, blank=True)),
('text', models.CharField(max_length=120, verbose_name='text', blank=True)),
('is_read', models.BooleanField(default=False, verbose_name='read?')),
],
options={
'ordering': ('-id',),
},
),
migrations.CreateModel(
name='UserEmailNotificationSettings',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('node_created', models.IntegerField(default=30, help_text='-1 (less than 0): disabled; 0: enabled for all; 1 (less than 0): enabled for those in the specified distance range (km)', verbose_name='node_created')),
('node_status_changed', models.IntegerField(default=30, help_text='-1 (less than 0): disabled; 0: enabled for all; 1 (less than 0): enabled for those in the specified distance range (km)', verbose_name='node_status_changed')),
('node_own_status_changed', models.BooleanField(default=True, verbose_name='node_own_status_changed')),
('node_deleted', models.IntegerField(default=30, help_text='-1 (less than 0): disabled; 0: enabled for all; 1 (less than 0): enabled for those in the specified distance range (km)', verbose_name='node_deleted')),
],
options={
'db_table': 'notifications_user_email_settings',
'verbose_name': 'user email notification settings',
'verbose_name_plural': 'user email notification settings',
},
),
migrations.CreateModel(
name='UserWebNotificationSettings',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('node_created', models.IntegerField(default=30, help_text='-1 (less than 0): disabled; 0: enabled for all; 1 (less than 0): enabled for those in the specified distance range (km)', verbose_name='node_created')),
('node_status_changed', models.IntegerField(default=30, help_text='-1 (less than 0): disabled; 0: enabled for all; 1 (less than 0): enabled for those in the specified distance range (km)', verbose_name='node_status_changed')),
('node_own_status_changed', models.BooleanField(default=True, verbose_name='node_own_status_changed')),
('node_deleted', models.IntegerField(default=30, help_text='-1 (less than 0): disabled; 0: enabled for all; 1 (less than 0): enabled for those in the specified distance range (km)', verbose_name='node_deleted')),
],
options={
'db_table': 'notifications_user_web_settings',
'verbose_name': 'user web notification settings',
'verbose_name_plural': 'user web notification settings',
},
),
]
| gpl-3.0 | -4,581,807,779,269,523,000 | 68.186441 | 288 | 0.589417 | false |
waveform80/lars | lars/datatypes/url.py | 1 | 10354 | # vim: set et sw=4 sts=4 fileencoding=utf-8:
#
# Copyright (c) 2013-2017 Dave Jones <[email protected]>
# Copyright (c) 2013 Mime Consulting Ltd. <[email protected]>
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Defines the URL parsing specific parts of :mod:`lars.datatypes`.
"""
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
from collections import namedtuple
try:
from urllib import parse
except ImportError:
import urlparse as parse
from .ipaddress import hostname
str = type('') # pylint: disable=redefined-builtin,invalid-name
def path(s):
"""
Returns a :class:`Path` object for the given string.
:param str s: The string containing the path to parse
:returns: A :class:`Path` object representing the path
"""
i = s.rfind('/') + 1
dirname, basename = s[:i], s[i:]
if dirname and dirname != '/' * len(dirname):
dirname = dirname.rstrip('/')
i = basename.rfind('.')
if i > 0:
ext = basename[i:]
else:
ext = ''
return Path(dirname, basename, ext)
def url(s):
"""
Returns a :class:`Url` object for the given string.
:param str s: The string containing the URL to parse
:returns: A :class:`Url` tuple representing the URL
"""
return Url(*parse.urlparse(s))
def request(s):
"""
Returns a :class:`Request` object for the given string.
:param str s: The string containing the request line to parse
:returns: A :class:`Request` tuple representing the request line
"""
try:
method, s = s.split(' ', 1)
except ValueError:
raise ValueError('Request line is missing a space separated method')
try:
s, protocol = s.rsplit(' ', 1)
except ValueError:
raise ValueError('Request line is missing a space separated protocol')
s = s.strip()
if not s:
raise ValueError('Request line URL cannot be blank')
return Request(method, url(s) if s != '*' else None, protocol)
class Path(namedtuple('Path', 'dirname basename ext')):
"""
Represents a path.
This type is returned by the :func:`path` function and represents a path in
POSIX format (forward slash separators and no drive portion). It is used to
represent the path portion of URLs and provides attributes for extracting
parts of the path there-in.
The original path can be obtained as a string by asking for the string
conversion of this class, like so::
p = datatypes.path('/foo/bar/baz.ext')
assert p.dirname == '/foo/bar'
assert p.basename == 'baz.ext'
assert str(p) == '/foo/bar/baz.ext'
.. attribute:: dirname
A string containing all of the path except the basename at the end
.. attribute:: basename
A string containing the basename (filename and extension) at the end
of the path
.. attribute:: ext
A string containing the filename's extension (including the leading dot)
"""
__slots__ = ()
@property
def dirs(self):
"""
Returns a sequence of the directories making up :attr:`dirname`
"""
return [d for d in self.dirname.split('/') if d]
@property
def basename_no_ext(self):
"""
Returns a string containing basename with the extension removed
(including the final dot separator).
"""
if self.ext:
return self.basename[:-len(self.ext)]
else:
return self.basename
@property
def isabs(self):
"""
Returns True if the path is absolute (dirname begins with one or more
forward slashes).
"""
return self.dirname.startswith('/')
def join(self, *paths):
"""
Joins this path with the specified parts, returning a new :class:`Path`
object.
:param \\*paths: The parts to append to this path
:returns: A new :class:`Path` object representing the extended path
"""
# pylint: disable=invalid-name
result = str(self)
for p in paths:
if not isinstance(p, str):
p = str(p)
# Strip doubled slashes? Or leave this to normpath?
if p.startswith('/'):
result = p
elif not result or result.endswith('/'):
result += p
else:
result += '/' + p
return path(result)
def __str__(self):
result = self.dirname
if not result or result.endswith('/'):
return result + self.basename
else:
return result + '/' + self.basename
# This is rather hackish; in Python 2.x, urlparse.ResultMixin provides
# functionality for extracting username, password, hostname and port from a
# parsed URL. In Python 3 this changed to ResultBase, then to a whole bunch of
# undocumented classes (split between strings and bytes) with ResultBase as an
# alias
try:
_ResultMixin = parse.ResultBase # pylint: disable=invalid-name
except AttributeError:
_ResultMixin = parse.ResultMixin # pylint: disable=invalid-name
class Url(namedtuple('Url', ('scheme', 'netloc', 'path_str', 'params',
'query_str', 'fragment')), _ResultMixin):
"""
Represents a URL.
This type is returned by the :func:`url` function and represents the parts
of the URL. You can obtain the original URL as a string by requesting the
string conversion of this class, for example::
>>> u = datatypes.url('http://foo/bar/baz')
>>> print u.scheme
http
>>> print u.hostname
foo
>>> print str(u)
http://foo/bar/baz
.. attribute:: scheme
The scheme of the URL, before the first ``:``
.. attribute:: netloc
The "network location" of the URL, comprising the hostname and port
(separated by a colon), and historically the username and password
(prefixed to the hostname and separated with an ampersand)
.. attribute:: path_str
The path of the URL from the first slash after the network location
.. attribute:: path
The path of the URL, parsed into a tuple which splits out the directory,
filename, and extension::
>>> u = datatypes.url('foo/bar/baz.html')
>>> u.path
Path(dirname='foo/bar', basename='baz.html', ext='.html')
>>> u.path.isabs
False
.. attribute:: params
The parameters of the URL
.. attribute:: query_str
The query string of the URL from the first question-mark in the path
.. attribute:: query
The query string, parsed into a mapping of keys to lists of values. For
example::
>>> u = datatypes.url('foo/bar?a=1&a=2&b=3&c=')
>>> print u.query
{'a': ['1', '2'], 'c': [''], 'b': ['3']}
>>> print 'a' in u.query
True
.. attribute:: fragment
The fragment of the URL from the last hash-mark to the end of the URL
Additionally, the following attributes can be used to separate out the
various parts of the :attr:`netloc` attribute:
.. attribute:: username
The username (historical, rare to see this used on the modern web)
.. attribute:: password
The password (historical, almost unheard of on the modern web as it's
extremely insecure to include credentials in the URL)
.. attribute:: hostname
The hostname from the network location. This attribute returns a
:class:`Hostname` object which can be used to resolve the hostname into
an IP address if required.
.. attribute:: port
The optional network port
"""
__slots__ = ()
def geturl(self):
"""
Return the URL as a string string.
"""
return parse.urlunparse(self)
def __str__(self):
return self.geturl()
@property
def hostname(self):
return hostname(super(Url, self).hostname)
@property
def query(self):
# pylint: disable=missing-docstring
return parse.parse_qs(self.query_str, keep_blank_values=True)
@property
def path(self):
# pylint: disable=missing-docstring
return path(self.path_str)
class Request(namedtuple('Request', 'method url protocol')):
"""
Represents an HTTP request line.
This type is returned by the :func:`request` function and represents the
three parts of an HTTP request line: the method, the URL (optional, can be
None in the case of methods like OPTIONS), and the protocol. The following
attributes exist:
.. attribute:: method
The method of the request (typically GET, POST, or PUT but can
technically be any valid HTTP token)
.. attribute:: url
The requested URL. May be an absolute URL, an absolute path, an
authority token, or None in the case that the request line contained "*"
for the URL.
.. attribute:: protocol
The HTTP protocol version requested. A string of the format 'HTTP/x.y'
where x.y is the version number. At the time of writing only HTTP/1.0
and HTTP/1.1 are defined.
"""
def __str__(self):
return '%s %s %s' % (self.method, self.url, self.protocol)
| mit | -247,906,314,771,236,100 | 29.542773 | 79 | 0.633668 | false |
adobdin/timmy | timmy/analyze_modules/rabbitmq.py | 1 | 7526 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from timmy.analyze_health import GREEN, UNKNOWN, YELLOW, RED
from timmy.env import project_name
import logging
import re
import yaml
logger = logging.getLogger(project_name)
def register(function_mapping):
function_mapping['rabbitmqctl-list-queues'] = parse_list_queues
function_mapping['rabbitmqctl-status'] = parse_status
def parse_list_queues(stdout, script, node, stderr=None, exitcode=None):
warning = 100
error = 1000
health = GREEN
details = []
if exitcode:
health = UNKNOWN
return health, details
data = [l.rstrip() for l in stdout.splitlines()]
for line in data[1:]:
elements = line.rstrip().split()
if len(elements) < 2:
logger.warning('no value in list_queues: "%s"' % line.rstrip())
else:
count = int(elements[1])
if count < error and count >= warning:
health = max(health, YELLOW)
details.append(line)
return health, details
def prepare_status(stdout):
bad_yaml = ''.join(stdout.splitlines()[1:])
# quoting string elements
bad_yaml = re.sub(r'([,{])([a-z_A-Z]+)([,}])', r'\1"\2"\3', bad_yaml)
# changing first element int a key - replacing , with :
bad_yaml = re.sub(r'({[^,]+),', r'\1:', bad_yaml)
bad_yaml_list = list(bad_yaml)
good_yaml, _ = fix_dicts(bad_yaml_list, 0)
status_list = yaml.load(''.join(good_yaml))
status_dict = squash_dicts(status_list)
return status_dict
def fix_dicts(json_str_list, pos):
'''recursively puts all comma-separted values into square
brackets to make data look like normal 'key: value' dicts
'''
quoted_string = False
value = True
value_pos = 0
commas = False
is_list = False
in_list = 0
while pos < len(json_str_list):
if not quoted_string:
if json_str_list[pos] == '{':
json_str_list, pos = fix_dicts(json_str_list, pos+1)
elif json_str_list[pos] == '"':
quoted_string = True
elif json_str_list[pos] == ':':
value = True
value_pos = pos + 1
elif json_str_list[pos] == '[':
if value and not commas:
is_list = True
in_list += 1
elif json_str_list[pos] == ']':
in_list -= 1
elif json_str_list[pos] == ',':
commas = True
if not in_list:
is_list = False
elif json_str_list[pos] == '}':
if not is_list and commas:
json_str_list = (json_str_list[:value_pos] + ['['] +
json_str_list[value_pos:pos] + [']'] +
json_str_list[pos:])
pos += 2
return json_str_list, pos
elif json_str_list[pos] == '"':
quoted_string = False
pos += 1
return json_str_list, pos
def squash_dicts(input_data):
# recursively converts [{a:1},{b:2},{c:3}...] into {a:1, b:2, c:3}'''
if type(input_data) is list:
for i in range(len(input_data)):
input_data[i] = squash_dicts(input_data[i])
if all([type(i) is dict for i in input_data]):
kv_list = [(k, v) for i in input_data for k, v in i.items()]
input_data = dict(kv_list)
elif type(input_data) is dict:
for k, v in input_data.items():
input_data[k] = squash_dicts(v)
return input_data
def parse_status(stdout, script, node, stderr=None, exitcode=None):
health = GREEN
details = []
status = prepare_status(stdout)
if not status:
health = UNKNOWN
details = ['Status unavailable']
if exitcode:
if exitcode == 69:
health = RED
details = ['RabbitMQ is not running']
return health, details
# disk free check
try:
dfree = int(status['disk_free'])
dlimit = int(status['disk_free_limit'])
dfree_ok = 10**9 # 1GB
if dfree > dlimit and dfree < dfree_ok:
health = max(health, YELLOW)
details.append('disk_free: %s, disk_free_limit: %s'
% (dfree, dlimit))
elif dfree <= dlimit:
health = max(health, RED)
details.append('disk_free: %s, disk_free_limit: %s'
% (dfree, dlimit))
except ValueError:
details.append('cannot convert disk_free* to int')
health = max(health, UNKNOWN)
except KeyError:
details.append('disk_free* not present')
health = max(health, UNKNOWN)
# process limit check
try:
pused = float(status['processes']['used'])
plimit = float(status['processes']['limit'])
ok_ratio = 0.9
if pused < plimit and pused/plimit > ok_ratio:
health = max(health, YELLOW)
details.append('processes used: %s, processes limit: %s'
% (pused, plimit))
elif pused >= plimit:
health = max(health, RED)
details.append('processes used: %s, processes limit: %s'
% (pused, plimit))
except ValueError:
details.append('cannot convert processes* to numbers')
health = max(health, UNKNOWN)
except KeyError:
details.append('processes* not present')
health = max(health, UNKNOWN)
# fd check
try:
sused = float(status['file_descriptors']['sockets_used'])
slimit = float(status['file_descriptors']['sockets_limit'])
ok_ratio = 0.9
if sused < slimit and sused/slimit > ok_ratio:
health = max(health, YELLOW)
details.append('sockets used: %s, sockets limit: %s'
% (sused, slimit))
elif sused >= slimit:
health = max(health, RED)
details.append('sockets used: %s, sockets limit: %s'
% (sused, slimit))
fdused = float(status['file_descriptors']['total_used'])
fdlimit = float(status['file_descriptors']['total_limit'])
ok_ratio = 0.9
if fdused < fdlimit and fdused/fdlimit > ok_ratio:
health = max(health, YELLOW)
details.append('fd total used: %s, fd total limit: %s'
% (fdused, fdlimit))
elif fdused >= fdlimit:
health = max(health, RED)
details.append('fd total used: %s, fd total limit: %s'
% (fdused, fdlimit))
except ValueError:
details.append('cannot convert file_descriptors* to numbers')
health = max(health, UNKNOWN)
except KeyError:
details.append('file_descriptors* not present')
health = max(health, UNKNOWN)
return health, details
| apache-2.0 | -8,396,333,004,954,782,000 | 35.357488 | 78 | 0.552485 | false |
smcantab/pele | playground/amber/driver_sanitycheck.py | 7 | 1208 | from pele.amber import amberSystem as amb
# create a new amber system and load database to be pruned
sys = amb.AMBERSystem('coords.prmtop', 'coords.inpcrd')
dbcurr = sys.create_database(db="aladipep.db")
print 'Collecting minima to delete .. '
listTODel = []
for minimum in dbcurr.minima():
testOutCome1 = sys.check_cistrans(minimum.coords)
testOutCome2 = sys.check_CAchirality(minimum.coords)
if testOutCome1 and testOutCome2:
print 'PASS', minimum._id, minimum.energy
else:
listTODel.append(minimum)
print 'FAIL', minimum._id, minimum.energy
print '------------'
print 'Number of minima to be deleted = ', len(listTODel)
# now delete
for minn in listTODel:
dbcurr.removeMinimum(minn)
#print 'Checking transition states .. '
#ct = 0
#print len(dbcurr.transition_states())
#for ts in dbcurr.transition_states() :
# if sys.check_cistrans(ts.coords ):
# print 'PASS', ts._id, ts.energy
# ct = ct + 1
# # dbcurr.removeTS(ts) # not implemented yet
# else:
# print 'FAIL', ts._id, ts.energy
#
# print '------------'
#
#print 'Number of TS deleted = ', ct
| gpl-3.0 | -3,238,013,312,960,548,400 | 29.2 | 59 | 0.619205 | false |
sorenk/ansible | lib/ansible/utils/module_docs_fragments/ipa.py | 27 | 2627 | # Copyright (c) 2017-18, Ansible Project
# Copyright (c) 2017-18, Abhijeet Kasurde ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Parameters for FreeIPA/IPA modules
DOCUMENTATION = '''
options:
ipa_port:
description:
- Port of FreeIPA / IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_PORT) will be used instead.
- If both the environment variable C(IPA_PORT) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: 443
ipa_host:
description:
- IP or hostname of IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_HOST) will be used instead.
- If both the environment variable C(IPA_HOST) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: ipa.example.com
ipa_user:
description:
- Administrative account used on IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_USER) will be used instead.
- If both the environment variable C(IPA_USER) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: admin
ipa_pass:
description:
- Password of administrative user.
- If the value is not specified in the task, the value of environment variable C(IPA_PASS) will be used instead.
- If both the environment variable C(IPA_PASS) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
required: true
ipa_prot:
description:
- Protocol used by IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_PROT) will be used instead.
- If both the environment variable C(IPA_PROT) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: https
choices: ["http", "https"]
validate_certs:
description:
- This only applies if C(ipa_prot) is I(https).
- If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
default: true
'''
| gpl-3.0 | -7,878,302,886,946,918,000 | 48.566038 | 122 | 0.71831 | false |
barykaed/Pelican-Test | activate/Lib/rlcompleter.py | 100 | 5763 | """Word completion for GNU readline.
The completer completes keywords, built-ins and globals in a selectable
namespace (which defaults to __main__); when completing NAME.NAME..., it
evaluates (!) the expression up to the last dot and completes its attributes.
It's very cool to do "import sys" type "sys.", hit the completion key (twice),
and see the list of names defined by the sys module!
Tip: to use the tab key as the completion key, call
readline.parse_and_bind("tab: complete")
Notes:
- Exceptions raised by the completer function are *ignored* (and generally cause
the completion to fail). This is a feature -- since readline sets the tty
device in raw (or cbreak) mode, printing a traceback wouldn't work well
without some complicated hoopla to save, reset and restore the tty state.
- The evaluation of the NAME.NAME... form may cause arbitrary application
defined code to be executed if an object with a __getattr__ hook is found.
Since it is the responsibility of the application (or the user) to enable this
feature, I consider this an acceptable risk. More complicated expressions
(e.g. function calls or indexing operations) are *not* evaluated.
- When the original stdin is not a tty device, GNU readline is never
used, and this module (and the readline module) are silently inactive.
"""
import atexit
import builtins
import __main__
__all__ = ["Completer"]
class Completer:
def __init__(self, namespace = None):
"""Create a new completer for the command line.
Completer([namespace]) -> completer instance.
If unspecified, the default namespace where completions are performed
is __main__ (technically, __main__.__dict__). Namespaces should be
given as dictionaries.
Completer instances should be used as the completion mechanism of
readline via the set_completer() call:
readline.set_completer(Completer(my_namespace).complete)
"""
if namespace and not isinstance(namespace, dict):
raise TypeError('namespace must be a dictionary')
# Don't bind to namespace quite yet, but flag whether the user wants a
# specific namespace or to use __main__.__dict__. This will allow us
# to bind to __main__.__dict__ at completion time, not now.
if namespace is None:
self.use_main_ns = 1
else:
self.use_main_ns = 0
self.namespace = namespace
def complete(self, text, state):
"""Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
"""
if self.use_main_ns:
self.namespace = __main__.__dict__
if state == 0:
if "." in text:
self.matches = self.attr_matches(text)
else:
self.matches = self.global_matches(text)
try:
return self.matches[state]
except IndexError:
return None
def _callable_postfix(self, val, word):
if callable(val):
word = word + "("
return word
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace that match.
"""
import keyword
matches = []
n = len(text)
for word in keyword.kwlist:
if word[:n] == text:
matches.append(word)
for nspace in [builtins.__dict__, self.namespace]:
for word, val in nspace.items():
if word[:n] == text and word != "__builtins__":
matches.append(self._callable_postfix(val, word))
return matches
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluable in self.namespace, it will be evaluated and its attributes
(as revealed by dir()) are used as possible completions. (For class
instances, class members are also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text)
if not m:
return []
expr, attr = m.group(1, 3)
try:
thisobject = eval(expr, self.namespace)
except Exception:
return []
# get the content of the object, except __builtins__
words = dir(thisobject)
if "__builtins__" in words:
words.remove("__builtins__")
if hasattr(thisobject, '__class__'):
words.append('__class__')
words.extend(get_class_members(thisobject.__class__))
matches = []
n = len(attr)
for word in words:
if word[:n] == attr and hasattr(thisobject, word):
val = getattr(thisobject, word)
word = self._callable_postfix(val, "%s.%s" % (expr, word))
matches.append(word)
return matches
def get_class_members(klass):
ret = dir(klass)
if hasattr(klass,'__bases__'):
for base in klass.__bases__:
ret = ret + get_class_members(base)
return ret
try:
import readline
except ImportError:
pass
else:
readline.set_completer(Completer().complete)
# Release references early at shutdown (the readline module's
# contents are quasi-immortal, and the completer function holds a
# reference to globals).
atexit.register(lambda: readline.set_completer(None))
| mit | -4,020,060,071,220,440,000 | 33.927273 | 80 | 0.616693 | false |
feigames/Odoo | addons/survey/__init__.py | 385 | 1037 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import survey
import controllers
import wizard
| agpl-3.0 | 2,154,562,227,659,596,300 | 42.208333 | 78 | 0.614272 | false |
mfortner/MyPythonKoans | python2/koans/about_true_and_false.py | 1 | 1502 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutTrueAndFalse(Koan):
def truth_value(self, condition):
if condition:
return 'true stuff'
else:
return 'false stuff'
def test_true_is_treated_as_true(self):
self.assertEqual("true stuff", self.truth_value(True))
def test_false_is_treated_as_false(self):
self.assertEqual("false stuff", self.truth_value(False))
def test_none_is_treated_as_false(self):
self.assertEqual("false stuff", self.truth_value(None))
def test_zero_is_treated_as_false(self):
self.assertEqual("false stuff", self.truth_value(0))
def test_empty_collections_are_treated_as_false(self):
self.assertEqual("false stuff", self.truth_value([]))
self.assertEqual("false stuff", self.truth_value(()))
self.assertEqual("false stuff", self.truth_value({}))
self.assertEqual("false stuff", self.truth_value(set()))
def test_blank_strings_are_treated_as_false(self):
self.assertEqual("false stuff", self.truth_value(""))
def test_everything_else_is_treated_as_true(self):
self.assertEqual("true stuff", self.truth_value(1))
self.assertEqual("true stuff", self.truth_value(1,))
self.assertEqual(
"true stuff",
self.truth_value("Python is named after Monty Python"))
self.assertEqual("true stuff", self.truth_value(' '))
| mit | -6,362,949,896,998,020,000 | 35.634146 | 67 | 0.626498 | false |
llazzaro/analyzerstrategies | tests/test_sma_strategy.py | 1 | 1444 | import random
import unittest
from pyStock.models import (
Exchange,
Stock,
Account,
Owner,
)
from pyStock.models.money import Currency
from analyzer.backtest.constant import (
SELL,
BUY_TO_COVER,
)
from analyzerstrategies.sma_strategy import SMAStrategy
class TestSMAStrategy(unittest.TestCase):
def setUp(self):
pesos = Currency(name='Pesos', code='ARG')
merval = Exchange(name='Merval', code='MERV', currency=pesos)
owner = Owner(name='test user')
self.account = Account(owner=owner)
self.security = Stock(symbol='YPF', exchange=merval)
self.tick = {'pattern': None, 'data': {'volume30d': '12165.08453826', 'timestamp': '1446070419', 'high': '305', 'ask': 302.7022, 'last': '302.632', 'bid': 301.0001, 'low': '294.51', 'volume': '437.07501250'}, 'type': 'message', 'security': self.security, 'channel': b'BTC'}
def test_quotes_feeder(self):
strategy = SMAStrategy(account=self.account, config=None, securities=[self.security], store=None)
# not enough data to return action.
self.assertTrue(strategy.update(self.tick) is None)
tick = self.tick
for i in range(0, 340):
tick['data']['last'] = random.uniform(300, 350)
action = strategy.update(tick)
if action is not None:
self.assertNotEquals(action, SELL)
self.assertNotEquals(action, BUY_TO_COVER)
| bsd-3-clause | 5,868,838,307,480,386,000 | 37 | 281 | 0.637812 | false |
takeshineshiro/keystone | keystone/common/extension.py | 31 | 1652 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
ADMIN_EXTENSIONS = {}
PUBLIC_EXTENSIONS = {}
def register_admin_extension(url_prefix, extension_data):
"""Register extension with collection of admin extensions.
Extensions register the information here that will show
up in the /extensions page as a way to indicate that the extension is
active.
url_prefix: unique key for the extension that will appear in the
urls generated by the extension.
extension_data is a dictionary. The expected fields are:
'name': short, human readable name of the extension
'namespace': xml namespace
'alias': identifier for the extension
'updated': date the extension was last updated
'description': text description of the extension
'links': hyperlinks to documents describing the extension
"""
ADMIN_EXTENSIONS[url_prefix] = extension_data
def register_public_extension(url_prefix, extension_data):
"""Same as register_admin_extension but for public extensions."""
PUBLIC_EXTENSIONS[url_prefix] = extension_data
| apache-2.0 | 3,783,975,581,597,571,600 | 35.711111 | 75 | 0.724576 | false |
bankonmeOS/cjdns | node_build/dependencies/libuv/build/gyp/pylib/gyp/generator/android.py | 15 | 43658 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This generates makefiles suitable for inclusion into the Android build system
# via an Android.mk file. It is based on make.py, the standard makefile
# generator.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level GypAndroid.mk. This means that all
# variables in .mk-files clobber one another, and furthermore that any
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
import gyp
import gyp.common
import gyp.generator.make as make # Reuse global functions from make backend.
import os
import re
import subprocess
generator_default_variables = {
'OS': 'android',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.so',
'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
'LIB_DIR': '$(obj).$(TOOLSET)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Generator-specific gyp specs.
generator_additional_non_configuration_keys = [
# Boolean to declare that this target does not want its name mangled.
'android_unmangled_name',
]
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
SHARED_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
"""
header = """\
# This file is generated by gyp; do not edit.
"""
android_standard_include_paths = set([
# JNI_H_INCLUDE in build/core/binary.mk
'dalvik/libnativehelper/include/nativehelper',
# from SRC_HEADERS in build/core/config.mk
'system/core/include',
'hardware/libhardware/include',
'hardware/libhardware_legacy/include',
'hardware/ril/include',
'dalvik/libnativehelper/include',
'frameworks/native/include',
'frameworks/native/opengl/include',
'frameworks/base/include',
'frameworks/base/opengl/include',
'frameworks/base/native/include',
'external/skia/include',
# TARGET_C_INCLUDES in build/core/combo/TARGET_linux-arm.mk
'bionic/libc/arch-arm/include',
'bionic/libc/include',
'bionic/libstdc++/include',
'bionic/libc/kernel/common',
'bionic/libc/kernel/arch-arm',
'bionic/libm/include',
'bionic/libm/include/arm',
'bionic/libthread_db/include',
])
# Map gyp target types to Android module classes.
MODULE_CLASSES = {
'static_library': 'STATIC_LIBRARIES',
'shared_library': 'SHARED_LIBRARIES',
'executable': 'EXECUTABLES',
}
def IsCPPExtension(ext):
return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
support options.generator_output, so this function is a noop."""
return path
# Map from qualified target to path to output.
# For Android, the target of these maps is a tuple ('static', 'modulename'),
# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
# since we link by module.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class AndroidMkWriter(object):
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, relative_target, base_path, output_filename,
spec, configs, part_of_all):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
relative_target: qualified target name relative to the root
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.relative_target = relative_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
self.android_module = self.ComputeAndroidModule(spec)
(self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
self.output = self.output_binary = self.ComputeOutput(spec)
# Standard header.
self.WriteLn('include $(CLEAR_VARS)\n')
# Module class and name.
self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
self.WriteLn('LOCAL_MODULE := ' + self.android_module)
# Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
# The library module classes fail if the stem is set. ComputeOutputParts
# makes sure that stem == modulename in these cases.
if self.android_stem != self.android_module:
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
self.WriteLn('LOCAL_MODULE_TAGS := optional')
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
# Grab output directories; needed for Actions and Rules.
self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir)')
self.WriteLn('gyp_shared_intermediate_dir := '
'$(call intermediates-dir-for,GYP,shared)')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
# can depend on the list.
# TODO: doesn't pull in things through transitive link deps; needed?
target_dependencies = [x[1] for x in deps if x[0] == 'path']
self.WriteLn('# Make sure our deps are built first.')
self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
local_pathify=True)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs)
# GYP generated outputs.
self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
# Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
# on both our dependency targets and our generated files.
self.WriteLn('# Make sure our deps and generated files are built first.')
self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
'$(GYP_GENERATED_OUTPUTS)')
self.WriteLn()
# Sources.
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
self.WriteTarget(spec, configs, deps, link_deps, part_of_all)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
# Update global list of link dependencies.
if self.type == 'static_library':
target_link_deps[qualified_target] = ('static', self.android_module)
elif self.type == 'shared_library':
target_link_deps[qualified_target] = ('shared', self.android_module)
self.fp.close()
return self.android_module
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
for action in actions:
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Action for target "%s" writes output to local path '
'"%s".' % (self.target, out))
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
# Prepare the actual command.
command = gyp.common.EncodePOSIXShellList(action['action'])
if 'message' in action:
quiet_cmd = 'Gyp action: %s ($@)' % action['message']
else:
quiet_cmd = 'Gyp action: %s ($@)' % name
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the gyp_*
# variables for the action rule with an absolute version so that the
# output goes in the right place.
# Only write the gyp_* rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# Android's envsetup.sh adds a number of directories to the path including
# the built host binary directory. This causes actions/rules invoked by
# gyp to sometimes use these instead of system versions, e.g. bison.
# The built host binaries may not be suitable, and can cause errors.
# So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
# set by envsetup.
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
% main_output)
for input in inputs:
assert ' ' not in input, (
"Spaces in action input filenames not supported (%s)" % input)
for output in outputs:
assert ' ' not in output, (
"Spaces in action output filenames not supported (%s)" % output)
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, ' '.join(map(self.LocalPathify, inputs))))
self.WriteLn('\t@echo "%s"' % quiet_cmd)
self.WriteLn('\t$(hide)%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
extra_outputs += outputs
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
if len(rules) == 0:
return
rule_trigger = '%s_rule_trigger' % self.android_module
did_write_rule = False
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
did_write_rule = True
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
inputs = rule.get('inputs')
for rule_source in rule.get('rule_sources', []):
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Rule for target %s writes output to local path %s'
% (self.target, out))
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
extra_outputs += outputs
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.extend(outputs)
components = []
for component in rule['action']:
component = self.ExpandInputRoot(component, rule_source_root,
rule_source_dirname)
if '$(RULE_SOURCES)' in component:
component = component.replace('$(RULE_SOURCES)',
rule_source)
components.append(component)
command = gyp.common.EncodePOSIXShellList(components)
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
if dirs:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
# We set up a rule to build the first output, and then set up
# a rule for each additional output to depend on the first.
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# See explanation in WriteActions.
self.WriteLn('%s: export PATH := '
'$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
main_output_deps = self.LocalPathify(rule_source)
if inputs:
main_output_deps += ' '
main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
self.WriteLn('.PHONY: %s' % (rule_trigger))
self.WriteLn('%s: %s' % (rule_trigger, main_output))
self.WriteLn('')
if did_write_rule:
extra_sources.append(rule_trigger) # Force all rules to run.
self.WriteLn('### Finished generating for all rules')
self.WriteLn('')
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
self.WriteLn('### Generated for copy rule.')
variable = make.StringToMakefileVariable(self.relative_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# The Android build system does not allow generation of files into the
# source tree. The destination should start with a variable, which will
# typically be $(gyp_intermediate_dir) or
# $(gyp_shared_intermediate_dir). Note that we can't use an assertion
# because some of the gyp tests depend on this.
if not copy['destination'].startswith('$'):
print ('WARNING: Copy rule for target %s writes output to '
'local path %s' % (self.target, copy['destination']))
# LocalPathify() calls normpath, stripping trailing slashes.
path = Sourceify(self.LocalPathify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
filename)))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
(output, path))
self.WriteLn('\t@echo Copying: $@')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
self.WriteLn()
outputs.append(output)
self.WriteLn('%s = %s' % (variable,
' '.join(map(make.QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
the current target.
Args:
spec, configs: input from gyp.
"""
for configname, config in sorted(configs.iteritems()):
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
config.get('cflags', []) + config.get('cflags_c', []))
extracted_includes.extend(includes_from_cflags)
self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
prefix='-D', quoter=make.EscapeCppDefine)
self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
includes = list(config.get('include_dirs', []))
includes.extend(extracted_includes)
includes = map(Sourceify, map(self.LocalPathify, includes))
includes = self.NormalizeIncludePaths(includes)
self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
'$(MY_DEFS_$(GYP_CONFIGURATION))')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host
# or target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
# Android uses separate flags for assembly file invocations, but gyp expects
# the same CFLAGS to be applied:
self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a genereated sources. Otherwise the Android build
rules won't pick them up.
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
sources = filter(make.Compilable, spec.get('sources', []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
# Determine and output the C++ extension used by these sources.
# We simply find the first C++ file and use that extension.
all_sources = sources + extra_sources
local_cpp_extension = '.cpp'
for source in all_sources:
(root, ext) = os.path.splitext(source)
if IsCPPExtension(ext):
local_cpp_extension = ext
break
if local_cpp_extension != '.cpp':
self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
# We need to move any non-generated sources that are coming from the
# shared intermediate directory out of LOCAL_SRC_FILES and put them
# into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
# that don't match our local_cpp_extension, since Android will only
# generate Makefile rules for a single LOCAL_CPP_EXTENSION.
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
if '$(gyp_shared_intermediate_dir)' in source:
extra_sources.append(source)
elif '$(gyp_intermediate_dir)' in source:
extra_sources.append(source)
elif IsCPPExtension(ext) and ext != local_cpp_extension:
extra_sources.append(source)
else:
local_files.append(os.path.normpath(os.path.join(self.path, source)))
# For any generated source, if it is coming from the shared intermediate
# directory then we add a Make rule to copy them to the local intermediate
# directory first. This is because the Android LOCAL_GENERATED_SOURCES
# must be in the local module intermediate directory for the compile rules
# to work properly. If the file has the wrong C++ extension, then we add
# a rule to copy that to intermediates and use the new version.
final_generated_sources = []
# If a source file gets copied, we still need to add the orginal source
# directory as header search path, for GCC searches headers in the
# directory that contains the source file by default.
origin_src_dirs = []
for source in extra_sources:
local_file = source
if not '$(gyp_intermediate_dir)/' in local_file:
basename = os.path.basename(local_file)
local_file = '$(gyp_intermediate_dir)/' + basename
(root, ext) = os.path.splitext(local_file)
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
self.WriteLn('\tmkdir -p $(@D); cp $< $@')
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
# We add back in all of the non-compilable stuff to make sure that the
# make rules have dependencies on them.
final_generated_sources.extend(generated_not_sources)
self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
self.WriteList(local_files, 'LOCAL_SRC_FILES')
# Write out the flags used to compile the source; this must be done last
# so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
self.WriteSourceFlags(spec, configs)
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
if int(spec.get('android_unmangled_name', 0)):
assert self.type != 'shared_library' or self.target.startswith('lib')
return self.target
if self.type == 'shared_library':
# For reasons of convention, the Android build system requires that all
# shared library modules are named 'libfoo' when generating -l flags.
prefix = 'lib_'
else:
prefix = ''
if spec['toolset'] == 'host':
suffix = '_host_gyp'
else:
suffix = '_gyp'
if self.path:
name = '%s%s_%s%s' % (prefix, self.path, self.target, suffix)
else:
name = '%s%s%s' % (prefix, self.target, suffix)
return make.StringToMakefileVariable(name)
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
assert self.type != 'loadable_module' # TODO: not supported?
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.a'
elif self.type == 'shared_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.so'
elif self.type == 'none':
target_ext = '.stamp'
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
if self.type != 'static_library' and self.type != 'shared_library':
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
target_stem = target_prefix + target
return (target_stem, target_ext)
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
return ''.join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
if self.type == 'executable' and self.toolset == 'host':
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
path = '$(gyp_shared_intermediate_dir)'
elif self.type == 'shared_library':
if self.toolset == 'host':
path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
path = '$(TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class,
self.android_module)
else:
path = '$(call intermediates-dir-for,%s,%s)' % (self.android_class,
self.android_module)
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory;
filter out include paths that are already brought in by the Android build
system.
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
normalized = []
for path in include_paths:
if path[0] == '/':
path = gyp.common.RelativePath(path, self.android_top_dir)
# Filter out the Android standard search path.
if path not in android_standard_include_paths:
normalized.append(path)
return normalized
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
for flag in cflags:
if flag.startswith('-I'):
include_paths.append(flag[2:])
else:
clean_cflags.append(flag)
return (clean_cflags, include_paths)
def ComputeAndroidLibraryModuleNames(self, libraries):
"""Compute the Android module names from libraries, ie spec.get('libraries')
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules)
"""
static_lib_modules = []
dynamic_lib_modules = []
for libs in libraries:
# Libs can have multiple words.
for lib in libs.split():
# Filter the system libraries, which are added by default by the Android
# build system.
if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
lib.endswith('libgcc.a')):
continue
match = re.search(r'([^/]+)\.a$', lib)
if match:
static_lib_modules.append(match.group(1))
continue
match = re.search(r'([^/]+)\.so$', lib)
if match:
dynamic_lib_modules.append(match.group(1))
continue
# "-lstlport" -> libstlport
if lib.startswith('-l'):
if lib.endswith('_static'):
static_lib_modules.append('lib' + lib[2:])
else:
dynamic_lib_modules.append('lib' + lib[2:])
return (static_lib_modules, dynamic_lib_modules)
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
for configname, config in sorted(configs.iteritems()):
ldflags = list(config.get('ldflags', []))
self.WriteLn('')
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION))')
# Libraries (i.e. -lfoo)
libraries = gyp.common.uniquer(spec.get('libraries', []))
static_libs, dynamic_libs = self.ComputeAndroidLibraryModuleNames(
libraries)
# Link dependencies (i.e. libfoo.a, libfoo.so)
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
self.WriteLn('')
self.WriteList(static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
self.WriteLn('')
self.WriteList(dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Rules for final target.')
if self.type != 'none':
self.WriteTargetFlags(spec, configs, link_deps)
# Add to the set of targets which represent the gyp 'all' target. We use the
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
if part_of_all:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
self.WriteLn('')
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
if self.target != self.android_module:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
self.WriteLn('')
# Add the command to trigger build of the target type depending
# on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
# NOTE: This has to come last!
modifier = ''
if self.toolset == 'host':
modifier = 'HOST_'
if self.type == 'static_library':
self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
elif self.type == 'shared_library':
self.WriteLn('LOCAL_PRELINK_MODULE := false')
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
elif self.type == 'executable':
if self.toolset == 'host':
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
else:
# Don't install target executables for now, as it results in them being
# included in ROM. This can be revisited if there's a reason to install
# them later.
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
def WriteList(self, value_list, variable=None, prefix='',
quoter=make.QuoteIfNecessary, local_pathify=False):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
if local_pathify:
value_list = [self.LocalPathify(l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
if '$(' in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
return os.path.normpath(path)
local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
local_path = os.path.normpath(local_path)
# Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
# - i.e. that the resulting path is still inside the project tree. The
# path may legitimately have ended up containing just $(LOCAL_PATH), though,
# so we don't look for a slash.
assert local_path.startswith('$(LOCAL_PATH)'), (
'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return path
def PerformBuild(data, configurations, params):
# The android backend only supports the default configuration.
options = params['options']
makefile = os.path.abspath(os.path.join(options.toplevel_dir,
'GypAndroid.mk'))
env = dict(os.environ)
env['ONE_SHOT_MAKEFILE'] = makefile
arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
print 'Building: %s' % arguments
subprocess.check_call(arguments, env=env)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'GypAndroid' + options.suffix + '.mk'
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(header)
# We set LOCAL_PATH just once, here, to the top of the project tree. This
# allows all the other paths we use to be relative to the Android.mk file,
# as the Android build system expects.
root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
android_modules = {}
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
relative_build_file = gyp.common.RelativePath(build_file,
options.toplevel_dir)
build_files.add(relative_build_file)
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
part_of_all = (qualified_target in needed_targets and
not int(spec.get('suppress_wildcard', False)))
if limit_to_target_all and not part_of_all:
continue
relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
toolset)
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, relative_target, base_path,
output_file, spec, configs,
part_of_all=part_of_all)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
(android_module, android_modules[android_module],
qualified_target))
return
android_modules[android_module] = qualified_target
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
root_makefile.write(SHARED_FOOTER)
root_makefile.close()
| gpl-3.0 | -1,663,012,194,707,850,800 | 39.725746 | 80 | 0.642013 | false |
ichuang/sympy | sympy/polys/domains/gmpyrationalfield.py | 3 | 3001 | """Implementaton of :class:`GMPYRationalField` class. """
from sympy.polys.domains.rationalfield import RationalField
from sympy.polys.domains.groundtypes import (
GMPYRationalType, SymPyRationalType,
gmpy_numer, gmpy_denom, gmpy_factorial,
)
from sympy.polys.polyerrors import CoercionFailed
class GMPYRationalField(RationalField):
"""Rational field based on GMPY mpq class. """
dtype = GMPYRationalType
zero = dtype(0)
one = dtype(1)
alias = 'QQ_gmpy'
def __init__(self):
pass
def to_sympy(self, a):
"""Convert `a` to a SymPy object. """
return SymPyRationalType(int(gmpy_numer(a)),
int(gmpy_denom(a)))
def from_sympy(self, a):
"""Convert SymPy's Integer to `dtype`. """
if a.is_Rational:
return GMPYRationalType(a.p, a.q)
elif a.is_Float:
from sympy.polys.domains import RR
return GMPYRationalType(*RR.as_integer_ratio(a))
else:
raise CoercionFailed("expected `Rational` object, got %s" % a)
def from_ZZ_python(K1, a, K0):
"""Convert a Python `int` object to `dtype`. """
return GMPYRationalType(a)
def from_QQ_python(K1, a, K0):
"""Convert a Python `Fraction` object to `dtype`. """
return GMPYRationalType(a.numerator, a.denominator)
def from_ZZ_sympy(K1, a, K0):
"""Convert a SymPy `Integer` object to `dtype`. """
return GMPYRationalType(a.p)
def from_QQ_sympy(K1, a, K0):
"""Convert a SymPy `Rational` object to `dtype`. """
return GMPYRationalType(a.p, a.q)
def from_ZZ_gmpy(K1, a, K0):
"""Convert a GMPY `mpz` object to `dtype`. """
return GMPYRationalType(a)
def from_QQ_gmpy(K1, a, K0):
"""Convert a GMPY `mpq` object to `dtype`. """
return a
def from_RR_sympy(K1, a, K0):
"""Convert a SymPy `Float` object to `dtype`. """
return GMPYRationalType(*K0.as_integer_ratio(a))
def from_RR_mpmath(K1, a, K0):
"""Convert a mpmath `mpf` object to `dtype`. """
return GMPYRationalType(*K0.as_integer_ratio(a))
def exquo(self, a, b):
"""Exact quotient of `a` and `b`, implies `__div__`. """
return GMPYRationalType(a.qdiv(b))
def quo(self, a, b):
"""Quotient of `a` and `b`, implies `__div__`. """
return GMPYRationalType(a.qdiv(b))
def rem(self, a, b):
"""Remainder of `a` and `b`, implies nothing. """
return self.zero
def div(self, a, b):
"""Division of `a` and `b`, implies `__div__`. """
return GMPYRationalType(a.qdiv(b)), self.zero
def numer(self, a):
"""Returns numerator of `a`. """
return gmpy_numer(a)
def denom(self, a):
"""Returns denominator of `a`. """
return gmpy_denom(a)
def factorial(self, a):
"""Returns factorial of `a`. """
return GMPYRationalType(gmpy_factorial(int(a)))
| bsd-3-clause | 6,129,132,649,464,025,000 | 30.260417 | 74 | 0.579474 | false |
cipriancraciun/extremely-simple-cluster-platform | components/py-tools/sources/escp/tools/enforcements.py | 1 | 23625 |
import re
import types
import escp.tools.callables as callables
import escp.tools.outcomes as outcomes
_e_debug = False
class Enforcer (object) :
def __init__ (self) :
return
def __nonzero__ (self) :
return _e_debug
def enforce (self, _condition) :
if _condition is not True :
raise EnforcementError (self.enforce)
return True
def enforce_is (self, _value_a, _value_b) :
if _value_a is not _value_b :
raise EnforcementError (self.enforce_is, value_a = _value_a, value_b = _value_b)
return True
def enforce_is_any (self, _value, _values) :
for _value_ in _values :
if _value is _value_ :
return True
raise EnforcementError (self.enforce_is_any, value = _value, values = _values)
def enforce_is_not (self, _value_a, _value_b) :
if _value_a is _value_b :
raise EnforcementError (self.enforce_is_not, value_a = _value_a, value_b = _value_b)
return True
def enforce_is_not_all (self, _value, _values) :
for _value_ in _values :
if _value is _value_ :
raise EnforcementError (self.enforce_is_not_all, value = _value, values = _values)
return True
def enforce_is_not_any (self, _value, _values) :
for _value_ in _values :
if _value is not _value_ :
return True
raise EnforcementError (self.enforce_is_not_any, value = _value, values = _values)
def enforce_is_none (self, _value) :
if _value is not None :
raise EnforcementError (self.enforce_is_none, value = _value)
return True
def enforce_is_none_all (self, _values) :
for _value in _values :
if _value is not None :
raise EnforcementError (self.enforce_is_none_all, value = _value)
return True
def enforce_is_none_any (self, _values) :
for _value in _values :
if _value is None :
return True
raise EnforcementError (self.enforce_is_none_any, values = _values)
def enforce_is_not_none (self, _value) :
if _value is None :
raise EnforcementError (self.enforce_is_not_none, value = _value)
return True
def enforce_is_not_none_all (self, _values) :
for _value in _values :
if _value is None :
raise EnforcementError (self.enforce_is_not_none_all, value = _value)
return True
def enforce_is_not_none_any (self, _values) :
for _value in _values :
if _value is not None :
return True
raise EnforcementError (self.enforce_is_not_none_any, values = _values)
def enforce_is_true (self, _value) :
if _value is not True :
raise EnforcementError (self.enforce_is_true, value = _value)
return True
def enforce_is_true_all (self, _values) :
for _value in _values :
if _value is not True :
raise EnforcementError (self.enforce_is_true_all, values = _values)
return True
def enforce_is_true_any (self, _values) :
for _value in _values :
if _value is True :
return
raise EnforcementError (self.enforce_is_true_any, values = _values)
def enforce_is_true_or_none (self, _value) :
if _value is not None and _value is not True :
raise EnforcementError (self.enforce_is_true_or_none, value = _value)
return True
def enforce_is_false (self, _value) :
if _value is not False :
raise EnforcementError (self.enforce_is_false, value = _value)
return True
def enforce_is_false_all (self, _values) :
for _value in _values :
if _value is not False :
raise EnforcementError (self.enforce_is_false_all, values = _values)
return True
def enforce_is_false_any (self, _values) :
for _value in _values :
if _value is False :
return True
raise EnforcementError (self.enforce_is_false_any, values = _values)
def enforce_is_false_or_none (self, _value) :
if _value is not None and _value is not False :
raise EnforcementError (self.enforce_is_false_or_none, value = _value)
return True
def enforce_instance (self, _value, _type) :
if not isinstance (_value, _type) :
raise EnforcementError (self.enforce_instance, value = _value, value_type = type (_value), expected_type = _type)
return True
def enforce_instance_or_none (self, _value, _type) :
if _value is not None and not isinstance (_value, _type) :
raise EnforcementError (self.enforce_instance_or_none, value = _value, value_type = type (_value), expected_type = _type)
return True
def enforce_int (self, _value) :
if not isinstance (_value, int) :
raise EnforcementError (self.enforce_int, value = _value, value_type = type (_value))
return True
def enforce_positive_int (self, _value) :
if not isinstance (_value, int) or _value < 0 :
raise EnforcementError (self.enforce_positive_int, value = _value, value_type = type (_value))
return True
def enforce_int_or_none (self, _value) :
if _value is not None and not isinstance (_value, int) :
raise EnforcementError (self.enforce_int_or_none, value = _value, value_type = type (_value))
return True
def enforce_float (self, _value) :
if not isinstance (_value, float) :
raise EnforcementError (self.enforce_float, value = _value, value_type = type (_value))
return True
def enforce_float_or_none (self, _value) :
if _value is not None and not isinstance (_value, float) :
raise EnforcementError (self.enforce_float_or_none, value = _value, value_type = type (_value))
return True
def enforce_positive_float (self, _value) :
if not isinstance (_value, float) or _value < 0.0 :
raise EnforcementError (self.enforce_positive_float, value = _value, value_type = type (_value))
return True
def enforce_positive_float_or_none (self, _value) :
if _value is not None and (not isinstance (_value, float) or _value < 0.0) :
raise EnforcementError (self.enforce_positive_float_or_none, value = _value, value_type = type (_value))
return True
def enforce_bool (self, _value) :
if not isinstance (_value, bool) :
raise EnforcementError (self.enforce_bool, value = _value, value_type = type (_value))
return True
def enforce_bool_all (self, _values) :
for _value in _values :
if not isinstance (_value, bool) :
raise EnforcementError (self.enforce_bool_all, values = _values, value_type = type (_value))
return True
def enforce_bool_or_none (self, _value) :
if _value is not None and not isinstance (_value, bool) :
raise EnforcementError (self.enforce_bool_or_none, value = _value, value_type = type (_value))
return True
def enforce_string (self, _value) :
if not isinstance (_value, basestring) :
raise EnforcementError (self.enforce_string, value = _value, value_type = type (_value))
return True
def enforce_string_all (self, _values) :
for _value in _values :
if not isinstance (_value, basestring) :
raise EnforcementError (self.enforce_string_all, values = _values, value_type = type (_value))
return True
def enforce_string_or_none (self, _value) :
if _value is not None and not isinstance (_value, basestring) :
raise EnforcementError (self.enforce_string_or_none, value = _value, value_type = type (_value))
return True
def enforce_string_or_none_all (self, _values) :
for _value in _values :
if _value is not None and not isinstance (_value, basestring) :
raise EnforcementError (self.enforce_string_or_none_all, values = _values, value_type = type (_value))
return True
def enforce_str (self, _value) :
if not isinstance (_value, str) :
raise EnforcementError (self.enforce_str, value = _value, value_type = type (_value))
return True
def enforce_str_or_none (self, _value) :
if _value is not None and not isinstance (_value, str) :
raise EnforcementError (self.enforce_str_or_none, value = _value, value_type = type (_value))
return True
def enforce_unicode (self, _value) :
if not isinstance (_value, unicode) :
raise EnforcementError (self.enforce_unicode, value = _value, value_type = type (_value))
return True
def enforce_unicode_or_none (self, _value) :
if _value is not None and not isinstance (_value, unicode) :
raise EnforcementError (self.enforce_unicode_or_none, value = _value, value_type = type (_value))
return True
def enforce_tuple (self, _value, _length = None) :
if not isinstance (_value, tuple) or (_length is not None and len (_value) != _length) :
raise EnforcementError (self.enforce_tuple, value = _value, value_type = type (_value), length = _length)
return True
def enforce_tuple_or_none (self, _value, _length = None) :
if _value is not None and (not isinstance (_value, tuple) or (_length is not None and len (_value) != _length)) :
raise EnforcementError (self.enforce_tuple_or_none, value = _value, value_type = type (_value), length = _length)
return True
def enforce_list (self, _value, _length = None) :
if not isinstance (_value, list) or (_length is not None and len (_value) != _length) :
raise EnforcementError (self.enforce_list, value = _value, value_type = type (_value), length = _length)
return True
def enforce_not_empty_list (self, _value) :
if not isinstance (_value, list) or len (_value) == 0 :
raise EnforcementError (self.enforce_not_empty_list, value = _value, value_type = type (_value))
return True
def enforce_list_or_none (self, _value, _length = None) :
if _value is not None and (not isinstance (_value, list) or (_length is not None and len (_value) != _length)) :
raise EnforcementError (self.enforce_list_or_none, value = _value, value_type = type (_value), length = _length)
return True
def enforce_not_empty_list_or_none (self, _value) :
if _value is not None and (not isinstance (_value, list) or len (_value) == 0) :
raise EnforcementError (self.enforce_not_empty_list_or_none, value = _value, value_type = type (_value))
return True
def enforce_set (self, _value) :
if not isinstance (_value, set) :
raise EnforcementError (self.enforce_set, value = _value, value_type = type (_value))
return True
def enforce_set_or_none (self, _value) :
if _value is not None and not isinstance (_value, set) :
raise EnforcementError (self.enforce_set_or_none, value = _value, value_type = type (_value))
return True
def enforce_dict (self, _value) :
if not isinstance (_value, dict) :
raise EnforcementError (self.enforce_dict, value = _value, value_type = type (_value))
return True
def enforce_dict_or_none (self, _value) :
if _value is not None and not isinstance (_value, dict) :
raise EnforcementError (self.enforce_dict_or_none, value = _value, value_type = type (_value))
return True
def enforce_tuple_or_list (self, _value) :
if not isinstance (_value, tuple) and not isinstance (_value, list) :
raise EnforcementError (self.enforce_tuple_or_list, value = _value, value_type = type (_value))
return True
def enforce_tuple_or_list_or_none (self, _value) :
if _value is not None and not isinstance (_value, tuple) and not isinstance (_value, list) :
raise EnforcementError (self.enforce_tuple_or_list_or_none, value = _value, value_type = type (_value))
return True
def enforce_type (self, _value) :
if type (_value) is not type :
raise EnforcementError (self.enforce_type, value = _value, value_type = type (_value))
return True
def enforce_type_or_none (self, _value) :
if _value is not None and type (_value) is not type :
raise EnforcementError (self.enforce_type_or_none, value = _value, value_type = type (_value))
return True
def enforce_class (self, _value) :
if type (_value) is not types.ClassType :
raise EnforcementError (self.enforce_class, value = _value, value_type = type (_value))
return True
def enforce_class_or_none (self, _value) :
if _value is not None and type (_value) is not types.ClassType :
raise EnforcementError (self.enforce_class_or_none, value = _value, value_type = type (_value))
return True
def enforce_type_or_class (self, _value) :
if type (_value) is not type and type (_value) is not types.ClassType :
raise EnforcementError (self.enforce_type_or_class, value = _value, value_type = type (_value))
return True
def enforce_type_or_class_or_none (self, _value) :
if _value is not None and type (_value) is not type and type (_value) is not types.ClassType :
raise EnforcementError (self.enforce_type_or_class_or_none, value = _value, value_type = type (_value))
return True
def enforce_in (self, _collection, _value) :
if _value not in _collection :
raise EnforcementError (self.enforce_in, collection = _collection, value = _value)
return True
def enforce_in_all (self, _collection, _values) :
for _value in _values :
if _value not in _collection :
raise EnforcementError (self.enforce_in_all, collection = _collection, values = _values)
return True
def enforce_in_any (self, _collection, _values) :
for _value in _values :
if _value in _collection :
return True
raise EnforcementError (self.enforce_in_any, collection = _collection, values = _values)
def enforce_not_in (self, _collection, _value) :
if _value in _collection :
raise EnforcementError (self.enforce_not_in, collection = _collection, value = _value)
return True
def enforce_not_in_all (self, _collection, _values) :
for _value in _values :
if _value in _collection :
raise EnforcementError (self.enforce_not_in_all, collection = _collection, values = _values)
return True
def enforce_not_in_any (self, _collection, _values) :
for _value in _values :
if _value not in _collection :
return True
raise EnforcementError (self.enforce_not_in_any, collection = _collection, values = _values)
def enforce_empty (self, _value) :
if len (_value) != 0 :
raise EnforcementError (self.enforce_empty, value = _value, value_type = type (_value))
return True
def enforce_not_empty (self, _value) :
if len (_value) == 0 :
raise EnforcementError (self.enforce_not_empty, value = _value, value_type = type (_value))
return True
def enforce_eq (self, _value_a, _value_b) :
if not (_value_a == _value_b) :
raise EnforcementError (self.enforce_eq, value_a = _value_a, value_b = _value_b)
return True
def enforce_eq_or_none (self, _value_a, _value_b) :
if _value_a is not None and _value_b is not None and not (_value_a == _value_b) :
raise EnforcementError (self.enforce_eq_or_none, value_a = _value_a, value_b = _value_b)
return True
def enforce_ne (self, _value_a, _value_b) :
if not (_value_a != _value_b) :
raise EnforcementError (self.enforce_ne, value_a = _value_a, value_b = _value_b)
return True
def enforce_ne_or_none (self, _value_a, _value_b) :
if _value_a is not None and _value_b is not None and not (_value_a != _value_b) :
raise EnforcementError (self.enforce_ne_or_none, value_a = _value_a, value_b = _value_b)
return True
def enforce_lt (self, _value_a, _value_b) :
if not (_value_a < _value_b) :
raise EnforcementError (self.enforce_lt, value_a = _value_a, value_b = _value_b)
return True
def enforce_le (self, _value_a, _value_b) :
if not (_value_a <= _value_b) :
raise EnforcementError (self.enforce_le, value_a = _value_a, value_b = _value_b)
return True
def enforce_gt (self, _value_a, _value_b) :
if not (_value_a > _value_b) :
raise EnforcementError (self.enforce_gt, value_a = _value_a, value_b = _value_b)
return True
def enforce_ge (self, _value_a, _value_b) :
if not (_value_a >= _value_b) :
raise EnforcementError (self.enforce_ge, value_a = _value_a, value_b = _value_b)
return True
def enforce_zero (self, _value) :
if _value != 0 :
raise EnforcementError (self.enforce_zero, value = _value)
return True
def enforce_callable (self, _callable, _argument_count = None) :
if not callable (_callable) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count)) :
raise EnforcementError (self.enforce_callable, callable = _callable, argument_count = _argument_count)
return True
def enforce_callable_all (self, _callables, _argument_count = None) :
for _callable in _callables :
if not callable (_callable) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count)) :
raise EnforcementError (self.enforce_callable_all, callables = _callables, argument_count = _argument_count)
return True
def enforce_callable_any (self, _callables, _argument_count = None) :
for _callable in _callables :
if callable (_callable) and (_argument_count is None or callables.match_callable_spec_argument_count (_callable, _argument_count)) :
return True
raise EnforcementError (self.enforce_callable_any, callables = _callables, argument_count = _argument_count)
def enforce_callable_or_none (self, _callable, _argument_count = None) :
if _callable is not None and (not callable (_callable) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count))) :
raise EnforcementError (self.enforce_callable_or_none, callable = _callable, argument_count = _argument_count)
return True
def enforce_callable_or_none_all (self, _callables, _argument_count = None) :
for _callable in _callables :
if _callable is not None and (not callable (_callable) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count))) :
raise EnforcementError (self.enforce_callable_or_none_all, callables = _callables, argument_count = _argument_count)
return True
def enforce_callable_or_none_any (self, _callables, _argument_count = None) :
for _callable in _callables :
if _callable is None or (callable (_callable) and (_argument_count is None or callables.match_callable_spec_argument_count (_callable, _argument_count))) :
return True
raise EnforcementError (self.enforce_callable_or_none_any, callables = _callables, argument_count = _argument_count)
def enforce_callable_spec (self, _callable, _argument_count = None) :
if (not isinstance (_callable, tuple) and not callable (_callable)) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count)) :
raise EnforcementError (self.enforce_callable_spec, callable = _callable, argument_count = _argument_count)
return True
def enforce_callable_spec_all (self, _callables, _argument_count = None) :
for _callable in _callables :
if (not isinstance (_callable, tuple) and not callable (_callable)) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count)) :
raise EnforcementError (self.enforce_callable_spec_all, callables = _callables, argument_count = _argument_count)
return True
def enforce_callable_spec_any (self, _callables, _argument_count = None) :
for _callable in _callables :
if (isinstance (_callable, tuple) or callable (_callable)) and (_argument_count is None or callables.match_callable_spec_argument_count (_callable, _argument_count)) :
return True
raise EnforcementError (self.enforce_callable_spec_any, callables = _callables, argument_count = _argument_count)
def enforce_callable_spec_or_none (self, _callable, _argument_count = None) :
if _callable is not None and ((not isinstance (_callable, tuple) and not callable (_callable)) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count))) :
raise EnforcementError (self.enforce_callable_spec_or_none, callable = _callable, argument_count = _argument_count)
return True
def enforce_callable_spec_or_none_all (self, _callables, _argument_count = None) :
for _callable in _callables :
if _callable is not None and ((not isinstance (_callable, tuple) and not callable (_callable)) or (_argument_count is not None and not callables.match_callable_spec_argument_count (_callable, _argument_count))) :
raise EnforcementError (self.enforce_callable_spec_or_none_all, callables = _callables, argument_count = _argument_count)
return True
def enforce_callable_spec_or_none_any (self, _callables, _argument_count = None) :
for _callable in _callables :
if _callable is not None or ((isinstance (_callable, tuple) or not callable (_callable)) and (_argument_count is None or callables.match_callable_spec_argument_count (_callable, _argument_count))) :
return True
raise EnforcementError (self.enforce_callable_spec_or_none_any, callables = _callables, argument_count = _argument_count)
def enforce_re_match (self, _value, _re) :
if not isinstance (_value, basestring) or _re.match (_value) is None :
raise EnforcementError (self.enforce_re_match, value = _value, re = _re)
return True
def enforce_re_match_all (self, _values, _re) :
for _value in _values :
if not isinstance (_value, basestring) or _re.match (_value) is None :
raise EnforcementError (self.enforce_re_match_all, values = _values, re = _re)
return True
def enforce_re_match_or_none (self, _value, _re) :
if _value is not None and _re.match (_value) is None :
raise EnforcementError (self.enforce_re_match_or_none, value = _value, re = _re)
return True
def enforce_hasattr (self, _object, _attr) :
if not hasattr (_object, _attr) :
raise EnforcementError (self.enforce_hasattr, object = _object, attr = _attr)
return True
def enforce_hasattr_all (self, _object, _attrs) :
for _attr in _attrs :
if not hasattr (_object, _attr) :
raise EnforcementError (self.enforce_hasattr_all, object = _object, attrs = _attrs)
return True
def enforce_not_hasattr (self, _object, _attr) :
if hasattr (_object, _attr) :
raise EnforcementError (self.enforce_not_hasattr, object = _object, attr = _attr)
return True
def enforce_not_hasattr_all (self, _object, _attrs) :
for _attr in _attrs :
if hasattr (_object, _attr) :
raise EnforcementError (self.enforce_not_hasattr_all, object = _object, attrs = _attrs)
return True
def enforce_all (self, _enforcements) :
for _enforcement in _enforcements :
try :
_enforcement[0] (* _enforcement[1:])
except Exception :
raise
return True
def enforce_any (self, _enforcements) :
for _enforcement in _enforcements :
try :
_enforcement[0] (* _enforcement[1:])
return True
except Exception :
pass
raise EnforcementError (self.enforce_any, enforcements = _enforcements)
def enforce_one (self, _enforcements) :
_succeeded = 0
for _enforcement in _enforcements :
try :
_enforcement[0] (* _enforcement[1:])
_succeeded += 1
except Exception :
pass
if _succeeded == 1 :
return True
raise EnforcementError (self.enforce_one, enforcements = _enforcements)
def enforcement_raise (self) :
raise EnforcementError (self.enforcement_raise)
class EnforcementError (outcomes.Failure) :
def __init__ (self, _enforcement, ** _fields) :
outcomes.Failure.__init__ (self, 'enforcement broken', None, _fields)
def enforcer_ef (_owner, parent = None) :
if _enforcer is not None :
return _enforcer
return Enforcer ()
_enforcer = None
_enforcer = enforcer_ef (None)
| gpl-3.0 | -7,422,290,239,341,858,000 | 29.842037 | 215 | 0.673989 | false |
linearregression/mpld3 | mpld3/__init__.py | 20 | 1109 | """
Interactive D3 rendering of matplotlib images
=============================================
Functions: General Use
----------------------
:func:`fig_to_html`
convert a figure to an html string
:func:`fig_to_dict`
convert a figure to a dictionary representation
:func:`show`
launch a web server to view an d3/html figure representation
:func:`save_html`
save a figure to an html file
:func:`save_json`
save a JSON representation of a figure to file
Functions: IPython Notebook
---------------------------
:func:`display`
display a figure in an IPython notebook
:func:`enable_notebook`
enable automatic D3 display of figures in the IPython notebook.
:func:`disable_notebook`
disable automatic D3 display of figures in the IPython
"""
__all__ = ["__version__",
"fig_to_html", "fig_to_dict", "fig_to_d3", "display_d3",
"display", "show_d3", "show", "save_html", "save_json",
"enable_notebook", "disable_notebook", "plugins", "urls"]
from .__about__ import __version__
from . import plugins
from . import urls
from ._display import *
| bsd-3-clause | -8,354,839,745,295,577,000 | 24.790698 | 68 | 0.620379 | false |
edgedb/edgedb | edb/edgeql/compiler/inference/multiplicity.py | 1 | 21233 | #
# This source file is part of the EdgeDB open source project.
#
# Copyright 2020-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""EdgeQL multiplicity inference.
A top-down multiplicity inferer that traverses the full AST populating
multiplicity fields and performing multiplicity checks.
"""
from __future__ import annotations
from typing import *
import functools
from edb import errors
from edb.edgeql import qltypes
from edb.schema import pointers as s_pointers
from edb.ir import ast as irast
from edb.ir import typeutils as irtyputils
from . import cardinality
from . import context as inference_context
ZERO = qltypes.Multiplicity.ZERO
ONE = qltypes.Multiplicity.ONE
MANY = qltypes.Multiplicity.MANY
def _max_multiplicity(
args: Iterable[qltypes.Multiplicity]
) -> qltypes.Multiplicity:
# Coincidentally, the lexical order of multiplicity is opposite of
# order of multiplicity values.
arg_list = list(args)
if not arg_list:
return ZERO
else:
return min(arg_list)
def _common_multiplicity(
args: Iterable[irast.Base],
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return _max_multiplicity(
infer_multiplicity(a, scope_tree=scope_tree, ctx=ctx) for a in args)
@functools.singledispatch
def _infer_multiplicity(
ir: irast.Expr,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# return MANY
raise ValueError(f'infer_multiplicity: cannot handle {ir!r}')
@_infer_multiplicity.register
def __infer_none(
ir: None,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# Here for debugging purposes.
raise ValueError('invalid infer_multiplicity(None, schema) call')
@_infer_multiplicity.register
def __infer_statement(
ir: irast.Statement,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return infer_multiplicity(
ir.expr, scope_tree=scope_tree, ctx=ctx)
@_infer_multiplicity.register
def __infer_empty_set(
ir: irast.EmptySet,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return ZERO
@_infer_multiplicity.register
def __infer_type_introspection(
ir: irast.TypeIntrospection,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# TODO: The result is always ONE, but we still want to actually
# introspect the expression. Unfortunately, currently the
# expression is not available at this stage.
#
# E.g. consider:
# WITH X := Foo {bar := {Bar, Bar}}
# SELECT INTROSPECT TYPEOF X.bar;
return ONE
def _infer_shape(
ir: irast.Set,
*,
is_mutation: bool=False,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> None:
for shape_set, _ in ir.shape:
new_scope = cardinality._get_set_scope(shape_set, scope_tree, ctx=ctx)
if shape_set.expr and shape_set.rptr:
expr_mult = infer_multiplicity(
shape_set.expr, scope_tree=new_scope, ctx=ctx)
ptrref = shape_set.rptr.ptrref
if expr_mult is MANY and irtyputils.is_object(ptrref.out_target):
raise errors.QueryError(
f'possibly not a strict set returned by an '
f'expression for a computable '
f'{ptrref.shortname.name}.',
hint=(
f'Use DISTINCT for the entire computable expression '
f'to resolve this.'
),
context=shape_set.context
)
_infer_shape(
shape_set, is_mutation=is_mutation, scope_tree=scope_tree, ctx=ctx)
def _infer_set(
ir: irast.Set,
*,
is_mutation: bool=False,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
result = _infer_set_inner(
ir, is_mutation=is_mutation, scope_tree=scope_tree, ctx=ctx)
ctx.inferred_multiplicity[ir, scope_tree] = result
# The shape doesn't affect multiplicity, but requires validation.
_infer_shape(ir, is_mutation=is_mutation, scope_tree=scope_tree, ctx=ctx)
return result
def _infer_set_inner(
ir: irast.Set,
*,
is_mutation: bool=False,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
rptr = ir.rptr
new_scope = cardinality._get_set_scope(ir, scope_tree, ctx=ctx)
if rptr is not None:
# Validate the source
infer_multiplicity(rptr.source, scope_tree=new_scope, ctx=ctx)
if ir.expr:
expr_mult = infer_multiplicity(ir.expr, scope_tree=new_scope, ctx=ctx)
if rptr is not None:
rptrref = rptr.ptrref
if isinstance(rptr.ptrref, irast.TupleIndirectionPointerRef):
# All bets are off for tuple elements.
return MANY
elif not irtyputils.is_object(ir.typeref):
# This is not an expression and is some kind of scalar, so
# multiplicity cannot be guaranteed to be ONE (most scalar
# expressions don't have an implicit requirement to be sets)
# unless we also have an exclusive constraint.
if rptr is not None:
schema = ctx.env.schema
# We should only have some kind of path terminating in a
# property here.
assert isinstance(rptrref, irast.PointerRef)
ptr = schema.get_by_id(rptrref.id, type=s_pointers.Pointer)
if ptr.is_exclusive(schema):
# Got an exclusive constraint
return ONE
return MANY
else:
# This is some kind of a link at the end of a path.
# Therefore the target is a proper set.
return ONE
elif ir.expr is not None:
return expr_mult
else:
# Evidently this is not a pointer, expression, or a scalar.
# This is an object type and therefore a proper set.
return ONE
@_infer_multiplicity.register
def __infer_func_call(
ir: irast.FunctionCall,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# If the function returns a set (for any reason), all bets are off
# and the maximum multiplicity cannot be inferred.
card = cardinality.infer_cardinality(
ir, scope_tree=scope_tree, ctx=ctx)
# We still want to validate the multiplicity of the arguments, though.
for arg in ir.args:
infer_multiplicity(arg.expr, scope_tree=scope_tree, ctx=ctx)
if card is not None and card.is_single():
return ONE
elif str(ir.func_shortname) == 'std::enumerate':
# Technically the output of enumerate is always of
# multiplicity ONE because it's a set of tuples with first
# elements being guaranteed to be distinct.
return ONE
else:
return MANY
@_infer_multiplicity.register
def __infer_oper_call(
ir: irast.OperatorCall,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
mult = []
cards = []
for arg in ir.args:
cards.append(
cardinality.infer_cardinality(
arg.expr, scope_tree=scope_tree, ctx=ctx))
mult.append(
infer_multiplicity(
arg.expr, scope_tree=scope_tree, ctx=ctx))
op_name = str(ir.func_shortname)
if op_name == 'std::UNION':
# UNION will produce multiplicity MANY unless most or all of
# the elements multiplicity is ZERO (from an empty set).
result = ZERO
for m in mult:
if m is ONE and result is ZERO:
result = m
elif m is ONE and result is not ZERO:
return MANY
elif m is MANY:
return MANY
return result
elif op_name == 'std::DISTINCT':
if mult[0] is ZERO:
return ZERO
else:
return ONE
elif op_name == 'std::IF':
# If the cardinality of the condition is more than ONE, then
# the multiplicity cannot be inferred.
if cards[1].is_single():
# Now it's just a matter of the multiplicity of the
# possible results.
return _max_multiplicity((mult[0], mult[2]))
else:
return MANY
else:
# The rest of the operators (other than UNION, DISTINCT, or
# IF..ELSE). We can ignore the SET OF args because the results
# are actually proportional to the element-wise args in our
# operators.
result = _max_multiplicity(mult)
if result is MANY:
return result
# Even when arguments are of multiplicity ONE, we cannot
# exclude the possibility of the result being of multiplicity
# MANY. We need to check that at most one argument has
# cardinality more than ONE.
if len([card for card in cards if card.is_multi()]) > 1:
return MANY
else:
return result
@_infer_multiplicity.register
def __infer_const(
ir: irast.BaseConstant,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return ONE
@_infer_multiplicity.register
def __infer_param(
ir: irast.Parameter,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return ONE
@_infer_multiplicity.register
def __infer_const_set(
ir: irast.ConstantSet,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
if len(ir.elements) == len({el.value for el in ir.elements}):
return ONE
else:
return MANY
@_infer_multiplicity.register
def __infer_typecheckop(
ir: irast.TypeCheckOp,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# Unless this is a singleton, the multiplicity cannot be assumed to be ONE.
card = cardinality.infer_cardinality(
ir, scope_tree=scope_tree, ctx=ctx)
if card is not None and card.is_single():
return ONE
else:
return MANY
@_infer_multiplicity.register
def __infer_typecast(
ir: irast.TypeCast,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return infer_multiplicity(
ir.expr, scope_tree=scope_tree, ctx=ctx,
)
def _infer_stmt_multiplicity(
ir: irast.FilteredStmt,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
result = infer_multiplicity(
ir.subject if isinstance(ir, irast.MutatingStmt) else ir.result,
scope_tree=scope_tree,
ctx=ctx,
)
# WITH block bindings need to be validated, they don't have to
# have multiplicity ONE, but their sub-expressions must be valid.
#
# Inferring how the FILTER clause affects multiplicity is in
# general impossible, but we still want to ensure that the FILTER
# expression has valid multiplicity.
for part in ir.bindings + [ir.where]:
if part:
infer_multiplicity(part, scope_tree=scope_tree, ctx=ctx)
return result
def _infer_for_multiplicity(
ir: irast.SelectStmt,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
assert ir.iterator_stmt is not None
itexpr = ir.iterator_stmt.expr
if isinstance(ir.result.expr, irast.SelectStmt):
union = ir.result.expr
if (isinstance(union.where, irast.Set) and
isinstance(union.where.expr, irast.OperatorCall) and
str(union.where.expr.func_shortname) == 'std::='):
op = union.where.expr
left, right = (a.expr for a in op.args)
# The iterator set may be wrapped in an `enumerate`, this
# requires different handling.
has_enumerate = (
isinstance(itexpr, irast.SelectStmt) and
isinstance(itfn := itexpr.result.expr, irast.FunctionCall) and
str(itfn.func_shortname) == 'std::enumerate'
)
# First make sure that the cardinality of the FILTER
# expression is is no more than 1. Then make sure both
# operands are paths.
if union.where_card.is_single():
it = None
if left.rptr is not None:
it = right
elif right.rptr is not None:
it = left
if it is not None:
if has_enumerate:
assert isinstance(itfn, irast.FunctionCall)
enumerate_mult = infer_multiplicity(
itfn.args[0].expr, scope_tree=scope_tree, ctx=ctx,
)
if (
enumerate_mult is ONE
and it.rptr is not None
and isinstance(
it.rptr,
irast.TupleIndirectionPointer
)
# Tuple comes from the iterator set
and it.rptr.source.expr is itexpr
# the indirection is accessing element 1
and str(it.rptr.ptrref.name) == '__tuple__::1'
):
return ONE
elif (it.is_binding and it.expr is itexpr):
return ONE
elif isinstance(ir.result.expr, irast.InsertStmt):
# A union of inserts always has multiplicity ONE
return ONE
return MANY
@_infer_multiplicity.register
def __infer_select_stmt(
ir: irast.SelectStmt,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
result = _infer_stmt_multiplicity(ir, scope_tree=scope_tree, ctx=ctx)
itmult = None
if ir.iterator_stmt:
# If this is a FOR, then there's a common pattern which can be
# detected and the multiplicity of it is ONE. Otherwise it
# cannot be reliably inferred.
#
# The pattern is: FOR x IN {<set of multiplicity ONE>} UNION
# (SELECT ... FILTER .prop = x) As long as the FILTER has just
# a single .prop = x expression, this is going to be a bunch
# of disjoint unions and the final multiplicity will be ONE.
itmult = infer_multiplicity(
ir.iterator_stmt, scope_tree=scope_tree, ctx=ctx,
)
# OFFSET, LIMIT and ORDER BY have already been validated to be
# singletons, but their sub-expressions (if any) still need to be
# validated.
for part in [ir.limit, ir.offset] + [sort.expr for sort in ir.orderby]:
if part:
new_scope = cardinality._get_set_scope(part, scope_tree, ctx=ctx)
infer_multiplicity(part, scope_tree=new_scope, ctx=ctx)
if itmult is not None:
if itmult is ONE:
return _infer_for_multiplicity(
ir, scope_tree=scope_tree, ctx=ctx)
return MANY
else:
return result
@_infer_multiplicity.register
def __infer_insert_stmt(
ir: irast.InsertStmt,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# INSERT will always return a proper set, but we still want to
# process the sub-expressions.
infer_multiplicity(
ir.subject, is_mutation=True, scope_tree=scope_tree, ctx=ctx
)
new_scope = cardinality._get_set_scope(ir.result, scope_tree, ctx=ctx)
infer_multiplicity(
ir.result, is_mutation=True, scope_tree=new_scope, ctx=ctx
)
if ir.on_conflict:
for part in [ir.on_conflict.select_ir, ir.on_conflict.else_ir]:
if part:
infer_multiplicity(part, scope_tree=scope_tree, ctx=ctx)
return ONE
@_infer_multiplicity.register
def __infer_update_stmt(
ir: irast.UpdateStmt,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# Presumably UPDATE will always return a proper set, even if it's
# fed something with higher multiplicity, but we still want to
# process the expression being updated.
infer_multiplicity(
ir.result, is_mutation=True, scope_tree=scope_tree, ctx=ctx,
)
result = _infer_stmt_multiplicity(ir, scope_tree=scope_tree, ctx=ctx)
if result is ZERO:
return ZERO
else:
return ONE
@_infer_multiplicity.register
def __infer_delete_stmt(
ir: irast.DeleteStmt,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# Presumably DELETE will always return a proper set, even if it's
# fed something with higher multiplicity, but we still want to
# process the expression being deleted.
infer_multiplicity(
ir.result, is_mutation=True, scope_tree=scope_tree, ctx=ctx,
)
result = _infer_stmt_multiplicity(ir, scope_tree=scope_tree, ctx=ctx)
if result is ZERO:
return ZERO
else:
return ONE
@_infer_multiplicity.register
def __infer_group_stmt(
ir: irast.GroupStmt,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
raise NotImplementedError
@_infer_multiplicity.register
def __infer_slice(
ir: irast.SliceIndirection,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# Slice indirection multiplicity is guaranteed to be ONE as long
# as the cardinality of this expression is at most one, otherwise
# the results of index indirection can contain values with
# multiplicity > 1.
card = cardinality.infer_cardinality(
ir, scope_tree=scope_tree, ctx=ctx)
if card is not None and card.is_single():
return ONE
else:
return MANY
@_infer_multiplicity.register
def __infer_index(
ir: irast.IndexIndirection,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
# Index indirection multiplicity is guaranteed to be ONE as long
# as the cardinality of this expression is at most one, otherwise
# the results of index indirection can contain values with
# multiplicity > 1.
card = cardinality.infer_cardinality(
ir, scope_tree=scope_tree, ctx=ctx)
if card is not None and card.is_single():
return ONE
else:
return MANY
@_infer_multiplicity.register
def __infer_array(
ir: irast.Array,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return _common_multiplicity(ir.elements, scope_tree=scope_tree, ctx=ctx)
@_infer_multiplicity.register
def __infer_tuple(
ir: irast.Tuple,
*,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
return _common_multiplicity(
[el.val for el in ir.elements], scope_tree=scope_tree, ctx=ctx
)
def infer_multiplicity(
ir: irast.Base,
*,
is_mutation: bool=False,
scope_tree: irast.ScopeTreeNode,
ctx: inference_context.InfCtx,
) -> qltypes.Multiplicity:
result = ctx.inferred_multiplicity.get((ir, scope_tree))
if result is not None:
return result
# We can use cardinality as a helper in determining multiplicity,
# since singletons have multiplicity one.
card = cardinality.infer_cardinality(
ir, is_mutation=is_mutation, scope_tree=scope_tree, ctx=ctx)
if isinstance(ir, irast.Set):
result = _infer_set(
ir, is_mutation=is_mutation, scope_tree=scope_tree, ctx=ctx,
)
else:
result = _infer_multiplicity(ir, scope_tree=scope_tree, ctx=ctx)
if card is not None and card.is_single():
# We've validated multiplicity, so now we can just override it
# safely.
result = ONE
if result not in {ZERO, ONE, MANY}:
raise errors.QueryError(
'could not determine the multiplicity of '
'set produced by expression',
context=ir.context)
ctx.inferred_multiplicity[ir, scope_tree] = result
return result
| apache-2.0 | -2,277,188,732,583,119,400 | 29.683526 | 79 | 0.62822 | false |
luzheqi1987/nova-annotation | nova/vnc/__init__.py | 38 | 1810 | #!/usr/bin/env python
# Copyright (c) 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for VNC Proxying."""
from oslo.config import cfg
vnc_opts = [
cfg.StrOpt('novncproxy_base_url',
default='http://127.0.0.1:6080/vnc_auto.html',
help='Location of VNC console proxy, in the form '
'"http://127.0.0.1:6080/vnc_auto.html"'),
cfg.StrOpt('xvpvncproxy_base_url',
default='http://127.0.0.1:6081/console',
help='Location of nova xvp VNC console proxy, in the form '
'"http://127.0.0.1:6081/console"'),
cfg.StrOpt('vncserver_listen',
default='127.0.0.1',
help='IP address on which instance vncservers should listen'),
cfg.StrOpt('vncserver_proxyclient_address',
default='127.0.0.1',
help='The address to which proxy clients '
'(like nova-xvpvncproxy) should connect'),
cfg.BoolOpt('vnc_enabled',
default=True,
help='Enable VNC related features'),
cfg.StrOpt('vnc_keymap',
default='en-us',
help='Keymap for VNC'),
]
CONF = cfg.CONF
CONF.register_opts(vnc_opts)
| apache-2.0 | -7,956,805,474,462,706,000 | 37.510638 | 77 | 0.614365 | false |
meh/servo | tests/wpt/web-platform-tests/dom/nodes/Document-createElement-namespace-tests/generate.py | 226 | 2091 | #!/usr/bin/python
import os
import sys
THIS_NAME = "generate.py"
# Note: these lists must be kept in sync with the lists in
# Document-createElement-namespace.html, and this script must be run whenever
# the lists are updated. (We could keep the lists in a shared JSON file, but
# seems like too much effort.)
FILES = (
("empty", ""),
("minimal_html", "<!doctype html><title></title>"),
("xhtml", '<html xmlns="http://www.w3.org/1999/xhtml"></html>'),
("svg", '<svg xmlns="http://www.w3.org/2000/svg"></svg>'),
("mathml", '<mathml xmlns="http://www.w3.org/1998/Math/MathML"></mathml>'),
("bare_xhtml", "<html></html>"),
("bare_svg", "<svg></svg>"),
("bare_mathml", "<math></math>"),
("xhtml_ns_removed", """\
<html xmlns="http://www.w3.org/1999/xhtml">
<head><script>
var newRoot = document.createElementNS(null, "html");
document.removeChild(document.documentElement);
document.appendChild(newRoot);
</script></head>
</html>
"""),
("xhtml_ns_changed", """\
<html xmlns="http://www.w3.org/1999/xhtml">
<head><script>
var newRoot = document.createElementNS("http://www.w3.org/2000/svg", "abc");
document.removeChild(document.documentElement);
document.appendChild(newRoot);
</script></head>
</html>
"""),
)
EXTENSIONS = (
"html",
"xhtml",
"xml",
"svg",
# Was not able to get server MIME type working properly :(
#"mml",
)
def __main__():
if len(sys.argv) > 1:
print "No arguments expected, aborting"
return
if not os.access(THIS_NAME, os.F_OK):
print "Must be run from the directory of " + THIS_NAME + ", aborting"
return
for name in os.listdir("."):
if name == THIS_NAME:
continue
os.remove(name)
manifest = open("MANIFEST", "w")
for name, contents in FILES:
for extension in EXTENSIONS:
f = open(name + "." + extension, "w")
f.write(contents)
f.close()
manifest.write("support " + name + "." + extension + "\n")
manifest.close()
__main__()
| mpl-2.0 | 3,605,091,391,020,475,400 | 26.155844 | 80 | 0.589192 | false |
sbellem/django | django/contrib/contenttypes/models.py | 273 | 7798 | from __future__ import unicode_literals
import warnings
from django.apps import apps
from django.db import models
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
class ContentTypeManager(models.Manager):
use_in_migrations = True
# Cache to avoid re-looking up ContentType objects all over the place.
# This cache is shared by all the get_for_* methods.
_cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self.__class__._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
self._add_to_cache(self.db, ct)
return ct
def _get_opts(self, model, for_concrete_model):
if for_concrete_model:
model = model._meta.concrete_model
elif model._deferred:
model = model._meta.proxy_for_model
return model._meta
def _get_from_cache(self, opts):
key = (opts.app_label, opts.model_name)
return self.__class__._cache[self.db][key]
def create(self, **kwargs):
if 'name' in kwargs:
del kwargs['name']
warnings.warn(
"ContentType.name field doesn't exist any longer. Please remove it from your code.",
RemovedInDjango110Warning, stacklevel=2)
return super(ContentTypeManager, self).create(**kwargs)
def get_for_model(self, model, for_concrete_model=True):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = self._get_opts(model, for_concrete_model)
try:
return self._get_from_cache(opts)
except KeyError:
pass
# The ContentType entry was not found in the cache, therefore we
# proceed to load or create it.
try:
try:
# We start with get() and not get_or_create() in order to use
# the db_for_read (see #20401).
ct = self.get(app_label=opts.app_label, model=opts.model_name)
except self.model.DoesNotExist:
# Not found in the database; we proceed to create it. This time we
# use get_or_create to take care of any race conditions.
ct, created = self.get_or_create(
app_label=opts.app_label,
model=opts.model_name,
)
except (OperationalError, ProgrammingError, IntegrityError):
# It's possible to migrate a single app before contenttypes,
# as it's not a required initial dependency (it's contrib!)
# Have a nice error for this.
raise RuntimeError(
"Error creating new content types. Please make sure contenttypes "
"is migrated before trying to migrate apps individually."
)
self._add_to_cache(self.db, ct)
return ct
def get_for_models(self, *models, **kwargs):
"""
Given *models, returns a dictionary mapping {model: content_type}.
"""
for_concrete_models = kwargs.pop('for_concrete_models', True)
# Final results
results = {}
# models that aren't already in the cache
needed_app_labels = set()
needed_models = set()
needed_opts = set()
for model in models:
opts = self._get_opts(model, for_concrete_models)
try:
ct = self._get_from_cache(opts)
except KeyError:
needed_app_labels.add(opts.app_label)
needed_models.add(opts.model_name)
needed_opts.add(opts)
else:
results[model] = ct
if needed_opts:
cts = self.filter(
app_label__in=needed_app_labels,
model__in=needed_models
)
for ct in cts:
model = ct.model_class()
if model._meta in needed_opts:
results[model] = ct
needed_opts.remove(model._meta)
self._add_to_cache(self.db, ct)
for opts in needed_opts:
# These weren't in the cache, or the DB, create them.
ct = self.create(
app_label=opts.app_label,
model=opts.model_name,
)
self._add_to_cache(self.db, ct)
results[ct.model_class()] = ct
return results
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self.__class__._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self.__class__._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
# Note it's possible for ContentType objects to be stale; model_class() will return None.
# Hence, there is no reliance on model._meta.app_label here, just using the model fields instead.
key = (ct.app_label, ct.model)
self.__class__._cache.setdefault(using, {})[key] = ct
self.__class__._cache.setdefault(using, {})[ct.id] = ct
@python_2_unicode_compatible
class ContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
def __str__(self):
return self.name
@property
def name(self):
model = self.model_class()
if not model:
return self.model
return force_text(model._meta.verbose_name)
def model_class(self):
"Returns the Python model class for this type of content."
try:
return apps.get_model(self.app_label, self.model)
except LookupError:
return None
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._base_manager.using(self._state.db).get(**kwargs)
def get_all_objects_for_this_type(self, **kwargs):
"""
Returns all objects of this type for the keyword arguments given.
"""
return self.model_class()._base_manager.using(self._state.db).filter(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
| bsd-3-clause | -2,790,826,641,743,166,000 | 37.413793 | 105 | 0.590536 | false |
popazerty/dvbapp-gui2 | lib/python/Components/config.py | 6 | 54005 | from enigma import getPrevAsciiCode
from Tools.NumericalTextInput import NumericalTextInput
from Tools.Directories import resolveFilename, SCOPE_CONFIG, fileExists
from Components.Harddisk import harddiskmanager
from copy import copy as copy_copy
from os import path as os_path
from time import localtime, strftime
# ConfigElement, the base class of all ConfigElements.
# it stores:
# value the current value, usefully encoded.
# usually a property which retrieves _value,
# and maybe does some reformatting
# _value the value as it's going to be saved in the configfile,
# though still in non-string form.
# this is the object which is actually worked on.
# default the initial value. If _value is equal to default,
# it will not be stored in the config file
# saved_value is a text representation of _value, stored in the config file
#
# and has (at least) the following methods:
# save() stores _value into saved_value,
# (or stores 'None' if it should not be stored)
# load() loads _value from saved_value, or loads
# the default if saved_value is 'None' (default)
# or invalid.
#
class ConfigElement(object):
def __init__(self):
self.extra_args = {}
self.saved_value = None
self.save_forced = False
self.last_value = None
self.save_disabled = False
self.__notifiers = None
self.__notifiers_final = None
self.enabled = True
self.callNotifiersOnSaveAndCancel = False
def getNotifiers(self):
if self.__notifiers is None:
self.__notifiers = [ ]
return self.__notifiers
def setNotifiers(self, val):
self.__notifiers = val
notifiers = property(getNotifiers, setNotifiers)
def getNotifiersFinal(self):
if self.__notifiers_final is None:
self.__notifiers_final = [ ]
return self.__notifiers_final
def setNotifiersFinal(self, val):
self.__notifiers_final = val
notifiers_final = property(getNotifiersFinal, setNotifiersFinal)
# you need to override this to do input validation
def setValue(self, value):
self._value = value
self.changed()
def getValue(self):
return self._value
value = property(getValue, setValue)
# you need to override this if self.value is not a string
def fromstring(self, value):
return value
# you can overide this for fancy default handling
def load(self):
sv = self.saved_value
if sv is None:
self.value = self.default
else:
self.value = self.fromstring(sv)
def tostring(self, value):
return str(value)
# you need to override this if str(self.value) doesn't work
def save(self):
if self.save_disabled or (self.value == self.default and not self.save_forced):
self.saved_value = None
else:
self.saved_value = self.tostring(self.value)
if self.callNotifiersOnSaveAndCancel:
self.changed()
def cancel(self):
self.load()
if self.callNotifiersOnSaveAndCancel:
self.changed()
def isChanged(self):
sv = self.saved_value
if sv is None and self.value == self.default:
return False
return self.tostring(self.value) != sv
def changed(self):
if self.__notifiers:
for x in self.notifiers:
try:
if self.extra_args[x]:
x(self, self.extra_args[x])
else:
x(self)
except:
x(self)
def changedFinal(self):
if self.__notifiers_final:
for x in self.notifiers_final:
try:
if self.extra_args[x]:
x(self, self.extra_args[x])
else:
x(self)
except:
x(self)
def addNotifier(self, notifier, initial_call = True, immediate_feedback = True, extra_args=None):
if not extra_args: extra_args = []
assert callable(notifier), "notifiers must be callable"
try:
self.extra_args[notifier] = extra_args
except: pass
if immediate_feedback:
self.notifiers.append(notifier)
else:
self.notifiers_final.append(notifier)
# CHECKME:
# do we want to call the notifier
# - at all when adding it? (yes, though optional)
# - when the default is active? (yes)
# - when no value *yet* has been set,
# because no config has ever been read (currently yes)
# (though that's not so easy to detect.
# the entry could just be new.)
if initial_call:
if extra_args:
notifier(self,extra_args)
else:
notifier(self)
def removeNotifier(self, notifier, initial_call = True, immediate_feedback = True):
assert callable(notifier), "notifiers must be callable"
if immediate_feedback:
self.notifiers.remove(notifier)
else:
self.notifiers_final.remove(notifier)
def disableSave(self):
self.save_disabled = True
def __call__(self, selected):
return self.getMulti(selected)
def onSelect(self, session):
pass
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
KEY_LEFT = 0
KEY_RIGHT = 1
KEY_OK = 2
KEY_DELETE = 3
KEY_BACKSPACE = 4
KEY_HOME = 5
KEY_END = 6
KEY_TOGGLEOW = 7
KEY_ASCII = 8
KEY_TIMEOUT = 9
KEY_NUMBERS = range(12, 12+10)
KEY_0 = 12
KEY_9 = 12+9
def getKeyNumber(key):
assert key in KEY_NUMBERS
return key - KEY_0
class choicesList(object): # XXX: we might want a better name for this
LIST_TYPE_LIST = 1
LIST_TYPE_DICT = 2
def __init__(self, choices, type = None):
self.choices = choices
if type is None:
if isinstance(choices, list):
self.type = choicesList.LIST_TYPE_LIST
elif isinstance(choices, dict):
self.type = choicesList.LIST_TYPE_DICT
else:
assert False, "choices must be dict or list!"
else:
self.type = type
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices.keys()
return ret or [""]
def __iter__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices
return iter(ret or [""])
def __len__(self):
return len(self.choices) or 1
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
ret = self.choices[index]
if isinstance(ret, tuple):
ret = ret[0]
return ret
return self.choices.keys()[index]
def index(self, value):
try:
return self.__list__().index(value)
except (ValueError, IndexError):
# occurs e.g. when default is not in list
return 0
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
orig = self.choices[index]
if isinstance(orig, tuple):
self.choices[index] = (value, orig[1])
else:
self.choices[index] = value
else:
key = self.choices.keys()[index]
orig = self.choices[key]
del self.choices[key]
self.choices[value] = orig
def default(self):
choices = self.choices
if not choices:
return ""
if self.type is choicesList.LIST_TYPE_LIST:
default = choices[0]
if isinstance(default, tuple):
default = default[0]
else:
default = choices.keys()[0]
return default
class descriptionList(choicesList): # XXX: we might want a better name for this
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[1] for x in self.choices]
else:
ret = self.choices.values()
return ret or [""]
def __iter__(self):
return iter(self.__list__())
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
for x in self.choices:
if isinstance(x, tuple):
if x[0] == index:
return str(x[1])
elif x == index:
return str(x)
return str(index) # Fallback!
else:
return str(self.choices.get(index, ""))
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
i = self.index(index)
orig = self.choices[i]
if isinstance(orig, tuple):
self.choices[i] = (orig[0], value)
else:
self.choices[i] = value
else:
self.choices[index] = value
#
# ConfigSelection is a "one of.."-type.
# it has the "choices", usually a list, which contains
# (id, desc)-tuples (or just only the ids, in case the id
# will be used as description)
#
# all ids MUST be plain strings.
#
class ConfigSelection(ConfigElement):
def __init__(self, choices, default = None):
ConfigElement.__init__(self)
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self._descr = None
self.default = self._value = self.last_value = default
def setChoices(self, choices, default = None):
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self.default = default
if self.value not in self.choices:
self.value = default
def setValue(self, value):
if value in self.choices:
self._value = value
else:
self._value = self.default
self._descr = None
self.changed()
def tostring(self, val):
return val
def getValue(self):
return self._value
def setCurrentText(self, text):
i = self.choices.index(self.value)
self.choices[i] = text
self._descr = self.description[text] = text
self._value = text
value = property(getValue, setValue)
def getIndex(self):
return self.choices.index(self.value)
index = property(getIndex)
# GUI
def handleKey(self, key):
nchoices = len(self.choices)
if nchoices > 1:
i = self.choices.index(self.value)
if key == KEY_LEFT:
self.value = self.choices[(i + nchoices - 1) % nchoices]
elif key == KEY_RIGHT:
self.value = self.choices[(i + 1) % nchoices]
elif key == KEY_HOME:
self.value = self.choices[0]
elif key == KEY_END:
self.value = self.choices[nchoices - 1]
def selectNext(self):
nchoices = len(self.choices)
i = self.choices.index(self.value)
self.value = self.choices[(i + 1) % nchoices]
def getText(self):
if self._descr is not None:
return self._descr
descr = self._descr = self.description[self.value]
if descr:
return _(descr)
return descr
def getMulti(self, selected):
if self._descr is not None:
descr = self._descr
else:
descr = self._descr = self.description[self.value]
if descr:
return "text", _(descr)
return "text", descr
# HTML
def getHTML(self, id):
res = ""
for v in self.choices:
descr = self.description[v]
if self.value == v:
checked = 'checked="checked" '
else:
checked = ''
res += '<input type="radio" name="' + id + '" ' + checked + 'value="' + v + '">' + descr + "</input></br>\n"
return res
def unsafeAssign(self, value):
# setValue does check if value is in choices. This is safe enough.
self.value = value
description = property(lambda self: descriptionList(self.choices.choices, self.choices.type))
# a binary decision.
#
# several customized versions exist for different
# descriptions.
#
boolean_descriptions = {False: _("false"), True: _("true")}
class ConfigBoolean(ConfigElement):
def __init__(self, default = False, descriptions = boolean_descriptions):
ConfigElement.__init__(self)
self.descriptions = descriptions
self.value = self.last_value = self.default = default
def handleKey(self, key):
if key in (KEY_LEFT, KEY_RIGHT):
self.value = not self.value
elif key == KEY_HOME:
self.value = False
elif key == KEY_END:
self.value = True
def getText(self):
descr = self.descriptions[self.value]
if descr:
return _(descr)
return descr
def getMulti(self, selected):
descr = self.descriptions[self.value]
if descr:
return "text", _(descr)
return "text", descr
def tostring(self, value):
if not value:
return "false"
else:
return "true"
def fromstring(self, val):
if val == "true":
return True
else:
return False
def getHTML(self, id):
if self.value:
checked = ' checked="checked"'
else:
checked = ''
return '<input type="checkbox" name="' + id + '" value="1" ' + checked + " />"
# this is FLAWED. and must be fixed.
def unsafeAssign(self, value):
if value == "1":
self.value = True
else:
self.value = False
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
yes_no_descriptions = {False: _("no"), True: _("yes")}
class ConfigYesNo(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = yes_no_descriptions)
on_off_descriptions = {False: _("off"), True: _("on")}
class ConfigOnOff(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = on_off_descriptions)
enable_disable_descriptions = {False: _("disable"), True: _("enable")}
class ConfigEnableDisable(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = enable_disable_descriptions)
class ConfigDateTime(ConfigElement):
def __init__(self, default, formatstring, increment = 86400):
ConfigElement.__init__(self)
self.increment = increment
self.formatstring = formatstring
self.value = self.last_value = self.default = int(default)
def handleKey(self, key):
if key == KEY_LEFT:
self.value -= self.increment
elif key == KEY_RIGHT:
self.value += self.increment
elif key == KEY_HOME or key == KEY_END:
self.value = self.default
def getText(self):
return strftime(self.formatstring, localtime(self.value))
def getMulti(self, selected):
return "text", strftime(self.formatstring, localtime(self.value))
def fromstring(self, val):
return int(val)
# *THE* mighty config element class
#
# allows you to store/edit a sequence of values.
# can be used for IP-addresses, dates, plain integers, ...
# several helper exist to ease this up a bit.
#
class ConfigSequence(ConfigElement):
def __init__(self, seperator, limits, default, censor_char = ""):
ConfigElement.__init__(self)
assert isinstance(limits, list) and len(limits[0]) == 2, "limits must be [(min, max),...]-tuple-list"
assert censor_char == "" or len(censor_char) == 1, "censor char must be a single char (or \"\")"
#assert isinstance(default, list), "default must be a list"
#assert isinstance(default[0], int), "list must contain numbers"
#assert len(default) == len(limits), "length must match"
self.marked_pos = 0
self.seperator = seperator
self.limits = limits
self.censor_char = censor_char
self.last_value = self.default = default
self.value = copy_copy(default)
self.endNotifier = None
def validate(self):
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
if self._value[num] < self.limits[num][0]:
self._value[num] = self.limits[num][0]
if self._value[num] > self.limits[num][1]:
self._value[num] = self.limits[num][1]
num += 1
if self.marked_pos >= max_pos:
if self.endNotifier:
for x in self.endNotifier:
x(self)
self.marked_pos = max_pos - 1
if self.marked_pos < 0:
self.marked_pos = 0
def validatePos(self):
if self.marked_pos < 0:
self.marked_pos = 0
total_len = sum([len(str(x[1])) for x in self.limits])
if self.marked_pos >= total_len:
self.marked_pos = total_len - 1
def addEndNotifier(self, notifier):
if self.endNotifier is None:
self.endNotifier = []
self.endNotifier.append(notifier)
def handleKey(self, key):
if key == KEY_LEFT:
self.marked_pos -= 1
self.validatePos()
elif key == KEY_RIGHT:
self.marked_pos += 1
self.validatePos()
elif key == KEY_HOME:
self.marked_pos = 0
self.validatePos()
elif key == KEY_END:
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
num += 1
self.marked_pos = max_pos - 1
self.validatePos()
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
block_len = [len(str(x[1])) for x in self.limits]
total_len = sum(block_len)
pos = 0
blocknumber = 0
block_len_total = [0, ]
for x in block_len:
pos += block_len[blocknumber]
block_len_total.append(pos)
if pos - 1 >= self.marked_pos:
pass
else:
blocknumber += 1
# length of numberblock
number_len = len(str(self.limits[blocknumber][1]))
# position in the block
posinblock = self.marked_pos - block_len_total[blocknumber]
oldvalue = self._value[blocknumber]
olddec = oldvalue % 10 ** (number_len - posinblock) - (oldvalue % 10 ** (number_len - posinblock - 1))
newvalue = oldvalue - olddec + (10 ** (number_len - posinblock - 1) * number)
self._value[blocknumber] = newvalue
self.marked_pos += 1
self.validate()
self.changed()
def genText(self):
value = ""
mPos = self.marked_pos
num = 0
for i in self._value:
if value: #fixme no heading separator possible
value += self.seperator
if mPos >= len(value) - 1:
mPos += 1
if self.censor_char == "":
value += ("%0" + str(len(str(self.limits[num][1]))) + "d") % i
else:
value += (self.censor_char * len(str(self.limits[num][1])))
num += 1
return value, mPos
def getText(self):
(value, mPos) = self.genText()
return value
def getMulti(self, selected):
(value, mPos) = self.genText()
# only mark cursor when we are selected
# (this code is heavily ink optimized!)
if self.enabled:
return "mtext"[1-selected:], value, [mPos]
else:
return "text", value
def tostring(self, val):
return self.seperator.join([self.saveSingle(x) for x in val])
def saveSingle(self, v):
return str(v)
def fromstring(self, value):
return [int(x) for x in value.split(self.seperator)]
def onDeselect(self, session):
if self.last_value != self._value:
self.changedFinal()
self.last_value = copy_copy(self._value)
ip_limits = [(0,255),(0,255),(0,255),(0,255)]
class ConfigIP(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = ip_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:self.marked_block])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return value, mBlock
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return "mtext"[1-selected:], value, mBlock
else:
return "text", value
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
mac_limits = [(1,255),(1,255),(1,255),(1,255),(1,255),(1,255)]
class ConfigMAC(ConfigSequence):
def __init__(self, default):
ConfigSequence.__init__(self, seperator = ":", limits = mac_limits, default = default)
class ConfigMacText(ConfigElement, NumericalTextInput):
def __init__(self, default = "", visible_width = False):
ConfigElement.__init__(self)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False)
self.marked_pos = 0
self.allmarked = (default != "")
self.fixed_size = 17
self.visible_width = visible_width
self.offset = 0
self.overwrite = 17
self.help_window = None
self.value = self.last_value = self.default = default
self.useableChars = '0123456789ABCDEF'
def validateMarker(self):
textlen = len(self.text)
if self.marked_pos > textlen-1:
self.marked_pos = textlen-1
elif self.marked_pos < 0:
self.marked_pos = 0
def insertChar(self, ch, pos, owr):
if self.text[pos] == ':':
pos += 1
if owr or self.overwrite:
self.text = self.text[0:pos] + ch + self.text[pos + 1:]
elif self.fixed_size:
self.text = self.text[0:pos] + ch + self.text[pos:-1]
else:
self.text = self.text[0:pos] + ch + self.text[pos:]
def handleKey(self, key):
if key == KEY_LEFT:
self.timeout()
if self.allmarked:
self.marked_pos = len(self.text)
self.allmarked = False
else:
if self.text[self.marked_pos-1] == ':':
self.marked_pos -= 2
else:
self.marked_pos -= 1
elif key == KEY_RIGHT:
self.timeout()
if self.allmarked:
self.marked_pos = 0
self.allmarked = False
else:
if self.marked_pos < (len(self.text)-1):
if self.text[self.marked_pos+1] == ':':
self.marked_pos += 2
else:
self.marked_pos += 1
elif key in KEY_NUMBERS:
owr = self.lastKey == getKeyNumber(key)
newChar = self.getKey(getKeyNumber(key))
self.insertChar(newChar, self.marked_pos, owr)
elif key == KEY_TIMEOUT:
self.timeout()
if self.help_window:
self.help_window.update(self)
if self.text[self.marked_pos] == ':':
self.marked_pos += 1
return
if self.help_window:
self.help_window.update(self)
self.validateMarker()
self.changed()
def nextFunc(self):
self.marked_pos += 1
self.validateMarker()
self.changed()
def getValue(self):
try:
return self.text.encode("utf-8")
except UnicodeDecodeError:
print "Broken UTF8!"
return self.text
def setValue(self, val):
try:
self.text = val.decode("utf-8")
except UnicodeDecodeError:
self.text = val.decode("utf-8", "ignore")
print "Broken UTF8!"
value = property(getValue, setValue)
_value = property(getValue, setValue)
def getText(self):
return self.text.encode("utf-8")
def getMulti(self, selected):
if self.visible_width:
if self.allmarked:
mark = range(0, min(self.visible_width, len(self.text)))
else:
mark = [self.marked_pos-self.offset]
return "mtext"[1-selected:], self.text[self.offset:self.offset+self.visible_width].encode("utf-8")+" ", mark
else:
if self.allmarked:
mark = range(0, len(self.text))
else:
mark = [self.marked_pos]
return "mtext"[1-selected:], self.text.encode("utf-8")+" ", mark
def onSelect(self, session):
self.allmarked = (self.value != "")
if session is not None:
from Screens.NumericalTextInputHelpDialog import NumericalTextInputHelpDialog
self.help_window = session.instantiateDialog(NumericalTextInputHelpDialog, self)
self.help_window.show()
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if self.help_window:
session.deleteDialog(self.help_window)
self.help_window = None
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
def getHTML(self, id):
return '<input type="text" name="' + id + '" value="' + self.value + '" /><br>\n'
def unsafeAssign(self, value):
self.value = str(value)
class ConfigPosition(ConfigSequence):
def __init__(self, default, args):
ConfigSequence.__init__(self, seperator = ",", limits = [(0,args[0]),(0,args[1]),(0,args[2]),(0,args[3])], default = default)
clock_limits = [(0,23),(0,59)]
class ConfigClock(ConfigSequence):
def __init__(self, default):
t = localtime(default)
ConfigSequence.__init__(self, seperator = ":", limits = clock_limits, default = [t.tm_hour, t.tm_min])
def increment(self):
# Check if Minutes maxed out
if self._value[1] == 59:
# Increment Hour, reset Minutes
if self._value[0] < 23:
self._value[0] += 1
else:
self._value[0] = 0
self._value[1] = 0
else:
# Increment Minutes
self._value[1] += 1
# Trigger change
self.changed()
def decrement(self):
# Check if Minutes is minimum
if self._value[1] == 0:
# Decrement Hour, set Minutes to 59
if self._value[0] > 0:
self._value[0] -= 1
else:
self._value[0] = 23
self._value[1] = 59
else:
# Decrement Minutes
self._value[1] -= 1
# Trigger change
self.changed()
integer_limits = (0, 9999999999)
class ConfigInteger(ConfigSequence):
def __init__(self, default, limits = integer_limits):
ConfigSequence.__init__(self, seperator = ":", limits = [limits], default = default)
# you need to override this to do input validation
def setValue(self, value):
self._value = [value]
self.changed()
def getValue(self):
return self._value[0]
value = property(getValue, setValue)
def fromstring(self, value):
return int(value)
def tostring(self, value):
return str(value)
class ConfigPIN(ConfigInteger):
def __init__(self, default, len = 4, censor = ""):
assert isinstance(default, int), "ConfigPIN default must be an integer"
if default == -1:
default = "aaaa"
ConfigSequence.__init__(self, seperator = ":", limits = [(0, (10**len)-1)], censor_char = censor, default = default)
self.len = len
def getLength(self):
return self.len
class ConfigFloat(ConfigSequence):
def __init__(self, default, limits):
ConfigSequence.__init__(self, seperator = ".", limits = limits, default = default)
def getFloat(self):
return float(self.value[1] / float(self.limits[1][1] + 1) + self.value[0])
float = property(getFloat)
# an editable text...
class ConfigText(ConfigElement, NumericalTextInput):
def __init__(self, default = "", fixed_size = True, visible_width = False):
ConfigElement.__init__(self)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False)
self.marked_pos = 0
self.allmarked = (default != "")
self.fixed_size = fixed_size
self.visible_width = visible_width
self.offset = 0
self.overwrite = fixed_size
self.help_window = None
self.value = self.last_value = self.default = default
def validateMarker(self):
textlen = len(self.text)
if self.fixed_size:
if self.marked_pos > textlen-1:
self.marked_pos = textlen-1
else:
if self.marked_pos > textlen:
self.marked_pos = textlen
if self.marked_pos < 0:
self.marked_pos = 0
if self.visible_width:
if self.marked_pos < self.offset:
self.offset = self.marked_pos
if self.marked_pos >= self.offset + self.visible_width:
if self.marked_pos == textlen:
self.offset = self.marked_pos - self.visible_width
else:
self.offset = self.marked_pos - self.visible_width + 1
if self.offset > 0 and self.offset + self.visible_width > textlen:
self.offset = max(0, len - self.visible_width)
def insertChar(self, ch, pos, owr):
if owr or self.overwrite:
self.text = self.text[0:pos] + ch + self.text[pos + 1:]
elif self.fixed_size:
self.text = self.text[0:pos] + ch + self.text[pos:-1]
else:
self.text = self.text[0:pos] + ch + self.text[pos:]
def deleteChar(self, pos):
if not self.fixed_size:
self.text = self.text[0:pos] + self.text[pos + 1:]
elif self.overwrite:
self.text = self.text[0:pos] + " " + self.text[pos + 1:]
else:
self.text = self.text[0:pos] + self.text[pos + 1:] + " "
def deleteAllChars(self):
if self.fixed_size:
self.text = " " * len(self.text)
else:
self.text = ""
self.marked_pos = 0
def handleKey(self, key):
# this will no change anything on the value itself
# so we can handle it here in gui element
if key == KEY_DELETE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
else:
self.deleteChar(self.marked_pos)
if self.fixed_size and self.overwrite:
self.marked_pos += 1
elif key == KEY_BACKSPACE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
elif self.marked_pos > 0:
self.deleteChar(self.marked_pos-1)
if not self.fixed_size and self.offset > 0:
self.offset -= 1
self.marked_pos -= 1
elif key == KEY_LEFT:
self.timeout()
if self.allmarked:
self.marked_pos = len(self.text)
self.allmarked = False
else:
self.marked_pos -= 1
elif key == KEY_RIGHT:
self.timeout()
if self.allmarked:
self.marked_pos = 0
self.allmarked = False
else:
self.marked_pos += 1
elif key == KEY_HOME:
self.timeout()
self.allmarked = False
self.marked_pos = 0
elif key == KEY_END:
self.timeout()
self.allmarked = False
self.marked_pos = len(self.text)
elif key == KEY_TOGGLEOW:
self.timeout()
self.overwrite = not self.overwrite
elif key == KEY_ASCII:
self.timeout()
newChar = unichr(getPrevAsciiCode())
if not self.useableChars or newChar in self.useableChars:
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
elif key in KEY_NUMBERS:
owr = self.lastKey == getKeyNumber(key)
newChar = self.getKey(getKeyNumber(key))
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, owr)
elif key == KEY_TIMEOUT:
self.timeout()
if self.help_window:
self.help_window.update(self)
return
if self.help_window:
self.help_window.update(self)
self.validateMarker()
self.changed()
def nextFunc(self):
self.marked_pos += 1
self.validateMarker()
self.changed()
def getValue(self):
try:
return self.text.encode("utf-8")
except UnicodeDecodeError:
print "Broken UTF8!"
return self.text
def setValue(self, val):
try:
self.text = val.decode("utf-8")
except UnicodeDecodeError:
self.text = val.decode("utf-8", "ignore")
print "Broken UTF8!"
value = property(getValue, setValue)
_value = property(getValue, setValue)
def getText(self):
return self.text.encode("utf-8")
def getMulti(self, selected):
if self.visible_width:
if self.allmarked:
mark = range(0, min(self.visible_width, len(self.text)))
else:
mark = [self.marked_pos-self.offset]
return "mtext"[1-selected:], self.text[self.offset:self.offset+self.visible_width].encode("utf-8")+" ", mark
else:
if self.allmarked:
mark = range(0, len(self.text))
else:
mark = [self.marked_pos]
return "mtext"[1-selected:], self.text.encode("utf-8")+" ", mark
def onSelect(self, session):
self.allmarked = (self.value != "")
if session is not None:
from Screens.NumericalTextInputHelpDialog import NumericalTextInputHelpDialog
self.help_window = session.instantiateDialog(NumericalTextInputHelpDialog, self)
self.help_window.show()
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if self.help_window:
session.deleteDialog(self.help_window)
self.help_window = None
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
def getHTML(self, id):
return '<input type="text" name="' + id + '" value="' + self.value + '" /><br>\n'
def unsafeAssign(self, value):
self.value = str(value)
class ConfigPassword(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False, censor = "*"):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
self.censor_char = censor
self.hidden = True
def getMulti(self, selected):
mtext, text, mark = ConfigText.getMulti(self, selected)
if self.hidden:
text = len(text) * self.censor_char
return mtext, text, mark
def onSelect(self, session):
ConfigText.onSelect(self, session)
self.hidden = False
def onDeselect(self, session):
ConfigText.onDeselect(self, session)
self.hidden = True
# lets the user select between [min, min+stepwidth, min+(stepwidth*2)..., maxval] with maxval <= max depending
# on the stepwidth
# min, max, stepwidth, default are int values
# wraparound: pressing RIGHT key at max value brings you to min value and vice versa if set to True
class ConfigSelectionNumber(ConfigSelection):
def __init__(self, min, max, stepwidth, default = None, wraparound = False):
self.wraparound = wraparound
if default is None:
default = min
default = str(default)
choices = []
step = min
while step <= max:
choices.append(str(step))
step += stepwidth
ConfigSelection.__init__(self, choices, default)
def getValue(self):
return int(ConfigSelection.getValue(self))
def setValue(self, val):
ConfigSelection.setValue(self, str(val))
value = property(getValue, setValue)
def getIndex(self):
return self.choices.index(self.value)
index = property(getIndex)
def handleKey(self, key):
if not self.wraparound:
if key == KEY_RIGHT:
if len(self.choices) == (self.choices.index(str(self.value)) + 1):
return
if key == KEY_LEFT:
if self.choices.index(str(self.value)) == 0:
return
nchoices = len(self.choices)
if nchoices > 1:
i = self.choices.index(str(self.value))
if key == KEY_LEFT:
self.value = self.choices[(i + nchoices - 1) % nchoices]
elif key == KEY_RIGHT:
self.value = self.choices[(i + 1) % nchoices]
elif key == KEY_HOME:
self.value = self.choices[0]
elif key == KEY_END:
self.value = self.choices[nchoices - 1]
class ConfigNumber(ConfigText):
def __init__(self, default = 0):
ConfigText.__init__(self, str(default), fixed_size = False)
def getValue(self):
return int(self.text)
def setValue(self, val):
self.text = str(val)
value = property(getValue, setValue)
_value = property(getValue, setValue)
def isChanged(self):
sv = self.saved_value
strv = self.tostring(self.value)
if sv is None and strv == self.default:
return False
return strv != sv
def conform(self):
pos = len(self.text) - self.marked_pos
self.text = self.text.lstrip("0")
if self.text == "":
self.text = "0"
if pos > len(self.text):
self.marked_pos = 0
else:
self.marked_pos = len(self.text) - pos
def handleKey(self, key):
if key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
ascii = getPrevAsciiCode()
if not (48 <= ascii <= 57):
return
else:
ascii = getKeyNumber(key) + 48
newChar = unichr(ascii)
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
else:
ConfigText.handleKey(self, key)
self.conform()
def onSelect(self, session):
self.allmarked = (self.value != "")
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
class ConfigSearchText(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False, search = True)
class ConfigDirectory(ConfigText):
def __init__(self, default="", visible_width=60):
ConfigText.__init__(self, default, fixed_size = True, visible_width = visible_width)
def handleKey(self, key):
pass
def getValue(self):
if self.text == "":
return None
else:
return ConfigText.getValue(self)
def setValue(self, val):
if val is None:
val = ""
ConfigText.setValue(self, val)
def getMulti(self, selected):
if self.text == "":
return "mtext"[1-selected:], _("List of storage devices"), range(0)
else:
return ConfigText.getMulti(self, selected)
def onSelect(self, session):
self.allmarked = (self.value != "")
# a slider.
class ConfigSlider(ConfigElement):
def __init__(self, default = 0, increment = 1, limits = (0, 100)):
ConfigElement.__init__(self)
self.value = self.last_value = self.default = default
self.min = limits[0]
self.max = limits[1]
self.increment = increment
def checkValues(self):
if self.value < self.min:
self.value = self.min
if self.value > self.max:
self.value = self.max
def handleKey(self, key):
if key == KEY_LEFT:
self.value -= self.increment
elif key == KEY_RIGHT:
self.value += self.increment
elif key == KEY_HOME:
self.value = self.min
elif key == KEY_END:
self.value = self.max
else:
return
self.checkValues()
def getText(self):
return "%d / %d" % (self.value, self.max)
def getMulti(self, selected):
self.checkValues()
return "slider", self.value, self.max
def fromstring(self, value):
return int(value)
# a satlist. in fact, it's a ConfigSelection.
class ConfigSatlist(ConfigSelection):
def __init__(self, list, default = None):
if default is not None:
default = str(default)
ConfigSelection.__init__(self, choices = [(str(orbpos), desc) for (orbpos, desc, flags) in list], default = default)
def getOrbitalPosition(self):
if self.value == "":
return None
return int(self.value)
orbital_position = property(getOrbitalPosition)
class ConfigSet(ConfigElement):
def __init__(self, choices, default=None):
if not default: default = []
ConfigElement.__init__(self)
if isinstance(choices, list):
choices.sort()
self.choices = choicesList(choices, choicesList.LIST_TYPE_LIST)
else:
assert False, "ConfigSet choices must be a list!"
if default is None:
default = []
self.pos = -1
default.sort()
self.last_value = self.default = default
self.value = default[:]
def toggleChoice(self, choice):
value = self.value
if choice in value:
value.remove(choice)
else:
value.append(choice)
value.sort()
self.changed()
def handleKey(self, key):
if key in KEY_NUMBERS + [KEY_DELETE, KEY_BACKSPACE]:
if self.pos != -1:
self.toggleChoice(self.choices[self.pos])
elif key == KEY_LEFT:
if self.pos < 0:
self.pos = len(self.choices)-1
else:
self.pos -= 1
elif key == KEY_RIGHT:
if self.pos >= len(self.choices)-1:
self.pos = -1
else:
self.pos += 1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def genString(self, lst):
res = ""
for x in lst:
res += self.description[x]+" "
return res
def getText(self):
return self.genString(self.value)
def getMulti(self, selected):
if not selected or self.pos == -1:
return "text", self.genString(self.value)
else:
tmp = self.value[:]
ch = self.choices[self.pos]
mem = ch in self.value
if not mem:
tmp.append(ch)
tmp.sort()
ind = tmp.index(ch)
val1 = self.genString(tmp[:ind])
val2 = " "+self.genString(tmp[ind+1:])
if mem:
chstr = " "+self.description[ch]+" "
else:
chstr = "("+self.description[ch]+")"
len_val1 = len(val1)
return "mtext", val1+chstr+val2, range(len_val1, len_val1 + len(chstr))
def onDeselect(self, session):
self.pos = -1
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value[:]
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
description = property(lambda self: descriptionList(self.choices.choices, choicesList.LIST_TYPE_LIST))
class ConfigLocations(ConfigElement):
def __init__(self, default=None, visible_width=False):
if not default: default = []
ConfigElement.__init__(self)
self.visible_width = visible_width
self.pos = -1
self.default = default
self.locations = []
self.mountpoints = []
self.value = default[:]
def setValue(self, value):
locations = self.locations
loc = [x[0] for x in locations if x[3]]
add = [x for x in value if not x in loc]
diff = add + [x for x in loc if not x in value]
locations = [x for x in locations if not x[0] in diff] + [[x, self.getMountpoint(x), True, True] for x in add]
#locations.sort(key = lambda x: x[0])
self.locations = locations
self.changed()
def getValue(self):
self.checkChangedMountpoints()
locations = self.locations
for x in locations:
x[3] = x[2]
return [x[0] for x in locations if x[3]]
value = property(getValue, setValue)
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
def load(self):
sv = self.saved_value
if sv is None:
tmp = self.default
else:
tmp = self.fromstring(sv)
locations = [[x, None, False, False] for x in tmp]
self.refreshMountpoints()
for x in locations:
if fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
self.locations = locations
def save(self):
locations = self.locations
if self.save_disabled or not locations:
self.saved_value = None
else:
self.saved_value = self.tostring([x[0] for x in locations])
def isChanged(self):
sv = self.saved_value
locations = self.locations
if val is None and not locations:
return False
return self.tostring([x[0] for x in locations]) != sv
def addedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = True
elif x[1] is None and fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
def removedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = False
def refreshMountpoints(self):
self.mountpoints = [p.mountpoint for p in harddiskmanager.getMountedPartitions() if p.mountpoint != "/"]
self.mountpoints.sort(key = lambda x: -len(x))
def checkChangedMountpoints(self):
oldmounts = self.mountpoints
self.refreshMountpoints()
newmounts = self.mountpoints
if oldmounts == newmounts:
return
for x in oldmounts:
if not x in newmounts:
self.removedMount(x)
for x in newmounts:
if not x in oldmounts:
self.addedMount(x)
def getMountpoint(self, file):
file = os_path.realpath(file)+"/"
for m in self.mountpoints:
if file.startswith(m):
return m
return None
def handleKey(self, key):
if key == KEY_LEFT:
self.pos -= 1
if self.pos < -1:
self.pos = len(self.value)-1
elif key == KEY_RIGHT:
self.pos += 1
if self.pos >= len(self.value):
self.pos = -1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def getText(self):
return " ".join(self.value)
def getMulti(self, selected):
if not selected:
valstr = " ".join(self.value)
if self.visible_width and len(valstr) > self.visible_width:
return "text", valstr[0:self.visible_width]
else:
return "text", valstr
else:
i = 0
valstr = ""
ind1 = 0
ind2 = 0
for val in self.value:
if i == self.pos:
ind1 = len(valstr)
valstr += str(val)+" "
if i == self.pos:
ind2 = len(valstr)
i += 1
if self.visible_width and len(valstr) > self.visible_width:
if ind1+1 < self.visible_width/2:
off = 0
else:
off = min(ind1+1-self.visible_width/2, len(valstr)-self.visible_width)
return "mtext", valstr[off:off+self.visible_width], range(ind1-off,ind2-off)
else:
return "mtext", valstr, range(ind1,ind2)
def onDeselect(self, session):
self.pos = -1
# nothing.
class ConfigNothing(ConfigSelection):
def __init__(self):
ConfigSelection.__init__(self, choices = [("","")])
# until here, 'saved_value' always had to be a *string*.
# now, in ConfigSubsection, and only there, saved_value
# is a dict, essentially forming a tree.
#
# config.foo.bar=True
# config.foobar=False
#
# turns into:
# config.saved_value == {"foo": {"bar": "True"}, "foobar": "False"}
#
class ConfigSubsectionContent(object):
pass
# we store a backup of the loaded configuration
# data in self.stored_values, to be able to deploy
# them when a new config element will be added,
# so non-default values are instantly available
# A list, for example:
# config.dipswitches = ConfigSubList()
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
class ConfigSubList(list, object):
def __init__(self):
list.__init__(self)
self.stored_values = {}
def save(self):
for x in self:
x.save()
def load(self):
for x in self:
x.load()
def getSavedValue(self):
res = { }
for i, val in enumerate(self):
sv = val.saved_value
if sv is not None:
res[str(i)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.stored_values.items():
if int(key) < len(self):
self[int(key)].saved_value = val
saved_value = property(getSavedValue, setSavedValue)
def append(self, item):
i = str(len(self))
list.append(self, item)
if i in self.stored_values:
item.saved_value = self.stored_values[i]
item.load()
def dict(self):
return dict([(str(index), value) for index, value in enumerate(self)])
# same as ConfigSubList, just as a dictionary.
# care must be taken that the 'key' has a proper
# str() method, because it will be used in the config
# file.
class ConfigSubDict(dict, object):
def __init__(self):
dict.__init__(self)
self.stored_values = {}
def save(self):
for x in self.values():
x.save()
def load(self):
for x in self.values():
x.load()
def getSavedValue(self):
res = {}
for (key, val) in self.items():
sv = val.saved_value
if sv is not None:
res[str(key)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.items():
if str(key) in self.stored_values:
val.saved_value = self.stored_values[str(key)]
saved_value = property(getSavedValue, setSavedValue)
def __setitem__(self, key, item):
dict.__setitem__(self, key, item)
if str(key) in self.stored_values:
item.saved_value = self.stored_values[str(key)]
item.load()
def dict(self):
return self
# Like the classes above, just with a more "native"
# syntax.
#
# some evil stuff must be done to allow instant
# loading of added elements. this is why this class
# is so complex.
#
# we need the 'content' because we overwrite
# __setattr__.
# If you don't understand this, try adding
# __setattr__ to a usual exisiting class and you will.
class ConfigSubsection(object):
def __init__(self):
self.__dict__["content"] = ConfigSubsectionContent()
self.content.items = { }
self.content.stored_values = { }
def __setattr__(self, name, value):
if name == "saved_value":
return self.setSavedValue(value)
assert isinstance(value, (ConfigSubsection, ConfigElement, ConfigSubList, ConfigSubDict)), "ConfigSubsections can only store ConfigSubsections, ConfigSubLists, ConfigSubDicts or ConfigElements"
content = self.content
content.items[name] = value
x = content.stored_values.get(name, None)
if x is not None:
#print "ok, now we have a new item,", name, "and have the following value for it:", x
value.saved_value = x
value.load()
def __getattr__(self, name):
return self.content.items[name]
def getSavedValue(self):
res = self.content.stored_values
for (key, val) in self.content.items.items():
sv = val.saved_value
if sv is not None:
res[key] = sv
elif key in res:
del res[key]
return res
def setSavedValue(self, values):
values = dict(values)
self.content.stored_values = values
for (key, val) in self.content.items.items():
value = values.get(key, None)
if value is not None:
val.saved_value = value
saved_value = property(getSavedValue, setSavedValue)
def save(self):
for x in self.content.items.values():
x.save()
def load(self):
for x in self.content.items.values():
x.load()
def dict(self):
return self.content.items
# the root config object, which also can "pickle" (=serialize)
# down the whole config tree.
#
# we try to keep non-existing config entries, to apply them whenever
# a new config entry is added to a subsection
# also, non-existing config entries will be saved, so they won't be
# lost when a config entry disappears.
class Config(ConfigSubsection):
def __init__(self):
ConfigSubsection.__init__(self)
def pickle_this(self, prefix, topickle, result):
for (key, val) in topickle.items():
name = '.'.join((prefix, key))
if isinstance(val, dict):
self.pickle_this(name, val, result)
elif isinstance(val, tuple):
result += [name, '=', str(val[0]), '\n']
else:
result += [name, '=', str(val), '\n']
def pickle(self):
result = []
self.pickle_this("config", self.saved_value, result)
return ''.join(result)
def unpickle(self, lines, base_file=True):
tree = { }
configbase = tree.setdefault("config", {})
for l in lines:
if not l or l[0] == '#':
continue
result = l.split('=', 1)
if len(result) != 2:
continue
(name, val) = result
val = val.strip()
names = name.split('.')
base = configbase
for n in names[1:-1]:
base = base.setdefault(n, {})
base[names[-1]] = val
if not base_file: # not the initial config file..
#update config.x.y.value when exist
try:
configEntry = eval(name)
if configEntry is not None:
configEntry.value = val
except (SyntaxError, KeyError):
pass
# we inherit from ConfigSubsection, so ...
#object.__setattr__(self, "saved_value", tree["config"])
if "config" in tree:
self.setSavedValue(tree["config"])
def saveToFile(self, filename):
text = self.pickle()
try:
import os
f = open(filename + ".writing", "w")
f.write(text)
f.flush()
os.fsync(f.fileno())
f.close()
os.rename(filename + ".writing", filename)
except IOError:
print "Config: Couldn't write %s" % filename
def loadFromFile(self, filename, base_file=True):
f = open(filename, "r")
self.unpickle(f.readlines(), base_file)
f.close()
config = Config()
config.misc = ConfigSubsection()
class ConfigFile:
def __init__(self):
pass
CONFIG_FILE = resolveFilename(SCOPE_CONFIG, "settings")
def load(self):
try:
config.loadFromFile(self.CONFIG_FILE, True)
except IOError, e:
print "unable to load config (%s), assuming defaults..." % str(e)
def save(self):
# config.save()
config.saveToFile(self.CONFIG_FILE)
def __resolveValue(self, pickles, cmap):
key = pickles[0]
if cmap.has_key(key):
if len(pickles) > 1:
return self.__resolveValue(pickles[1:], cmap[key].dict())
else:
return str(cmap[key].value)
return None
def getResolvedKey(self, key):
names = key.split('.')
if len(names) > 1:
if names[0] == "config":
ret=self.__resolveValue(names[1:], config.content.items)
if ret and len(ret):
return ret
print "getResolvedKey", key, "failed !! (Typo??)"
return ""
def NoSave(element):
element.disableSave()
return element
configfile = ConfigFile()
configfile.load()
def getConfigListEntry(*args):
assert len(args) > 1, "getConfigListEntry needs a minimum of two arguments (descr, configElement)"
return args
def updateConfigElement(element, newelement):
newelement.value = element.value
return newelement
#def _(x):
# return x
#
#config.bla = ConfigSubsection()
#config.bla.test = ConfigYesNo()
#config.nim = ConfigSubList()
#config.nim.append(ConfigSubsection())
#config.nim[0].bla = ConfigYesNo()
#config.nim.append(ConfigSubsection())
#config.nim[1].bla = ConfigYesNo()
#config.nim[1].blub = ConfigYesNo()
#config.arg = ConfigSubDict()
#config.arg["Hello"] = ConfigYesNo()
#
#config.arg["Hello"].handleKey(KEY_RIGHT)
#config.arg["Hello"].handleKey(KEY_RIGHT)
#
##config.saved_value
#
##configfile.save()
#config.save()
#print config.pickle()
cec_limits = [(0,15),(0,15),(0,15),(0,15)]
class ConfigCECAddress(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = cec_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:self.marked_block])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return value, mBlock
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return "mtext"[1-selected:], value, mBlock
else:
return "text", value
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
| gpl-2.0 | -4,205,983,932,562,745,300 | 25.682312 | 195 | 0.663809 | false |
jianjunz/online-judge-solutions | leetcode/1223-graph-connectivity-with-threshold.py | 2 | 1095 | class Solution:
def areConnected(self, n: int, threshold: int, queries: List[List[int]]) -> List[bool]:
cities=[0]*(n+1)
group={}
nextGroupId=1
def union(source, to):
if source==to:
return
for c in group[source]:
cities[c]=to
group[to].extend(group[source])
del group[source]
for base in range(threshold+1, n):
currentGroupId=nextGroupId
nextGroupId+=1
group[currentGroupId]=[]
for member in range(base, n+1, base):
if cities[member]==0:
cities[member]=currentGroupId
group[currentGroupId].append(member)
else:
union(cities[member], currentGroupId)
answer=[False]*len(queries)
for i in range(len(queries)):
u,v=queries[i]
if cities[u]==cities[v] and cities[u]!=0:
answer[i]=True
return answer
| mit | 5,391,455,218,779,802,000 | 32.28125 | 91 | 0.46758 | false |
nagnath006/Soccer-Analytics | Soccer-Analytics/Lib/encodings/utf_16_be.py | 860 | 1037 | """ Python 'utf-16-be' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
encode = codecs.utf_16_be_encode
def decode(input, errors='strict'):
return codecs.utf_16_be_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.utf_16_be_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
_buffer_decode = codecs.utf_16_be_decode
class StreamWriter(codecs.StreamWriter):
encode = codecs.utf_16_be_encode
class StreamReader(codecs.StreamReader):
decode = codecs.utf_16_be_decode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-16-be',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mpl-2.0 | 6,223,694,101,009,345,000 | 23.690476 | 61 | 0.712633 | false |
fucxy/ESPython | SDK/ESP8266_NONOS_SDK/tools/make_cert.py | 6 | 1411 | import os
class Cert(object):
def __init__(self, name, buff):
self.name = name
self.len = len(buff)
self.buff = buff
pass
def __str__(self):
out_str = ['\0']*32
for i in range(len(self.name)):
out_str[i] = self.name[i]
out_str = "".join(out_str)
out_str += str(chr(self.len & 0xFF))
out_str += str(chr((self.len & 0xFF00) >> 8))
out_str += self.buff
return out_str
pass
def main():
cert_list = []
file_list = os.listdir(os.getcwd())
cert_file_list = []
for _file in file_list:
pos = _file.find(".key_1024")
if pos != -1:
cert_file_list.append(_file[:pos])
pos = _file.find(".cer")
if pos!= -1:
cert_file_list.append(_file[:pos])
for cert_file in cert_file_list:
if cert_file == 'private_key':
with open(cert_file+".key_1024", 'rb') as f:
buff = f.read()
cert_list.append(Cert(cert_file, buff))
if cert_file == 'certificate':
with open(cert_file+".cer", 'rb') as f:
buff = f.read()
cert_list.append(Cert(cert_file, buff))
with open('esp_cert_private_key.bin', 'wb+') as f:
for _cert in cert_list:
f.write("%s" % _cert)
pass
if __name__ == '__main__':
main()
| mit | -1,542,461,129,570,458,000 | 25.622642 | 56 | 0.477675 | false |
mskrzypkows/servo | tests/wpt/web-platform-tests/webdriver/user_input/sendkeys_test.py | 141 | 3188 | import os
import sys
import random
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
repo_root = os.path.abspath(os.path.join(__file__, "../../.."))
sys.path.insert(1, os.path.join(repo_root, "tools", "webdriver"))
from webdriver import exceptions
class SendKeysTest(base_test.WebDriverBaseTest):
def setUp(self):
self.driver.get(self.webserver.where_is("user_input/res/text-form.html"))
def test_send_simple_string(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("lorem ipsum")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"lorem ipsum")
def test_send_return(self):
element = self.driver.find_element_by_id("Text1")
returnkey = unichr(int("E006", 16))
element.send_keys([returnkey])
self.assertEquals(u"" + self.driver.get_current_url(), u"" + self.webserver.where_is("user_input/res/text-form-landing.html?e=mc2"))
def test_send_backspace(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("world ")
element.send_keys("wide ")
element.send_keys("web ")
element.send_keys("consortium")
backspace= unichr(int("E003", 16))
for i in range(0, 11):
element.send_keys([backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"world wide web")
def test_send_tab(self):
element1 = self.driver.find_element_by_id("Text1")
element2 = self.driver.find_element_by_id("Text2")
element1.send_keys("typing here")
tab= unichr(int("E004", 16))
element1.send_keys([tab])
output = self.driver.find_element_by_id("output")
tab_pressed = output.get_attribute("checked")
self.assertEquals(tab_pressed, u"true")
def test_send_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("low ")
shift= unichr(int("E008", 16))
element.send_keys([shift , "u", "p", shift])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"low UP")
def test_send_arrow_keys(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("internet")
backspace= unichr(int("E003", 16))
left= unichr(int("E012", 16))
right= unichr(int("E014", 16))
for i in range(0, 4):
element.send_keys([left])
element.send_keys([backspace])
element.send_keys([right])
element.send_keys("a")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"intranet")
def test_select_text_with_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("WebDriver")
backspace= unichr(int("E003", 16))
shift= unichr(int("E008", 16))
left= unichr(int("E012", 16))
element.send_keys([shift, left, left, left, left, left, left, backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"Web")
if __name__ == "__main__":
unittest.main()
| mpl-2.0 | 5,445,272,607,829,949,000 | 32.208333 | 140 | 0.617315 | false |
ktosiek/spacewalk | proxy/proxy/rhnConstants.py | 3 | 1463 | #!/usr/bin/python
#
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
##
# rhnDefines.py - Constants used throughout the Spacewalk Proxy.
#-----------------------------------------------------------------------------
#
"""Constants used by the Spacewalk Proxy"""
# HTTP Headers
HEADER_ACTUAL_URI = 'X-RHN-ActualURI'
HEADER_EFFECTIVE_URI = 'X-RHN-EffectiveURI'
HEADER_CHECKSUM = 'X-RHN-Checksum'
HEADER_LOCATION = 'Location'
HEADER_CONTENT_LENGTH = 'Content-Length'
HEADER_RHN_REDIRECT = 'X-RHN-Redirect'
HEADER_RHN_ORIG_LOC = 'X-RHN-OriginalLocation'
# HTTP Schemes
SCHEME_HTTP = 'http'
SCHEME_HTTPS = 'https'
# These help us match URIs when kickstarting through a Proxy.
URI_PREFIX_KS = '/ty/'
URI_PREFIX_KS_CHECKSUM = '/ty-cksm/'
# Component Constants
COMPONENT_BROKER = 'proxy.broker'
COMPONENT_REDIRECT = 'proxy.redirect'
| gpl-2.0 | -4,732,060,325,747,034,000 | 30.804348 | 78 | 0.672591 | false |
makermade/arm_android-21_arm-linux-androideabi-4.8 | lib/python2.7/nntplib.py | 157 | 21135 | """An NNTP client class based on RFC 977: Network News Transfer Protocol.
Example:
>>> from nntplib import NNTP
>>> s = NNTP('news')
>>> resp, count, first, last, name = s.group('comp.lang.python')
>>> print 'Group', name, 'has', count, 'articles, range', first, 'to', last
Group comp.lang.python has 51 articles, range 5770 to 5821
>>> resp, subs = s.xhdr('subject', first + '-' + last)
>>> resp = s.quit()
>>>
Here 'resp' is the server response line.
Error responses are turned into exceptions.
To post an article from a file:
>>> f = open(filename, 'r') # file containing article, including header
>>> resp = s.post(f)
>>>
For descriptions of all methods, read the comments in the code below.
Note that all arguments and return values representing article numbers
are strings, not numbers, since they are rarely used for calculations.
"""
# RFC 977 by Brian Kantor and Phil Lapsley.
# xover, xgtitle, xpath, date methods by Kevan Heydon
# Imports
import re
import socket
__all__ = ["NNTP","NNTPReplyError","NNTPTemporaryError",
"NNTPPermanentError","NNTPProtocolError","NNTPDataError",
"error_reply","error_temp","error_perm","error_proto",
"error_data",]
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
Exception.__init__(self, *args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryError(NNTPError):
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
"""Error in response data"""
pass
# for backwards compatibility
error_reply = NNTPReplyError
error_temp = NNTPTemporaryError
error_perm = NNTPPermanentError
error_proto = NNTPProtocolError
error_data = NNTPDataError
# Standard port used by NNTP servers
NNTP_PORT = 119
# Response numbers that are followed by additional text (e.g. article)
LONGRESP = ['100', '215', '220', '221', '222', '224', '230', '231', '282']
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
CRLF = '\r\n'
# The class itself
class NNTP:
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=True):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.create_connection((host, port))
self.file = self.sock.makefile('rb')
self.debugging = 0
self.welcome = self.getresp()
# 'mode reader' is sometimes necessary to enable 'reader' mode.
# However, the order in which 'mode reader' and 'authinfo' need to
# arrive differs between some NNTP servers. Try to send
# 'mode reader', and if it fails with an authorization failed
# error, try again after sending authinfo.
readermode_afterauth = 0
if readermode:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
except NNTPTemporaryError, e:
if user and e.response[:3] == '480':
# Need authorization before 'mode reader'
readermode_afterauth = 1
else:
raise
# If no login/password was specified, try to get them from ~/.netrc
# Presume that if .netc has an entry, NNRP authentication is required.
try:
if usenetrc and not user:
import netrc
credentials = netrc.netrc()
auth = credentials.authenticators(host)
if auth:
user = auth[0]
password = auth[2]
except IOError:
pass
# Perform NNRP authentication if needed.
if user:
resp = self.shortcmd('authinfo user '+user)
if resp[:3] == '381':
if not password:
raise NNTPReplyError(resp)
else:
resp = self.shortcmd(
'authinfo pass '+password)
if resp[:3] != '281':
raise NNTPPermanentError(resp)
if readermode_afterauth:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
# Get the welcome message from the server
# (this is read and squirreled away by __init__()).
# If the response code is 200, posting is allowed;
# if it 201, posting is not allowed
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
if self.debugging: print '*welcome*', repr(self.welcome)
return self.welcome
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
self.debugging = level
debug = set_debuglevel
def putline(self, line):
"""Internal: send one line to the server, appending CRLF."""
line = line + CRLF
if self.debugging > 1: print '*put*', repr(line)
self.sock.sendall(line)
def putcmd(self, line):
"""Internal: send one command to the server (through putline())."""
if self.debugging: print '*cmd*', repr(line)
self.putline(line)
def getline(self):
"""Internal: return one line from the server, stripping CRLF.
Raise EOFError if the connection is closed."""
line = self.file.readline()
if self.debugging > 1:
print '*get*', repr(line)
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
def getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error."""
resp = self.getline()
if self.debugging: print '*resp*', repr(resp)
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def getlongresp(self, file=None):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error."""
openedFile = None
try:
# If a string was passed then open a file with that name
if isinstance(file, str):
openedFile = file = open(file, "w")
resp = self.getresp()
if resp[:3] not in LONGRESP:
raise NNTPReplyError(resp)
list = []
while 1:
line = self.getline()
if line == '.':
break
if line[:2] == '..':
line = line[1:]
if file:
file.write(line + "\n")
else:
list.append(line)
finally:
# If this method created the file, then it must close it
if openedFile:
openedFile.close()
return resp, list
def shortcmd(self, line):
"""Internal: send a command and get the response."""
self.putcmd(line)
return self.getresp()
def longcmd(self, line, file=None):
"""Internal: send a command and get the response plus following text."""
self.putcmd(line)
return self.getlongresp(file)
def newgroups(self, date, time, file=None):
"""Process a NEWGROUPS command. Arguments:
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of newsgroup names"""
return self.longcmd('NEWGROUPS ' + date + ' ' + time, file)
def newnews(self, group, date, time, file=None):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of message ids"""
cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
return self.longcmd(cmd, file)
def list(self, file=None):
"""Process a LIST command. Return:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)"""
resp, list = self.longcmd('LIST', file)
for i in range(len(list)):
# Parse lines into "group last first flag"
list[i] = tuple(list[i].split())
return resp, list
def description(self, group):
"""Get a description for a single group. If more than one
group matches ('group' is a pattern), return the first. If no
group matches, return an empty string.
This elides the response code from the server, since it can
only be '215' or '285' (for xgtitle) anyway. If the response
code is needed, use the 'descriptions' method.
NOTE: This neither checks for a wildcard in 'group' nor does
it check whether the group actually exists."""
resp, lines = self.descriptions(group)
if len(lines) == 0:
return ""
else:
return lines[0][1]
def descriptions(self, group_pattern):
"""Get descriptions for a range of groups."""
line_pat = re.compile("^(?P<group>[^ \t]+)[ \t]+(.*)$")
# Try the more std (acc. to RFC2980) LIST NEWSGROUPS first
resp, raw_lines = self.longcmd('LIST NEWSGROUPS ' + group_pattern)
if resp[:3] != "215":
# Now the deprecated XGTITLE. This either raises an error
# or succeeds with the same output structure as LIST
# NEWSGROUPS.
resp, raw_lines = self.longcmd('XGTITLE ' + group_pattern)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles (string)
- first: first article number (string)
- last: last article number (string)
- name: the group name"""
resp = self.shortcmd('GROUP ' + name)
if resp[:3] != '211':
raise NNTPReplyError(resp)
words = resp.split()
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = words[4].lower()
return resp, count, first, last, name
def help(self, file=None):
"""Process a HELP command. Returns:
- resp: server response if successful
- list: list of strings"""
return self.longcmd('HELP',file)
def statparse(self, resp):
"""Internal: parse the response of a STAT, NEXT or LAST command."""
if resp[:2] != '22':
raise NNTPReplyError(resp)
words = resp.split()
nr = 0
id = ''
n = len(words)
if n > 1:
nr = words[1]
if n > 2:
id = words[2]
return resp, nr, id
def statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self.shortcmd(line)
return self.statparse(resp)
def stat(self, id):
"""Process a STAT command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: the article number
- id: the message id"""
return self.statcmd('STAT ' + id)
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self.statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self.statcmd('LAST')
def artcmd(self, line, file=None):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, list = self.longcmd(line, file)
resp, nr, id = self.statparse(resp)
return resp, nr, id, list
def head(self, id):
"""Process a HEAD command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's header"""
return self.artcmd('HEAD ' + id)
def body(self, id, file=None):
"""Process a BODY command. Argument:
- id: article number or message id
- file: Filename string or file object to store the article in
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's body or an empty list
if file was used"""
return self.artcmd('BODY ' + id, file)
def article(self, id):
"""Process an ARTICLE command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article"""
return self.artcmd('ARTICLE ' + id)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful"""
return self.shortcmd('SLAVE')
def xhdr(self, hdr, str, file=None):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
Returns:
- resp: server response if successful
- list: list of (nr, value) strings"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str, file)
for i in range(len(lines)):
line = lines[i]
m = pat.match(line)
if m:
lines[i] = m.group(1, 2)
return resp, lines
def xover(self, start, end, file=None):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
Returns:
- resp: server response if successful
- list: list of (art-nr, subject, poster, date,
id, references, size, lines)"""
resp, lines = self.longcmd('XOVER ' + start + '-' + end, file)
xover_lines = []
for line in lines:
elem = line.split("\t")
try:
xover_lines.append((elem[0],
elem[1],
elem[2],
elem[3],
elem[4],
elem[5].split(),
elem[6],
elem[7]))
except IndexError:
raise NNTPDataError(line)
return resp,xover_lines
def xgtitle(self, group, file=None):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
resp, raw_lines = self.longcmd('XGTITLE ' + group, file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self,id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article"""
resp = self.shortcmd("XPATH " + id)
if resp[:3] != '223':
raise NNTPReplyError(resp)
try:
[resp_num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date (self):
"""Process the DATE command. Arguments:
None
Returns:
resp: server response if successful
date: Date suitable for newnews/newgroups commands etc.
time: Time suitable for newnews/newgroups commands etc."""
resp = self.shortcmd("DATE")
if resp[:3] != '111':
raise NNTPReplyError(resp)
elem = resp.split()
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1][2:8]
time = elem[1][-6:]
if len(date) != 6 or len(time) != 6:
raise NNTPDataError(resp)
return resp, date, time
def post(self, f):
"""Process a POST command. Arguments:
- f: file containing the article
Returns:
- resp: server response if successful"""
resp = self.shortcmd('POST')
# Raises error_??? if posting is not allowed
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def ihave(self, id, f):
"""Process an IHAVE command. Arguments:
- id: message-id of the article
- f: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
resp = self.shortcmd('IHAVE ' + id)
# Raises error_??? if the server already has it
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
resp = self.shortcmd('QUIT')
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
# Test retrieval when run as a script.
# Assumption: if there's a local news server, it's called 'news'.
# Assumption: if user queries a remote news server, it's named
# in the environment variable NNTPSERVER (used by slrn and kin)
# and we want readermode off.
if __name__ == '__main__':
import os
newshost = 'news' and os.environ["NNTPSERVER"]
if newshost.find('.') == -1:
mode = 'readermode'
else:
mode = None
s = NNTP(newshost, readermode=mode)
resp, count, first, last, name = s.group('comp.lang.python')
print resp
print 'Group', name, 'has', count, 'articles, range', first, 'to', last
resp, subs = s.xhdr('subject', first + '-' + last)
print resp
for item in subs:
print "%7s %s" % item
resp = s.quit()
print resp
| gpl-2.0 | 2,184,284,403,858,647,300 | 32.708134 | 80 | 0.553395 | false |
datalogics/scons | bin/memoicmp.py | 2 | 2183 | #!/usr/bin/env python
#
# A script to compare the --debug=memoizer output found int
# two different files.
import sys,string
def memoize_output(fname):
mout = {}
lines=filter(lambda words:
len(words) == 5 and
words[1] == 'hits' and words[3] == 'misses',
map(string.split, open(fname,'r').readlines()))
for line in lines:
mout[line[-1]] = ( int(line[0]), int(line[2]) )
return mout
def memoize_cmp(filea, fileb):
ma = memoize_output(filea)
mb = memoize_output(fileb)
print 'All output: %s / %s [delta]'%(filea, fileb)
print '----------HITS---------- ---------MISSES---------'
cfmt='%7d/%-7d [%d]'
ma_o = []
mb_o = []
mab = []
for k in ma.keys():
if k in mb.keys():
if k not in mab:
mab.append(k)
else:
ma_o.append(k)
for k in mb.keys():
if k in ma.keys():
if k not in mab:
mab.append(k)
else:
mb_o.append(k)
mab.sort()
ma_o.sort()
mb_o.sort()
for k in mab:
hits = cfmt%(ma[k][0], mb[k][0], mb[k][0]-ma[k][0])
miss = cfmt%(ma[k][1], mb[k][1], mb[k][1]-ma[k][1])
print '%-24s %-24s %s'%(hits, miss, k)
for k in ma_o:
hits = '%7d/ --'%(ma[k][0])
miss = '%7d/ --'%(ma[k][1])
print '%-24s %-24s %s'%(hits, miss, k)
for k in mb_o:
hits = ' -- /%-7d'%(mb[k][0])
miss = ' -- /%-7d'%(mb[k][1])
print '%-24s %-24s %s'%(hits, miss, k)
print '-'*(24+24+1+20)
if __name__ == "__main__":
if len(sys.argv) != 3:
print """Usage: %s file1 file2
Compares --debug=memomize output from file1 against file2."""%sys.argv[0]
sys.exit(1)
memoize_cmp(sys.argv[1], sys.argv[2])
sys.exit(0)
| mit | 344,834,245,684,390,850 | 28.90411 | 73 | 0.391663 | false |
louietsai/python-for-android | python3-alpha/extra_modules/gdata/apps/emailsettings/client.py | 48 | 23375 | #!/usr/bin/python2.4
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""EmailSettingsClient simplifies Email Settings API calls.
EmailSettingsClient extends gdata.client.GDClient to ease interaction with
the Google Apps Email Settings API. These interactions include the ability
to create labels, filters, aliases, and update web-clip, forwarding, POP,
IMAP, vacation-responder, signature, language, and general settings, and
retrieve labels, send-as, forwarding, pop, imap, vacation and signature
settings.
"""
__author__ = 'Claudio Cherubino <[email protected]>'
import urllib.request, urllib.parse, urllib.error
import gdata.apps.emailsettings.data
import gdata.client
# Email Settings URI template
# The strings in this template are eventually replaced with the API version,
# Google Apps domain name, username, and settingID, respectively.
EMAIL_SETTINGS_URI_TEMPLATE = '/a/feeds/emailsettings/%s/%s/%s/%s'
# The settingID value for the label requests
SETTING_ID_LABEL = 'label'
# The settingID value for the filter requests
SETTING_ID_FILTER = 'filter'
# The settingID value for the send-as requests
SETTING_ID_SENDAS = 'sendas'
# The settingID value for the webclip requests
SETTING_ID_WEBCLIP = 'webclip'
# The settingID value for the forwarding requests
SETTING_ID_FORWARDING = 'forwarding'
# The settingID value for the POP requests
SETTING_ID_POP = 'pop'
# The settingID value for the IMAP requests
SETTING_ID_IMAP = 'imap'
# The settingID value for the vacation responder requests
SETTING_ID_VACATION_RESPONDER = 'vacation'
# The settingID value for the signature requests
SETTING_ID_SIGNATURE = 'signature'
# The settingID value for the language requests
SETTING_ID_LANGUAGE = 'language'
# The settingID value for the general requests
SETTING_ID_GENERAL = 'general'
# The settingID value for the delegation requests
SETTING_ID_DELEGATION = 'delegation'
# The KEEP action for the email settings
ACTION_KEEP = 'KEEP'
# The ARCHIVE action for the email settings
ACTION_ARCHIVE = 'ARCHIVE'
# The DELETE action for the email settings
ACTION_DELETE = 'DELETE'
# The ALL_MAIL setting for POP enable_for property
POP_ENABLE_FOR_ALL_MAIL = 'ALL_MAIL'
# The MAIL_FROM_NOW_ON setting for POP enable_for property
POP_ENABLE_FOR_MAIL_FROM_NOW_ON = 'MAIL_FROM_NOW_ON'
class EmailSettingsClient(gdata.client.GDClient):
"""Client extension for the Google Email Settings API service.
Attributes:
host: string The hostname for the Email Settings API service.
api_version: string The version of the Email Settings API.
"""
host = 'apps-apis.google.com'
api_version = '2.0'
auth_service = 'apps'
auth_scopes = gdata.gauth.AUTH_SCOPES['apps']
ssl = True
def __init__(self, domain, auth_token=None, **kwargs):
"""Constructs a new client for the Email Settings API.
Args:
domain: string The Google Apps domain with Email Settings.
auth_token: (optional) gdata.gauth.ClientLoginToken, AuthSubToken, or
OAuthToken which authorizes this client to edit the email settings.
kwargs: The other parameters to pass to the gdata.client.GDClient
constructor.
"""
gdata.client.GDClient.__init__(self, auth_token=auth_token, **kwargs)
self.domain = domain
def make_email_settings_uri(self, username, setting_id):
"""Creates the URI for the Email Settings API call.
Using this client's Google Apps domain, create the URI to setup
email settings for the given user in that domain. If params are provided,
append them as GET params.
Args:
username: string The name of the user affected by this setting.
setting_id: string The key of the setting to be configured.
Returns:
A string giving the URI for Email Settings API calls for this client's
Google Apps domain.
"""
if '@' in username:
username, domain = username.split('@', 1)
else:
domain = self.domain
uri = EMAIL_SETTINGS_URI_TEMPLATE % (self.api_version, domain,
username, setting_id)
return uri
MakeEmailSettingsUri = make_email_settings_uri
def create_label(self, username, name, **kwargs):
"""Creates a label with the given properties.
Args:
username: string The name of the user.
name: string The name of the label.
kwargs: The other parameters to pass to gdata.client.GDClient.post().
Returns:
gdata.apps.emailsettings.data.EmailSettingsLabel of the new resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LABEL)
new_label = gdata.apps.emailsettings.data.EmailSettingsLabel(
uri=uri, name=name)
return self.post(new_label, uri, **kwargs)
CreateLabel = create_label
def retrieve_labels(self, username, **kwargs):
"""Retrieves email labels for the specified username
Args:
username: string The name of the user to get the labels for
Returns:
A gdata.data.GDFeed of the user's email labels
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LABEL)
return self.GetFeed(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsLabelFeed,
**kwargs)
RetrieveLabels = retrieve_labels
def delete_label(self, username, label, **kwargs):
"""Delete a label from the specified account.
Args:
username: string Name of the user
label: string Name of the label to be deleted
Returns:
An atom.http_core.HttpResponse() with the result of the request
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LABEL)
uri = '/'.join([uri, urllib.parse.quote_plus(label)])
return self.delete(uri, **kwargs)
DeleteLabel = delete_label
def create_filter(self, username, from_address=None,
to_address=None, subject=None, has_the_word=None,
does_not_have_the_word=None, has_attachments=None,
label=None, mark_as_read=None, archive=None, **kwargs):
"""Creates a filter with the given properties.
Args:
username: string The name of the user.
from_address: string The source email address for the filter.
to_address: string (optional) The destination email address for
the filter.
subject: string (optional) The value the email must have in its
subject to be filtered.
has_the_word: string (optional) The value the email must have
in its subject or body to be filtered.
does_not_have_the_word: string (optional) The value the email
cannot have in its subject or body to be filtered.
has_attachments: string (optional) A boolean string representing
whether the email must have an attachment to be filtered.
label: string (optional) The name of the label to apply to
messages matching the filter criteria.
mark_as_read: Boolean (optional) Whether or not to mark
messages matching the filter criteria as read.
archive: Boolean (optional) Whether or not to move messages
matching to Archived state.
kwargs: The other parameters to pass to gdata.client.GDClient.post().
Returns:
gdata.apps.emailsettings.data.EmailSettingsFilter of the new resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_FILTER)
new_filter = gdata.apps.emailsettings.data.EmailSettingsFilter(
uri=uri, from_address=from_address,
to_address=to_address, subject=subject,
has_the_word=has_the_word,
does_not_have_the_word=does_not_have_the_word,
has_attachments=has_attachments, label=label,
mark_as_read=mark_as_read, archive=archive)
return self.post(new_filter, uri, **kwargs)
CreateFilter = create_filter
def create_send_as(self, username, name, address, reply_to=None,
make_default=None, **kwargs):
"""Creates a send-as alias with the given properties.
Args:
username: string The name of the user.
name: string The name that will appear in the "From" field.
address: string The email address that appears as the
origination address for emails sent by this user.
reply_to: string (optional) The address to be used as the reply-to
address in email sent using the alias.
make_default: Boolean (optional) Whether or not this alias should
become the default alias for this user.
kwargs: The other parameters to pass to gdata.client.GDClient.post().
Returns:
gdata.apps.emailsettings.data.EmailSettingsSendAsAlias of the
new resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SENDAS)
new_alias = gdata.apps.emailsettings.data.EmailSettingsSendAsAlias(
uri=uri, name=name, address=address,
reply_to=reply_to, make_default=make_default)
return self.post(new_alias, uri, **kwargs)
CreateSendAs = create_send_as
def retrieve_send_as(self, username, **kwargs):
"""Retrieves send-as aliases for the specified username
Args:
username: string The name of the user to get the send-as for
Returns:
A gdata.data.GDFeed of the user's send-as alias settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SENDAS)
return self.GetFeed(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsSendAsAliasFeed,
**kwargs)
RetrieveSendAs = retrieve_send_as
def update_webclip(self, username, enable, **kwargs):
"""Enable/Disable Google Mail web clip.
Args:
username: string The name of the user.
enable: Boolean Whether to enable showing Web clips.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsWebClip of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_WEBCLIP)
new_webclip = gdata.apps.emailsettings.data.EmailSettingsWebClip(
uri=uri, enable=enable)
return self.update(new_webclip, **kwargs)
UpdateWebclip = update_webclip
def update_forwarding(self, username, enable, forward_to=None,
action=None, **kwargs):
"""Update Google Mail Forwarding settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable incoming email forwarding.
forward_to: (optional) string The address email will be forwarded to.
action: string (optional) The action to perform after forwarding
an email (ACTION_KEEP, ACTION_ARCHIVE, ACTION_DELETE).
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsForwarding of the
updated resource
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_FORWARDING)
new_forwarding = gdata.apps.emailsettings.data.EmailSettingsForwarding(
uri=uri, enable=enable, forward_to=forward_to, action=action)
return self.update(new_forwarding, **kwargs)
UpdateForwarding = update_forwarding
def retrieve_forwarding(self, username, **kwargs):
"""Retrieves forwarding settings for the specified username
Args:
username: string The name of the user to get the forwarding settings for
Returns:
A gdata.data.GDEntry of the user's email forwarding settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_FORWARDING)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsForwarding,
**kwargs)
RetrieveForwarding = retrieve_forwarding
def update_pop(self, username, enable, enable_for=None, action=None,
**kwargs):
"""Update Google Mail POP settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable incoming POP3 access.
enable_for: string (optional) Whether to enable POP3 for all mail
(POP_ENABLE_FOR_ALL_MAIL), or mail from now on
(POP_ENABLE_FOR_MAIL_FROM_NOW_ON).
action: string (optional) What Google Mail should do with its copy
of the email after it is retrieved using POP (ACTION_KEEP,
ACTION_ARCHIVE, ACTION_DELETE).
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsPop of the updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_POP)
new_pop = gdata.apps.emailsettings.data.EmailSettingsPop(
uri=uri, enable=enable,
enable_for=enable_for, action=action)
return self.update(new_pop, **kwargs)
UpdatePop = update_pop
def retrieve_pop(self, username, **kwargs):
"""Retrieves POP settings for the specified username
Args:
username: string The name of the user to get the POP settings for
Returns:
A gdata.data.GDEntry of the user's POP settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_POP)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsPop,
**kwargs)
RetrievePop = retrieve_pop
def update_imap(self, username, enable, **kwargs):
"""Update Google Mail IMAP settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable IMAP access.language
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsImap of the updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_IMAP)
new_imap = gdata.apps.emailsettings.data.EmailSettingsImap(
uri=uri, enable=enable)
return self.update(new_imap, **kwargs)
UpdateImap = update_imap
def retrieve_imap(self, username, **kwargs):
"""Retrieves imap settings for the specified username
Args:
username: string The name of the user to get the imap settings for
Returns:
A gdata.data.GDEntry of the user's IMAP settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_IMAP)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsImap,
**kwargs)
RetrieveImap = retrieve_imap
def update_vacation(self, username, enable, subject=None, message=None,
start_date=None, end_date=None, contacts_only=None,
domain_only=None, **kwargs):
"""Update Google Mail vacation-responder settings.
Args:
username: string The name of the user.
enable: Boolean Whether to enable the vacation responder.
subject: string (optional) The subject line of the vacation responder
autoresponse.
message: string (optional) The message body of the vacation responder
autoresponse.
startDate: string (optional) The start date of the vacation responder
autoresponse.
endDate: string (optional) The end date of the vacation responder
autoresponse.
contacts_only: Boolean (optional) Whether to only send autoresponses
to known contacts.
domain_only: Boolean (optional) Whether to only send autoresponses
to users in the primary domain.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsVacationResponder of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_VACATION_RESPONDER)
new_vacation = gdata.apps.emailsettings.data.EmailSettingsVacationResponder(
uri=uri, enable=enable, subject=subject,
message=message, start_date=start_date, end_date=end_date,
contacts_only=contacts_only, domain_only=domain_only)
return self.update(new_vacation, **kwargs)
UpdateVacation = update_vacation
def retrieve_vacation(self, username, **kwargs):
"""Retrieves vacation settings for the specified username
Args:
username: string The name of the user to get the vacation settings for
Returns:
A gdata.data.GDEntry of the user's vacation auto-responder settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_VACATION_RESPONDER)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=
gdata.apps.emailsettings.data.EmailSettingsVacationResponder,
**kwargs)
RetrieveVacation = retrieve_vacation
def update_signature(self, username, signature, **kwargs):
"""Update Google Mail signature.
Args:
username: string The name of the user.
signature: string The signature to be appended to outgoing messages.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsSignature of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SIGNATURE)
new_signature = gdata.apps.emailsettings.data.EmailSettingsSignature(
uri=uri, signature=signature)
return self.update(new_signature, **kwargs)
UpdateSignature = update_signature
def retrieve_signature(self, username, **kwargs):
"""Retrieves signature settings for the specified username
Args:
username: string The name of the user to get the signature settings for
Returns:
A gdata.data.GDEntry of the user's signature settings
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_SIGNATURE)
return self.GetEntry(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsSignature,
**kwargs)
RetrieveSignature = retrieve_signature
def update_language(self, username, language, **kwargs):
"""Update Google Mail language settings.
Args:
username: string The name of the user.
language: string The language tag for Google Mail's display language.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsLanguage of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_LANGUAGE)
new_language = gdata.apps.emailsettings.data.EmailSettingsLanguage(
uri=uri, language=language)
return self.update(new_language, **kwargs)
UpdateLanguage = update_language
def update_general_settings(self, username, page_size=None, shortcuts=None,
arrows=None, snippets=None, use_unicode=None,
**kwargs):
"""Update Google Mail general settings.
Args:
username: string The name of the user.
page_size: int (optional) The number of conversations to be shown per
page.
shortcuts: Boolean (optional) Whether to enable keyboard shortcuts.
arrows: Boolean (optional) Whether to display arrow-shaped personal
indicators next to email sent specifically to the user.
snippets: Boolean (optional) Whether to display snippets of the messages
in the inbox and when searching.
use_unicode: Boolean (optional) Whether to use UTF-8 (unicode) encoding
for all outgoing messages.
kwargs: The other parameters to pass to the update method.
Returns:
gdata.apps.emailsettings.data.EmailSettingsGeneral of the
updated resource.
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_GENERAL)
new_general = gdata.apps.emailsettings.data.EmailSettingsGeneral(
uri=uri, page_size=page_size, shortcuts=shortcuts,
arrows=arrows, snippets=snippets, use_unicode=use_unicode)
return self.update(new_general, **kwargs)
UpdateGeneralSettings = update_general_settings
def add_email_delegate(self, username, address, **kwargs):
"""Add an email delegate to the mail account
Args:
username: string The name of the user
address: string The email address of the delegated account
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_DELEGATION)
new_delegation = gdata.apps.emailsettings.data.EmailSettingsDelegation(
uri=uri, address=address)
return self.post(new_delegation, uri, **kwargs)
AddEmailDelegate = add_email_delegate
def retrieve_email_delegates(self, username, **kwargs):
"""Retrieve a feed of the email delegates for the specified username
Args:
username: string The name of the user to get the email delegates for
Returns:
A gdata.data.GDFeed of the user's email delegates
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_DELEGATION)
return self.GetFeed(
uri,
auth_token=None,
query=None,
desired_class=gdata.apps.emailsettings.data.EmailSettingsDelegationFeed,
**kwargs)
RetrieveEmailDelegates = retrieve_email_delegates
def delete_email_delegate(self, username, address, **kwargs):
"""Delete an email delegate from the specified account
Args:
username: string The name of the user
address: string The email address of the delegated account
"""
uri = self.MakeEmailSettingsUri(username=username,
setting_id=SETTING_ID_DELEGATION)
uri = uri + '/' + address
return self.delete(uri, **kwargs)
DeleteEmailDelegate = delete_email_delegate
| apache-2.0 | 8,020,760,382,267,732,000 | 36.4 | 81 | 0.673925 | false |
diegoguimaraes/django | django/core/mail/backends/filebased.py | 35 | 2765 | """Email backend that writes messages to a file."""
import datetime
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
from django.utils import six
class EmailBackend(ConsoleEmailBackend):
def __init__(self, *args, **kwargs):
self._fname = None
if 'file_path' in kwargs:
self.file_path = kwargs.pop('file_path')
else:
self.file_path = getattr(settings, 'EMAIL_FILE_PATH', None)
# Make sure self.file_path is a string.
if not isinstance(self.file_path, six.string_types):
raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured(
'Path for saving email messages exists, but is not a directory: %s' % self.file_path
)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except OSError as err:
raise ImproperlyConfigured(
'Could not create directory for saving email messages: %s (%s)' % (self.file_path, err)
)
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs['stream'] = None
super(EmailBackend, self).__init__(*args, **kwargs)
def write_message(self, message):
self.stream.write(message.message().as_bytes() + b'\n')
self.stream.write(b'-' * 79)
self.stream.write(b'\n')
def _get_filename(self):
"""Return a unique file name."""
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname
def open(self):
if self.stream is None:
self.stream = open(self._get_filename(), 'ab')
return True
return False
def close(self):
try:
if self.stream is not None:
self.stream.close()
finally:
self.stream = None
| bsd-3-clause | -4,122,964,187,453,269,500 | 39.072464 | 107 | 0.599638 | false |
repotvsupertuga/tvsupertuga.repository | script.module.universalscrapers/lib/universalscrapers/common.py | 2 | 16257 | import HTMLParser
import json
import random
import re
import urllib2
import urlparse
import requests,os,time
import xbmc,xbmcaddon
USERDATA_PATH = xbmc.translatePath('special://home/userdata/addon_data')
ADDON_DATA = os.path.join(USERDATA_PATH,'script.module.universalscrapers')
full_file = ADDON_DATA + '/Log.txt'
def clean_title(title):
if title == None: return
title = str(title)
title = re.sub('&#(\d);', '', title)
title = re.sub('(&#[0-9]+)([^;^0-9]+)', '\\1;\\2', title)
title = title.replace('"', '\"').replace('&', '&')
title = re.sub('\n|([[].+?[]])|([(].+?[)])|\s(vs|v[.])\s|(:|;|-|"|,|\'|\_|\.|\?)|\s', '', title)
return title.lower()
def clean_search(title):
if title == None: return
title = title.lower()
title = re.sub('&#(\d+);', '', title)
title = re.sub('(&#[0-9]+)([^;^0-9]+)', '\\1;\\2', title)
title = title.replace('"', '\"').replace('&', '&')
title = re.sub('\\\|/|\(|\)|\[|\]|\{|\}|-|:|;|\*|\?|"|\'|<|>|\_|\.|\?', ' ', title).lower()
title = ' '.join(title.split())
return title
def send_log(name,Time,count,title,year,season = '', episode = ''):
if not os.path.exists(full_file):
full_write = open(full_file,"w")
elif os.path.exists(full_file):
full_write = open(full_file,'a')
if count ==0:
count = 'Check Scraper/NoLinks'
if episode != '':
title = title + '('+year+') : S'+season+' E'+episode
else:
title = title + '('+year+')'
Print = '<######################################################\n# universalscraper: %s' %(str(name))+'\n# Tested with: '+str(title)+'\n# Links returned: %s' %(str(count))+'\n# Time to Complete: %s' %(str(round(Time,2)))+'\n#######################################################>'
full_write.write(Print+'\n')
'''
print '<######################################################'
print '# Tested with: %s' %(str(title))
print '# universalscraper: %s' %(str(name))
print '# Links returned: %s' %(str(count))
print '# Time to Complete: %s' %(str(round(Time,2)))
print '#######################################################>'
return
'''
def Del_LOG():
ADDON_DATA = os.path.join(USERDATA_PATH,'script.module.universalscrapers')
full_file = ADDON_DATA + '/Log.txt'
if os.path.exists(full_file):
os.remove(full_file)
def error_log(name,Txt):
if not os.path.exists(full_file):
full_write = open(full_file,"w")
elif os.path.exists(full_file):
full_write = open(full_file,'a')
Print = ':>>>> Scraper: %s' %(str(name))+'\n:>>>> LogNotice: %s' %(str(Txt))
full_write.write(Print+'\n')
'''
print ':>>>> Scraper: %s' %(str(name))
print ':>>>> LogNotice: %s' %(str(Txt))
return
'''
def random_agent():
BR_VERS = [
['%s.0' % i for i in xrange(18, 43)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111',
'39.0.2171.71', '39.0.2171.95', '39.0.2171.99', '40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124',
'44.0.2403.155', '44.0.2403.157', '45.0.2454.101', '45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80'],
['11.0']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1',
'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko']
index = random.randrange(len(RAND_UAS))
return RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES),
br_ver=random.choice(BR_VERS[index]))
def replaceHTMLCodes(txt):
txt = re.sub("(&#[0-9]+)([^;^0-9]+)", "\\1;\\2", txt)
txt = HTMLParser.HTMLParser().unescape(txt)
txt = txt.replace(""", "\"")
txt = txt.replace("&", "&")
return txt
def vk(url):
try:
try:
oid, id = urlparse.parse_qs(urlparse.urlparse(url).query)['oid'][0], \
urlparse.parse_qs(urlparse.urlparse(url).query)['id'][0]
except:
oid, id = re.compile('\/video(.*)_(.*)').findall(url)[0]
try:
hash = urlparse.parse_qs(urlparse.urlparse(url).query)['hash'][0]
except:
hash = vk_hash(oid, id)
u = 'http://api.vk.com/method/video.getEmbed?oid=%s&video_id=%s&embed_hash=%s' % (oid, id, hash)
headers = {'User-Agent': random_agent()}
request = urllib2.Request(u, headers=headers)
result = urllib2.urlopen(request).read()
result = re.sub(r'[^\x00-\x7F]+', ' ', result)
try:
result = json.loads(result)['response']
except:
result = vk_private(oid, id)
url = []
try:
url += [{'quality': '720', 'url': result['url720']}]
except:
pass
try:
url += [{'quality': '540', 'url': result['url540']}]
except:
pass
try:
url += [{'quality': '480', 'url': result['url480']}]
except:
pass
if not url == []: return url
try:
url += [{'quality': '360', 'url': result['url360']}]
except:
pass
if not url == []: return url
try:
url += [{'quality': '240', 'url': result['url240']}]
except:
pass
if not url == []: return url
except:
return
def vk_hash(oid, id):
try:
url = 'http://vk.com/al_video.php?act=show_inline&al=1&video=%s_%s' % (oid, id)
headers = {'User-Agent': random_agent()}
request = urllib2.Request(url, headers=headers)
result = urllib2.urlopen(request).read()
result = result.replace('\'', '"').replace(' ', '')
hash = re.compile('"hash2":"(.+?)"').findall(result)
hash += re.compile('"hash":"(.+?)"').findall(result)
hash = hash[0]
return hash
except:
return
def vk_private(oid, id):
try:
url = 'http://vk.com/al_video.php?act=show_inline&al=1&video=%s_%s' % (oid, id)
headers = {'User-Agent': random_agent()}
request = urllib2.Request(url, headers=headers)
result = urllib2.urlopen(request).read()
result = re.compile('var vars *= *({.+?});').findall(result)[0]
result = re.sub(r'[^\x00-\x7F]+', ' ', result)
result = json.loads(result)
return result
except:
return
def odnoklassniki(url):
try:
url = re.compile('//.+?/.+?/([\w]+)').findall(url)[0]
url = 'http://ok.ru/dk?cmd=videoPlayerMetadata&mid=%s' % url
headers = {'User-Agent': random_agent()}
request = urllib2.Request(url, headers=headers)
result = urllib2.urlopen(request).read()
result = re.sub(r'[^\x00-\x7F]+', ' ', result)
result = json.loads(result)['videos']
try:
hd = [{'quality': '1080', 'url': i['url']} for i in result if i['name'] == 'full']
except:
pass
try:
hd += [{'quality': 'HD', 'url': i['url']} for i in result if i['name'] == 'hd']
except:
pass
try:
sd = [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'sd']
except:
pass
try:
sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'low']
except:
pass
try:
sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'lowest']
except:
pass
try:
sd += [{'quality': 'SD', 'url': i['url']} for i in result if i['name'] == 'mobile']
except:
pass
url = hd + sd[:1]
if not url == []: return url
except:
return
def googletag(url):
quality = re.compile('itag=(\d*)').findall(url)
quality += re.compile('=m(\d*)$').findall(url)
try:
quality = quality[0]
except:
return []
if quality in ['37', '137', '299', '96', '248', '303', '46']:
return [{'quality': '1080', 'url': url}]
elif quality in ['22', '84', '136', '298', '120', '95', '247', '302', '45', '102']:
return [{'quality': '720', 'url': url}]
elif quality in ['35', '44', '135', '244', '94']:
return [{'quality': '480', 'url': url}]
elif quality in ['18', '34', '43', '82', '100', '101', '134', '243', '93']:
return [{'quality': '480', 'url': url}]
elif quality in ['5', '6', '36', '83', '133', '242', '92', '132']:
return [{'quality': '480', 'url': url}]
else:
return []
def filter_host(host):
if host not in ['example.com', 'allvid.ch', 'anime-portal.org', 'anyfiles.pl',
'www.apnasave.club', 'castamp.com', 'clicknupload.com', 'clicknupload.me',
'clicknupload.link', 'cloud.mail.ru', 'cloudy.ec', 'cloudy.eu', 'cloudy.sx',
'cloudy.ch', 'cloudy.com', 'daclips.in', 'daclips.com', 'dailymotion.com',
'ecostream.tv', 'exashare.com', 'uame8aij4f.com', 'yahmaib3ai.com',
'facebook.com', 'filepup.net', 'fileweed.net', 'flashx.tv', 'googlevideo.com',
'googleusercontent.com', 'get.google.com', 'plus.google.com', 'googledrive.com',
'drive.google.com', 'docs.google.com', 'gorillavid.in', 'gorillavid.com',
'grifthost.com', 'hugefiles.net', 'indavideo.hu', 'kingfiles.net', 'mail.ru',
'my.mail.ru', 'm.my.mail.ru', 'videoapi.my.mail.ru', 'api.video.mail.ru',
'mersalaayitten.com', 'mersalaayitten.co', 'mersalaayitten.us', 'movdivx.com',
'divxme.com', 'movpod.net', 'movpod.in', 'movshare.net', 'wholecloud.net',
'vidgg.to', 'mp4stream.com', 'myvi.ru', 'nosvideo.com', 'noslocker.com',
'novamov.com', 'auroravid.to', 'ok.ru', 'odnoklassniki.ru', 'openload.io',
'openload.co', 'oload.tv', 'playwire.com', 'promptfile.com', 'rapidvideo.com',
'raptu.com', 'rutube.ru', 'videos.sapo.pt', 'speedvideo.net', 'streamcloud.eu',
'streamin.to', 'stream.moe', 'streamplay.to', 'teramixer.com', 'thevid.net',
'thevideo.me', 'toltsd-fel.tk', 'toltsd-fel.xyz', 'trollvid.net', 'trollvid.io',
'mp4edge.com', 'tudou.com', 'tune.pk', 'upload.af', 'uploadx.org', 'uploadz.co',
'uptobox.com', 'uptostream.com', 'veoh.com', 'videa.hu', 'videoget.me',
'videohut.to', 'videoraj.ec', 'videoraj.eu', 'videoraj.sx', 'videoraj.ch',
'videoraj.com', 'videoraj.to', 'videoraj.co', 'bitvid.sx', 'videoweed.es',
'videoweed.com', 'videowood.tv', 'byzoo.org', 'playpanda.net', 'videozoo.me',
'videowing.me', 'easyvideo.me', 'play44.net', 'playbb.me', 'video44.net',
'vidlox.tv', 'vidmad.net', 'tamildrive.com', 'vid.me', 'vidup.me', 'vimeo.com',
'vivo.sx', 'vk.com', 'vshare.eu', 'watchers.to', 'watchonline.to',
'everplay.watchpass.net', 'weshare.me', 'xvidstage.com', 'yourupload.com',
'yucache.net', 'youtube.com', 'youtu.be', 'youtube-nocookie.com',
'youwatch.org', 'chouhaa.info', 'aliez.me', 'ani-stream.com', 'bestream.tv',
'blazefile.co', 'divxstage.eu', 'divxstage.net', 'divxstage.to', 'cloudtime.to',
'downace.com', 'entervideo.net', 'estream.to', 'fastplay.sx', 'fastplay.cc',
'goodvideohost.com', 'jetload.tv', 'letwatch.us', 'letwatch.to', 'vidshare.us',
'megamp4.net', 'mp4engine.com', 'mp4upload.com', 'myvidstream.net',
'nowvideo.eu', 'nowvideo.ch', 'nowvideo.sx', 'nowvideo.co', 'nowvideo.li',
'nowvideo.fo', 'nowvideo.at', 'nowvideo.ec', 'playedto.me', 'www.playhd.video',
'www.playhd.fo', 'putload.tv', 'shitmovie.com', 'rapidvideo.ws',
'speedplay.xyz', 'speedplay.us', 'speedplay1.site', 'speedplay.pw',
'speedplay1.pw', 'speedplay3.pw', 'speedplayy.site', 'speedvid.net',
'spruto.tv', 'stagevu.com', 'streame.net', 'thevideos.tv', 'tusfiles.net',
'userscloud.com', 'usersfiles.com', 'vidabc.com', 'vidcrazy.net',
'uploadcrazy.net', 'thevideobee.to', 'videocloud.co', 'vidfile.net',
'vidhos.com', 'vidto.me', 'vidtodo.com', 'vidup.org', 'vidzi.tv', 'vodlock.co',
'vshare.io', 'watchvideo.us', 'watchvideo2.us', 'watchvideo3.us',
'watchvideo4.us', 'watchvideo5.us', 'watchvideo6.us', 'watchvideo7.us',
'watchvideo8.us', 'watchvideo9.us', 'watchvideo10.us', 'watchvideo11.us',
'watchvideo12.us', 'zstream.to']:
return False
return True
def check_playable(url):
"""
checks if passed url is a live link
:param str url: stream url
:return: playable stream url or None
:rtype: str or None
"""
import urllib
import requests
try:
headers = url.rsplit('|', 1)[1]
except:
headers = ''
headers = urllib.quote_plus(headers).replace('%3D', '=') if ' ' in headers else headers
headers = dict(urlparse.parse_qsl(headers))
result = None
try:
if url.startswith('http') and '.m3u8' in url:
result = requests.head(url.split('|')[0], headers=headers, timeout=5)
if result is None:
return None
elif url.startswith('http'):
result = requests.head(url.split('|')[0], headers=headers, timeout=5)
if result is None:
return None
except:
pass
return result
def get_rd_domains():
import xbmc
import xbmcaddon
import os
try:
from sqlite3 import dbapi2 as database
except:
from pysqlite2 import dbapi2 as database
import datetime
cache_location = os.path.join(
xbmc.translatePath(xbmcaddon.Addon("script.module.universalscrapers").getAddonInfo('profile')).decode('utf-8'),
'url_cache.db')
try:
dbcon = database.connect(cache_location)
dbcur = dbcon.cursor()
try:
dbcur.execute("SELECT * FROM version")
match = dbcur.fetchone()
except:
dbcur.execute("CREATE TABLE version (""version TEXT)")
dbcur.execute("INSERT INTO version Values ('0.5.4')")
dbcon.commit()
dbcur.execute(
"CREATE TABLE IF NOT EXISTS rd_domains (""domains TEXT, ""added TEXT"");")
except Exception as e:
pass
try:
sources = []
dbcur.execute(
"SELECT * FROM rd_domains")
match = dbcur.fetchone()
t1 = int(re.sub('[^0-9]', '', str(match[1])))
t2 = int(datetime.datetime.now().strftime("%Y%m%d%H%M"))
update = abs(t2 - t1) > 60 * 24
if update is False:
sources = json.loads(match[0])
return sources
except Exception as e:
pass
url = 'https://api.real-debrid.com/rest/1.0/hosts/domains'
domains = requests.get(url).json()
try:
dbcur.execute("DELETE FROM rd_domains WHERE added = %s" %(match[1]))
except:
pass
dbcur.execute("INSERT INTO rd_domains Values (?, ?)", (
json.dumps(domains),
datetime.datetime.now().strftime("%Y-%m-%d %H:%M")))
dbcon.commit()
return domains
| gpl-2.0 | -7,718,691,009,701,874,000 | 40.261421 | 315 | 0.512395 | false |
Benrflanders/Pytris | pyglet/gl/lib_wgl.py | 41 | 5761 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: lib_glx.py 597 2007-02-03 16:13:07Z Alex.Holkner $'
import ctypes
from ctypes import *
import pyglet
from pyglet.gl.lib import missing_function, decorate_function
from pyglet.compat import asbytes
__all__ = ['link_GL', 'link_GLU', 'link_WGL']
_debug_trace = pyglet.options['debug_trace']
gl_lib = ctypes.windll.opengl32
glu_lib = ctypes.windll.glu32
wgl_lib = gl_lib
if _debug_trace:
from pyglet.lib import _TraceLibrary
gl_lib = _TraceLibrary(gl_lib)
glu_lib = _TraceLibrary(glu_lib)
wgl_lib = _TraceLibrary(wgl_lib)
try:
wglGetProcAddress = wgl_lib.wglGetProcAddress
wglGetProcAddress.restype = CFUNCTYPE(POINTER(c_int))
wglGetProcAddress.argtypes = [c_char_p]
_have_get_proc_address = True
except AttributeError:
_have_get_proc_address = False
class WGLFunctionProxy(object):
__slots__ = ['name', 'requires', 'suggestions', 'ftype', 'func']
def __init__(self, name, ftype, requires, suggestions):
assert _have_get_proc_address
self.name = name
self.ftype = ftype
self.requires = requires
self.suggestions = suggestions
self.func = None
def __call__(self, *args, **kwargs):
if self.func:
return self.func(*args, **kwargs)
from pyglet.gl import current_context
if not current_context:
raise Exception(
'Call to function "%s" before GL context created' % self.name)
address = wglGetProcAddress(asbytes(self.name))
if cast(address, POINTER(c_int)): # check cast because address is func
self.func = cast(address, self.ftype)
decorate_function(self.func, self.name)
else:
self.func = missing_function(
self.name, self.requires, self.suggestions)
result = self.func(*args, **kwargs)
return result
def link_GL(name, restype, argtypes, requires=None, suggestions=None):
try:
func = getattr(gl_lib, name)
func.restype = restype
func.argtypes = argtypes
decorate_function(func, name)
return func
except AttributeError:
# Not in opengl32.dll. Try and get a pointer from WGL.
try:
fargs = (restype,) + tuple(argtypes)
ftype = ctypes.WINFUNCTYPE(*fargs)
if _have_get_proc_address:
from pyglet.gl import gl_info
if gl_info.have_context():
address = wglGetProcAddress(name)
if address:
func = cast(address, ftype)
decorate_function(func, name)
return func
else:
# Insert proxy until we have a context
return WGLFunctionProxy(name, ftype, requires, suggestions)
except:
pass
return missing_function(name, requires, suggestions)
def link_GLU(name, restype, argtypes, requires=None, suggestions=None):
try:
func = getattr(glu_lib, name)
func.restype = restype
func.argtypes = argtypes
decorate_function(func, name)
return func
except AttributeError:
# Not in glu32.dll. Try and get a pointer from WGL.
try:
fargs = (restype,) + tuple(argtypes)
ftype = ctypes.WINFUNCTYPE(*fargs)
if _have_get_proc_address:
from pyglet.gl import gl_info
if gl_info.have_context():
address = wglGetProcAddress(name)
if address:
func = cast(address, ftype)
decorate_function(func, name)
return func
else:
# Insert proxy until we have a context
return WGLFunctionProxy(name, ftype, requires, suggestions)
except:
pass
return missing_function(name, requires, suggestions)
link_WGL = link_GL
| mit | -7,252,160,383,359,486,000 | 36.409091 | 79 | 0.618122 | false |
HarmJ0y/sparta | app/settings.py | 1 | 25730 | #!/usr/bin/env python
'''
SPARTA - Network Infrastructure Penetration Testing Tool (http://sparta.secforce.com)
Copyright (c) 2014 SECFORCE (Antonio Quina and Leonidas Stavliotis)
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys, os
from PyQt4 import QtCore, QtGui
from app.auxiliary import * # for timestamp
# this class reads and writes application settings
class AppSettings():
def __init__(self):
# check if settings file exists and creates it if it doesn't
if not os.path.exists('./sparta.conf'):
print '[+] Creating settings file..'
self.createDefaultSettings()
else:
print '[+] Loading settings file..'
self.actions = QtCore.QSettings('./sparta.conf', QtCore.QSettings.NativeFormat)
# This function creates the default settings file. Note that, in general, everything is case sensitive.
# Each action should be in the following format:
#
# (key, [label, command, service])
# key - must be unique within the group and is used to retrieve each action. is used to create the tab titles and also to recognise nmap commands so we can parse the output (case sensitive)
# label - is what appears in the context menu in the gui
# command - command that will be run. These placeholders will be replaced on-the-fly: [IP] [PORT] [OUTPUT]
# service - service(s) to which the tool applies (comma-separated). Leave empty if valid for all services.
def createDefaultSettings(self):
self.actions = QtCore.QSettings('./sparta.conf', QtCore.QSettings.NativeFormat)
self.actions.beginGroup('GeneralSettings')
self.actions.setValue('default-terminal','gnome-terminal')
self.actions.setValue('tool-output-black-background','False')
self.actions.setValue('screenshooter-timeout','15000')
self.actions.setValue('web-services','http,https,ssl,soap,http-proxy,http-alt,https-alt')
self.actions.setValue('enable-scheduler','True')
self.actions.setValue('max-fast-processes', '10')
self.actions.setValue('max-slow-processes', '10')
self.actions.endGroup()
self.actions.beginGroup('BruteSettings')
self.actions.setValue('store-cleartext-passwords-on-exit','True')
self.actions.setValue('username-wordlist-path','/usr/share/wordlists/')
self.actions.setValue('password-wordlist-path','/usr/share/wordlists/')
self.actions.setValue('default-username','root')
self.actions.setValue('default-password','password')
self.actions.setValue('services', "asterisk,afp,cisco,cisco-enable,cvs,firebird,ftp,ftps,http-head,http-get,https-head,https-get,http-get-form,http-post-form,https-get-form,https-post-form,http-proxy,http-proxy-urlenum,icq,imap,imaps,irc,ldap2,ldap2s,ldap3,ldap3s,ldap3-crammd5,ldap3-crammd5s,ldap3-digestmd5,ldap3-digestmd5s,mssql,mysql,ncp,nntp,oracle-listener,oracle-sid,pcanywhere,pcnfs,pop3,pop3s,postgres,rdp,rexec,rlogin,rsh,s7-300,sip,smb,smtp,smtps,smtp-enum,snmp,socks5,ssh,sshkey,svn,teamspeak,telnet,telnets,vmauthd,vnc,xmpp")
self.actions.setValue('no-username-services', "cisco,cisco-enable,oracle-listener,s7-300,snmp,vnc")
self.actions.setValue('no-password-services', "oracle-sid,rsh,smtp-enum")
self.actions.endGroup()
self.actions.beginGroup('StagedNmapSettings')
self.actions.setValue('stage1-ports','T:80,443')
self.actions.setValue('stage2-ports','T:25,135,137,139,445,1433,3306,5432,U:137,161,162,1434')
self.actions.setValue('stage3-ports','T:23,21,22,110,111,2049,3389,8080,U:500,5060')
self.actions.setValue('stage4-ports','T:0-20,24,26-79,81-109,112-134,136,138,140-442,444,446-1432,1434-2048,2050-3305,3307-3388,3390-5431,5433-8079,8081-29999')
self.actions.setValue('stage5-ports','T:30000-65535')
self.actions.endGroup()
self.actions.beginGroup('ToolSettings')
self.actions.setValue('nmap-path','/usr/bin/nmap')
self.actions.setValue('hydra-path','/usr/bin/hydra')
self.actions.setValue('cutycapt-path','/usr/bin/cutycapt')
self.actions.setValue('texteditor-path','/usr/bin/leafpad')
self.actions.endGroup()
self.actions.beginGroup('HostActions')
self.actions.setValue("nmap-fast-tcp", ["Run nmap (fast TCP)", "nmap -Pn -F -T4 -vvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-full-tcp", ["Run nmap (full TCP)", "nmap -Pn -sV -sC -O -p- -T4 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-fast-udp", ["Run nmap (fast UDP)", "nmap -n -Pn -sU -F --min-rate=1000 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-udp-1000", ["Run nmap (top 1000 quick UDP)", "nmap -n -Pn -sU --min-rate=1000 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("nmap-full-udp", ["Run nmap (full UDP)", "nmap -n -Pn -sU -p- -T4 -vvvvv [IP] -oA [OUTPUT]"])
self.actions.setValue("unicornscan-full-udp", ["Run unicornscan (full UDP)", "unicornscan -mU -Ir 1000 [IP]:a -v"])
self.actions.endGroup()
self.actions.beginGroup('PortActions')
self.actions.setValue("banner", ["Grab banner", "bash -c \"echo \"\" | nc -v -n -w1 [IP] [PORT]\"", ""])
self.actions.setValue("screenshooter", ["Take a webservice screenshot", "", ""])
self.actions.setValue("nmap", ["Run nmap (scripts) on port", "nmap -Pn -sV -sC -vvvvv -p[PORT] [IP] -oA [OUTPUT]", ""])
self.actions.setValue("nikto", ["Run nikto", "nikto -o [OUTPUT].txt -p [PORT] -h [IP]", "http,https,ssl,soap,http-proxy,http-alt"])
self.actions.setValue("dirbuster", ["Launch dirbuster", "java -Xmx256M -jar /usr/share/dirbuster/DirBuster-1.0-RC1.jar -u http://[IP]:[PORT]/", "http,https,ssl,soap,http-proxy,http-alt"])
self.actions.setValue("webslayer", ["Launch webslayer", "webslayer", "http,https,ssl,soap,http-proxy,http-alt"])
### SMB
self.actions.setValue("samrdump", ["Run samrdump", "python /usr/share/doc/python-impacket-doc/examples/samrdump.py [IP] [PORT]/SMB", "netbios-ssn,microsoft-ds"])
self.actions.setValue("nbtscan", ["Run nbtscan", "nbtscan -v -h [IP]", "netbios-ns"])
self.actions.setValue("smbenum", ["Run smbenum", "bash ./scripts/smbenum.sh [IP]", "netbios-ssn,microsoft-ds"])
self.actions.setValue("enum4linux", ["Run enum4linux", "enum4linux [IP]", "netbios-ssn,microsoft-ds"])
self.actions.setValue("polenum", ["Extract password policy (polenum)", "polenum [IP]", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-users", ["Enumerate users (nmap)", "nmap -p[PORT] --script=smb-enum-users [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-users-rpc", ["Enumerate users (rpcclient)", "bash -c \"echo 'enumdomusers' | rpcclient [IP] -U%\"", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-admins", ["Enumerate domain admins (net)", "net rpc group members \"Domain Admins\" -I [IP] -U% ", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-groups", ["Enumerate groups (nmap)", "nmap -p[PORT] --script=smb-enum-groups [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-shares", ["Enumerate shares (nmap)", "nmap -p[PORT] --script=smb-enum-shares [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-sessions", ["Enumerate logged in users (nmap)", "nmap -p[PORT] --script=smb-enum-sessions [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-enum-policies", ["Extract password policy (nmap)", "nmap -p[PORT] --script=smb-enum-domains [IP] -vvvvv", "netbios-ssn,microsoft-ds"])
self.actions.setValue("smb-null-sessions", ["Check for null sessions (rpcclient)", "bash -c \"echo 'srvinfo' | rpcclient [IP] -U%\"", "netbios-ssn,microsoft-ds"])
###
self.actions.setValue("ldapsearch", ["Run ldapsearch", "ldapsearch -h [IP] -p [PORT] -x -s base", "ldap"])
self.actions.setValue("snmpcheck", ["Run snmpcheck", "snmpcheck -t [IP]", "snmp,snmptrap"])
self.actions.setValue("rpcinfo", ["Run rpcinfo", "rpcinfo -p [IP]", "rpcbind"])
self.actions.setValue("showmount", ["Show nfs shares", "showmount -e [IP]", "nfs"])
self.actions.setValue("x11screen", ["Run x11screenshot", "bash ./scripts/x11screenshot.sh [IP]", "X11"])
self.actions.setValue("sslscan", ["Run sslscan", "sslscan --no-failed [IP]:[PORT]", "http,https,ssl,soap,http-proxy,http-alt"])
self.actions.setValue("rwho", ["Run rwho", "rwho -a [IP]", "who"])
self.actions.setValue("finger", ["Enumerate users (finger)", "./scripts/fingertool.sh [IP]", "finger"])
self.actions.setValue("smtp-enum-vrfy", ["Enumerate SMTP users (VRFY)", "smtp-user-enum -M VRFY -U /usr/share/metasploit-framework/data/wordlists/unix_users.txt -t [IP] -p [PORT]", "smtp"])
self.actions.setValue("smtp-enum-expn", ["Enumerate SMTP users (EXPN)", "smtp-user-enum -M EXPN -U /usr/share/metasploit-framework/data/wordlists/unix_users.txt -t [IP] -p [PORT]", "smtp"])
self.actions.setValue("smtp-enum-rcpt", ["Enumerate SMTP users (RCPT)", "smtp-user-enum -M RCPT -U /usr/share/metasploit-framework/data/wordlists/unix_users.txt -t [IP] -p [PORT]", "smtp"])
self.actions.setValue("ftp-default", ["Check for default ftp credentials", "hydra -s [PORT] -C ./wordlists/ftp-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] ftp", "ftp"])
self.actions.setValue("mssql-default", ["Check for default mssql credentials", "hydra -s [PORT] -C ./wordlists/mssql-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] mssql", "ms-sql-s"])
self.actions.setValue("mysql-default", ["Check for default mysql credentials", "hydra -s [PORT] -C ./wordlists/mysql-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] mysql", "mysql"])
self.actions.setValue("oracle-default", ["Check for default oracle credentials", "hydra -s [PORT] -C ./wordlists/oracle-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] oracle-listener", "oracle-tns"])
self.actions.setValue("postgres-default", ["Check for default postgres credentials", "hydra -s [PORT] -C ./wordlists/postgres-default-userpass.txt -u -o \"[OUTPUT].txt\" -f [IP] postgres", "postgresql"])
#self.actions.setValue("snmp-default", ["Check for default community strings", "onesixtyone -c /usr/share/doc/onesixtyone/dict.txt [IP]", "snmp,snmptrap"])
#self.actions.setValue("snmp-default", ["Check for default community strings", "python ./scripts/snmpbrute.py.old -t [IP] -p [PORT] -f ./wordlists/snmp-default.txt", "snmp,snmptrap"])
self.actions.setValue("snmp-default", ["Check for default community strings", "python ./scripts/snmpbrute.py -t [IP] -p [PORT] -f ./wordlists/snmp-default.txt -b --no-colours", "snmp,snmptrap"])
self.actions.setValue("snmp-brute", ["Bruteforce community strings (medusa)", "bash -c \"medusa -h [IP] -u root -P ./wordlists/snmp-default.txt -M snmp | grep SUCCESS\"", "snmp,snmptrap"])
self.actions.setValue("oracle-version", ["Get version", "msfcli auxiliary/scanner/oracle/tnslsnr_version rhosts=[IP] E", "oracle-tns"])
self.actions.setValue("oracle-sid", ["Oracle SID enumeration", "msfcli auxiliary/scanner/oracle/sid_enum rhosts=[IP] E", "oracle-tns"])
###
self.actions.endGroup()
self.actions.beginGroup('PortTerminalActions')
self.actions.setValue("netcat", ["Open with netcat", "nc -v [IP] [PORT]", ""])
self.actions.setValue("telnet", ["Open with telnet", "telnet [IP] [PORT]", ""])
self.actions.setValue("ftp", ["Open with ftp client", "ftp [IP] [PORT]", "ftp"])
self.actions.setValue("mysql", ["Open with mysql client (as root)", "mysql -u root -h [IP] --port=[PORT] -p", "mysql"])
self.actions.setValue("mssql", ["Open with mssql client (as sa)", "python /usr/share/doc/python-impacket-doc/examples/mssqlclient.py -p [PORT] sa@[IP]", "mys-sql-s,codasrv-se"])
self.actions.setValue("ssh", ["Open with ssh client (as root)", "ssh root@[IP] -p [PORT]", "ssh"])
self.actions.setValue("psql", ["Open with postgres client (as postgres)", "psql -h [IP] -p [PORT] -U postgres", "postgres"])
self.actions.setValue("rdesktop", ["Open with rdesktop", "rdesktop [IP]:[PORT]", "ms-wbt-server"])
self.actions.setValue("rpcclient", ["Open with rpcclient (NULL session)", "rpcclient [IP] -p [PORT] -U%", "netbios-ssn,microsoft-ds"])
self.actions.setValue("vncviewer", ["Open with vncviewer", "vncviewer [IP]:[PORT]", "vnc"])
self.actions.setValue("xephyr", ["Open with Xephyr", "Xephyr -query [IP] :1", "xdmcp"])
self.actions.setValue("rlogin", ["Open with rlogin", "rlogin -i root -p [PORT] [IP]", "login"])
self.actions.setValue("rsh", ["Open with rsh", "rsh -l root [IP]", "shell"])
self.actions.endGroup()
self.actions.beginGroup('SchedulerSettings')
self.actions.setValue("nikto",["http,https,ssl,soap,http-proxy,http-alt","tcp"])
self.actions.setValue("screenshooter",["http,https,ssl,http-proxy,http-alt","tcp"])
self.actions.setValue("smbenum",["microsoft-ds","tcp"])
# self.actions.setValue("enum4linux","netbios-ssn,microsoft-ds")
# self.actions.setValue("smb-null-sessions","netbios-ssn,microsoft-ds")
# self.actions.setValue("nbtscan","netbios-ns")
self.actions.setValue("snmpcheck",["snmp","udp"])
self.actions.setValue("x11screen",["X11","tcp"])
self.actions.setValue("snmp-default",["snmp","udp"])
self.actions.setValue("smtp-enum-vrfy",["smtp","tcp"])
self.actions.setValue("mysql-default",["mysql","tcp"])
self.actions.setValue("mssql-default",["ms-sql-s","tcp"])
self.actions.setValue("ftp-default",["ftp","tcp"])
self.actions.setValue("postgres-default",["postgresql","tcp"])
self.actions.setValue("oracle-default",["oracle-tns","tcp"])
self.actions.endGroup()
self.actions.sync()
# NOTE: the weird order of elements in the functions below is due to historical reasons. Change this some day.
def getGeneralSettings(self):
settings = dict()
self.actions.beginGroup('GeneralSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def getBruteSettings(self):
settings = dict()
self.actions.beginGroup('BruteSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def getStagedNmapSettings(self):
settings = dict()
self.actions.beginGroup('StagedNmapSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def getToolSettings(self):
settings = dict()
self.actions.beginGroup('ToolSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
# this function fetches all the host actions from the settings file
def getHostActions(self):
hostactions = []
sortArray = []
self.actions.beginGroup('HostActions')
keys = self.actions.childKeys()
for k in keys:
hostactions.append([self.actions.value(k).toList()[0].toString(), str(k), self.actions.value(k).toList()[1].toString()])
sortArray.append(self.actions.value(k).toList()[0].toString())
self.actions.endGroup()
sortArrayWithArray(sortArray, hostactions) # sort by label so that it appears nicely in the context menu
return hostactions
# this function fetches all the port actions from the settings file
def getPortActions(self):
portactions = []
sortArray = []
self.actions.beginGroup('PortActions')
keys = self.actions.childKeys()
for k in keys:
portactions.append([self.actions.value(k).toList()[0].toString(), str(k), self.actions.value(k).toList()[1].toString(), self.actions.value(k).toList()[2].toString()])
sortArray.append(self.actions.value(k).toList()[0].toString())
self.actions.endGroup()
sortArrayWithArray(sortArray, portactions) # sort by label so that it appears nicely in the context menu
return portactions
# this function fetches all the port actions that will be run as terminal commands from the settings file
def getPortTerminalActions(self):
portactions = []
sortArray = []
self.actions.beginGroup('PortTerminalActions')
keys = self.actions.childKeys()
for k in keys:
portactions.append([self.actions.value(k).toList()[0].toString(), str(k), self.actions.value(k).toList()[1].toString(), self.actions.value(k).toList()[2].toString()])
sortArray.append(self.actions.value(k).toList()[0].toString())
self.actions.endGroup()
sortArrayWithArray(sortArray, portactions) # sort by label so that it appears nicely in the context menu
return portactions
def getSchedulerSettings(self):
settings = []
self.actions.beginGroup('SchedulerSettings')
keys = self.actions.childKeys()
for k in keys:
settings.append([str(k),self.actions.value(k).toList()[0].toString(),self.actions.value(k).toList()[1].toString()])
self.actions.endGroup()
return settings
def getSchedulerSettings_old(self):
settings = dict()
self.actions.beginGroup('SchedulerSettings')
keys = self.actions.childKeys()
for k in keys:
settings.update({str(k):str(self.actions.value(k).toString())})
self.actions.endGroup()
return settings
def backupAndSave(self, newSettings):
# Backup and save
print '[+] Backing up old settings and saving new settings..'
os.rename('./sparta.conf', './'+getTimestamp()+'-sparta.conf')
self.actions = QtCore.QSettings('./sparta.conf', QtCore.QSettings.NativeFormat)
self.actions.beginGroup('GeneralSettings')
self.actions.setValue('default-terminal',newSettings.general_default_terminal)
self.actions.setValue('tool-output-black-background',newSettings.general_tool_output_black_background)
self.actions.setValue('screenshooter-timeout',newSettings.general_screenshooter_timeout)
self.actions.setValue('web-services',newSettings.general_web_services)
self.actions.setValue('enable-scheduler',newSettings.general_enable_scheduler)
self.actions.setValue('max-fast-processes', newSettings.general_max_fast_processes)
self.actions.setValue('max-slow-processes', newSettings.general_max_slow_processes)
self.actions.endGroup()
self.actions.beginGroup('BruteSettings')
self.actions.setValue('store-cleartext-passwords-on-exit',newSettings.brute_store_cleartext_passwords_on_exit)
self.actions.setValue('username-wordlist-path',newSettings.brute_username_wordlist_path)
self.actions.setValue('password-wordlist-path',newSettings.brute_password_wordlist_path)
self.actions.setValue('default-username',newSettings.brute_default_username)
self.actions.setValue('default-password',newSettings.brute_default_password)
self.actions.setValue('services', newSettings.brute_services)
self.actions.setValue('no-username-services', newSettings.brute_no_username_services)
self.actions.setValue('no-password-services', newSettings.brute_no_password_services)
self.actions.endGroup()
self.actions.beginGroup('StagedNmapSettings')
self.actions.setValue('stage1-ports',newSettings.tools_nmap_stage1_ports)
self.actions.setValue('stage2-ports',newSettings.tools_nmap_stage2_ports)
self.actions.setValue('stage3-ports',newSettings.tools_nmap_stage3_ports)
self.actions.setValue('stage4-ports',newSettings.tools_nmap_stage4_ports)
self.actions.setValue('stage5-ports',newSettings.tools_nmap_stage5_ports)
self.actions.endGroup()
self.actions.beginGroup('HostActions')
for a in newSettings.hostActions:
self.actions.setValue(a[1], [a[0], a[2]])
self.actions.endGroup()
self.actions.beginGroup('PortActions')
for a in newSettings.portActions:
self.actions.setValue(a[1], [a[0], a[2], a[3]])
self.actions.endGroup()
self.actions.beginGroup('PortTerminalActions')
for a in newSettings.portTerminalActions:
self.actions.setValue(a[1], [a[0], a[2], a[3]])
self.actions.endGroup()
self.actions.beginGroup('SchedulerSettings')
for tool in newSettings.automatedAttacks:
self.actions.setValue(tool, newSettings.automatedAttacks[tool])
self.actions.endGroup()
self.actions.sync()
# This class first sets all the default settings and then overwrites them with the settings found in the configuration file
class Settings():
def __init__(self, appSettings=None):
# general
self.general_default_terminal = "gnome-terminal"
self.general_tool_output_black_background = "False"
self.general_screenshooter_timeout = "15000"
self.general_web_services = "http,https,ssl,soap,http-proxy,http-alt,https-alt"
self.general_enable_scheduler = "True"
self.general_max_fast_processes = "10"
self.general_max_slow_processes = "10"
# brute
self.brute_store_cleartext_passwords_on_exit = "True"
self.brute_username_wordlist_path = "/usr/share/wordlists/"
self.brute_password_wordlist_path = "/usr/share/wordlists/"
self.brute_default_username = "root"
self.brute_default_password = "password"
self.brute_services = "asterisk,afp,cisco,cisco-enable,cvs,firebird,ftp,ftps,http-head,http-get,https-head,https-get,http-get-form,http-post-form,https-get-form,https-post-form,http-proxy,http-proxy-urlenum,icq,imap,imaps,irc,ldap2,ldap2s,ldap3,ldap3s,ldap3-crammd5,ldap3-crammd5s,ldap3-digestmd5,ldap3-digestmd5s,mssql,mysql,ncp,nntp,oracle-listener,oracle-sid,pcanywhere,pcnfs,pop3,pop3s,postgres,rdp,rexec,rlogin,rsh,s7-300,sip,smb,smtp,smtps,smtp-enum,snmp,socks5,ssh,sshkey,svn,teamspeak,telnet,telnets,vmauthd,vnc,xmpp"
self.brute_no_username_services = "cisco,cisco-enable,oracle-listener,s7-300,snmp,vnc"
self.brute_no_password_services = "oracle-sid,rsh,smtp-enum"
# tools
self.tools_nmap_stage1_ports = "T:80,443"
self.tools_nmap_stage2_ports = "T:25,135,137,139,445,1433,3306,5432,U:137,161,162,1434"
self.tools_nmap_stage3_ports = "T:23,21,22,110,111,2049,3389,8080,U:500,5060"
self.tools_nmap_stage4_ports = "T:0-20,24,26-79,81-109,112-134,136,138,140-442,444,446-1432,1434-2048,2050-3305,3307-3388,3390-5431,5433-8079,8081-29999"
self.tools_nmap_stage5_ports = "T:30000-65535"
self.tools_path_nmap = "/usr/bin/nmap"
self.tools_path_hydra = "/usr/bin/hydra"
self.tools_path_cutycapt = "/usr/bin/cutycapt"
self.tools_path_texteditor = "/usr/bin/leafpad"
self.hostActions = []
self.portActions = []
self.portTerminalActions = []
self.stagedNmapSettings = []
self.automatedAttacks = []
# now that all defaults are set, overwrite with whatever was in the .conf file (stored in appSettings)
if appSettings:
try:
self.generalSettings = appSettings.getGeneralSettings()
self.bruteSettings = appSettings.getBruteSettings()
self.stagedNmapSettings = appSettings.getStagedNmapSettings()
self.toolSettings = appSettings.getToolSettings()
self.hostActions = appSettings.getHostActions()
self.portActions = appSettings.getPortActions()
self.portTerminalActions = appSettings.getPortTerminalActions()
self.automatedAttacks = appSettings.getSchedulerSettings()
# general
self.general_default_terminal = self.generalSettings['default-terminal']
self.general_tool_output_black_background = self.generalSettings['tool-output-black-background']
self.general_screenshooter_timeout = self.generalSettings['screenshooter-timeout']
self.general_web_services = self.generalSettings['web-services']
self.general_enable_scheduler = self.generalSettings['enable-scheduler']
self.general_max_fast_processes = self.generalSettings['max-fast-processes']
self.general_max_slow_processes = self.generalSettings['max-slow-processes']
# brute
self.brute_store_cleartext_passwords_on_exit = self.bruteSettings['store-cleartext-passwords-on-exit']
self.brute_username_wordlist_path = self.bruteSettings['username-wordlist-path']
self.brute_password_wordlist_path = self.bruteSettings['password-wordlist-path']
self.brute_default_username = self.bruteSettings['default-username']
self.brute_default_password = self.bruteSettings['default-password']
self.brute_services = self.bruteSettings['services']
self.brute_no_username_services = self.bruteSettings['no-username-services']
self.brute_no_password_services = self.bruteSettings['no-password-services']
# tools
self.tools_nmap_stage1_ports = self.stagedNmapSettings['stage1-ports']
self.tools_nmap_stage2_ports = self.stagedNmapSettings['stage2-ports']
self.tools_nmap_stage3_ports = self.stagedNmapSettings['stage3-ports']
self.tools_nmap_stage4_ports = self.stagedNmapSettings['stage4-ports']
self.tools_nmap_stage5_ports = self.stagedNmapSettings['stage5-ports']
self.tools_path_nmap = self.toolSettings['nmap-path']
self.tools_path_hydra = self.toolSettings['hydra-path']
self.tools_path_cutycapt = self.toolSettings['cutycapt-path']
self.tools_path_texteditor = self.toolSettings['texteditor-path']
except KeyError:
print '\t[-] Something went wrong while loading the configuration file. Falling back to default settings for some settings.'
print '\t[-] Go to the settings menu to fix the issues!'
# TODO: send signal to automatically open settings dialog here
def __eq__(self, other): # returns false if settings objects are different
if type(other) is type(self):
return self.__dict__ == other.__dict__
return False
if __name__ == "__main__":
settings = AppSettings()
s = Settings(settings)
s2 = Settings(settings)
print s == s2
s2.general_default_terminal = 'whatever'
print s == s2
| gpl-3.0 | 5,327,895,277,197,737,000 | 59.399061 | 540 | 0.723086 | false |
svn2github/chromium-depot-tools | third_party/gsutil/gslib/bucket_listing_ref.py | 51 | 6349 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import time
class BucketListingRef(object):
"""
Container that holds a reference to one result from a bucket listing, allowing
polymorphic iteration over wildcard-iterated URIs, Keys, or Prefixes. At a
minimum, every reference contains a StorageUri. If the reference came from a
bucket listing (as opposed to a manually instantiated ref that might populate
only the StorageUri), it will additionally contain either a Key or a Prefix,
depending on whether it was a reference to an object or was just a prefix of a
path (i.e., bucket subdirectory). The latter happens when the bucket was
listed using delimiter='/'.
Note that Keys are shallow-populated, based on the contents extracted from
parsing a bucket listing. This includes name, length, and other fields
(basically, the info listed by gsutil ls -l), but does not include information
like ACL and location (which require separate server requests, which is why
there's a separate gsutil ls -L option to get this more detailed info).
"""
def __init__(self, uri, key=None, prefix=None, headers=None):
"""Instantiate BucketListingRef from uri and (if available) key or prefix.
Args:
uri: StorageUri for the object (required).
key: Key for the object, or None if not available.
prefix: Prefix for the subdir, or None if not available.
headers: Dictionary containing optional HTTP headers to pass to boto
(which happens when GetKey() is called on an BucketListingRef which
has no constructor-populated Key), or None if not available.
At most one of key and prefix can be populated.
"""
assert key is None or prefix is None
self.uri = uri
self.key = key
self.prefix = prefix
self.headers = headers or {}
def GetUri(self):
"""Get URI form of listed URI.
Returns:
StorageUri.
"""
return self.uri
def GetUriString(self):
"""Get string URI form of listed URI.
Returns:
String.
"""
return self.uri.uri
def NamesBucket(self):
"""Determines if this BucketListingRef names a bucket.
Returns:
bool indicator.
"""
return self.key is None and self.prefix is None and self.uri.names_bucket()
def IsLatest(self):
"""Determines if this BucketListingRef names the latest version of an
object.
Returns:
bool indicator.
"""
return hasattr(self.uri, 'is_latest') and self.uri.is_latest
def GetRStrippedUriString(self):
"""Get string URI form of listed URI, stripped of any right trailing
delims, and without version string.
Returns:
String.
"""
return self.uri.versionless_uri.rstrip('/')
def HasKey(self):
"""Return bool indicator of whether this BucketListingRef has a Key."""
return bool(self.key)
def HasPrefix(self):
"""Return bool indicator of whether this BucketListingRef has a Prefix."""
return bool(self.prefix)
def GetKey(self):
"""Get Key form of listed URI.
Returns:
Subclass of boto.s3.key.Key.
Raises:
BucketListingRefException: for bucket-only uri.
"""
# For gsutil ls -l gs://bucket self.key will be populated from (boto)
# parsing the bucket listing. But as noted and handled below there are
# cases where self.key isn't populated.
if not self.key:
if not self.uri.names_object():
raise BucketListingRefException(
'Attempt to call GetKey() on Key-less BucketListingRef (uri=%s) ' %
self.uri)
# This case happens when we do gsutil ls -l on a object name-ful
# StorageUri with no object-name wildcard. Since the ls command
# implementation only reads bucket info we need to read the object
# for this case.
self.key = self.uri.get_key(validate=False, headers=self.headers)
# When we retrieve the object this way its last_modified timestamp
# is formatted in RFC 1123 format, which is different from when we
# retrieve from the bucket listing (which uses ISO 8601 format), so
# convert so we consistently return ISO 8601 format.
tuple_time = (time.strptime(self.key.last_modified,
'%a, %d %b %Y %H:%M:%S %Z'))
self.key.last_modified = time.strftime('%Y-%m-%dT%H:%M:%S', tuple_time)
return self.key
def GetPrefix(self):
"""Get Prefix form of listed URI.
Returns:
boto.s3.prefix.Prefix.
Raises:
BucketListingRefException: if this object has no Prefix.
"""
if not self.prefix:
raise BucketListingRefException(
'Attempt to call GetPrefix() on Prefix-less BucketListingRef '
'(uri=%s)' % self.uri)
return self.prefix
def __repr__(self):
"""Returns string representation of BucketListingRef."""
return 'BucketListingRef(%s, HasKey=%s, HasPrefix=%s)' % (
self.uri, self.HasKey(), self.HasPrefix())
class BucketListingRefException(StandardError):
"""Exception thrown for invalid BucketListingRef requests."""
def __init__(self, reason):
StandardError.__init__(self)
self.reason = reason
def __repr__(self):
return 'BucketListingRefException: %s' % self.reason
def __str__(self):
return 'BucketListingRefException: %s' % self.reason
| bsd-3-clause | -964,310,703,412,498,300 | 35.28 | 80 | 0.693338 | false |
alheinecke/tensorflow-xsmm | tensorflow/contrib/session_bundle/exporter.py | 49 | 12341 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Export a TensorFlow model.
See: go/tf-exporter
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
import six
from google.protobuf.any_pb2 import Any
from tensorflow.contrib.session_bundle import constants
from tensorflow.contrib.session_bundle import gc
from tensorflow.contrib.session_bundle import manifest_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import training_util
from tensorflow.python.util import compat
from tensorflow.python.util.deprecation import deprecated
@deprecated("2017-06-30", "Please use SavedModel instead.")
def gfile_copy_callback(files_to_copy, export_dir_path):
"""Callback to copy files using `gfile.Copy` to an export directory.
This method is used as the default `assets_callback` in `Exporter.init` to
copy assets from the `assets_collection`. It can also be invoked directly to
copy additional supplementary files into the export directory (in which case
it is not a callback).
Args:
files_to_copy: A dictionary that maps original file paths to desired
basename in the export directory.
export_dir_path: Directory to copy the files to.
"""
logging.info("Write assets into: %s using gfile_copy.", export_dir_path)
gfile.MakeDirs(export_dir_path)
for source_filepath, basename in files_to_copy.items():
new_path = os.path.join(
compat.as_bytes(export_dir_path), compat.as_bytes(basename))
logging.info("Copying asset %s to path %s.", source_filepath, new_path)
if gfile.Exists(new_path):
# Guard against being restarted while copying assets, and the file
# existing and being in an unknown state.
# TODO(b/28676216): Do some file checks before deleting.
logging.info("Removing file %s.", new_path)
gfile.Remove(new_path)
gfile.Copy(source_filepath, new_path)
@deprecated("2017-06-30", "Please use SavedModel instead.")
def regression_signature(input_tensor, output_tensor):
"""Creates a regression signature.
Args:
input_tensor: Tensor specifying the input to a graph.
output_tensor: Tensor specifying the output of a graph.
Returns:
A Signature message.
"""
signature = manifest_pb2.Signature()
signature.regression_signature.input.tensor_name = input_tensor.name
signature.regression_signature.output.tensor_name = output_tensor.name
return signature
@deprecated("2017-06-30", "Please use SavedModel instead.")
def classification_signature(input_tensor,
classes_tensor=None,
scores_tensor=None):
"""Creates a classification signature.
Args:
input_tensor: Tensor specifying the input to a graph.
classes_tensor: Tensor specifying the output classes of a graph.
scores_tensor: Tensor specifying the scores of the output classes.
Returns:
A Signature message.
"""
signature = manifest_pb2.Signature()
signature.classification_signature.input.tensor_name = input_tensor.name
if classes_tensor is not None:
signature.classification_signature.classes.tensor_name = classes_tensor.name
if scores_tensor is not None:
signature.classification_signature.scores.tensor_name = scores_tensor.name
return signature
@deprecated("2017-06-30", "Please use SavedModel instead.")
def generic_signature(name_tensor_map):
"""Creates a generic signature of name to Tensor name.
Args:
name_tensor_map: Map from logical name to Tensor.
Returns:
A Signature message.
"""
signature = manifest_pb2.Signature()
for name, tensor in six.iteritems(name_tensor_map):
signature.generic_signature.map[name].tensor_name = tensor.name
return signature
class Exporter(object):
"""Exporter helps package a TensorFlow model for serving.
Args:
saver: Saver object.
"""
def __init__(self, saver):
# Makes a copy of the saver-def and disables garbage-collection, since the
# exporter enforces garbage-collection independently. Specifically, since
# the exporter performs atomic copies of the saver output, it is required
# that garbage-collection via the underlying saver be disabled.
saver_def = saver.as_saver_def()
saver_def.ClearField("max_to_keep")
self._saver = tf_saver.Saver(saver_def=saver_def)
self._has_init = False
self._assets_to_copy = {}
@deprecated("2017-06-30", "Please use SavedModel instead.")
def init(self,
graph_def=None,
init_op=None,
clear_devices=False,
default_graph_signature=None,
named_graph_signatures=None,
assets_collection=None,
assets_callback=gfile_copy_callback):
"""Initialization.
Args:
graph_def: A GraphDef message of the graph to be used in inference.
GraphDef of default graph is used when None.
init_op: Op to be used in initialization.
clear_devices: If device info of the graph should be cleared upon export.
default_graph_signature: Default signature of the graph.
named_graph_signatures: Map of named input/output signatures of the graph.
assets_collection: A collection of constant asset filepath tensors. If set
the assets will be exported into the asset directory.
assets_callback: callback with two argument called during export with the
list of files to copy and the asset path.
Raises:
RuntimeError: if init is called more than once.
TypeError: if init_op is not an Operation or None.
ValueError: if asset file path tensors are not non-empty constant string
scalar tensors.
"""
# Avoid Dangerous default value []
if named_graph_signatures is None:
named_graph_signatures = {}
assets = []
if assets_collection:
for asset_tensor in assets_collection:
asset_filepath = self._file_path_value(asset_tensor)
if not asset_filepath:
raise ValueError("invalid asset filepath tensor %s" % asset_tensor)
basename = os.path.basename(asset_filepath)
assets.append((basename, asset_tensor))
self._assets_to_copy[asset_filepath] = basename
if self._has_init:
raise RuntimeError("init should be called only once")
self._has_init = True
if graph_def or clear_devices:
copy = graph_pb2.GraphDef()
if graph_def:
copy.CopyFrom(graph_def)
else:
copy.CopyFrom(ops.get_default_graph().as_graph_def())
if clear_devices:
for node in copy.node:
node.device = ""
graph_any_buf = Any()
graph_any_buf.Pack(copy)
ops.add_to_collection(constants.GRAPH_KEY, graph_any_buf)
if init_op:
if not isinstance(init_op, ops.Operation):
raise TypeError("init_op needs to be an Operation: %s" % init_op)
ops.add_to_collection(constants.INIT_OP_KEY, init_op)
signatures_proto = manifest_pb2.Signatures()
if default_graph_signature:
signatures_proto.default_signature.CopyFrom(default_graph_signature)
for signature_name, signature in six.iteritems(named_graph_signatures):
signatures_proto.named_signatures[signature_name].CopyFrom(signature)
signatures_any_buf = Any()
signatures_any_buf.Pack(signatures_proto)
ops.add_to_collection(constants.SIGNATURES_KEY, signatures_any_buf)
for filename, tensor in assets:
asset = manifest_pb2.AssetFile()
asset.filename = filename
asset.tensor_binding.tensor_name = tensor.name
asset_any_buf = Any()
asset_any_buf.Pack(asset)
ops.add_to_collection(constants.ASSETS_KEY, asset_any_buf)
self._assets_callback = assets_callback
@deprecated("2017-06-30", "Please use SavedModel instead.")
def export(self,
export_dir_base,
global_step_tensor,
sess=None,
exports_to_keep=None):
"""Exports the model.
Args:
export_dir_base: A string path to the base export dir.
global_step_tensor: An Tensor or tensor name providing the
global step counter to append to the export directory path and set
in the manifest version.
sess: A Session to use to save the parameters.
exports_to_keep: a gc.Path filter function used to determine the set of
exports to keep. If set to None, all versions will be kept.
Returns:
The string path to the exported directory.
Raises:
RuntimeError: if init is not called.
RuntimeError: if the export would overwrite an existing directory.
"""
if not self._has_init:
raise RuntimeError("init must be called first")
# Export dir must not end with / or it will break exports to keep. Strip /.
if export_dir_base.endswith("/"):
export_dir_base = export_dir_base[:-1]
global_step = training_util.global_step(sess, global_step_tensor)
export_dir = os.path.join(
compat.as_bytes(export_dir_base),
compat.as_bytes(constants.VERSION_FORMAT_SPECIFIER % global_step))
# Prevent overwriting on existing exports which could lead to bad/corrupt
# storage and loading of models. This is an important check that must be
# done before any output files or directories are created.
if gfile.Exists(export_dir):
raise RuntimeError("Overwriting exports can cause corruption and are "
"not allowed. Duplicate export dir: %s" % export_dir)
# Output to a temporary directory which is atomically renamed to the final
# directory when complete.
tmp_export_dir = compat.as_text(export_dir) + "-tmp"
gfile.MakeDirs(tmp_export_dir)
self._saver.save(sess,
os.path.join(
compat.as_text(tmp_export_dir),
compat.as_text(constants.EXPORT_BASE_NAME)),
meta_graph_suffix=constants.EXPORT_SUFFIX_NAME)
# Run the asset callback.
if self._assets_callback and self._assets_to_copy:
assets_dir = os.path.join(
compat.as_bytes(tmp_export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
gfile.MakeDirs(assets_dir)
self._assets_callback(self._assets_to_copy, assets_dir)
# TODO(b/27794910): Delete *checkpoint* file before rename.
gfile.Rename(tmp_export_dir, export_dir)
if exports_to_keep:
# create a simple parser that pulls the export_version from the directory.
def parser(path):
match = re.match("^" + export_dir_base + "/(\\d{8})$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
paths_to_delete = gc.negation(exports_to_keep)
for p in paths_to_delete(gc.get_paths(export_dir_base, parser=parser)):
gfile.DeleteRecursively(p.path)
return export_dir
def _file_path_value(self, path_tensor):
"""Returns the filepath value stored in constant `path_tensor`."""
if not isinstance(path_tensor, ops.Tensor):
raise TypeError("tensor is not a Tensor")
if path_tensor.op.type != "Const":
raise TypeError("Only constants tensor are supported")
if path_tensor.dtype != dtypes.string:
raise TypeError("File paths should be string")
str_value = path_tensor.op.get_attr("value").string_val
if len(str_value) != 1:
raise TypeError("Only scalar tensors are supported")
return str_value[0]
| apache-2.0 | -8,954,033,795,216,479,000 | 37.565625 | 80 | 0.690058 | false |
F0rth/seafile-obsd-wip | web/main.py | 1 | 35100 | #!/usr/bin/env python2
# encoding: utf-8
import gettext
import locale
import os
import simplejson as json
import sys
import platform
import urllib
import web
from web.contrib.template import render_mako
import settings
from seaserv import CCNET_CONF_PATH
from seaserv import ccnet_rpc, seafile_rpc, applet_rpc
from seaserv import get_peers_by_role
from seaserv import get_repos, get_repo, get_commits, \
get_branches, open_dir, get_diff, \
get_default_seafile_worktree, \
get_current_prefs
from pysearpc import SearpcError
urls = (
'/', 'repos',
'/opendir/', 'open_directory',
'/home/', 'repos',
'/repos/', 'repos',
'/repo/', 'repo',
'/repo/history/', 'repo_history',
'/repo/setting/', 'repo_setting',
'/repo/sync-status/', 'repo_sync_status',
'/repo/transfer/', 'repo_transfer',
'/repos/download-tasks/', 'CloneTasks',
'/repos/clone-tasks/', 'clone_tasks',
'/repo/download/', 'repo_download',
'/repo/sync/', 'repo_sync',
'/repos/operation/', 'repo_operation',
'/procs/', 'procs',
'/settings/', 'settings_page',
'/i18n/', 'i18n',
'/seafile_access_check/', 'seafile_access_check',
'/open-local-file/', 'open_local_file',
'/seafile_rpc_version/', 'seafile_rpc_version',
)
# See http://www.py2exe.org/index.cgi/WhereAmI
if 'win32' in sys.platform and hasattr(sys, 'frozen'):
__file__ = sys.executable
curdir = os.path.abspath(os.path.dirname(__file__))
localedir = os.path.join(curdir, 'i18n')
if "darwin" == sys.platform and hasattr(sys, 'frozen'):
sys.path.append(curdir)
NET_STATE_CONNECTED = 1
lang_code = locale.getdefaultlocale()[0]
if lang_code == 'zh_CN':
DEFAULT_LANG = 'zh_CN'
else:
DEFAULT_LANG = 'en_US'
lang_in_use = None
gettext.install('messages', localedir, unicode=True)
gettext.translation('messages', localedir,
languages=[DEFAULT_LANG]).install(True)
render = render_mako(directories=['templates'],
output_encoding='utf-8', input_encoding='utf-8',
default_filters=['decode.utf8'])
app = web.application(urls, globals())
SEAFILE_VERSION = '1.7'
default_options = { "confdir": CCNET_CONF_PATH,
'web_ctx': web.ctx,
'seafile_version': SEAFILE_VERSION,
'lang': DEFAULT_LANG,
'settings': settings,
}
def get_relay_of_repo(repo):
if not repo:
return None
relay = None
try:
if repo.props.relay_id:
relay = ccnet_rpc.get_peer(repo.props.relay_id)
except:
return None
return relay
def get_dir_nav_links(repo, commit_id, path):
"""Get every folder on the path from repo root to [path]. Return value is
in this format:
[(root, href-to-root), (level-1-folder, href-to-level-1), ... (path, href-to-path)]
"""
names = []
links = []
if path != u'/':
names = path[1:].split(u'/')
for idx,name in enumerate(names):
current_path = u'/' + u'/'.join(names[:idx+1])
quoted_path = urllib.quote(current_path.encode('utf-8'))
href = "/repos/operation/?repo=%s&commit_id=%s&path=%s&op=dir" \
% (repo.props.id, commit_id, quoted_path)
links.append(href)
# insert root link in the front
names.insert(0, repo.props.name)
href = "/repos/operation/?repo=%s&commit_id=%s&op=dir" % (repo.props.id, commit_id)
links.insert(0, href)
return zip(names, links)
class open_directory:
def GET(self):
path = web.webapi.input(path='').path
if path:
open_dir(path)
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
def prepare_repo_info(repo):
"""Get various types of information belong to the repo."""
### get branch information
repo.branches = get_branches(repo.props.id)
repo.current_branch = None
repo.master_branch = None
repo.local_branch = None
for branch in repo.branches:
if branch.props.name == "master":
repo.master_branch = branch
elif branch.props.name == "local":
repo.local_branch = branch
if branch.props.name == repo.props.head_branch:
repo.current_branch = branch
### transfer task information and sync info
repo.sync_info = seafile_rpc.get_repo_sync_info(repo.props.id)
class repos:
def show_repos(self):
# relay info
relays = get_peers_by_role ("MyRelay")
# remove name unresolved relay
relays = [relay for relay in relays if relay.name]
# get repos info
repos = get_repos()
for repo in repos:
# is_broken is not used now, we should clean it later
repo.is_broken = False
try:
prepare_repo_info(repo)
except SearpcError, e:
repo.is_broken = True
repo.error_msg = e.msg
for relay in relays:
relay.repos = []
for repo in repos:
if relay.props.id == repo.props.relay_id:
relay.repos.append(repo)
repo.relay = relay
repos.sort(key=lambda x: x.props.last_modify, reverse=True)
return render.repos(repos=repos,
relays=relays,
**default_options)
def GET(self):
# Set language preference on the first load of home page
global lang_in_use
if not lang_in_use:
lang_in_use = seafile_rpc.get_config('lang_in_use')
if not lang_in_use:
seafile_rpc.set_config('lang_in_use', DEFAULT_LANG)
lang_in_use = DEFAULT_LANG
gettext.translation('messages', localedir,
languages=[lang_in_use]).install(True)
default_options['lang'] = lang_in_use
return self.show_repos()
class repo:
"""Show a specific repo."""
def show_repo(self, repo_id):
repo = seafile_rpc.get_repo(repo_id)
if not repo:
return render.repo_missing(repo_id=repo_id, **default_options)
try:
prepare_repo_info(repo)
recent_commits = get_commits(repo_id, 0, 3)
repo.is_broken = False
except SearpcError, e:
repo.is_broken = True
recent_commits = []
repo.error_msg = e.msg
relay = get_relay_of_repo(repo)
relay_addr = seafile_rpc.get_repo_relay_address(repo_id)
relay_port = seafile_rpc.get_repo_relay_port(repo_id)
return render.repo(repo=repo,
recent_commits=recent_commits,
relay=relay,
relay_addr=relay_addr,
relay_port=relay_port,
**default_options)
def GET(self):
inputs = web.webapi.input(repo='')
return self.show_repo(inputs.repo)
class repo_history:
def show_repo_history(self, repo_id):
repo = seafile_rpc.get_repo(repo_id)
prepare_repo_info(repo)
inputs = web.webapi.input(page="1", per_page="25")
current_page = int(inputs.page)
per_page = int(inputs.per_page)
commits_all = get_commits(repo_id, per_page * (current_page - 1), per_page + 1)
commits = commits_all[:per_page]
if len(commits_all) == per_page + 1:
page_next = True
else:
page_next = False
return render.repo_history(repo=repo,
commits=commits,
current_page=current_page,
per_page=per_page,
page_next=page_next,
**default_options)
def GET(self):
inputs = web.webapi.input(repo='')
return self.show_repo_history(inputs.repo)
class repo_transfer:
def GET(self):
inputs = web.webapi.input(repo='')
task = {}
t = seafile_rpc.find_transfer_task(inputs.repo)
if t:
task['ttype'] = t.props.ttype
task['state'] = t.props.state
task['rt_state'] = t.props.rt_state
task['block_done'] = t.props.block_done
task['block_total'] = t.props.block_total
task['rate'] = t.props.rate
task['error_str'] = t.props.error_str
return json.dumps(task)
class repo_sync_status:
def GET(self):
inputs = web.webapi.input(repo='')
sync_status = {}
repo = get_repo(inputs.repo)
if not repo or not repo.props.worktree or not repo.props.head_branch:
return json.dumps(sync_status)
relay = get_relay_of_repo(repo)
if relay:
if not relay.props.is_ready:
if relay.net_state != NET_STATE_CONNECTED:
sync_status['state'] = 'relay not connected'
else:
sync_status['state'] = 'relay authenticating'
return json.dumps(sync_status)
t = seafile_rpc.get_repo_sync_task(inputs.repo)
if t:
if t.props.state == 'error' and t.props.error == 'relay not connected':
# Hide the 'relay not connected' error from daemon when relay
# is actually connected, but the check sync pulse has not come yet
sync_status['state'] = 'waiting for sync'
return json.dumps(sync_status)
elif t.props.state == 'canceled' or t.props.state == 'cancel pending':
sync_status['state'] = 'waiting for sync'
else:
sync_status['state'] = t.props.state
sync_status['is_sync_lan'] = t.props.is_sync_lan
sync_status['error'] = t.props.error
else:
# No sync task yet: seafile maybe have just been started
sync_status['state'] = 'waiting for sync'
auto_sync_enabled = seafile_rpc.is_auto_sync_enabled()
if not auto_sync_enabled or not repo.props.auto_sync:
sync_status['state'] = 'auto sync is turned off'
return json.dumps(sync_status)
class repo_operation:
def perform_operation_get(self, op, repo_id):
repo = get_repo(repo_id)
if not repo:
raise web.seeother('/repos/')
if op == 'sync':
try:
seafile_rpc.sync(repo.props.id, None)
except:
pass
elif op == 'open' and repo.props.worktree:
try:
open_dir(repo.props.worktree.encode('utf-8'))
except:
pass
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
elif op == 'open_file':
quote_file_path = web.webapi.input(quote_file_path='').file_path
file_path = quote_file_path.encode('utf-8')
dir_path = file_path
if os.path.exists(file_path) and os.path.isfile(file_path):
dir_path = os.path.dirname(file_path)
try:
open_dir(dir_path)
except:
pass
return render.checkout_msg(repo=repo, file_path=file_path, **default_options)
elif op == 'diff':
inputs = web.webapi.input(old='', new='')
new_commit = seafile_rpc.get_commit(inputs.new)
if inputs.old != '':
old_commit = seafile_rpc.get_commit(inputs.old)
else:
old_commit = None
(new, removed, renamed, modified, newdir, deldir) = get_diff(repo_id, inputs.old, inputs.new)
return render.repo_diff(repo=repo,
new=new, removed=removed,
renamed=renamed, modified=modified,
newdir=newdir, deldir=deldir,
new_commit=new_commit, old_commit=old_commit,
**default_options)
elif op == 'lsch':
inputs = web.webapi.input(old='', new='')
(new, removed, renamed, modified, newdir, deldir) = get_diff(repo_id, inputs.old, inputs.new)
ch = {}
ch['new'] = new
ch['removed'] = removed
ch['renamed'] = renamed
ch['modified'] = modified
ch['newdir'] = newdir
ch['deldir'] = deldir
return json.dumps(ch)
elif op == 'dir':
inputs = web.webapi.input(commit_id='', path='/')
dirs = seafile_rpc.list_dir_by_path(inputs.commit_id, inputs.path.encode('utf-8'))
navs = get_dir_nav_links(repo, inputs.commit_id, inputs.path)
try:
commit = seafile_rpc.get_commit(inputs.commit_id)
except SearpcError:
raise web.seeother('/repo/?repo=%s' % repo_id)
return render.repo_dir(repo=repo, dirs=dirs, commit_id=inputs.commit_id,
commit=commit,
navs=navs,
path=inputs.path,
**default_options)
elif op == 'remove':
try:
seafile_rpc.remove_repo(repo_id)
except:
pass
raise web.seeother('/repos/')
elif op == 'set-auto-sync':
auto_sync = {}
try:
seafile_rpc.set_repo_property(repo_id, "auto-sync", "true")
except:
pass
auto_sync['start'] = True
return json.dumps(auto_sync)
elif op == 'set-manual-sync':
auto_sync = {}
try:
seafile_rpc.set_repo_property(repo_id, "auto-sync", "false")
except:
pass
auto_sync['start'] = False
return json.dumps(auto_sync)
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
def perform_operation_post(self, op, repo_id):
repo = get_repo(repo_id)
if not repo:
raise web.seeother('/repos/')
if op == 'modify-relay':
relay_id = web.webapi.input(relay_id="").relay_id
if relay_id != repo.props.relay_id:
seafile_rpc.set_repo_property(repo.props.id,
"relay-id", relay_id)
elif op == 'set-passwd':
passwd = web.webapi.input(passwd="").passwd
if passwd:
seafile_rpc.set_repo_passwd(repo.props.id, passwd)
elif op == 'edit-relay':
inputs = web.webapi.input(relay_addr='', relay_port='')
if inputs.relay_addr and inputs.relay_port:
seafile_rpc.update_repo_relay_info(repo_id,
inputs.relay_addr,
inputs.relay_port)
referer = web.ctx.env.get('HTTP_REFERER', '/home/')
raise web.seeother(referer)
def GET(self):
inputs = web.webapi.input(op='', repo='')
if inputs.op and inputs.repo:
return self.perform_operation_get(inputs.op, inputs.repo)
raise web.seeother('/repos/')
def POST(self):
inputs = web.webapi.input(op='', repo='')
if inputs.op and inputs.repo:
return self.perform_operation_post(inputs.op, inputs.repo)
raise web.seeother('/repos/')
class CloneTasks:
def GET(self):
inputs = web.webapi.input(op='', repo_id='')
if inputs.op and inputs.repo_id:
if inputs.op == "remove":
seafile_rpc.remove_clone_task(inputs.repo_id)
elif inputs.op == "cancel":
seafile_rpc.cancel_clone_task(inputs.repo_id)
raise web.seeother('/repos/download-tasks/')
return render.clone_tasks(**default_options)
class clone_tasks:
def GET(self):
ts = []
tasks = seafile_rpc.get_clone_tasks()
for task in tasks:
t = {}
t['repo_id'] = task.props.repo_id
t['repo_name'] = task.props.repo_name
t['state'] = task.props.state
t['error_str'] = task.props.error_str
t['worktree'] = task.props.worktree
tx_task = False
checkout_task = False
if task.props.state == "fetch":
tx_task = seafile_rpc.find_transfer_task(task.props.repo_id)
t['tx_block_done'] = tx_task.props.block_done
t['tx_block_total'] = tx_task.props.block_total
elif task.props.state == "checkout":
checkout_task = seafile_rpc.get_checkout_task(task.props.repo_id)
t['checkout_finished_files'] = checkout_task.props.finished_files
t['checkout_total_files'] = checkout_task.props.total_files
elif task.props.state == "error" and task.props.error_str == "fetch":
tx_task = seafile_rpc.find_transfer_task(task.props.repo_id)
t['tx_error_str'] = tx_task.props.error_str
elif task.props.state == "error" and task.props.error_str == "password":
t['relay_id'] = task.props.peer_id
ts.append(t)
Tasks = {}
Tasks['tasks'] = ts
return json.dumps(Tasks)
class repo_download:
def GET(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', magic='', email='')
relay_id = inputs.relay_id
token = inputs.token
relay_addr = inputs.relay_addr
relay_port = inputs.relay_port
repo_id = inputs.repo_id
repo_name = inputs.repo_name
email = inputs.email
if seafile_rpc.get_repo(inputs.repo_id):
return render.repo_download(repo_already_exists=True,
**default_options)
tasks = seafile_rpc.get_clone_tasks()
for task in tasks:
if task.props.repo_id == inputs.repo_id:
if task.props.state != 'done' and task.props.state != 'error' \
and task.props.state != 'canceled':
raise web.seeother('/repos/download-tasks/')
wt_parent = get_default_seafile_worktree ()
sync_url = "/repo/sync/?relay_id=%s&relay_addr=%s&relay_port=%s&" \
"email=%s&token=%s&repo_id=%s&repo_name=%s" % \
(relay_id, relay_addr, relay_port, urllib.quote(email), token, repo_id,
urllib.quote(repo_name.encode('utf-8')))
if inputs.encrypted:
sync_url += "&encrypted=1&magic=%s" % inputs.magic
return render.repo_download(error_msg=None,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
token=token,
relay_addr=relay_addr,
relay_port=relay_port,
repo_name=repo_name,
wt_parent=wt_parent,
encrypted=inputs.encrypted,
magic=inputs.magic,
email=email,
sync_url=sync_url,
**default_options)
def POST(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', password='', magic='',
wt_parent='', email='')
sync_url = "/repo/sync/?relay_id=%s&relay_addr=%s&relay_port=%s&" \
"email=%s&token=%s&repo_id=%s&repo_name=%s" % \
(inputs.relay_id, inputs.relay_addr, inputs.relay_port,
urllib.quote(inputs.email), inputs.token, inputs.repo_id,
urllib.quote(inputs.repo_name.encode('utf-8')))
if inputs.encrypted:
sync_url += "&encrypted=1&magic=%s" % inputs.magic
error_msg = None
if not inputs.wt_parent:
error_msg = _("You must choose a local directory")
elif inputs.encrypted and not inputs.password:
error_msg=_("Password can not be empty")
elif len(inputs.repo_id) != 36:
error_msg=_("Invalid Repo ID")
if error_msg:
return render.repo_download (error_msg=error_msg,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
magic=inputs.magic,
wt_parent=inputs.wt_parent,
email=inputs.email,
sync_url=sync_url,
**default_options)
if not inputs.password:
inputs.password = None
if not inputs.magic:
inputs.magic = None
try:
seafile_rpc.download (inputs.repo_id, inputs.relay_id,
inputs.repo_name.encode('utf-8'),
inputs.wt_parent.encode('utf-8'),
inputs.token,
inputs.password,
inputs.magic,
inputs.relay_addr,
inputs.relay_port,
inputs.email)
except SearpcError as e:
if e.msg == 'Invalid local directory':
error_msg = _('Invalid local directory')
elif e.msg == 'Already in sync':
error_msg = _('The local directory you chose is in sync with another repo. Please choose another one.')
elif e.msg == 'Worktree conflicts system path':
error_msg = _('The local directory you chose cannot be under or includes a system directory of seafile.')
elif e.msg == 'Worktree conflicts existing repo':
error_msg = _('The local directory you chose cannot be under or includes another library.')
elif e.msg == 'Incorrect password':
error_msg = _('Incorrect password.')
else:
error_msg = _('Internal error.') + str(e)
if error_msg:
return render.repo_download (error_msg=error_msg,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
password=inputs.password,
magic=inputs.magic,
wt_parent=inputs.wt_parent,
email=inputs.email,
sync_url=sync_url,
**default_options)
raise web.seeother('/repos/download-tasks/')
class repo_sync:
def GET(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', magic='', email='')
relay_id = inputs.relay_id
token = inputs.token
relay_addr = inputs.relay_addr
relay_port = inputs.relay_port
repo_id = inputs.repo_id
repo_name = inputs.repo_name
email = inputs.email
if seafile_rpc.get_repo(inputs.repo_id):
return render.repo_sync(repo_already_exists=True, **default_options)
tasks = seafile_rpc.get_clone_tasks()
for task in tasks:
if task.props.repo_id == inputs.repo_id:
if task.props.state != 'done' and task.props.state != 'error' \
and task.props.state != 'canceled':
raise web.seeother('/repos/download-tasks/')
return render.repo_sync(error_msg=None,
repo_already_exists=False,
repo_id=inputs.repo_id,
relay_id=inputs.relay_id,
token=token,
relay_addr=relay_addr,
relay_port=relay_port,
repo_name=repo_name,
worktree='',
encrypted=inputs.encrypted,
magic=inputs.magic,
email=email,
**default_options)
def POST(self):
inputs = web.webapi.input(relay_id='', token='',
relay_addr='', relay_port = '',
repo_id='', repo_name='',
encrypted='', password='', magic='',
worktree='', email='')
repo_id = inputs.repo_id.strip()
error_msg = None
if not inputs.worktree:
error_msg = _("You must choose a local directory")
elif inputs.encrypted and not inputs.password:
error_msg=_("Password can not be empty")
elif len(repo_id) != 36:
error_msg=_("Invalid Repo ID")
if error_msg:
return render.repo_sync (error_msg=error_msg,
repo_already_exists=False,
repo_id=repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
magic=inputs.magic,
worktree=inputs.worktree,
email=inputs.email,
**default_options)
if not inputs.password:
inputs.password = None
if not inputs.magic:
inputs.magic = None
try:
seafile_rpc.clone (repo_id, inputs.relay_id,
inputs.repo_name.encode('utf-8'),
inputs.worktree.encode('utf-8'),
inputs.token,
inputs.password,
inputs.magic,
inputs.relay_addr, inputs.relay_port, inputs.email)
except SearpcError as e:
if e.msg == 'Invalid local directory':
error_msg = _('Invalid local directory')
elif e.msg == 'Already in sync':
error_msg = _('The local directory you chose is in sync with another repo. Please choose another one.')
elif e.msg == 'Worktree conflicts system path':
error_msg = _('The local directory you chose cannot be under or includes a system directory of seafile.')
elif e.msg == 'Worktree conflicts existing repo':
error_msg = _('The local directory you chose cannot be under or includes another library.')
elif e.msg == 'Incorrect password':
error_msg = _('Incorrect password.')
else:
error_msg = _('Internal error.') + str(e)
if error_msg:
return render.repo_sync (error_msg=error_msg,
repo_already_exists=False,
repo_id=repo_id,
relay_id=inputs.relay_id,
relay_addr=inputs.relay_addr,
relay_port=inputs.relay_port,
token=inputs.token,
repo_name=inputs.repo_name,
encrypted=inputs.encrypted,
magic=inputs.magic,
worktree=inputs.worktree,
email=inputs.email,
**default_options)
raise web.seeother('/repos/download-tasks/')
class settings_page:
def GET(self):
current_prefs = get_current_prefs()
return render.settings(prefs=current_prefs, **default_options)
def POST(self):
current_prefs = get_current_prefs()
inputs = web.webapi.input(auto_start='off', notify_sync='off',
encrypt_channel='off',
upload_limit='', download_limit='')
applet_rpc.set_auto_start(inputs.auto_start)
if inputs.notify_sync != current_prefs['notify_sync']:
seafile_rpc.set_config('notify_sync', inputs.notify_sync)
if inputs.encrypt_channel != current_prefs['encrypt_channel']:
ccnet_rpc.set_config('encrypt_channel', inputs.encrypt_channel)
if not inputs.upload_limit:
upload_limit = 0
else:
try:
upload_limit = int(inputs.upload_limit) * 1024
except:
upload_limit = 0
if not inputs.download_limit:
download_limit = 0
else:
try:
download_limit = int(inputs.download_limit) * 1024
except:
download_limit = 0
if upload_limit != current_prefs['upload_limit']:
seafile_rpc.set_upload_rate_limit(upload_limit)
if download_limit != current_prefs['download_limit']:
seafile_rpc.set_download_rate_limit(download_limit)
raise web.seeother('/settings/')
class procs:
def GET(self):
aprocs = ccnet_rpc.get_procs_alive(0, -1)
dprocs = ccnet_rpc.get_procs_dead(0, -1)
acnt = ccnet_rpc.count_procs_alive()
dcnt = ccnet_rpc.count_procs_dead()
return render.procs(aprocs=aprocs, dprocs=dprocs,
acnt=acnt, dcnt=dcnt, **default_options)
class i18n:
def GET(self):
global lang_in_use
if lang_in_use == 'zh_CN':
lang_in_use = 'en_US'
else:
lang_in_use = 'zh_CN'
gettext.translation('messages', localedir,
languages=[lang_in_use]).install(True)
seafile_rpc.set_config('lang_in_use', lang_in_use)
default_options['lang'] = lang_in_use
inputs = web.webapi.input(prev='/home/')
raise web.seeother(inputs.prev)
# for seahub repo download
class seafile_access_check:
"""For seahub to check whether local seafile is started when downloading a
repo. For a bug in the released server 0.9.5, here we need always return
2.
"""
def GET(self):
return 'xx(2)'
class seafile_rpc_version:
"""For the server to query current seafile client rpc version"""
def GET(self):
version = 1
return 'xx(%s)' % json.dumps(version)
class open_local_file:
"""
handle jsonp ajax cross domain 'open-local-file' request from seahub
"""
def GET(self):
inputs = web.webapi.input(repo_id='', path='', callback='', commit_id='')
repo_id, path, callback = inputs.repo_id, inputs.path.lstrip('/'), inputs.callback
d = {}
if not (repo_id and path and callback):
d['error'] = 'invalid request'
return '%s(%s)' % (inputs.callback, json.dumps(d))
try:
repo = get_repo(repo_id)
except Exception, e:
d['error'] = str(e)
return '%s(%s)' % (inputs.callback, json.dumps(d))
else:
if not repo:
d['exists'] = False
return '%s(%s)' % (inputs.callback, json.dumps(d))
if inputs.commit_id:
if repo.head_cmmt_id != inputs.commit_id:
d['outdated'] = True
d['auto-sync'] = repo.auto_sync
return '%s(%s)' % (inputs.callback, json.dumps(d))
# ok, repo exists
file_path = os.path.join(repo.worktree, path)
uname = platform.platform()
err_msg = ''
if 'Windows' in uname:
try:
os.startfile(file_path)
except WindowsError, e:
if e.winerror == 1155:
# windows error 1155: no default application for this file type
d['no_assoc'] = True
try:
# try to open the folder instead
os.startfile(os.path.dirname(file_path))
except:
pass
else:
err_msg = str(e)
elif 'Linux' in uname:
file_path = file_path.encode('utf-8')
try:
os.system('xdg-open "%s"' % file_path)
except Exception, e:
err_msg = str(e)
elif 'Darwin' in uname:
# what to do in mac?
file_path = file_path.encode('utf-8')
try:
os.system('open "%s"' % file_path)
except Exception, e:
err_msg = str(e)
if err_msg:
d['error'] = err_msg
return '%s(%s)' % (inputs.callback, json.dumps(d))
if __name__ == "__main__":
app.run()
| gpl-3.0 | -8,629,200,800,224,864,000 | 36.420043 | 121 | 0.493276 | false |