repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 22
values | size
stringlengths 4
7
| content
stringlengths 626
1.05M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 5.21
99.9
| line_max
int64 12
999
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
alexander-ae/sistema-de-cotizacion | quoman/quotes/models.py | 1 | 4975 | from django.core.urlresolvers import reverse
from django.db import models
from . import constants
from products.models import Product
from users.models import User
from quoman.models import Config
class Quote(models.Model):
fecha_de_creacion = models.DateTimeField('Fecha de creación', auto_now_add=True)
propietario_id = models.ForeignKey(User, verbose_name='Propietario', blank=True, null=True,
on_delete=models.PROTECT)
aplica_detraccion = models.BooleanField('Aplica Detracción', default=False)
estado = models.CharField('Estado', max_length=24, choices=constants.COTIZACION_ESTADO,
default=constants.COTIZACION_PENDIENTE, blank=True)
codigo = models.CharField('Código', max_length=12, unique=True)
ruc = models.CharField('RUC', max_length=12, blank=True)
empresa_razon_social = models.CharField('Razón Social', max_length=180)
empresa_direccion = models.CharField('Dirección', max_length=180)
representante = models.CharField('Representante', max_length=96, blank=True)
empresa_telefono = models.CharField('Teléfono de la empresa', max_length=32, blank=True)
tiempo_de_entrega = models.CharField('Tiempo de Entrega', max_length=96, blank=True)
valida_hasta = models.CharField('Válida hasta', max_length=120, blank=True, default=None, null=True)
forma_de_pago = models.CharField('Forma de pago', max_length=180, blank=True)
costo_de_envio = models.DecimalField('Costo de envío', max_digits=6, decimal_places=2, default=0)
total = models.DecimalField('Total', max_digits=9, decimal_places=2, default=0)
igv = models.DecimalField('IGV', max_digits=3, decimal_places=1)
class Meta:
verbose_name = 'Cotización'
verbose_name_plural = 'Cotizaciones'
ordering = ['fecha_de_creacion']
def __str__(self):
return self.codigo
def save(self, *args, **kwargs):
self.total = self.calcula_total()
if not self.igv:
config, created = Config.objects.get_or_create(pk=1)
self.igv = config.igv
super(Quote, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('quotes:edit', kwargs={'codigo': self.codigo})
def get_detail_url(self):
return reverse('quotes:detail', kwargs={'codigo': self.codigo})
def get_pdf_url(self):
return reverse('quotes:pdf', kwargs={'codigo': self.codigo})
def get_send_url(self):
return reverse('quotes:send_quote', kwargs={'codigo': self.codigo})
def calcula_subtotal_productos(self):
subtotal = 0
for producto in self.productos_a_cotizar.all():
subtotal = subtotal + producto.cantidad * producto.precio
return subtotal
def calcula_igv(self):
subtotal = self.calcula_subtotal_productos()
total_antes_igv = subtotal + self.costo_de_envio
return total_antes_igv * self.igv / 100
def calcula_total(self):
''' Incluye el costo de los productos, costo de envío e IGV '''
subtotal = self.calcula_subtotal_productos()
total_antes_igv = subtotal + self.costo_de_envio
total = total_antes_igv + self.calcula_igv()
return total
class QuoteReceiver(models.Model):
quote = models.ForeignKey(Quote, verbose_name='', on_delete=models.CASCADE)
nombres = models.CharField('Nombres', max_length=64, blank=True)
email = models.EmailField('Email')
class Meta:
verbose_name = 'Destinatario de la cotización'
verbose_name_plural = 'Destinatarios de la cotización'
def __str__(self):
return self.email
def full_email(self):
if self.nombres:
return '{}<{}>'.format(self.nombres, self.email)
return self.email
class QuoteProduct(models.Model):
quote = models.ForeignKey(Quote, verbose_name='Cotización', related_name='productos_a_cotizar')
producto = models.ForeignKey(Product, verbose_name='Producto', blank=True, null=True)
sku = models.CharField('SKU', max_length=32, help_text='Identificador único')
nombre = models.CharField('Nombre', max_length=64)
detalle = models.TextField('Detalle', blank=True)
precio = models.DecimalField('Precio', max_digits=12, decimal_places=2,
help_text='Precio en soles con dos decimales como máximo.')
cantidad = models.IntegerField('Cantidad', default=1)
subtotal = models.DecimalField('Subtotal', max_digits=9, decimal_places=2, default=0)
class Meta:
verbose_name = 'Producto a cotizar'
verbose_name_plural = 'Productos a cotizar'
unique_together = ('quote', 'sku')
def __str__(self):
return self.sku
def save(self, *args, **kwargs):
self.subtotal = self.calcula_subtotal()
super(QuoteProduct, self).save(*args, **kwargs)
def calcula_subtotal(self):
return self.precio * self.cantidad
| gpl-3.0 | -2,523,302,372,689,671,000 | 37.449612 | 104 | 0.663306 | false |
tcmichals/neo-fwloader | fwloader/fwloader.py | 1 | 6100 |
#!/usr/bin/python
import mmap
import struct
import sys
import ctypes
'''
TODO: PAGESIZE is not used, but it should. When mmap is used
the address must be on a page boundry, but the addresses used will be
'''
VERSION = '0.0.1'
USAGE = '''fwloader: load M4 firmware and start.
Usage:
python fwloader.py [options] FILE [ FILE ... ]
Options:
-h, --help this help message.
-v, --version version info.
'''
"""
This is from imx-regs.h from u-boot
struct src {
u32 scr;
u32 sbmr1;
u32 srsr;
u32 reserved1[2];
u32 sisr;
u32 simr;
u32 sbmr2;
u32 gpr1;
u32 gpr2;
u32 gpr3;
u32 gpr4;
u32 gpr5;
u32 gpr6;
u32 gpr7;
u32 gpr8;
u32 gpr9;
u32 gpr10;
};
"""
class src(ctypes.Structure):
_fields_=[
("scr", ctypes.c_uint),
("sbmr1", ctypes.c_uint),
("srsr", ctypes.c_uint),
("reserved1" , ctypes.c_uint *2),
("sisr",ctypes.c_uint),
("simr",ctypes.c_uint),
("sbmr2",ctypes.c_uint),
("gpr1",ctypes.c_uint),
("gpr2",ctypes.c_uint),
("gpr3",ctypes.c_uint),
("gpr4",ctypes.c_uint),
("gpr5",ctypes.c_uint),
("gpr6",ctypes.c_uint),
("gpr7",ctypes.c_uint),
("gpr8",ctypes.c_uint),
("gpr9", ctypes.c_uint),
("gpr10", ctypes.c_uint),
]
SRC_BASE_ADDR = ((0x02000000 + 0x80000) + 0x58000)
'''
From the A9
0080_0000 008F_FFFF is SRAM TCMU 32 * 1024 (32K)
007F_8000 007F_FFFF is SRAM TCML 32 * 1024 (32K)
TCML is alias to 0x0 -- 0x7FFF so, this is used for
reset vector
'''
M4_BOOTROM_BASE_ADDR = 0x007F8000
def arch_auxiliary_core_check_up(core_id):
print("Size of src: ", ctypes.sizeof(src))
try:
with open("/dev/mem", "r+b") as fd:
mem =mmap.mmap(fd.fileno(),length =ctypes.sizeof(src),
flags=mmap.MAP_SHARED,
prot=mmap.PROT_WRITE | mmap.PROT_READ,
offset=SRC_BASE_ADDR)
mapped_src = src.from_buffer(mem)
if mapped_src.scr & 0x00000010:
return 0
return 1
except Exception as ex:
print("OK something happend ", str(ex))
raise RuntimeError('could open or map memory')
def set_stack_pc(pc,stack):
print("Size of src: ", ctypes.sizeof(src))
try:
with open("/dev/mem", "r+b") as fd:
mem =mmap.mmap(fd.fileno(),length =ctypes.sizeof(src),
flags=mmap.MAP_SHARED, prot=mmap.PROT_WRITE | mmap.PROT_READ,offset=M4_BOOTROM_BASE_ADDR)
print("set_stack_pc: write")
mem[0:3] = struct.pack("<L", stack)
mem[4:7] = struct.pack("<L", pc)
print("set_stack_pc: close")
mem.close()
return 0
except Exception as ex:
print("OK something happend ", str(ex))
raise RuntimeError('could open or map memory')
def reset_start_M4(start=False):
print("Size of src: ", ctypes.sizeof(src))
try:
with open("/dev/mem", "r+b") as fd:
mem =mmap.mmap(fd.fileno(),length =ctypes.sizeof(src),
flags=mmap.MAP_SHARED,
prot=mmap.PROT_WRITE | mmap.PROT_READ,
offset=SRC_BASE_ADDR)
if start:
#read, or, write
mapped_src = src.from_buffer(mem)
mapped_src.scr |= 0x00400000
mem[0:3] = mapped_src.scr
#read, mask, write
mapped_src = src.from_buffer(mem)
mapped_src.scr &= ~0x00000010
mem[0:3] = mapped_src.scr
else:
#read, or, write
mapped_src = src.from_buffer(mem)
mapped_src.scr |= 0x00000010
mem[0:3] = mapped_src.scr
print("close mem")
mem.close()
except Exception as ex:
print("OK something happend ", str(ex))
raise RuntimeError('could open or map memory')
return 0
'''
if pc and stack are 0, then don't load PC or stack
then the code is loaded at the reset vector; using intel hex
'''
def arch_auxiliary_core_up(core_id, pc=0, stack=0):
print("Size of src: ", ctypes.sizeof(src))
try:
with open("/dev/mem", "r+b") as fd:
mem =mmap.mmap(fd.fileno(),
length =ctypes.sizeof(src),
flags=mmap.MAP_SHARED,
prot=mmap.PROT_WRITE | mmap.PROT_READ,
offset=SRC_BASE_ADDR)
mapped_src = src.from_buffer(mem)
if mapped_src.scr & 0x00000010:
return 0
return 1
except Exception as ex:
print("OK something happend ", str(ex))
raise RuntimeError('could open or map memory')
def loadM4MemoryWithCode(address, data, len):
if address % mmap.ALLOCATIONGRANULARITY:
raise RuntimeError("Address is not align " +
str(address) +
" boundry " +
str(mmap.ALLOCATIONGRANULARITY))
try:
with open("/dev/mem", "r+b") as fd:
mem =mmap.mmap(fd.fileno(),length =len,
flags=mmap.MAP_SHARED,
prot=mmap.PROT_WRITE | mmap.PROT_READ,
offset=address)
print("mem write")
mem[0:len]=data
print("Close mem")
mem.close()
return 0
except Exception as ex:
print("OK something happend ", str(ex))
raise RuntimeError('could open or map memory')
def main():
print("Arguments passed ", len(sys.argv))
try:
if arch_auxiliary_core_check_up(0):
print("M4 is running .. shutdown M4")
else:
print("Core is not running")
except RuntimeError as ex:
print("Error:", str(ex))
if __name__ == '__main__':
sys.exit(main())
| gpl-3.0 | -1,771,490,875,793,467,400 | 23.497992 | 116 | 0.516721 | false |
anilpai/leetcode | LinkedList/RotateLLKnodes.py | 1 | 1675 | # Definition for singly-linked list.
import random
class ListNode(object):
"""
Singly Linked List Node with a value and next pointer
"""
def __init__(self, x=None, next=None):
"""
:param x: int, float
"""
self.val = x
self.next = next
class Solution(object):
"""
Solution to the problem of reversing singly linked list
"""
def RotateListKnodes(self, head, k):
position = 0
if k == position:
return head
prev, curr = None, head
while position < k:
prev, curr = curr, curr.next
position += 1
prev.next, prev = None, curr
while curr.next is not None:
curr = curr.next
curr.next = head
head = prev
return head
def createLinkedList(self, ll):
if len(ll) == 0:
return None
l = [ListNode(item) for item in ll]
head = l[0]
if len(l) == 1:
return head
prev = head
for item in l[1:]:
curr = item
prev.next, prev = curr, curr
return head
def printLinkedList(self, head):
while head:
print(str(head.val))
head = head.next
return
if __name__ == '__main__':
ll = random.sample(range(300), 6)
s = Solution()
head = s.createLinkedList(ll)
print("Before rotating: ")
s.printLinkedList(head)
k = 3
head = s.RotateListKnodes(head, k)
print("After rotating: ")
s.printLinkedList(head)
k = 2
head = s.RotateListKnodes(head, k)
print("After rotating: ")
s.printLinkedList(head) | mit | 5,360,164,379,546,595,000 | 18.717647 | 59 | 0.525373 | false |
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/registry/tests/test_prf_walker.py | 1 | 21471 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for lp.registry.scripts.productreleasefinder.walker."""
import logging
import StringIO
import urlparse
from lazr.restful.utils import safe_hasattr
from lp.registry.scripts.productreleasefinder.walker import WalkerBase
from lp.testing import (
reset_logging,
TestCase,
)
class WalkerBase_Logging(TestCase):
def testCreatesDefaultLogger(self):
"""WalkerBase creates a default logger."""
from logging import Logger
w = WalkerBase("/")
self.failUnless(isinstance(w.log, Logger))
def testCreatesChildLogger(self):
"""WalkerBase creates a child logger if given a parent."""
from logging import getLogger
parent = getLogger("foo")
w = WalkerBase("/", log_parent=parent)
self.assertEquals(w.log.parent, parent)
class WalkerBase_Base(TestCase):
def testSetsBase(self):
"""WalkerBase sets the base property."""
w = WalkerBase("ftp://localhost/")
self.assertEquals(w.base, "ftp://localhost/")
def testSetsScheme(self):
"""WalkerBase sets the scheme property."""
w = WalkerBase("ftp://localhost/")
self.assertEquals(w.scheme, "ftp")
def testSetsHost(self):
"""WalkerBase sets the host property."""
w = WalkerBase("ftp://localhost/")
self.assertEquals(w.host, "localhost")
def testNoScheme(self):
"""WalkerBase works when given a URL with no scheme."""
w = WalkerBase("/")
self.assertEquals(w.host, "")
def testWrongScheme(self):
"""WalkerBase raises WalkerError when given an unhandled scheme."""
from lp.registry.scripts.productreleasefinder.walker import (
WalkerBase, WalkerError)
self.assertRaises(WalkerError, WalkerBase, "foo://localhost/")
def testUnescapesHost(self):
"""WalkerBase unescapes the host portion."""
w = WalkerBase("ftp://local%40host/")
self.assertEquals(w.host, "local@host")
def testNoUsername(self):
"""WalkerBase stores None when there is no username."""
w = WalkerBase("ftp://localhost/")
self.assertEquals(w.user, None)
def testUsername(self):
"""WalkerBase splits out the username from the host portion."""
w = WalkerBase("ftp://scott@localhost/")
self.assertEquals(w.user, "scott")
self.assertEquals(w.host, "localhost")
def testUnescapesUsername(self):
"""WalkerBase unescapes the username portion."""
w = WalkerBase("ftp://scott%3awibble@localhost/")
self.assertEquals(w.user, "scott:wibble")
self.assertEquals(w.host, "localhost")
def testNoPassword(self):
"""WalkerBase stores None when there is no password."""
w = WalkerBase("ftp://scott@localhost/")
self.assertEquals(w.passwd, None)
def testPassword(self):
"""WalkerBase splits out the password from the username."""
w = WalkerBase("ftp://scott:wibble@localhost/")
self.assertEquals(w.user, "scott")
self.assertEquals(w.passwd, "wibble")
self.assertEquals(w.host, "localhost")
def testUnescapesPassword(self):
"""WalkerBase unescapes the password portion."""
w = WalkerBase("ftp://scott:wibble%20wobble@localhost/")
self.assertEquals(w.user, "scott")
self.assertEquals(w.passwd, "wibble wobble")
self.assertEquals(w.host, "localhost")
def testPathOnly(self):
"""WalkerBase stores the path if that's all there is."""
w = WalkerBase("/path/to/something/")
self.assertEquals(w.path, "/path/to/something/")
def testPathInUrl(self):
"""WalkerBase stores the path portion of a complete URL."""
w = WalkerBase("ftp://localhost/path/to/something/")
self.assertEquals(w.path, "/path/to/something/")
def testAddsSlashToPath(self):
"""WalkerBase adds a trailing slash to path if ommitted."""
w = WalkerBase("ftp://localhost/path/to/something")
self.assertEquals(w.path, "/path/to/something/")
def testUnescapesPath(self):
"""WalkerBase leaves the path escaped."""
w = WalkerBase("ftp://localhost/some%20thing/")
self.assertEquals(w.path, "/some%20thing/")
def testStoresQuery(self):
"""WalkerBase stores the query portion of a supporting URL."""
w = WalkerBase("http://localhost/?foo")
self.assertEquals(w.query, "foo")
def testStoresFragment(self):
"""WalkerBase stores the fragment portion of a supporting URL."""
WalkerBase.FRAGMENTS = True
try:
w = WalkerBase("http://localhost/#foo")
self.assertEquals(w.fragment, "foo")
finally:
WalkerBase.FRAGMENTS = False
class WalkerBase_walk(TestCase):
"""Test the walk() method."""
def tearDown(self):
reset_logging()
super(WalkerBase_walk, self).tearDown()
def test_walk_UnicodeEncodeError(self):
"""Verify that a UnicodeEncodeError is logged."""
class TestWalker(WalkerBase):
def list(self, sub_dir):
# Force the walker to handle an exception.
raise UnicodeEncodeError(
'utf-8', u'source text', 0, 1, 'reason')
def open(self):
pass
def close(self):
pass
log_output = StringIO.StringIO()
logger = logging.getLogger()
self.addCleanup(logger.setLevel, logger.level)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(log_output))
walker = TestWalker('http://example.org/foo', logger)
list(walker)
self.assertEqual(
"Unicode error parsing http://example.org/foo page '/foo/'\n",
log_output.getvalue())
def test_walk_open_fail(self):
# The walker handles an exception raised during open().
class TestWalker(WalkerBase):
def list(self, sub_dir):
pass
def open(self):
raise IOError("Test failure.")
def close(self):
pass
log_output = StringIO.StringIO()
logger = logging.getLogger()
self.addCleanup(logger.setLevel, logger.level)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(log_output))
walker = TestWalker('ftp://example.org/foo', logger)
list(walker)
self.assertEqual(
"Could not connect to ftp://example.org/foo\n"
"Failure: Test failure.\n",
log_output.getvalue())
class FTPWalker_Base(TestCase):
def testFtpScheme(self):
"""FTPWalker works when initialized with an ftp-scheme URL."""
from lp.registry.scripts.productreleasefinder.walker import (
FTPWalker)
w = FTPWalker("ftp://localhost/")
self.assertEquals(w.host, "localhost")
def testNoScheme(self):
"""FTPWalker works when given a URL with no scheme."""
from lp.registry.scripts.productreleasefinder.walker import (
FTPWalker)
w = FTPWalker("/")
self.assertEquals(w.host, "")
def testWrongScheme(self):
"""FTPWalker raises WalkerError when given an unhandled scheme."""
from lp.registry.scripts.productreleasefinder.walker import (
FTPWalker, WalkerError)
self.assertRaises(WalkerError, FTPWalker, "http://localhost/")
def testNoUsername(self):
"""FTPWalker stores 'anonymous' when there is no username."""
from lp.registry.scripts.productreleasefinder.walker import (
FTPWalker)
w = FTPWalker("ftp://localhost/")
self.assertEquals(w.user, "anonymous")
def testNoPassword(self):
"""FTPWalker stores empty string when there is no password."""
from lp.registry.scripts.productreleasefinder.walker import (
FTPWalker)
w = FTPWalker("ftp://scott@localhost/")
self.assertEquals(w.passwd, "")
class HTTPWalker_Base(TestCase):
def testHttpScheme(self):
"""HTTPWalker works when initialized with an http-scheme URL."""
from lp.registry.scripts.productreleasefinder.walker import (
HTTPWalker)
w = HTTPWalker("http://localhost/")
self.assertEquals(w.host, "localhost")
def testHttpsScheme(self):
"""HTTPWalker works when initialized with an https-scheme URL."""
from lp.registry.scripts.productreleasefinder.walker import (
HTTPWalker)
w = HTTPWalker("https://localhost/")
self.assertEquals(w.host, "localhost")
def testNoScheme(self):
"""HTTPWalker works when given a URL with no scheme."""
from lp.registry.scripts.productreleasefinder.walker import (
HTTPWalker)
w = HTTPWalker("/")
self.assertEquals(w.host, "")
def testWrongScheme(self):
"""HTTPWalker raises WalkerError when given an unhandled scheme."""
from lp.registry.scripts.productreleasefinder.walker import (
HTTPWalker, WalkerError)
self.assertRaises(WalkerError, HTTPWalker, "foo://localhost/")
class HTTPWalker_url_schemes_and_handlers(TestCase):
"""Verify there is a handler for each URL scheme."""
def setUp(self):
TestCase.setUp(self)
from lp.registry.scripts.productreleasefinder.walker import (
HTTPWalker)
self.walker = HTTPWalker("http://localhost/")
def verify_url_scheme_and_handler(self, scheme, handler):
self.assert_(scheme in self.walker.URL_SCHEMES)
self.assert_(handler in self.walker.handlers)
# urllib2 uses a naming convention to select the handler for
# a URL scheme. This test is sanity to check to ensure that the
# HTTPWalker's configuration of the OpenerDirector is will work.
method_name = '%s_open' % scheme
self.assert_(safe_hasattr(handler, method_name))
def test_http_request(self):
import urllib2
self.verify_url_scheme_and_handler('http', urllib2.HTTPHandler)
def test_https_request(self):
import urllib2
self.verify_url_scheme_and_handler('https', urllib2.HTTPSHandler)
def test_ftp_request(self):
import urllib2
self.verify_url_scheme_and_handler('ftp', urllib2.FTPHandler)
class HTTPWalker_ListDir(TestCase):
def tearDown(self):
reset_logging()
super(HTTPWalker_ListDir, self).tearDown()
def setUpWalker(self, listing_url, listing_content):
from lp.registry.scripts.productreleasefinder.walker import (
HTTPWalker)
test = self
class TestHTTPWalker(HTTPWalker):
def request(self, method, path):
test.assertEqual(method, 'GET')
test.assertEqual(urlparse.urljoin(self.base, path),
listing_url)
return StringIO.StringIO(listing_content)
def isDirectory(self, path):
return path.endswith('/')
logging.basicConfig(level=logging.CRITICAL)
return TestHTTPWalker(listing_url, logging.getLogger())
def testApacheListing(self):
# Test that list() handles a standard Apache dir listing.
content = '''
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<html>
<head>
<title>Index of /pub/GNOME/sources/gnome-gpg/0.5</title>
</head>
<body>
<h1>Index of /pub/GNOME/sources/gnome-gpg/0.5</h1>
<pre><img src="/icons/blank.gif" alt="Icon " width="24" height="24"> <a href="?C=N;O=D">Name</a> <a href="?C=M;O=A">Last modified</a> <a href="?C=S;O=A">Size</a> <a href="?C=D;O=A">Description</a><hr><a href="/pub/GNOME/sources/gnome-gpg/"><img src="http://www.gnome.org/img/24x24/parent.png" alt="[DIR]" width="24" height="24"></a> <a href="/pub/GNOME/sources/gnome-gpg/">Parent Directory</a> -
<a href="LATEST-IS-0.5.0"><img src="http://www.gnome.org/img/24x24/default.png" alt="[ ]" width="24" height="24"></a> <a href="LATEST-IS-0.5.0">LATEST-IS-0.5.0</a> 02-Sep-2006 08:58 81K
<a href="gnome-gpg-0.5.0.md5sum"><img src="http://www.gnome.org/img/24x24/default.png" alt="[ ]" width="24" height="24"></a> <a href="gnome-gpg-0.5.0.md5sum">gnome-gpg-0.5.0.md5sum</a> 02-Sep-2006 08:58 115
<a href="gnome-gpg-0.5.0.tar.bz2"><img src="http://www.gnome.org/img/24x24/archive.png" alt="[ ]" width="24" height="24"></a> <a href="gnome-gpg-0.5.0.tar.bz2">gnome-gpg-0.5.0.tar.bz2</a> 02-Sep-2006 08:58 68K
<a href="gnome-gpg-0.5.0.tar.gz"><img src="http://www.gnome.org/img/24x24/archive.png" alt="[ ]" width="24" height="24"></a> <a href="gnome-gpg-0.5.0.tar.gz">gnome-gpg-0.5.0.tar.gz</a> 02-Sep-2006 08:58 81K
<hr></pre>
<address>Apache/2.2.3 (Unix) Server at <a href="mailto:[email protected]">ftp.acc.umu.se</a> Port 80</address>
</body></html>
'''
walker = self.setUpWalker(
'http://ftp.gnome.org/pub/GNOME/sources/gnome-gpg/0.5/', content)
dirnames, filenames = walker.list('/pub/GNOME/sources/gnome-gpg/0.5/')
self.assertEqual(dirnames, [])
self.assertEqual(filenames, ['LATEST-IS-0.5.0',
'gnome-gpg-0.5.0.md5sum',
'gnome-gpg-0.5.0.tar.bz2',
'gnome-gpg-0.5.0.tar.gz'])
def testSquidFtpListing(self):
# Test that a Squid FTP listing can be parsed.
content = '''
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- HTML listing generated by Squid 2.5.STABLE12 -->
<!-- Wed, 06 Sep 2006 11:04:02 GMT -->
<HTML><HEAD><TITLE>
FTP Directory: ftp://ftp.gnome.org/pub/GNOME/sources/gnome-gpg/0.5/
</TITLE>
<STYLE type="text/css"><!--BODY{background-color:#ffffff;font-family:verdana,sans-serif}--></STYLE>
</HEAD><BODY>
<H2>
FTP Directory: <A HREF="/">ftp://ftp.gnome.org</A>/<A HREF="/pub/">pub</A>/<A HREF="/pub/GNOME/">GNOME</A>/<A HREF="/pub/GNOME/sources/">sources</A>/<A HREF="/pub/GNOME/sources/gnome-gpg/">gnome-gpg</A>/<A HREF="/pub/GNOME/sources/gnome-gpg/0.5/">0.5</A>/</H2>
<PRE>
<A HREF="../"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-dirup.gif" ALT="[DIRUP]"></A> <A HREF="../">Parent Directory</A>
<A HREF="LATEST-IS-0.5.0"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-link.gif" ALT="[LINK]"></A> <A HREF="LATEST-IS-0.5.0">LATEST-IS-0.5.0</A>. . . . . . . . . Sep 02 07:07 <A HREF="LATEST-IS-0.5.0;type=a"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-text.gif" ALT="[VIEW]"></A> <A HREF="LATEST-IS-0.5.0;type=i"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-box.gif" ALT="[DOWNLOAD]"></A> -> <A HREF="gnome-gpg-0.5.0.tar.gz">gnome-gpg-0.5.0.tar.gz</A>
<A HREF="gnome-gpg-0.5.0.md5sum"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-unknown.gif" ALT="[FILE]"></A> <A HREF="gnome-gpg-0.5.0.md5sum">gnome-gpg-0.5.0.md5sum</A> . . . . . Sep 02 06:58 115 <A HREF="gnome-gpg-0.5.0.md5sum;type=a"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-text.gif" ALT="[VIEW]"></A> <A HREF="gnome-gpg-0.5.0.md5sum;type=i"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-box.gif" ALT="[DOWNLOAD]"></A>
<A HREF="gnome-gpg-0.5.0.tar.bz2"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-compressed.gif" ALT="[FILE]"></A> <A HREF="gnome-gpg-0.5.0.tar.bz2">gnome-gpg-0.5.0.tar.bz2</A>. . . . . Sep 02 06:58 68K <A HREF="gnome-gpg-0.5.0.tar.bz2;type=i"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-box.gif" ALT="[DOWNLOAD]"></A>
<A HREF="gnome-gpg-0.5.0.tar.gz"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-tar.gif" ALT="[FILE]"></A> <A HREF="gnome-gpg-0.5.0.tar.gz">gnome-gpg-0.5.0.tar.gz</A> . . . . . Sep 02 06:58 81K <A HREF="gnome-gpg-0.5.0.tar.gz;type=i"><IMG border="0" SRC="http://squid:3128/squid-internal-static/icons/anthony-box.gif" ALT="[DOWNLOAD]"></A>
</PRE>
<HR noshade size="1px">
<ADDRESS>
Generated Wed, 06 Sep 2006 11:04:02 GMT by squid (squid/2.5.STABLE12)
</ADDRESS></BODY></HTML>
'''
walker = self.setUpWalker(
'ftp://ftp.gnome.org/pub/GNOME/sources/gnome-gpg/0.5/', content)
dirnames, filenames = walker.list('/pub/GNOME/sources/gnome-gpg/0.5/')
self.assertEqual(dirnames, [])
self.assertEqual(filenames, ['LATEST-IS-0.5.0',
'gnome-gpg-0.5.0.md5sum',
'gnome-gpg-0.5.0.tar.bz2',
'gnome-gpg-0.5.0.tar.gz'])
def testNonAsciiListing(self):
# Test that list() handles non-ASCII output.
content = '''
<html>
<head>
<title>Listing</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
</head>
<body>
<p>A non-breaking space: \xc2\xa0</p>
<p><a href="/elsewhere">Somewhere else on the site</a></p>
<!-- intentionally unclosed anchor below -->
<p><a href="/foo/file99">Absolute path</p>
<pre>
<a href="../">Parent directory</a>
<a href="subdir1/">subdir 1</a>
<a href="subdir2/">subdir 2</a>
<a href="subdir3/">subdir 3</a>
<a href="file3">file 3</a>
<a href="file2">file 2</a>
<a href="file1">file 1</a>
</pre>
</html>
'''
walker = self.setUpWalker('http://example.com/foo/', content)
dirnames, filenames = walker.list('/foo/')
self.assertEqual(dirnames, ['subdir1/', 'subdir2/', 'subdir3/'])
self.assertEqual(filenames, ['file1', 'file2', 'file3', 'file99'])
def testDotPaths(self):
# Test that paths containing dots are handled correctly.
#
# We expect the returned directory and file names to only
# include those links http://example.com/foo/ even in the
# presence of "." and ".." path segments.
content = '''
<html>
<head>
<title>Listing</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
</head>
<body>
<pre>
<a href="../">Up a level</a>
<a href="/foo/../">The same again</a>
<a href="file1/../file2">file2</a>
<a href=".">This directory</a>
<a href="dir/.">A subdirectory</a>
</pre>
</html>
'''
walker = self.setUpWalker('http://example.com/foo/', content)
dirnames, filenames = walker.list('/foo/')
self.assertEqual(dirnames, ['dir/'])
self.assertEqual(filenames, ['file2'])
def testNamedAnchors(self):
# Test that the directory listing parser code handles named anchors.
# These are <a> tags without an href attribute.
content = '''
<html>
<head>
<title>Listing</title>
</head>
<body>
<a name="top"></a>
<pre>
<a href="file1">file1</a>
<a href="dir1/">dir1/</a>
<a href="#top">Go to top</a>
</pre>
</html>
'''
walker = self.setUpWalker('http://example.com/foo/', content)
dirnames, filenames = walker.list('/foo/')
self.assertEqual(dirnames, ['dir1/'])
self.assertEqual(filenames, ['file1'])
def testGarbageListing(self):
# Make sure that garbage doesn't trip up the dir lister.
content = '\x01\x02\x03\x00\xff\xf2\xablkjsdflkjsfkljfds'
walker = self.setUpWalker('http://example.com/foo/', content)
dirnames, filenames = walker.list('/foo/')
self.assertEqual(dirnames, [])
self.assertEqual(filenames, [])
class HTTPWalker_IsDirectory(TestCase):
def tearDown(self):
reset_logging()
super(HTTPWalker_IsDirectory, self).tearDown()
def testFtpIsDirectory(self):
# Test that no requests are made by isDirectory() when walking
# FTP sites.
from lp.registry.scripts.productreleasefinder.walker import (
HTTPWalker)
test = self
class TestHTTPWalker(HTTPWalker):
def request(self, method, path):
test.fail('%s was requested with method %s' % (path, method))
logging.basicConfig(level=logging.CRITICAL)
walker = TestHTTPWalker('ftp://ftp.gnome.org/', logging.getLogger())
self.assertEqual(walker.isDirectory('/foo/'), True)
self.assertEqual(walker.isDirectory('/foo'), False)
class Walker_CombineUrl(TestCase):
def testConstructsUrl(self):
"""combine_url constructs the URL correctly."""
from lp.registry.scripts.productreleasefinder.walker import (
combine_url)
self.assertEquals(combine_url("file:///base", "/subdir/", "file"),
"file:///subdir/file")
self.assertEquals(combine_url("file:///base", "/subdir", "file"),
"file:///subdir/file")
| agpl-3.0 | 5,881,352,907,865,783,000 | 41.182711 | 550 | 0.608076 | false |
iamshang1/Projects | Advanced_ML/Deep_Reinforcement/cartpole_dqn.py | 1 | 6091 | import numpy as np
import tensorflow as tf
import gym
from collections import deque
import random
class agent(object):
def __init__(self,action_size=2,gamma=0.95,memory=10000):
'''
simple deep Q learning network for cartpole
'''
#params
self.explore_start = 1.0 # exploration probability at start
self.explore_stop = 0.01 # minimum exploration probability
self.decay_rate = 0.001 # exponential decay rate for exploration prob
self.gamma = gamma
self.action_size = action_size
self.total_steps = 0
#init experience replay memory
self.buffer = deque(maxlen=memory)
self.burn_in = 2000 # number of observations before training
#build DQN layers
self.state = tf.placeholder(tf.float32,shape=(None,4))
self.action = tf.placeholder(tf.float32,shape=(None,2))
self.target = tf.placeholder(tf.float32,shape=(None))
self.hidden1 = tf.layers.dense(self.state,36,activation=tf.nn.elu,
kernel_initializer=tf.contrib.layers.xavier_initializer())
self.hidden2 = tf.layers.dense(self.hidden1,36,activation=tf.nn.elu,
kernel_initializer=tf.contrib.layers.xavier_initializer())
self.output = tf.layers.dense(self.hidden2,2,
kernel_initializer=tf.contrib.layers.xavier_initializer())
self.Q_pred = tf.reduce_sum(tf.multiply(self.output,self.action),1)
#optimizer
self.loss = tf.reduce_mean(tf.losses.huber_loss(self.target,self.Q_pred))
self.optimizer = tf.train.AdamOptimizer(0.00005,0.9,0.99).minimize(self.loss)
#init op
self.init_op = tf.global_variables_initializer()
self.saver = tf.train.Saver()
self.sess = tf.Session()
self.sess.run(self.init_op)
def predict(self,stacked_state):
'''
predict next action given an input state
'''
#calculate explore probability
r = np.random.rand()
explore_prob = self.explore_stop + (self.explore_start - self.explore_stop) \
* np.exp(-self.decay_rate * self.total_steps)
#explore
if explore_prob > r:
action = np.random.randint(0,self.action_size)
#exploit
else:
d = {self.state:np.expand_dims(stacked_state,0)}
Qs = self.sess.run(self.output,feed_dict=d)
action = np.argmax(Qs)
return action,explore_prob
def train(self,batch_size=256):
'''
train model using samples from replay memory
'''
#fill up memory replay before training
if len(self.buffer) < self.burn_in:
return 0
self.total_steps +=1
#get batch
batch = self.sample_memory(batch_size)
states = np.array([each[0] for each in batch])
actions = np.array([each[1] for each in batch])
actions_onehot = [np.zeros(self.action_size) for i in range(len(batch))]
rewards = np.array([each[2] for each in batch])
next_states = np.array([each[3] for each in batch])
dones = np.array([each[4] for each in batch])
#calculate target Q values
d = {self.state:next_states}
Qs_next_state = self.sess.run(self.output,feed_dict=d)
target_Qs_batch = []
for i in range(0, len(batch)):
actions_onehot[i][actions[i]] = 1
if dones[i]:
target_Qs_batch.append(rewards[i])
else:
target = rewards[i] + self.gamma * np.max(Qs_next_state[i])
target_Qs_batch.append(target)
actions_onehot = np.array(actions_onehot)
#train
targets = np.array([each for each in target_Qs_batch])
d = {self.state:states,self.target:targets,self.action:actions_onehot}
loss,_ = self.sess.run([self.loss,self.optimizer],feed_dict=d)
return loss
def add_memory(self,experience):
'''
add experience to replay memory
'''
self.buffer.append(experience)
def sample_memory(self,batch_size):
'''
create training batch from replay memory
'''
buffer_size = len(self.buffer)
idx = np.random.choice(np.arange(buffer_size),size=batch_size,replace=False)
return [self.buffer[i] for i in idx]
def save(self,filename):
self.saver.save(self.sess,filename)
def load(self,filename):
self.saver.restore(self.sess,filename)
#initialize environment
env = gym.make('CartPole-v1')
action_size = env.action_space.n
DQagent = agent(action_size=action_size)
viz = False
#run episodes
for ep in range(1000):
state = env.reset()
done = False
t = 0
episode_rewards = []
mean_loss = []
explore_prob = 1.0
#terminate episode when done
while not done and t < 200:
t += 1
#get next action
action,explore_prob = DQagent.predict(state)
new_state, reward, done, info = env.step(action)
episode_rewards.append(reward)
#visualize
if viz:
env.render('human')
#normalize rewards
reward /= 5
if done:
reward = -1
#update memory and train
DQagent.add_memory((state,action,reward,new_state,done))
state = new_state
mean_loss.append(DQagent.train())
#show episode results
loss = np.mean(mean_loss)
total_reward = np.sum(episode_rewards)
print('Episode: {}'.format(ep+1),
'Total reward: {}'.format(total_reward),
'Explore P: {:.4f}'.format(explore_prob),
'Training Loss {:.4f}'.format(loss))
#enable vizualization once model gets a perfect score
if total_reward >= 200:
viz = True
env.close() | mit | -566,953,904,573,196,900 | 32.472527 | 89 | 0.572812 | false |
santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/SQLAlchemy-0.4.3-py2.5.egg/sqlalchemy/sql/visitors.py | 1 | 3851 | class ClauseVisitor(object):
"""Traverses and visits ``ClauseElement`` structures.
Calls visit_XXX() methods dynamically generated for each particular
``ClauseElement`` subclass encountered. Traversal of a
hierarchy of ``ClauseElements`` is achieved via the
``traverse()`` method, which is passed the lead
``ClauseElement``.
By default, ``ClauseVisitor`` traverses all elements
fully. Options can be specified at the class level via the
``__traverse_options__`` dictionary which will be passed
to the ``get_children()`` method of each ``ClauseElement``;
these options can indicate modifications to the set of
elements returned, such as to not return column collections
(column_collections=False) or to return Schema-level items
(schema_visitor=True).
``ClauseVisitor`` also supports a simultaneous copy-and-traverse
operation, which will produce a copy of a given ``ClauseElement``
structure while at the same time allowing ``ClauseVisitor`` subclasses
to modify the new structure in-place.
"""
__traverse_options__ = {}
def traverse_single(self, obj, **kwargs):
meth = getattr(self, "visit_%s" % obj.__visit_name__, None)
if meth:
return meth(obj, **kwargs)
def traverse_chained(self, obj, **kwargs):
v = self
while v is not None:
meth = getattr(self, "visit_%s" % obj.__visit_name__, None)
if meth:
meth(obj, **kwargs)
v = getattr(v, '_next', None)
def iterate(self, obj):
stack = [obj]
traversal = []
while len(stack) > 0:
t = stack.pop()
yield t
traversal.insert(0, t)
for c in t.get_children(**self.__traverse_options__):
stack.append(c)
def traverse(self, obj, clone=False):
if clone:
cloned = {}
def do_clone(obj):
# the full traversal will only make a clone of a particular element
# once.
if obj not in cloned:
cloned[obj] = obj._clone()
return cloned[obj]
obj = do_clone(obj)
stack = [obj]
traversal = []
while len(stack) > 0:
t = stack.pop()
traversal.insert(0, t)
if clone:
t._copy_internals(clone=do_clone)
for c in t.get_children(**self.__traverse_options__):
stack.append(c)
for target in traversal:
v = self
while v is not None:
meth = getattr(v, "visit_%s" % target.__visit_name__, None)
if meth:
meth(target)
v = getattr(v, '_next', None)
return obj
def chain(self, visitor):
"""'chain' an additional ClauseVisitor onto this ClauseVisitor.
the chained visitor will receive all visit events after this one."""
tail = self
while getattr(tail, '_next', None) is not None:
tail = tail._next
tail._next = visitor
return self
class NoColumnVisitor(ClauseVisitor):
"""ClauseVisitor with 'column_collections' set to False; will not
traverse the front-facing Column collections on Table, Alias, Select,
and CompoundSelect objects.
"""
__traverse_options__ = {'column_collections':False}
def traverse(clause, **kwargs):
clone = kwargs.pop('clone', False)
class Vis(ClauseVisitor):
__traverse_options__ = kwargs.pop('traverse_options', {})
def __getattr__(self, key):
if key in kwargs:
return kwargs[key]
else:
return None
return Vis().traverse(clause, clone=clone)
| bsd-3-clause | -3,608,080,513,703,661,000 | 34.330275 | 83 | 0.564269 | false |
micgro42/SaneDecayRescaling | sanedecayrescaling/tests/test_extract_decays.py | 1 | 9772 | import os
import sanedecayrescaling
from pytest import fixture
import pytest
@fixture # Registering this function as a fixture.
def fixture_source(request):
print "start setup"
source_decay_file=open('source_decay_file.dec.tmp','w')
source_decay_file.write("some lines at start\n")
source_decay_file.write("Decay B0\n")
source_decay_file.write("some lines 1\n")
source_decay_file.write("some lines 2\n")
source_decay_file.write("some lines 3\n")
source_decay_file.write("some lines 4\n")
source_decay_file.write("some lines 5\n")
source_decay_file.write("some lines 6\n")
source_decay_file.write("Enddecay\n")
source_decay_file.write("some lines at end\n")
source_decay_file.write("Decay D*+\n")
source_decay_file.write("0.6770 D0 pi+ VSS;\n")
source_decay_file.write("0.3070 D+ pi0 VSS;\n")
source_decay_file.write("0.0160 D+ gamma VSP_PWAVE;\n")
source_decay_file.write("Enddecay\n")
source_decay_file.write("last line of the file")
source_decay_file.close()
reference_file=open('reference_file.tmp','w')
reference_file.writelines(["=============\n",
"| D*(2007)0 |\n",
"=============\n",
"I(JP) = 1/2(1-) I, J, P need confirmation.\n",
" Mass m = 2006.98+-0.15 MeV\n",
" mass(D*0) - mass(D0) = 142.12+-0.07 MeV\n",
"Full width Gamma < 2.1 MeV, CL = 90%\n",
" \n",
" Dbar*(2007)0 modes are charge conjugates of modes below.\n",
"D*(2007)0 DECAY MODES Fraction (G(i)/G) p (MeV/c)\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
"D0 pi0 (61.9+-2.9)% 43\n",
"D0 gamma (38.1+-2.9)% 137\n",
" \n",
"=============================================================================\n",
"==============\n",
"| D*(2010)+- |\n",
"==============\n",
"I(JP) = 1/2(1-) I, J, P need confirmation.\n",
" Mass m = 2010.28+-0.13 MeV\n",
" mass(D*(2010)+) - mass(D+) = 140.66+-0.10 MeV(S = 1.1)\n",
" mass(D*(2010)+) - mass(D0) = 145.421+-0.010 MeV(S = 1.1)\n",
" Full width Gamma = 96+-22 keV\n",
" \n",
" D*(2010)- modes are charge conjugates of the modes below.\n",
"D*(2010)+- DECAY MODES Fraction (G(i)/G) p (MeV/c)\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
" \n",
"D0 pi+ (67.7+-0.5)% 39\n",
"D+ pi0 (30.7+-0.5)% 38\n",
"D+ gamma ( 1.6+-0.4)% 136\n",
" \n",
"=============================================================================\n",
"================\n",
"| D(0)*(2400)0 |\n",
"================\n",
"I(JP) = 1/2(0+)\n",
" Mass m = 2318+-29 MeV(S = 1.7)\n",
" Full width Gamma = 267+-40 MeV\n",
" \n",
"D(0)*(2400)0 DECAY MODES Fraction (G(i)/G) p (MeV/c)\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
" \n",
"D+ pi- seen 385\n",
" \n",
"============================================================================="])
reference_file.close()
@request.addfinalizer
def tearDown():
print "TEAR DOWN!"
os.remove("source_decay_file.dec.tmp")
if os.path.isfile("workfile.tmp"):
os.remove("workfile.tmp")
os.remove('reference_file.tmp')
def test_successfully_extracting_decay(fixture_source):
assert sanedecayrescaling.extract_decays.extract_decays_from_decay('source_decay_file.dec.tmp', "B0") == 0
workfile=open("workfile.tmp",'r')
for i, line in enumerate(workfile):
if i == 0:
assert line == 'Decay B0\n'
elif i == 7:
assert line == 'Enddecay\n'
elif i > 7:
break
def test_decay_not_found(fixture_source):
with pytest.raises(SystemExit) as cm:
sanedecayrescaling.extract_decays.extract_decays_from_decay('source_decay_file.dec.tmp', "B+")
ex = cm.value
assert ex.code == os.EX_DATAERR # SystemExit should be os.EX_DATAERR!
assert not os.path.isfile("workdir.tmp") # workdir.tmp has been created even so it shouldn't have been
def test_decayfile_not_found(fixture_source):
with pytest.raises(SystemExit) as cm:
sanedecayrescaling.extract_decays.extract_decays_from_decay('Xsource_decay_file.dec.tmp', "B0")
ex = cm.value
assert ex.code == os.EX_IOERR # SystemExit should be os.EX_IOERR!
assert not os.path.isfile("workdir.tmp") # workdir.tmp has been created even so it shouldn't have been
def test_particle_name_incomplete(fixture_source):
with pytest.raises(SystemExit) as cm:
sanedecayrescaling.extract_decays.extract_decays_from_decay('source_decay_file.dec.tmp', "B")
ex = cm.value
assert ex.code == os.EX_DATAERR # SystemExit should be os.EX_DATAERR!
assert not os.path.isfile("workdir.tmp") # workdir.tmp has been created even so it shouldn't have been
def test_extract_decays_from_reference(fixture_source):
sanedecayrescaling.extract_decays.extract_decays_from_reference('reference_file.tmp','D*(2010)+-')
reference_file = open('workreffile.tmp','r')
reference_file_lines = reference_file.readlines()
assert reference_file_lines[0] == 'Decay D*+-\n'
assert reference_file_lines[1] == '0.677 0.005 0.005 D0 pi+\n'
assert reference_file_lines[2] == '0.307 0.005 0.005 D+ pi0\n'
assert reference_file_lines[3] == '0.016 0.004 0.004 D+ gamma\n'
assert reference_file_lines[4] == 'Enddecay\n'
os.remove("workreffile.tmp")
# see D+
def test_extract_decays_from_reference_missing_blank_line():
reference_file = open('reference_file.tmp','w');
reference_file.writelines([
" Scale factor/ p\n",
"D+ DECAY MODES Fraction (G(i)/G) CL(MeV\\\n",
" /c)\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
" \n",
" Inclusive modes\n",
" \n",
"eta'(958) e+ nu(e) ( 2.2 +-0.5 )E-4 689\n",
"phi e+ nu(e) < 9 E-5 CL=90% 657\n",
" Fractions of some of the following modes with resonances have\n",
" already appeared above as submodes of particular\n",
" charged-particle modes.\n",
" \n",
"Kbar*(892)0 e+ nu(e) ( 5.52+-0.15 )% 722\n",
" \n",
"=============================================================================\n",
"======\n",
"| D0 |\n",
"======\n"])
reference_file.close()
sanedecayrescaling.extract_decays.extract_decays_from_reference('reference_file.tmp','D+')
work_ref_file = open('workreffile.tmp','r')
work_ref_file_lines = work_ref_file.readlines()
assert work_ref_file_lines[0] == 'Decay D+\n'
assert work_ref_file_lines[1] == "0.00022 5e-05 5e-05 eta' e+ nu_e\n"
assert work_ref_file_lines[2] == "9e-05 0.0 0.0 phi e+ nu_e\n"
assert work_ref_file_lines[3] == "0.0552 0.0015 0.0015 anti-K*0 e+ nu_e\n"
assert work_ref_file_lines[4] == "Enddecay\n"
# see D0
def test_extract_decays_from_reference_two_lines_missing_blank_line():
reference_file = open('reference_file.tmp','w');
reference_file.writelines([
" Scale factor/ p\n",
"D+ DECAY MODES Fraction (G(i)/G) CL(MeV\\\n",
" /c)\n",
"- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n",
" \n",
" Inclusive modes\n",
" \n",
"eta'(958) e+ nu(e) ( 2.2 +-0.5 )E-4 689\n",
"K- 3pi+ 2pi- ( 2.2 +- 0.6 \ 713\n",
" )E-4\n",
" Fractions of many of the following modes with resonances have\n",
" \n",
"Kbar*(892)0 e+ nu(e) ( 5.52+-0.15 )% 722\n",
" \n",
"=============================================================================\n",
"======\n",
"| D0 |\n",
"======\n"])
reference_file.close()
sanedecayrescaling.extract_decays.extract_decays_from_reference('reference_file.tmp','D+')
work_ref_file = open('workreffile.tmp','r')
work_ref_file_lines = work_ref_file.readlines()
assert work_ref_file_lines[0] == 'Decay D+\n'
assert work_ref_file_lines[1] == "0.00022 5e-05 5e-05 eta' e+ nu_e\n"
assert work_ref_file_lines[2] == "0.00022 6e-05 6e-05 K- pi+ pi+ pi+ pi- pi-\n"
assert work_ref_file_lines[3] == "0.0552 0.0015 0.0015 anti-K*0 e+ nu_e\n"
assert work_ref_file_lines[4] == "Enddecay\n"
def test_decay_found(fixture_source):
ref_file = open('reference_file.tmp','r')
file_position_begin_decay, linenumber_begin_decay, decay_length = sanedecayrescaling.extract_decays.find_particle_in_reference('D*(2007)0', ref_file)
assert file_position_begin_decay == 352
assert linenumber_begin_decay == 10
assert decay_length == 1
#the functionality for this test is not yet implemented
# def test_decay_special_name_found(fixture_source):
# ref_file = open('reference_file.tmp','r')
# start, end = sanedecayrescaling.extract_decays.find_particle_in_reference('D*(2010)+', ref_file)
# assert start == 10
# assert end == 15
if __name__ == '__main__':
pytest.main()
| gpl-2.0 | -7,649,275,424,793,038,000 | 44.240741 | 153 | 0.498056 | false |
Pandaaaa906/ChemErpSystem | ERP/migrations/0015_auto_20170324_1009.py | 1 | 3125 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-24 02:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
#('auth', '0009_erpuser'),
('ERP', '0014_auto_20170323_2354'),
]
operations = [
migrations.CreateModel(
name='ModelFormTemplate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_id', models.PositiveIntegerField(default=999, verbose_name='\u4f18\u5148\u987a\u5e8f')),
('template', models.TextField(verbose_name='\u6a21\u7248')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='auth.Group', verbose_name='\u6743\u9650\u5206\u7ec4')),
('model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType', verbose_name='\u6a21\u578b')),
],
options={
'verbose_name': '\u6a21\u578b\u8868\u5355\u6a21\u7248',
},
),
migrations.CreateModel(
name='ModelGridToolbar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'verbose_name': '\u6a21\u578b\u8868\u5355\u5de5\u5177\u680f',
},
),
migrations.AlterModelOptions(
name='modelgridtoolbarbutton',
options={'verbose_name': '\u6a21\u578b\u8868\u683c\u5de5\u5177\u680f\u6309\u94ae'},
),
migrations.AlterField(
model_name='modelgridfield',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='auth.Group', verbose_name='\u6743\u9650\u5206\u7ec4'),
),
migrations.AlterField(
model_name='sidebaritem',
name='icon',
field=models.CharField(blank=True, choices=[('icon-page', '\u9875\u9762'), ('icon-save', '\u4fdd\u5b58'), ('icon-folder', '\u6587\u4ef6\u5939'), ('icon-folder', '\u6dfb\u52a0')], default='icon-page', max_length=200, verbose_name='\u56fe\u6807'),
),
migrations.AddField(
model_name='modelgridtoolbar',
name='buttons',
field=models.ManyToManyField(to='ERP.ModelGridToolbarButton', verbose_name='\u6309\u94ae\u7ec4\u5408'),
),
migrations.AddField(
model_name='modelgridtoolbar',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='auth.Group', verbose_name='\u6743\u9650\u5206\u7ec4'),
),
migrations.AddField(
model_name='modelgridtoolbar',
name='model',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType', verbose_name='\u6a21\u578b'),
),
]
| apache-2.0 | 1,161,541,605,528,770,600 | 44.289855 | 257 | 0.59488 | false |
weso/CWR-DataApi | tests/parser/dictionary/decoder/record/test_work_origin.py | 1 | 1959 | # -*- coding: utf-8 -*-
import unittest
from cwr.parser.decoder.dictionary import WorkOriginDictionaryDecoder
from cwr.other import VISAN
"""
Dictionary to Message decoding tests.
The following cases are tested:
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__status__ = 'Development'
class TestWorkOriginDictionaryDecoder(unittest.TestCase):
def setUp(self):
self._decoder = WorkOriginDictionaryDecoder()
def test_encoded(self):
data = {}
data['record_type'] = 'ORN'
data['transaction_sequence_n'] = 3
data['record_sequence_n'] = 15
data['intended_purpose'] = 'PURPOSE'
data['production_title'] = 'TITLE'
data['cd_identifier'] = 'ID134'
data['cut_number'] = 5
data['library'] = 'LIB467'
data['bltvr'] = 'BLTVR'
data['visan'] = 1234567123456789121231
data['production_n'] = 'PROD145'
data['episode_title'] = 'EPISODE'
data['episode_n'] = 'EP145'
data['year_production'] = 1994
data['audio_visual_key'] = 'KEY'
record = self._decoder.decode(data)
self.assertEqual('ORN', record.record_type)
self.assertEqual(3, record.transaction_sequence_n)
self.assertEqual(15, record.record_sequence_n)
self.assertEqual('PURPOSE', record.intended_purpose)
self.assertEqual('TITLE', record.production_title)
self.assertEqual('ID134', record.cd_identifier)
self.assertEqual(5, record.cut_number)
self.assertEqual('LIB467', record.library)
self.assertEqual('BLTVR', record.bltvr)
self.assertEqual(1234567123456789121231, record.visan)
self.assertEqual('PROD145', record.production_n)
self.assertEqual('EPISODE', record.episode_title)
self.assertEqual('EP145', record.episode_n)
self.assertEqual(1994, record.year_production)
self.assertEqual('KEY', record.audio_visual_key)
| mit | -3,769,773,485,179,675,000 | 32.758621 | 69 | 0.643514 | false |
ramineni/my_congress | congress_tempest_tests/tests/scenario/congress_datasources/test_ceilometer.py | 1 | 3098 | # Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.lib.common.utils import test_utils
from tempest.lib import exceptions
from tempest import test
from congress_tempest_tests.tests.scenario import manager_congress
CONF = config.CONF
class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
@classmethod
def skip_checks(cls):
super(TestCeilometerDriver, cls).skip_checks()
if not getattr(CONF.service_available, 'ceilometer', False):
msg = ("%s skipped as ceilometer is not available" %
cls.__class__.__name__)
raise cls.skipException(msg)
def setUp(cls):
super(TestCeilometerDriver, cls).setUp()
cls.telemetry_client = cls.admin_manager.telemetry_client
cls.datasource_id = manager_congress.get_datasource_id(
cls.admin_manager.congress_client, 'ceilometer')
@test.attr(type='smoke')
def test_ceilometer_meters_table(self):
meter_schema = (
self.admin_manager.congress_client.show_datasource_table_schema(
self.datasource_id, 'meters')['columns'])
meter_id_col = next(i for i, c in enumerate(meter_schema)
if c['name'] == 'meter_id')
def _check_data_table_ceilometer_meters():
# Fetch data from ceilometer each time, because this test may start
# before ceilometer has all the users.
meters = self.telemetry_client.list_meters()
meter_map = {}
for meter in meters:
meter_map[meter['meter_id']] = meter
results = (
self.admin_manager.congress_client.list_datasource_rows(
self.datasource_id, 'meters'))
for row in results['results']:
try:
meter_row = meter_map[row['data'][meter_id_col]]
except KeyError:
return False
for index in range(len(meter_schema)):
if (str(row['data'][index]) !=
str(meter_row[meter_schema[index]['name']])):
return False
return True
if not test_utils.call_until_true(
func=_check_data_table_ceilometer_meters,
duration=100, sleep_for=5):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
| apache-2.0 | -1,850,053,867,921,899,300 | 39.233766 | 79 | 0.607489 | false |
txtbits/daw-python | ficheros/CSV/prueba csv (lm)/alumnos.py | 1 | 1510 | # -*- coding: utf-8 -*-
'''
Created on 02/12/2011
@author: chra
'''
import csv
from operator import itemgetter
# ----- Función media de la notas de los alumnos ----------
def media(alumno):
#devuelve la nota media a partir de un diccionario con datos de un alumno
nota1 = int(alumno['Nota1'])
nota2 = int(alumno.get('Nota2'))
nota3 = int(alumno.get('Nota3'))
return (nota1+nota2+nota3) / 3.
# ----------------------------------------------------------
fin = open('alumnos.csv')
lector = csv.DictReader(fin, delimiter=",") # si no se pone delimiter, coge la coma por defecto // devuelve diccionario
# lector = csv.reader(fin, delimiter=",") <-- Devuelve lista
alumnos = []
for linea in lector:
alumnos.append((linea['Alumno'], media(linea)))
# -------- Ordenar por nombre de alumno -----------
alumnos.sort()
print 'Orden por nombre de alumno'
for al in alumnos:
print "%-10s %6.2f" % al #10 espacios entre cadena (nombre - nota media) y permite 6 digitos, 2 de ellos decimales.
# --------------------------------------------------
# --------- Ordenar por nota -----------------------
print '\nOrden por nota'
alumnos.sort(key=itemgetter(1),reverse=True)
for al in alumnos:
print "%-10s %6.2f" % al
#---------------------------------------------------
# Crea un fichero 'lista_ordenada_notas.csv' y escribe la lista ordenada por notas
fw = open('lista_ordenada_notas.csv', 'w')
csvwriter = csv.writer(fw)
for al in alumnos:
csvwriter.writerow(al)
fw.close() | mit | -935,177,956,870,283,900 | 28.607843 | 120 | 0.579192 | false |
uva-its/awstools | maws.py | 1 | 1100 | #!/usr/bin/python3
import argparse
import sys
from mawslib.manager import Manager
import importlib
configfile="cloudconfig.yaml"
parser = argparse.ArgumentParser(
#add_help=False,
description='AWS Manager',
usage='''maws [<options>] <command> <subcommand> [<args>]
For help:
maws help
maws <command> help
maws <command> <subcommand> help
''')
parser.add_argument('command', help='Command to run',
choices = ['help', 'ec2', 'sdb', 'route53', 'r53', 'rds',
'cloudformation', 'cfn' ])
parser.add_argument('--config',
help='alternate config file to use (default: cloudconfig.yaml)',
action="store")
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args, subargs = parser.parse_known_args()
if hasattr(args, "config"): configfile = args.config
mgr = Manager(configfile)
mgr.showname()
if args.command == "cfn": args.command = "cloudformation"
if args.command == "r53": args.command = "route53"
cli_mod = importlib.import_module("cli.%s_cli" % args.command)
cli_mod.processCommand(mgr, subargs)
| mit | -6,308,816,211,156,035,000 | 27.947368 | 68 | 0.697273 | false |
lorien/grab | tests/grab_error.py | 2 | 1460 | from unittest import TestCase
import mock
from six import StringIO
from grab import GrabNetworkError
from grab.util.warning import warn
from tests.util import NON_ROUTABLE_IP, build_grab, only_grab_transport
class GrabErrorTestCase(TestCase):
def test_warn(self):
out = StringIO()
with mock.patch('sys.stderr', out):
warn('abc')
self.assertTrue('GrabDeprecationWarning: abc' in out.getvalue())
@only_grab_transport('pycurl')
def test_original_exceptions_grab(self):
import pycurl
grab = build_grab()
try:
grab.go('http://%s' % NON_ROUTABLE_IP)
except GrabNetworkError as ex: # pylint: disable=broad-except
self.assertTrue(isinstance(ex.original_exc, pycurl.error))
@only_grab_transport('urllib3')
def test_original_exceptions_urllib2(self):
from urllib3.exceptions import ConnectTimeoutError
grab = build_grab()
try:
grab.go('http://%s' % NON_ROUTABLE_IP)
except GrabNetworkError as ex: # pylint: disable=broad-except
self.assertTrue(isinstance(ex.original_exc, ConnectTimeoutError))
def test_attribute_exception(self):
grab = build_grab()
self.assertTrue(grab.exception is None)
try:
grab.go('http://%s' % NON_ROUTABLE_IP)
except GrabNetworkError:
pass
self.assertTrue(isinstance(grab.exception, GrabNetworkError))
| mit | 2,088,099,408,612,192,500 | 32.181818 | 77 | 0.65274 | false |
jimrybarski/fylm | tests/model/timestamp.py | 1 | 2372 | import unittest
from fylm.model.timestamp import Timestamps
class MockExperiment(object):
def __init__(self):
self.data_dir = "/tmp/"
self.fields_of_view = [1, 2]
self.time_periods = [1, 2]
self.base_path = None
self.field_of_view_count = 2
class TimestampsTests(unittest.TestCase):
def setUp(self):
self.t = Timestamps()
def test_parse_line(self):
index, timestamp = self.t._parse_line("238 4.5356246")
self.assertEqual(index, 238)
self.assertAlmostEqual(timestamp, 4.5356246)
def test_parse_line_invalid(self):
with self.assertRaises(AttributeError):
index, timestamp = self.t._parse_line("238")
def test_parse_line_empty(self):
with self.assertRaises(AttributeError):
index, timestamp = self.t._parse_line("")
def test_load(self):
data = ["1 2.222", "2 4.444", "3 8.888"]
self.t.load(data)
expected = {1: 2.222, 2: 4.444, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
def test_load_one_bad(self):
data = ["1 2.222", "2 4.444", "Horrendous Surprise!", "3 8.888"]
self.t.load(data)
expected = {1: 2.222, 2: 4.444, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
def test_load_one_invalid_type(self):
data = ["1 2.222", "2 Fourpointfourseconds", "3 8.888"]
self.t.load(data)
expected = {1: 2.222, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
def test_lines(self):
self.t._timestamps = {3: 8.888, 1: 2.222, 2: 4.444}
lines = sorted(list(self.t.lines))
self.assertListEqual(["1 2.222", "2 4.444", "3 8.888"], lines)
def test_add(self):
self.t.add("2.222")
self.t.add("4.444")
self.t.add("8.888")
expected = {1: 2.222, 2: 4.444, 3: 8.888}
self.assertDictEqual(self.t._timestamps, expected)
self.t.add("12.222")
expected = {1: 2.222, 2: 4.444, 3: 8.888, 4: 12.222}
self.assertDictEqual(self.t._timestamps, expected)
def test_last(self):
self.t._timestamps = {3: 8.888, 1: 2.222, 2: 4.444}
self.assertEqual(self.t.last, 8.888)
def test_last_none(self):
self.t._timestamps = {}
with self.assertRaises(ValueError):
last = self.t.last | mit | -4,187,316,977,040,479,700 | 32.422535 | 72 | 0.581788 | false |
SUSE/kiwi | kiwi/builder/container.py | 1 | 6089 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import logging
import os
# project
from kiwi.container import ContainerImage
from kiwi.container.setup import ContainerSetup
from kiwi.system.setup import SystemSetup
from kiwi.system.result import Result
from kiwi.utils.checksum import Checksum
from kiwi.defaults import Defaults
from kiwi.exceptions import KiwiContainerBuilderError
from kiwi.runtime_config import RuntimeConfig
log = logging.getLogger('kiwi')
class ContainerBuilder:
"""
**Container image builder**
:param object xml_state: Instance of :class:`XMLState`
:param str target_dir: target directory path name
:param str root_dir: root directory path name
:param dict custom_args: Custom processing arguments defined as hash keys:
* xz_options: string of XZ compression parameters
"""
def __init__(self, xml_state, target_dir, root_dir, custom_args=None):
self.custom_args = custom_args or {}
self.root_dir = root_dir
self.target_dir = target_dir
self.container_config = xml_state.get_container_config()
self.requested_container_type = xml_state.get_build_type_name()
self.base_image = None
self.base_image_md5 = None
self.container_config['xz_options'] = \
self.custom_args.get('xz_options')
self.container_config['metadata_path'] = \
xml_state.build_type.get_metadata_path()
if xml_state.get_derived_from_image_uri():
# The base image is expected to be unpacked by the kiwi
# prepare step and stored inside of the root_dir/image directory.
# In addition a md5 file of the image is expected too
self.base_image = Defaults.get_imported_root_image(
self.root_dir
)
self.base_image_md5 = ''.join([self.base_image, '.md5'])
if not os.path.exists(self.base_image):
raise KiwiContainerBuilderError(
'Unpacked Base image {0} not found'.format(
self.base_image
)
)
if not os.path.exists(self.base_image_md5):
raise KiwiContainerBuilderError(
'Base image MD5 sum {0} not found at'.format(
self.base_image_md5
)
)
self.system_setup = SystemSetup(
xml_state=xml_state, root_dir=self.root_dir
)
self.filename = ''.join(
[
target_dir, '/',
xml_state.xml_data.get_name(),
'.' + Defaults.get_platform_name(),
'-' + xml_state.get_image_version(),
'.', self.requested_container_type,
'.tar' if self.requested_container_type != 'appx' else ''
]
)
self.result = Result(xml_state)
self.runtime_config = RuntimeConfig()
def create(self):
"""
Builds a container image which is usually a data archive
including container specific metadata.
Image types which triggers this builder are:
* image="docker"
* image="oci"
* image="appx"
:return: result
:rtype: instance of :class:`Result`
"""
if not self.base_image:
log.info(
'Setting up %s container', self.requested_container_type
)
container_setup = ContainerSetup.new(
self.requested_container_type, self.root_dir,
self.container_config
)
container_setup.setup()
else:
checksum = Checksum(self.base_image)
if not checksum.matches(checksum.md5(), self.base_image_md5):
raise KiwiContainerBuilderError(
'base image file {0} checksum validation failed'.format(
self.base_image
)
)
log.info(
'--> Creating container image'
)
container_image = ContainerImage.new(
self.requested_container_type, self.root_dir, self.container_config
)
self.filename = container_image.create(
self.filename, self.base_image
)
Result.verify_image_size(
self.runtime_config.get_max_size_constraint(),
self.filename
)
self.result.add(
key='container',
filename=self.filename,
use_for_bundle=True,
compress=False,
shasum=True
)
self.result.add(
key='image_packages',
filename=self.system_setup.export_package_list(
self.target_dir
),
use_for_bundle=True,
compress=False,
shasum=False
)
self.result.add(
key='image_changes',
filename=self.system_setup.export_package_changes(
self.target_dir
),
use_for_bundle=True,
compress=True,
shasum=False
)
self.result.add(
key='image_verified',
filename=self.system_setup.export_package_verification(
self.target_dir
),
use_for_bundle=True,
compress=False,
shasum=False
)
return self.result
| gpl-3.0 | -1,654,388,339,811,904,800 | 33.40113 | 79 | 0.576121 | false |
CodethinkLabs/software-dependency-visualizer | import/callgraph-parser.py | 1 | 5092 | #!/usr/bin/env python3
# This is meant to scan a directory for files called 'calls.*' which
# are in a proprietary text format showing the calls between objects.
# One call file represents one 'package' which may contain several
# sections representing object files (.o files in the build). Those
# sections contain symbol information and the calls between them.
#
# The script also requires an index file called "alldefs_sorted_uniq"
# which maps symbol names to packages. Both this file and the calls
# file are generated from proprietary data, so we can't describe here
# how to create them.
from __future__ import print_function
import os
import re
import sys
import yaml
# demangles SunPro mangled names - partially. Arguments aren't parsed yet.
def demangle(mangled):
if mangled.startswith("__1c"):
# Looks like a SunPro compiler name
ptr = 4;
names = [];
while True:
if ptr > len(mangled)-1: break
lengthCharacter = mangled[ptr]
if ord(lengthCharacter) > ord("A") and ord(lengthCharacter) <= ord("Z"):
symlen = ord(lengthCharacter) - ord('A') + 1;
names.append(mangled[ptr+1:ptr+symlen])
ptr += symlen
else:
break
return "::".join(names)
else:
return mangled
# Process a single package (call.* file) and create a package object.
def processPackage(filename, directory):
global index
packageName = filename
if packageName.startswith("calls."): packageName = packageName[6:]
package = { '@id': "id:"+packageName,
'@type': "sw:Package",
'name': packageName }
print("Processing module "+packageName, file=sys.stderr)
f = open(filename, "rt")
objectName = "unknown object"
objectSymbols = None
objectYaml = None
package['contains'] = []
while True:
l = f.readline()
if l == "": break
m = re.match('^(\w+).o$',l) # A new object
if(m):
objectName = m.group(1)
# New object
objectYaml = { '@id': "id:"+packageName+":"+objectName,
'@type': "sw:Object",
'name': objectName,
'contains': []
}
objectSymbols = {}
package['contains'].append(objectYaml)
m = re.match('^(\w+) ([uU]),(\w+)$',l) # A symbol call
if(m):
called = m.group(1)
symbolType = m.group(2)
caller = demangle(m.group(3))
# Look up an existing symbol object for the caller, or create one.
if caller in objectSymbols:
symbolYaml = objectSymbols[caller]
else:
symbolYaml = { '@id': "id:"+packageName+":"+objectName+":"+caller,
'@type': "sw:Symbol",
'name': caller,
'calls': [] }
objectYaml['contains'].append(symbolYaml)
objectSymbols[caller] = symbolYaml
# A lowercase 'u' means locally undefined. This symbol is defined inside this
# object, so we have no problem specifying the call destination.
if symbolType == "u":
callDest = packageName+":"+objectName+":"+demangle(called)
else:
# Otherwise, it's generally undefined, so this would normally be up to the
# linker to find the called object. We'll need to look it up in our index.
if called not in index:
callDest = "NULL:"+demangle(called)
else:
packageObjectName = index[called]
callDest = packageObjectName+":"+demangle(called)
symbolYaml['calls'].append("id:"+callDest)
# Empty 'contains' fields cause problems, so delete them
if package['contains'] == []: del package['contains']
return package
def scanFile(directory, filename):
package = processPackage(filename, os.path.join(directory, filename))
of = open(filename+".yaml", "wt")
of.write(yaml.dump({ "@context": ["http://localhost:8000/context.jsonld"],
"@graph": package }))
of.close()
def scanDirectory(directory):
print("Scanning %s"%directory, file=sys.stderr)
files = os.listdir(directory)
for f in files:
if f.startswith("calls.") and not f.endswith(".yaml"):
scanFile(directory, f)
def main():
global index
# Load the symbol directory
indexfile = open("alldefs_sorted_uniq")
index = {}
while True:
l = indexfile.readline()
if l == "": break
(symbol, objectName, libraryName) = l.split(":")
index[symbol]= "%s:%s"%(libraryName.strip(),objectName.strip())
if len(sys.argv) > 1:
if os.path.isdir(sys.argv[1]):
scanDirectory(sys.argv[1])
else:
scanFile(".",sys.argv[1])
else:
scanDirectory(".")
if __name__=="__main__":
main()
| apache-2.0 | 1,753,138,279,019,987,000 | 34.859155 | 90 | 0.562844 | false |
afronski/grammar-generator | grammar-generator/Tests/PopulatorsTest.py | 1 | 3427 | import unittest
from Elements.G.Populators.OnlyEntriesPopulator import OnlyEntriesPopulator
from Elements.G.Populators.ParameterListPopulator import ParameterListPopulator
class TestClass(unittest.TestCase):
def setUpEntriesPopulator(self, name, templates = { "Name" : "" }, settings = {}):
self.entriesPopulator = OnlyEntriesPopulator(name, templates, settings)
def setUpParameterListPopulator(self,
name,
templates = { "Name" : "" },
settings = { "ModulePrefix" : "" }):
self.listPopulator = ParameterListPopulator(name, templates, settings)
def test_for_entries_only_populator(self):
self.setUpEntriesPopulator("Name")
assert self.entriesPopulator.getType() == "CreateEntriesOnly", "Invalid type for entries populator."
def test_for_entries_population(self):
self.setUpEntriesPopulator(
"Name_Name_Template-Test",
{
"Template-Test" : "${Temp}_${Test}:\n${Test}\n;"
},
{
"ModuleLongName" : "",
"ModuleShortName" : "",
"ModulePrefix" : "",
"Template" :
{
"A" :
{
"Name" : "",
"Temp" : "users",
"Test" : "A"
}
}
})
assert self.entriesPopulator.populate() == "users_A:\nA\n;", "Invalid template resolving"
def test_for_many_entries_population(self):
self.setUpEntriesPopulator(
"Name_Name_Template-Test",
{
"Template-Test" : "${Temp}_${Test}:\n${Test}\n;"
},
{
"ModuleLongName" : "",
"ModuleShortName" : "",
"ModulePrefix" : "",
"Template" :
{
"A" :
{
"Name" : "",
"Temp" : "users",
"Test" : "A"
},
"B" :
{
"Name" : "",
"Temp" : "users",
"Test" : "B"
}
}
})
assert self.entriesPopulator.populate() == "users_A:\nA\n;,\nusers_B:\nB\n;"
def test_for_missing_template_for_entries_population(self):
try:
self.setUpEntriesPopulator(
"Name_Name_Template-Test",
{
"OtherTemplate" : ""
},
{
"Module" : {}
})
self.fail("No exception for missing template in dictionary.")
except AttributeError:
pass
def test_for_simple_creation_list_populator(self):
self.setUpParameterListPopulator("Name-Test")
assert self.listPopulator.getType() == "CreateParametersListWithName", "Invalid type name."
def test_for_param_list_population(self):
self.setUpParameterListPopulator(
"Name_Name_TemplateParameters-Test",
{
"TemplateParameters" : "${Temp}_${Test}:\n${Test}"
},
{
"ModuleLongName" : "",
"ModuleShortName" : "",
"ModulePrefix" : "users",
"TemplateParameters" :
{
"A" :
{
"Name" : "",
"Temp" : "users",
"Test" : "A"
}
}
})
assert self.listPopulator.populate() == "users_test:\nusers_A:\nA\n;", "Invalid template resolving"
def test_for_many_param_list_population(self):
self.setUpParameterListPopulator(
"Name_Name_TemplateParameters-Test",
{
"TemplateParameters" : "${Temp}_${Test}:\n${Test}"
},
{
"ModuleLongName" : "",
"ModuleShortName" : "",
"ModulePrefix" : "users",
"TemplateParameters" :
{
"A" :
{
"Name" : "",
"Temp" : "users",
"Test" : "A"
},
"B" :
{
"Name" : "",
"Temp" : "users",
"Test" : "B"
}
}
})
assert self.listPopulator.populate() == "users_test:\nusers_A:\nA|\nusers_B:\nB\n;" | mit | -1,056,892,937,051,613,400 | 23.485714 | 102 | 0.575139 | false |
SAGES-UCSC/Photometry | simple_test.py | 1 | 1683 | '''
AUTHOR:
Alexa Villaume, UCSC
PURPOSE:
A program demonstrating how to use the geom_utils and phot_utils packages.
INPUT PARAMETERS:
A photometry source catalog
The equation of a line for the color cut
OUTPUT:
A catalog of of GC candidates
NOTES:
'''
import numpy as np
import phot_utils as pu
import geom_utils as gu
import Sources as S
# For color selection
# Have the slope, y-intercept, and endpoints from M87 data
# This is for <u-z> vs <g-z> space.
b = -0.086
m = 0.50
x0 = 1.5
x1 = 3.0
var = 0.3 # This controls how strict the color cut is
catalog = open("n4459_cfht_ugiz_auto.cat", "r")
catalog.next() # This is to skip the header
# This reads in the catalog and initalizes each line as a
# CFHT source
sources = map(lambda line: S.CFHTSource(line), catalog)
## Make the color cut
candidates = filter(lambda s: pu.makeColorCut(s.mag1, s.mag4, s.mag2, s.mag4, x0, x1, m, b, var), sources)
# Finds the value of a_world that seems to "contain" the point-like sources
shape = map(lambda s: s.a_world, candidates)
'''
Zach and I use the x-value that seems to contain the gaussian
distribution as the cut off. Not sure how well detSizeCut is
going to work for different data sets so at the moment it's good to
take a look for youself.
'''
pu.LookAtShapes(shape, 1000)
peak = pu.detSizeCut(shape, 1000)
print "Estimated peak: ", peak
# Make shape cut based on value found in previous step
candidates = filter(lambda s: s.a_world <= peak, candidates)
# Just to demonstrate how to use this function...
corrected = pu.correctMag(candidates, 5)
#output = open("GC_Candidates.txt", "w")
#for source in candidates:
# output.write(source.line)
| mit | 2,018,581,001,355,435,800 | 24.119403 | 106 | 0.717172 | false |
uwdb/Cosette | run_calcite_examples.py | 1 | 1292 | """
run calcite examples in batch
"""
import solver
import os.path
import json
import gen_cos_files
def run_calcite_examples(write=False):
""" run calcite examples """
calcite_path = "./examples/calcite/"
# get already generated rules, since some of them may be edited
generated_rules = {}
for filename in os.listdir(calcite_path):
if filename.endswith(".cos"):
case_name = filename[:-4]
with open(calcite_path+filename, 'r') as source_file:
cos = source_file.read()
generated_rules[case_name] = cos
# run all the rule from the json file
with open(calcite_path+'calcite_tests.json') as input_file:
calcite_rules = json.load(input_file)
for rule in calcite_rules:
rname = rule["name"]
if rname in generated_rules:
cos = generated_rules[rname]
else:
cos = gen_cos_files.gen_cos_source(rule["q1"], rule["q2"])
if write: # generate .cos file
with open("{}{}.cos".format(calcite_path, rname), 'w') as ofile:
ofile.write(cos)
result = json.loads(solver.solve(cos))
print "{},{}".format(rname, result["result"])
if __name__ == '__main__':
run_calcite_examples(write=True)
| bsd-2-clause | -2,538,266,779,622,439,400 | 30.512195 | 80 | 0.589783 | false |
its0x08/IPTV_attack_tool | gatherURLs.py | 1 | 1374 | try:
from requests import get
from urlparse import urlparse
from duckduckgo import search
from sys import argv
except ImportError as e:
print str(e)
def extractUrls(dorks):
temp = []
for dork in open(dorks, "r").readlines():
for link in search(dork.strip(), max_results=400):
if link not in temp:
temp.append(link)
return temp
def checkUrls(urls):
temp = []
for url in urls:
url = urlparse(url.strip())[1]
if url not in temp:
temp.append(url)
print "[i] Found %s in total." % (len(temp))
return temp
def aliveOrNot(urls):
temp = []
print "[*] Hunting URLs for Admin panel"
for url in urls:
try:
if "Xtream Codes</a>" in get("http://%s/" % (url), timeout=10).text:
print "\t{%s} Panel found on URL -->> http://%s/" % (len(temp+1),url)
temp.append(url)
except Exception as e:
# print "\tNo Panel found -->> http://%s/" %(url)
pass
print "[i] %s of them are alive!" % (len(temp))
f = open("urls.txt", "a+")
for url in temp:
f.write("http://%s/\n" %(url))
f.close()
if __name__ == '__main__':
try:
dorks = argv[1]
aliveOrNot(checkUrls(extractUrls(dorks)))
except Exception as e:
print "Error\n%s" % (str(e))
| gpl-3.0 | 223,682,472,958,928,580 | 27.040816 | 86 | 0.534207 | false |
OneDrive/onedrive-sdk-python | src/onedrivesdk/model/open_with_app.py | 1 | 2826 | # -*- coding: utf-8 -*-
'''
# Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
#
# This file was generated and any changes will be overwritten.
'''
from __future__ import unicode_literals
from ..model.identity import Identity
from ..one_drive_object_base import OneDriveObjectBase
class OpenWithApp(OneDriveObjectBase):
def __init__(self, prop_dict=None):
self._prop_dict = prop_dict if prop_dict is not None else {}
@property
def app(self):
"""
Gets and sets the app
Returns:
:class:`Identity<onedrivesdk.model.identity.Identity>`:
The app
"""
if "app" in self._prop_dict:
if isinstance(self._prop_dict["app"], OneDriveObjectBase):
return self._prop_dict["app"]
else :
self._prop_dict["app"] = Identity(self._prop_dict["app"])
return self._prop_dict["app"]
return None
@app.setter
def app(self, val):
self._prop_dict["app"] = val
@property
def view_url(self):
"""Gets and sets the viewUrl
Returns:
str:
The viewUrl
"""
if "viewUrl" in self._prop_dict:
return self._prop_dict["viewUrl"]
else:
return None
@view_url.setter
def view_url(self, val):
self._prop_dict["viewUrl"] = val
@property
def edit_url(self):
"""Gets and sets the editUrl
Returns:
str:
The editUrl
"""
if "editUrl" in self._prop_dict:
return self._prop_dict["editUrl"]
else:
return None
@edit_url.setter
def edit_url(self, val):
self._prop_dict["editUrl"] = val
@property
def view_post_parameters(self):
"""Gets and sets the viewPostParameters
Returns:
str:
The viewPostParameters
"""
if "viewPostParameters" in self._prop_dict:
return self._prop_dict["viewPostParameters"]
else:
return None
@view_post_parameters.setter
def view_post_parameters(self, val):
self._prop_dict["viewPostParameters"] = val
@property
def edit_post_parameters(self):
"""Gets and sets the editPostParameters
Returns:
str:
The editPostParameters
"""
if "editPostParameters" in self._prop_dict:
return self._prop_dict["editPostParameters"]
else:
return None
@edit_post_parameters.setter
def edit_post_parameters(self, val):
self._prop_dict["editPostParameters"] = val
| mit | -7,186,371,526,504,117,000 | 25.660377 | 151 | 0.553079 | false |
jeffcole/gluefun | gluefun/tasks.py | 1 | 3511 | """
Module to hold Celery tasks for the gluefun app.
"""
# This allows division to output floats by default.
from __future__ import division
import logging
from celery import task
from models import ScoredFriend, TaskCompletion
logger = logging.getLogger('gluefun.custom')
LIKED_ACTIONS = ('Checkin', 'Favorited', 'Liked', 'Saved')
DISLIKED_ACTIONS = ('Disliked', 'Unwanted')
ACTIONS = LIKED_ACTIONS + DISLIKED_ACTIONS
@task()
def compute_friend_scores(client):
"""
This task performs the work of making requests for data to GetGlue via the
pre-authorized GlueClient (client) parameter, scoring each of the user's
friends according to a comparison of their likes and dislikes, and saving
the scored data in ScoredFriend objects.
"""
friends = client.get_friends()
friends.remove(u'getglue')
# We're only interested in movies and TV for the time being.
objects = client.get_objects('movies')
objects.extend(client.get_objects('tv_shows'))
completion = TaskCompletion.objects.create(
task_id=compute_friend_scores.request.id)
total_friends = len(friends)
if total_friends == 0:
completion.percent_complete = 100
completion.save()
for friend_count, friend in enumerate(friends):
score = 0
both_liked, both_disliked, object_titles = [], [], []
for obj in objects:
try:
my_action = obj['action']
if my_action not in ACTIONS:
continue
object_key = obj['objectKey']
object_title = obj['title']
"""
Maintain a list of object titles, and only query each friend
for each object once. Objects can appear more than once due to
the different action types (Liked, Checkin, etc.).
"""
if object_title not in object_titles:
friend_action = client.get_user_object_action(friend,
object_key)
if friend_action:
# User judgement agrees with friend (liked or disliked).
if (liked(my_action) and liked(friend_action) or
disliked(my_action) and disliked(friend_action)):
score += 1
both_liked.append(obj['title'])
# User judgment is opposite of friend.
else:
score -= 1
both_disliked.append(obj['title'])
object_titles.append(object_title)
except KeyError:
pass
"""
We maintain a single object per user/friend combination. An alternative
would be to create new objects for each task run.
"""
scored_friend, created = ScoredFriend.objects.get_or_create(
user_name=client.user_id,
friend_name=friend)
scored_friend.score = score
scored_friend.both_liked = both_liked
scored_friend.both_disliked = both_disliked
scored_friend.save()
# Update task completion.
percent_complete = int(((friend_count + 1) / total_friends) * 100)
completion.percent_complete = percent_complete
completion.save()
def liked(action):
return action in LIKED_ACTIONS
def disliked(action):
return action in DISLIKED_ACTIONS
| isc | 9,080,895,113,997,794,000 | 36.351064 | 80 | 0.580746 | false |
bernardotorres/bus-catcher | setup.py | 1 | 1694 | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='bus-catcher',
version='0.1',
packages=['buslinesscrapper'],
include_package_data=True,
license='GPL v2.0',
description='Displays Florianopolis bus lines timetables as stem-and-leaf plot',
long_description=README,
url='https://github.com/bernardotorres/bus-catcher',
author='Bernardo Torres',
author_email='[email protected]',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=[
'Scrapy==0.24.4',
'Twisted==14.0.2',
'cffi==0.8.6',
'cryptography==0.7',
'cssselect==0.9.1',
'enum34==1.0.4',
'lxml==3.4.1',
'pyOpenSSL==0.14',
'pyasn1==0.1.7',
'pycparser==2.10',
'queuelib==1.2.2',
'six==1.8.0',
'w3lib==1.10.0',
'wsgiref==0.1.2',
'zope.interface==4.1.1',
],
)
| gpl-2.0 | -5,021,029,553,312,018,000 | 30.962264 | 84 | 0.585596 | false |
chiara-paci/baskerville | baskervilleweb/bibliography/management/commands/load_magazine.py | 1 | 1497 | #! /usr/bin/python
# -*- coding: utf-8 -*-
import re,time,datetime,sys
from django.core.management.base import BaseCommand, CommandError
from django.core.exceptions import ObjectDoesNotExist
from django.conf import settings
import django.core.files
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from bibliography.models import Pubblication,Volume,IssueType,Issue
class Command(BaseCommand):
args = '<file_elenco>'
help = 'Load categories'
def handle(self, *args, **options):
comic_type=IssueType.objects.get(label="magazine")
elenco=args[0]
lista=[]
fd=open(elenco,"r")
for l in fd.readlines():
l=str(l,'utf-8')
l=l.strip()
if not l: continue
t=[x.strip() for x in l.split("|")]
if len(t)!=5:
print(t)
continue
issn=t[0].strip()
vol=t[1].strip()
issn_num=t[2].strip()
num=t[3].strip()
day=t[4].strip()
pub_obj=Pubblication.objects.get(issn=issn)
vol_obj=Volume.objects.get(pubblication=pub_obj,label=vol)
issue,created=Issue.objects.get_or_create(volume=vol_obj,issue_type=comic_type,issn_num=issn_num,
number=num,date=day)
if created:
print("Created: ",issue)
continue
fd.close()
| gpl-3.0 | 4,148,187,215,045,427,700 | 28.352941 | 109 | 0.573814 | false |
MyRookie/SentimentAnalyse | src/Analyse/AFX.py | 1 | 2182 | from nltk.corpus import wordnet
import json
import os
punctuation = [',','.','?','!',';']
SentimentShifter = {}
Conjunction = {}
#Check whether the string is a word
def isWord(s):
if s[0] >= 'a' and s[0] <= 'z':
return True;
return False;
#convent list to dictionaries
def GetWord(word,Cmd):
return {
'Word':word[0],
'PoS':word[1],
'Tag':word[2],
}.get(Cmd,None)
#extract the data from WordNet
def WordNetInfo(synset):
return unicode(synset).split('.')
#convent Part of Speech to wordnet format
def PoSConvent(PoS):
if PoS.startswith('J'):
return wordnet.ADJ
elif PoS.startswith('V'):
return wordnet.VERB
elif PoS.startswith('N'):
return wordnet.NOUN
elif PoS.startswith('R'):
return wordnet.ADV
else:
return PoS
#Check if the PoS of word in document match the selected gloss
def GetWordNetPoS(word,notation,PoS):
for i in wordnet.synsets(word):
if WordNetInfo(i.name())[0] == word and WordNetInfo(i.name())[2] == notation and PoS == i.pos():
return True
return False
#read json file
def ConventJson():
if not open('../Profile/SentimentShifter.json','r'):
print "File Not Found"
return None
#open the SentimentShifter.json to get the negation and increment
with open('../Profile/SentimentShifter.json','r') as f:
data = json.load(f)
negation = data.get('negation', None)
if negation is not None:
for element in data['negation']:
for key in element:
SentimentShifter[key] = "Neg"
increment = data.get('increment', None)
if increment is not None:
for element in data['increment']:
for key in element:
SentimentShifter[key] = "Inc"
downtoner = data.get('downtoner',None)
if downtoner is not None:
for element in data['downtoner']:
for key in element:
SentimentShifter[key] = "Dow"
if not open('../Profile/Conjunction.json','r'):
print "File Not Found"
return None
#open the Conjunction.json to get the conjunctions
with open('../Profile/Conjunction.json','r') as f:
data = json.load(f)
conjunction = data.get('conjunction',None)
if conjunction is not None:
for element in data['conjunction']:
for key in element:
Conjunction[key] = element[key]
| mit | -5,854,149,236,082,704,000 | 22.717391 | 98 | 0.681027 | false |
McStasMcXtrace/McCode | tools/Python/mcresplot/tas.py | 1 | 41056 | #!/usr/bin/env python3
#
# calculates TAS angles from rlu
# @author Tobias Weber <[email protected]>
# @date 1-aug-18
# @license GNU GPLv3
# @descr This tool comes from Takin 2: https://dx.doi.org/10.5281/zenodo.4117437
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
try:
import numpy as np
import numpy.linalg as la
except ImportError:
print("Numpy could not be imported!")
exit(-1)
use_scipy = False
# -----------------------------------------------------------------------------
# rotate a vector around an axis using Rodrigues' formula
# see https://en.wikipedia.org/wiki/Rodrigues%27_rotation_formula
def rotate(_axis, vec, phi):
axis = _axis / la.norm(_axis)
s = np.sin(phi)
c = np.cos(phi)
return c*vec + (1.-c)*np.dot(vec, axis)*axis + s*np.cross(axis, vec)
# get metric from crystal B matrix
# basis vectors are in the columns of B, i.e. the second index
# see T. Arens et al., "Mathematik", 2015, ISBN: 978-3-642-44919-2, p. 815
def get_metric(B):
#return np.einsum("ij,ik -> jk", B, B)
return np.dot(np.transpose(B), B)
# cross product in fractional coordinates: c^l = eps_ijk g^li a^j b^k
# see T. Arens et al., "Mathematik", 2015, ISBN: 978-3-642-44919-2, p. 815
def cross(a, b, B):
# levi-civita in fractional coordinates
def levi(i,j,k, B):
M = np.array([B[:,i], B[:,j], B[:,k]])
return la.det(M)
metric_inv = la.inv(get_metric(B))
eps = [[[ levi(i,j,k, B) for k in range(0,3) ] for j in range(0,3) ] for i in range(0,3) ]
return np.einsum("ijk,j,k,li -> l", eps, a, b, metric_inv)
# dot product in fractional coordinates
# see T. Arens et al., "Mathematik", 2015, ISBN: 978-3-642-44919-2, p. 808
def dot(a, b, metric):
return np.dot(a, np.dot(metric, b))
# angle between peaks in fractional coordinates
# see T. Arens et al., "Mathematik", 2015, ISBN: 978-3-642-44919-2, p. 808
def angle(a, b, metric):
len_a = np.sqrt(dot(a, a, metric))
len_b = np.sqrt(dot(b, b, metric))
c = dot(a, b, metric) / (len_a * len_b)
# check for rounding errors
if c > 1.:
#print("arccos precision overflow: " + str(c) + ".")
c = 1.
if c < -1.:
#print("arccos precision underflow: " + str(c) + ".")
c = -1.
return np.arccos(c)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
if use_scipy:
try:
import scipy as sp
import scipy.constants as co
except ImportError:
print("Scipy could not be imported!")
exit(-1)
hbar_in_meVs = co.Planck/co.elementary_charge*1000./2./np.pi
E_to_k2 = 2.*co.neutron_mass/hbar_in_meVs**2. / co.elementary_charge*1000. * 1e-20
else:
E_to_k2 = 0.482596406464 # calculated with scipy, using the formula above
k2_to_E = 1./E_to_k2
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# mono (or ana) k -> A1 & A2 angles (or A5 & A6)
def get_a1a2(k, d):
s = np.pi/(d*k)
a1 = np.arcsin(s)
return [a1, 2.*a1]
# a1 angle (or a5) -> mono (or ana) k
def get_monok(theta, d):
s = np.sin(theta)
k = np.pi/(d*s)
return k
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# scattering angle a4
def get_a4(ki, kf, Q):
c = (ki**2. + kf**2. - Q**2.) / (2.*ki*kf)
return np.arccos(c)
# get |Q| from ki, kf and a4
def get_Q(ki, kf, a4):
c = np.cos(a4)
return np.sqrt(ki**2. + kf**2. - c*(2.*ki*kf))
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# angle enclosed by ki and Q
def get_psi(ki, kf, Q, sense=1.):
c = (ki**2. + Q**2. - kf**2.) / (2.*ki*Q)
return sense*np.arccos(c)
# crystallographic A matrix converting fractional to lab coordinates
# see https://de.wikipedia.org/wiki/Fraktionelle_Koordinaten
def get_A(lattice, angles):
cs = np.cos(angles)
s2 = np.sin(angles[2])
a = lattice[0] * np.array([1, 0, 0])
b = lattice[1] * np.array([cs[2], s2, 0])
c = lattice[2] * np.array([cs[1], \
(cs[0]-cs[1]*cs[2]) / s2, \
(np.sqrt(1. - np.dot(cs,cs) + 2.*cs[0]*cs[1]*cs[2])) / s2])
# testing equality with own derivation
#print((np.sqrt(1. - np.dot(cs,cs) + 2.*cs[0]*cs[1]*cs[2])) / s2)
#print(np.sqrt(1. - cs[1]*cs[1] - ((cs[0] - cs[2]*cs[1])/s2)**2.))
# the real-space basis vectors form the columns of the A matrix
return np.transpose(np.array([a, b, c]))
# crystallographic B matrix converting rlu to 1/A
# the reciprocal-space basis vectors form the columns of the B matrix
def get_B(lattice, angles):
A = get_A(lattice, angles)
B = 2.*np.pi * np.transpose(la.inv(A))
return B
# UB orientation matrix
# see https://dx.doi.org/10.1107/S0021889805004875
def get_UB(B, orient1_rlu, orient2_rlu, orientup_rlu):
orient1_invA = np.dot(B, orient1_rlu)
orient2_invA = np.dot(B, orient2_rlu)
orientup_invA = np.dot(B, orientup_rlu)
orient1_invA = orient1_invA / la.norm(orient1_invA)
orient2_invA = orient2_invA / la.norm(orient2_invA)
orientup_invA = orientup_invA / la.norm(orientup_invA)
U_invA = np.array([orient1_invA, orient2_invA, orientup_invA])
UB = np.dot(U_invA, B)
return UB
# a3 & a4 angles
def get_a3a4(ki, kf, Q_rlu, orient_rlu, orient_up_rlu, B, sense_sample=1., a3_offs=np.pi):
metric = get_metric(B)
# angle xi between Q and orientation reflex
xi = angle(Q_rlu, orient_rlu, metric)
# sign of xi
if dot(cross(orient_rlu, Q_rlu, B), orient_up_rlu, metric) < 0.:
xi = -xi
# length of Q
Qlen = np.sqrt(dot(Q_rlu, Q_rlu, metric))
# distance to plane
up_len = np.sqrt(dot(orient_up_rlu, orient_up_rlu, metric))
dist_Q_plane = dot(Q_rlu, orient_up_rlu, metric) / up_len
# angle psi enclosed by ki and Q
psi = get_psi(ki, kf, Qlen, sense_sample)
a3 = - psi - xi + a3_offs
a4 = get_a4(ki, kf, Qlen)
#print("xi = " + str(xi/np.pi*180.) + ", psi = " + str(psi/np.pi*180.) + ", offs = " + str(a3_offs/np.pi*180.))
return [a3, a4, dist_Q_plane]
def get_hkl(ki, kf, a3, Qlen, orient_rlu, orient_up_rlu, B, sense_sample=1., a3_offs=np.pi):
B_inv = la.inv(B)
# angle enclosed by ki and Q
psi = get_psi(ki, kf, Qlen, sense_sample)
# angle between Q and orientation reflex
xi = - a3 + a3_offs - psi
Q_lab = rotate(np.dot(B, orient_up_rlu), np.dot(B, orient_rlu*Qlen), xi)
Q_lab *= Qlen / la.norm(Q_lab)
Q_rlu = np.dot(B_inv, Q_lab)
return Q_rlu
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# get ki from kf and energy transfer
def get_ki(kf, E):
return np.sqrt(kf**2. + E_to_k2*E)
# get kf from ki and energy transfer
def get_kf(ki, E):
return np.sqrt(ki**2. - E_to_k2*E)
# get energy transfer from ki and kf
def get_E(ki, kf):
return (ki**2. - kf**2.) / E_to_k2
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# get the difference in tas angles for two positions
def get_angle_deltas(ki1, kf1, Q_rlu1, di1, df1, \
ki2, kf2, Q_rlu2, di2, df2, \
orient_rlu, orient_up_rlu, B, sense_sample=1., a3_offs=np.pi):
# position 1
[a1_1, a2_1] = get_a1a2(ki1, di1)
[a5_1, a6_1] = get_a1a2(kf1, df1)
[a3_1, a4_1, dist_Q_plane_1] = get_a3a4(ki1, kf1, Q_rlu1, orient_rlu, orient_up_rlu, B, sense_sample, a3_offs)
# position 2
[a1_2, a2_2] = get_a1a2(ki2, di2)
[a5_2, a6_2] = get_a1a2(kf2, df2)
[a3_2, a4_2, dist_Q_plane_2] = get_a3a4(ki2, kf2, Q_rlu2, orient_rlu, orient_up_rlu, B, sense_sample, a3_offs)
return [a1_2-a1_1, a2_2-a2_1, a3_2-a3_1, a4_2-a4_1, a5_2-a5_1, a6_2-a6_1, dist_Q_plane_1, dist_Q_plane_2]
# get the instrument driving time
def driving_time(deltas, rads_per_times):
times = np.abs(deltas) / rads_per_times
return np.max(times)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# GUI
# -----------------------------------------------------------------------------
class TasGUI:
B = np.array([[1,0,0], [0,1,0], [0,0,1]])
orient_rlu = np.array([1,0,0])
orient2_rlu = np.array([0,1,0])
orient_up_rlu = np.array([0,0,1])
g_eps = 1e-4
a3_offs = np.pi
# -----------------------------------------------------------------------------
# helpers
def getfloat(self, str):
try:
return float(str)
except ValueError:
return 0.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# crystal tab
def xtalChanged(self):
lattice = np.array([self.getfloat(self.editA.text()), self.getfloat(self.editB.text()), self.getfloat(self.editC.text())])
angles = np.array([self.getfloat(self.editAlpha.text()), self.getfloat(self.editBeta.text()), self.getfloat(self.editGamma.text())])
self.orient_rlu = np.array([self.getfloat(self.editAx.text()), self.getfloat(self.editAy.text()), self.getfloat(self.editAz.text())])
self.orient2_rlu = np.array([self.getfloat(self.editBx.text()), self.getfloat(self.editBy.text()), self.getfloat(self.editBz.text())])
try:
self.B = get_B(lattice, angles/180.*np.pi)
invB = la.inv(self.B)
metric = get_metric(self.B)
ang = angle(self.orient_rlu, self.orient2_rlu, metric)
self.orient_up_rlu = cross(self.orient_rlu, self.orient2_rlu, self.B)
self.orient_up_rlu_norm = self.orient_up_rlu / la.norm(self.orient_up_rlu)
UB = get_UB(self.B, self.orient_rlu, self.orient2_rlu, self.orient_up_rlu)
invUB = la.inv(UB)
self.editBMat.setPlainText("Scattering plane normal: %s rlu.\n" % str(self.orient_up_rlu_norm) \
+"Angle between orientation vectors 1 and 2: %.4g deg.\n" % (ang/np.pi*180.) \
+"\nB =\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n" \
% (self.B[0,0],self.B[0,1],self.B[0,2], self.B[1,0],self.B[1,1],self.B[1,2], self.B[2,0],self.B[2,1],self.B[2,2]) \
+"\nB^(-1) =\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n" \
% (invB[0,0],invB[0,1],invB[0,2], invB[1,0],invB[1,1],invB[1,2], invB[2,0],invB[2,1],invB[2,2]) \
+"\nUB =\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n" \
% (UB[0,0],UB[0,1],UB[0,2], UB[1,0],UB[1,1],UB[1,2], UB[2,0],UB[2,1],UB[2,2]) \
+"\n(UB)^(-1) =\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n%10.4f %10.4f %10.4f\n" \
% (invUB[0,0],invUB[0,1],invUB[0,2], invUB[1,0],invUB[1,1],invUB[1,2], invUB[2,0],invUB[2,1],invUB[2,2]) \
)
except (ArithmeticError, la.LinAlgError) as err:
self.editBMat.setPlainText("invalid")
self.QChanged()
self.QChanged_angles()
def planeChanged(self):
self.xtalChanged()
#self.QChanged()
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# tas tab
def TASChanged(self):
a1 = self.getfloat(self.editA1.text()) / 180. * np.pi
a2 = a1 * 2.
a3 = self.getfloat(self.editA3.text()) / 180. * np.pi
a4 = self.getfloat(self.editA4.text()) / 180. * np.pi
a5 = self.getfloat(self.editA5.text()) / 180. * np.pi
a6 = a5 * 2.
dmono = self.getfloat(self.editDm.text())
dana = self.getfloat(self.editDa.text())
sense_sample = 1.
if self.checkA4Sense.isChecked() == False:
sense_sample = -1.
self.editA2.setText("%.6g" % (a2 / np.pi * 180.))
self.editA6.setText("%.6g" % (a6 / np.pi * 180.))
try:
ki = get_monok(a1, dmono)
kf = get_monok(a5, dana)
E = get_E(ki, kf)
Qlen = get_Q(ki, kf, a4)
Qvec = get_hkl(ki, kf, a3, Qlen, self.orient_rlu, self.orient_up_rlu, self.B, sense_sample, self.a3_offs)
self.edith.setText("%.6g" % Qvec[0])
self.editk.setText("%.6g" % Qvec[1])
self.editl.setText("%.6g" % Qvec[2])
self.editQAbs.setText("%.6g" % Qlen)
self.editKi.setText("%.6g" % ki)
self.editKf.setText("%.6g" % kf)
self.editE.setText("%.6g" % E)
except (ArithmeticError, la.LinAlgError) as err:
self.edith.setText("invalid")
self.editk.setText("invalid")
self.editl.setText("invalid")
self.editKi.setText("invalid")
self.editKf.setText("invalid")
self.editE.setText("invalid")
def A2Changed(self):
a2 = self.getfloat(self.editA2.text()) / 180. * np.pi
self.editA1.setText("%.6g" % (0.5*a2 / np.pi * 180.))
self.TASChanged()
def A6Changed(self):
a6 = self.getfloat(self.editA6.text()) / 180. * np.pi
self.editA5.setText("%.6g" % (0.5*a6 / np.pi * 180.))
self.TASChanged()
def DChanged(self):
self.QChanged()
def QChanged(self):
Q_rlu = np.array([self.getfloat(self.edith.text()), self.getfloat(self.editk.text()), self.getfloat(self.editl.text())])
ki = self.getfloat(self.editKi.text())
kf = self.getfloat(self.editKf.text())
try:
[a1, a2] = get_a1a2(ki, self.getfloat(self.editDm.text()))
self.editA1.setText("%.6g" % (a1 / np.pi * 180.))
self.editA2.setText("%.6g" % (a2 / np.pi * 180.))
except (ArithmeticError, la.LinAlgError) as err:
self.editA1.setText("invalid")
self.editA2.setText("invalid")
try:
[a5, a6] = get_a1a2(kf, self.getfloat(self.editDa.text()))
self.editA5.setText("%.6g" % (a5 / np.pi * 180.))
self.editA6.setText("%.6g" % (a6 / np.pi * 180.))
except (ArithmeticError, la.LinAlgError) as err:
self.editA5.setText("invalid")
self.editA6.setText("invalid")
try:
sense_sample = 1.
if self.checkA4Sense.isChecked() == False:
sense_sample = -1.
[a3, a4, dist_Q_plane] = get_a3a4(ki, kf, Q_rlu, self.orient_rlu, self.orient_up_rlu, self.B, sense_sample, self.a3_offs)
Qlen = get_Q(ki, kf, a4)
Q_in_plane = np.abs(dist_Q_plane) < self.g_eps
self.editA3.setText("%.6g" % (a3 / np.pi * 180.))
self.editA4.setText("%.6g" % (a4 / np.pi * 180.))
self.editQAbs.setText("%.6g" % Qlen)
if Q_in_plane:
self.tasstatus.setText("")
else:
metric = get_metric(self.B)
#ang1 = angle(Q_rlu, self.orient_rlu, metric)
#ang2 = angle(Q_rlu, self.orient2_rlu, metric)
ang_plane = np.pi*0.5 - angle(Q_rlu, self.orient_up_rlu, metric)
self.tasstatus.setText(u"WARNING: Q is out of the plane by %.4g \u212b\u207b\u00b9, i.e. %.4g deg!" \
% (dist_Q_plane, ang_plane/np.pi*180.))
if np.isnan(a4) or np.isnan(Qlen):
self.tasstatus.setText(u"WARNING: Scattering triangle cannot be closed.")
self.editA3.setText("invalid")
self.editA4.setText("invalid")
self.editQAbs.setText("invalid")
except (ArithmeticError, la.LinAlgError) as err:
self.editA3.setText("invalid")
self.editA4.setText("invalid")
def KiKfChanged(self):
ki = self.getfloat(self.editKi.text())
kf = self.getfloat(self.editKf.text())
try:
E = get_E(ki, kf)
self.editE.setText("%.6g" % E)
self.QChanged()
except (ArithmeticError, la.LinAlgError) as err:
self.editE.setText("invalid")
def EChanged(self):
E = self.getfloat(self.editE.text())
kf = self.getfloat(self.editKf.text())
try:
ki = get_ki(kf, E)
self.editKi.setText("%.6g" % ki)
self.QChanged()
except (ArithmeticError, la.LinAlgError) as err:
self.editKi.setText("invalid")
def QChanged_angles(self):
Q_rlu1 = np.array([self.getfloat(self.edith1.text()), self.getfloat(self.editk1.text()), self.getfloat(self.editl1.text())])
Q_rlu2 = np.array([self.getfloat(self.edith2.text()), self.getfloat(self.editk2.text()), self.getfloat(self.editl2.text())])
ki1 = self.getfloat(self.editKi1.text())
ki2 = self.getfloat(self.editKi2.text())
kf1 = self.getfloat(self.editKf1.text())
kf2 = self.getfloat(self.editKf2.text())
di = self.getfloat(self.editDm.text())
df = self.getfloat(self.editDa.text())
speed_a1 = self.getfloat(self.editSpeedA1.text()) / 180.*np.pi
speed_a2 = self.getfloat(self.editSpeedA2.text()) / 180.*np.pi
speed_a3 = self.getfloat(self.editSpeedA3.text()) / 180.*np.pi
speed_a4 = self.getfloat(self.editSpeedA4.text()) / 180.*np.pi
speed_a5 = self.getfloat(self.editSpeedA5.text()) / 180.*np.pi
speed_a6 = self.getfloat(self.editSpeedA6.text()) / 180.*np.pi
try:
sense_sample = 1.
if self.checkA4Sense.isChecked() == False:
sense_sample = -1.
[da1, da2, da3, da4, da5, da6, dist1, dist2] = get_angle_deltas(\
ki1, kf1, Q_rlu1, di, df, \
ki2, kf2, Q_rlu2, di, df, \
self.orient_rlu, self.orient_up_rlu, self.B, sense_sample, self.a3_offs)
self.editdA1.setText("%.6g" % (da1 / np.pi * 180.))
self.editdA2.setText("%.6g" % (da2 / np.pi * 180.))
self.editdA3.setText("%.6g" % (da3 / np.pi * 180.))
self.editdA4.setText("%.6g" % (da4 / np.pi * 180.))
self.editdA5.setText("%.6g" % (da5 / np.pi * 180.))
self.editdA6.setText("%.6g" % (da6 / np.pi * 180.))
Q1_in_plane = np.abs(dist1) < self.g_eps
Q2_in_plane = np.abs(dist2) < self.g_eps
status = ""
if not Q1_in_plane:
status += "Position 1 is out-of-plane! "
if not Q2_in_plane:
status += "Position 2 is out-of-plane! "
if status != "":
status = "WARNING: " + status
if status == "":
driving = driving_time([da1, da2, da3, da4, da5, da6], \
[speed_a1, speed_a2, speed_a3, speed_a4, speed_a5, speed_a6])
status = "Instrument driving time: %.2f s" % (driving)
self.anglesstatus.setText(status)
except (ArithmeticError, la.LinAlgError) as err:
self.editdA1.setText("invalid")
self.editdA2.setText("invalid")
self.editdA3.setText("invalid")
self.editdA4.setText("invalid")
self.editdA5.setText("invalid")
self.editdA6.setText("invalid")
def KiKfChanged_angles(self):
ki1 = self.getfloat(self.editKi1.text())
ki2 = self.getfloat(self.editKi2.text())
kf1 = self.getfloat(self.editKf1.text())
kf2 = self.getfloat(self.editKf2.text())
try:
E1 = get_E(ki1, kf1)
self.editE1.setText("%.6g" % E1)
except (ArithmeticError, la.LinAlgError) as err:
self.editE1.setText("invalid")
try:
E2 = get_E(ki2, kf2)
self.editE2.setText("%.6g" % E2)
except (ArithmeticError, la.LinAlgError) as err:
self.editE2.setText("invalid")
self.QChanged_angles()
def EChanged_angles(self):
E1 = self.getfloat(self.editE1.text())
kf1 = self.getfloat(self.editKf1.text())
E2 = self.getfloat(self.editE2.text())
kf2 = self.getfloat(self.editKf2.text())
try:
ki1 = get_ki(kf1, E1)
self.editKi1.setText("%.6g" % ki1)
except (ArithmeticError, la.LinAlgError) as err:
self.editKi1.setText("invalid")
try:
ki2 = get_ki(kf2, E2)
self.editKi2.setText("%.6g" % ki2)
except (ArithmeticError, la.LinAlgError) as err:
self.editKi2.setText("invalid")
self.QChanged_angles()
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# info/settings tab
def comboA3ConvChanged(self):
idx = self.comboA3.currentIndex()
self.a3_offs = self.a3_offsets[idx]
self.QChanged()
# -----------------------------------------------------------------------------
#
# show qt GUI
#
def __init__(self):
# -----------------------------------------------------------------------------
# dependencies
# try to import qt5...
try:
import PyQt5 as qt
import PyQt5.QtCore as qtc
import PyQt5.QtGui as qtg
import PyQt5.QtWidgets as qtw
qt_ver = 5
except ImportError:
# ...and if not possible try to import qt4 instead
try:
import PyQt4 as qt
import PyQt4.QtCore as qtc
import PyQt4.QtGui as qtg
qtw = qtg
qt_ver = 4
except ImportError:
print("Error: No suitable version of Qt was found!")
exit(-1)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# main application
np.set_printoptions(suppress=True, precision=4)
app = qtw.QApplication(sys.argv)
app.setApplicationName("qtas")
#app.setStyle("Fusion")
sett = qtc.QSettings("tobis_stuff", "in20tool")
if sett.contains("mainwnd/theme"):
app.setStyle(sett.value("mainwnd/theme"))
tabs = qtw.QTabWidget()
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# crystal tab
xtalpanel = qtw.QWidget()
xtallayout = qtw.QGridLayout(xtalpanel)
self.editA = qtw.QLineEdit(xtalpanel)
self.editB = qtw.QLineEdit(xtalpanel)
self.editC = qtw.QLineEdit(xtalpanel)
self.editAlpha = qtw.QLineEdit(xtalpanel)
self.editBeta = qtw.QLineEdit(xtalpanel)
self.editGamma = qtw.QLineEdit(xtalpanel)
separatorXtal = qtw.QFrame(xtalpanel)
separatorXtal.setFrameStyle(qtw.QFrame.HLine)
self.editAx = qtw.QLineEdit(xtalpanel)
self.editAy = qtw.QLineEdit(xtalpanel)
self.editAz = qtw.QLineEdit(xtalpanel)
self.editBx = qtw.QLineEdit(xtalpanel)
self.editBy = qtw.QLineEdit(xtalpanel)
self.editBz = qtw.QLineEdit(xtalpanel)
self.editBMat = qtw.QPlainTextEdit(xtalpanel)
self.editBMat.setReadOnly(True)
self.editA.textEdited.connect(self.xtalChanged)
self.editB.textEdited.connect(self.xtalChanged)
self.editC.textEdited.connect(self.xtalChanged)
self.editAlpha.textEdited.connect(self.xtalChanged)
self.editBeta.textEdited.connect(self.xtalChanged)
self.editGamma.textEdited.connect(self.xtalChanged)
self.editAx.textEdited.connect(self.planeChanged)
self.editAy.textEdited.connect(self.planeChanged)
self.editAz.textEdited.connect(self.planeChanged)
self.editBx.textEdited.connect(self.planeChanged)
self.editBy.textEdited.connect(self.planeChanged)
self.editBz.textEdited.connect(self.planeChanged)
self.editA.setText("%.6g" % sett.value("qtas/a", 5., type=float))
self.editB.setText("%.6g" % sett.value("qtas/b", 5., type=float))
self.editC.setText("%.6g" % sett.value("qtas/c", 5., type=float))
self.editAlpha.setText("%.6g" % sett.value("qtas/alpha", 90., type=float))
self.editBeta.setText("%.6g" % sett.value("qtas/beta", 90., type=float))
self.editGamma.setText("%.6g" % sett.value("qtas/gamma", 90., type=float))
self.editAx.setText("%.6g" % sett.value("qtas/ax", 1., type=float))
self.editAy.setText("%.6g" % sett.value("qtas/ay", 0., type=float))
self.editAz.setText("%.6g" % sett.value("qtas/az", 0., type=float))
self.editBx.setText("%.6g" % sett.value("qtas/bx", 0., type=float))
self.editBy.setText("%.6g" % sett.value("qtas/by", 1., type=float))
self.editBz.setText("%.6g" % sett.value("qtas/bz", 0., type=float))
xtallayout.addWidget(qtw.QLabel(u"a (\u212b):", xtalpanel), 0,0, 1,1)
xtallayout.addWidget(self.editA, 0,1, 1,3)
xtallayout.addWidget(qtw.QLabel(u"b (\u212b):", xtalpanel), 1,0, 1,1)
xtallayout.addWidget(self.editB, 1,1, 1,3)
xtallayout.addWidget(qtw.QLabel(u"c (\u212b):", xtalpanel), 2,0, 1,1)
xtallayout.addWidget(self.editC, 2,1, 1,3)
xtallayout.addWidget(qtw.QLabel(u"\u03b1 (deg):", xtalpanel), 3,0, 1,1)
xtallayout.addWidget(self.editAlpha, 3,1, 1,3)
xtallayout.addWidget(qtw.QLabel(u"\u03b2 (deg):", xtalpanel), 4,0, 1,1)
xtallayout.addWidget(self.editBeta, 4,1, 1,3)
xtallayout.addWidget(qtw.QLabel(u"\u03b3 (deg):", xtalpanel), 5,0, 1,1)
xtallayout.addWidget(self.editGamma, 5,1, 1,3)
xtallayout.addWidget(separatorXtal, 6,0, 1,4)
xtallayout.addWidget(qtw.QLabel("Orient. 1 (rlu):", xtalpanel), 7,0, 1,1)
xtallayout.addWidget(self.editAx, 7,1, 1,1)
xtallayout.addWidget(self.editAy, 7,2, 1,1)
xtallayout.addWidget(self.editAz, 7,3, 1,1)
xtallayout.addWidget(qtw.QLabel("Orient. 2 (rlu):", xtalpanel), 8,0, 1,1)
xtallayout.addWidget(self.editBx, 8,1, 1,1)
xtallayout.addWidget(self.editBy, 8,2, 1,1)
xtallayout.addWidget(self.editBz, 8,3, 1,1)
xtallayout.addWidget(self.editBMat, 9,0, 2,4)
tabs.addTab(xtalpanel, "Crystal")
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# tas tab
taspanel = qtw.QWidget()
taslayout = qtw.QGridLayout(taspanel)
self.editA1 = qtw.QLineEdit(taspanel)
self.editA2 = qtw.QLineEdit(taspanel)
self.editA3 = qtw.QLineEdit(taspanel)
self.editA4 = qtw.QLineEdit(taspanel)
self.editA5 = qtw.QLineEdit(taspanel)
self.editA6 = qtw.QLineEdit(taspanel)
self.checkA4Sense = qtw.QCheckBox(taspanel)
self.editDm = qtw.QLineEdit(taspanel)
self.editDa = qtw.QLineEdit(taspanel)
self.edith = qtw.QLineEdit(taspanel)
self.editk = qtw.QLineEdit(taspanel)
self.editl = qtw.QLineEdit(taspanel)
self.editE = qtw.QLineEdit(taspanel)
self.editKi = qtw.QLineEdit(taspanel)
self.editKf = qtw.QLineEdit(taspanel)
self.editQAbs = qtw.QLineEdit(taspanel)
self.editQAbs.setReadOnly(True)
self.tasstatus = qtw.QLabel(taspanel)
separatorTas = qtw.QFrame(taspanel)
separatorTas.setFrameStyle(qtw.QFrame.HLine)
separatorTas2 = qtw.QFrame(taspanel)
separatorTas2.setFrameStyle(qtw.QFrame.HLine)
separatorTas3 = qtw.QFrame(taspanel)
separatorTas3.setFrameStyle(qtw.QFrame.HLine)
self.editA1.textEdited.connect(self.TASChanged)
self.editA3.textEdited.connect(self.TASChanged)
self.editA4.textEdited.connect(self.TASChanged)
self.editA5.textEdited.connect(self.TASChanged)
self.editA2.textEdited.connect(self.A2Changed)
self.editA6.textEdited.connect(self.A6Changed)
self.editDm.textEdited.connect(self.DChanged)
self.editDa.textEdited.connect(self.DChanged)
self.edith.textEdited.connect(self.QChanged)
self.editk.textEdited.connect(self.QChanged)
self.editl.textEdited.connect(self.QChanged)
self.editKi.textEdited.connect(self.KiKfChanged)
self.editKf.textEdited.connect(self.KiKfChanged)
self.editE.textEdited.connect(self.EChanged)
self.editDm.setText("%.6g" % sett.value("qtas/dm", 3.355, type=float))
self.editDa.setText("%.6g" % sett.value("qtas/da", 3.355, type=float))
self.edith.setText("%.6g" % sett.value("qtas/h", 1., type=float))
self.editk.setText("%.6g" % sett.value("qtas/k", 0., type=float))
self.editl.setText("%.6g" % sett.value("qtas/l", 0., type=float))
#self.editE.setText("%.6g" % sett.value("qtas/E", 0., type=float))
self.editKi.setText("%.6g" % sett.value("qtas/ki", 2.662, type=float))
self.editKf.setText("%.6g" % sett.value("qtas/kf", 2.662, type=float))
self.checkA4Sense.setText("a4 sense is counter-clockwise")
self.checkA4Sense.setChecked(sett.value("qtas/a4_sense", 1, type=bool))
self.checkA4Sense.stateChanged.connect(self.QChanged)
taslayout.addWidget(qtw.QLabel("h (rlu):", taspanel), 0,0, 1,1)
taslayout.addWidget(self.edith, 0,1, 1,2)
taslayout.addWidget(qtw.QLabel("k (rlu):", taspanel), 1,0, 1,1)
taslayout.addWidget(self.editk, 1,1, 1,2)
taslayout.addWidget(qtw.QLabel("l (rlu):", taspanel), 2,0, 1,1)
taslayout.addWidget(self.editl, 2,1, 1,2)
taslayout.addWidget(qtw.QLabel("E (meV):", taspanel), 3,0, 1,1)
taslayout.addWidget(self.editE, 3,1, 1,2)
taslayout.addWidget(qtw.QLabel(u"ki, kf (\u212b\u207b\u00b9):", taspanel), 4,0, 1,1)
taslayout.addWidget(self.editKi, 4,1, 1,1)
taslayout.addWidget(self.editKf, 4,2, 1,1)
taslayout.addWidget(qtw.QLabel(u"|Q| (\u212b\u207b\u00b9):", taspanel), 5,0, 1,1)
taslayout.addWidget(self.editQAbs, 5,1, 1,2)
taslayout.addWidget(separatorTas, 6,0,1,3)
taslayout.addWidget(qtw.QLabel("a1, a2 (deg):", taspanel), 7,0, 1,1)
taslayout.addWidget(self.editA1, 7,1, 1,1)
taslayout.addWidget(self.editA2, 7,2, 1,1)
taslayout.addWidget(qtw.QLabel("a3, a4 (deg):", taspanel), 8,0, 1,1)
taslayout.addWidget(self.editA3, 8,1, 1,1)
taslayout.addWidget(self.editA4, 8,2, 1,1)
taslayout.addWidget(qtw.QLabel("a5, a6 (deg):", taspanel), 9,0, 1,1)
taslayout.addWidget(self.editA5, 9,1, 1,1)
taslayout.addWidget(self.editA6, 9,2, 1,1)
taslayout.addWidget(separatorTas2, 10,0, 1,3)
taslayout.addWidget(qtw.QLabel("Sense:", taspanel), 11,0, 1,1)
taslayout.addWidget(self.checkA4Sense, 11,1, 1,2)
taslayout.addWidget(separatorTas3, 12,0, 1,3)
taslayout.addWidget(qtw.QLabel(u"Mono., Ana. d (\u212b):", taspanel), 13,0, 1,1)
taslayout.addWidget(self.editDm, 13,1, 1,1)
taslayout.addWidget(self.editDa, 13,2, 1,1)
taslayout.addItem(qtw.QSpacerItem(16,16, qtw.QSizePolicy.Minimum, qtw.QSizePolicy.Expanding), 14,0, 1,3)
taslayout.addWidget(self.tasstatus, 15,0, 1,3)
tabs.addTab(taspanel, "TAS")
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# angular distance tab
anglespanel = qtw.QWidget()
angleslayout = qtw.QGridLayout(anglespanel)
self.editdA1 = qtw.QLineEdit(anglespanel)
self.editdA2 = qtw.QLineEdit(anglespanel)
self.editdA3 = qtw.QLineEdit(anglespanel)
self.editdA4 = qtw.QLineEdit(anglespanel)
self.editdA5 = qtw.QLineEdit(anglespanel)
self.editdA6 = qtw.QLineEdit(anglespanel)
self.editdA1.setReadOnly(True)
self.editdA2.setReadOnly(True)
self.editdA3.setReadOnly(True)
self.editdA4.setReadOnly(True)
self.editdA5.setReadOnly(True)
self.editdA6.setReadOnly(True)
self.editSpeedA1 = qtw.QLineEdit(anglespanel)
self.editSpeedA2 = qtw.QLineEdit(anglespanel)
self.editSpeedA3 = qtw.QLineEdit(anglespanel)
self.editSpeedA4 = qtw.QLineEdit(anglespanel)
self.editSpeedA5 = qtw.QLineEdit(anglespanel)
self.editSpeedA6 = qtw.QLineEdit(anglespanel)
self.edith1 = qtw.QLineEdit(anglespanel)
self.editk1 = qtw.QLineEdit(anglespanel)
self.editl1 = qtw.QLineEdit(anglespanel)
self.editE1 = qtw.QLineEdit(anglespanel)
self.editKi1 = qtw.QLineEdit(anglespanel)
self.editKf1 = qtw.QLineEdit(anglespanel)
self.edith2 = qtw.QLineEdit(anglespanel)
self.editk2 = qtw.QLineEdit(anglespanel)
self.editl2 = qtw.QLineEdit(anglespanel)
self.editE2 = qtw.QLineEdit(anglespanel)
self.editKi2 = qtw.QLineEdit(anglespanel)
self.editKf2 = qtw.QLineEdit(anglespanel)
self.anglesstatus = qtw.QLabel(anglespanel)
separatorAngles = qtw.QFrame(anglespanel)
separatorAngles.setFrameStyle(qtw.QFrame.HLine)
separatorAngles2 = qtw.QFrame(anglespanel)
separatorAngles2.setFrameStyle(qtw.QFrame.HLine)
self.edith1.textEdited.connect(self.QChanged_angles)
self.editk1.textEdited.connect(self.QChanged_angles)
self.editl1.textEdited.connect(self.QChanged_angles)
self.editKi1.textEdited.connect(self.KiKfChanged_angles)
self.editKf1.textEdited.connect(self.KiKfChanged_angles)
self.editE1.textEdited.connect(self.EChanged_angles)
self.edith2.textEdited.connect(self.QChanged_angles)
self.editk2.textEdited.connect(self.QChanged_angles)
self.editl2.textEdited.connect(self.QChanged_angles)
self.editKi2.textEdited.connect(self.KiKfChanged_angles)
self.editKf2.textEdited.connect(self.KiKfChanged_angles)
self.editE2.textEdited.connect(self.EChanged_angles)
self.editSpeedA1.textEdited.connect(self.QChanged_angles)
self.editSpeedA2.textEdited.connect(self.QChanged_angles)
self.editSpeedA3.textEdited.connect(self.QChanged_angles)
self.editSpeedA4.textEdited.connect(self.QChanged_angles)
self.editSpeedA5.textEdited.connect(self.QChanged_angles)
self.editSpeedA6.textEdited.connect(self.QChanged_angles)
self.edith1.setText("%.6g" % sett.value("qtas/h1", 1., type=float))
self.editk1.setText("%.6g" % sett.value("qtas/k1", 0., type=float))
self.editl1.setText("%.6g" % sett.value("qtas/l1", 0., type=float))
#self.editE1.setText("%.6g" % sett.value("qtas/E1", 0., type=float))
self.editKi1.setText("%.6g" % sett.value("qtas/ki1", 2.662, type=float))
self.editKf1.setText("%.6g" % sett.value("qtas/kf1", 2.662, type=float))
self.edith2.setText("%.6g" % sett.value("qtas/h2", 1., type=float))
self.editk2.setText("%.6g" % sett.value("qtas/k2", 0., type=float))
self.editl2.setText("%.6g" % sett.value("qtas/l2", 0., type=float))
#self.editE2.setText("%.6g" % sett.value("qtas/E2", 0., type=float))
self.editKi2.setText("%.6g" % sett.value("qtas/ki2", 2.662, type=float))
self.editKf2.setText("%.6g" % sett.value("qtas/kf2", 2.662, type=float))
self.editSpeedA1.setText("%.2f" % sett.value("qtas/v_a1", 0.15, type=float))
self.editSpeedA2.setText("%.2f" % sett.value("qtas/v_a2", 0.15, type=float))
self.editSpeedA3.setText("%.2f" % sett.value("qtas/v_a3", 1.25, type=float))
self.editSpeedA4.setText("%.2f" % sett.value("qtas/v_a4", 1.88, type=float))
self.editSpeedA5.setText("%.2f" % sett.value("qtas/v_a5", 1., type=float))
self.editSpeedA6.setText("%.2f" % sett.value("qtas/v_a6", 1., type=float))
angleslayout.addWidget(qtw.QLabel("Position 1:", anglespanel), 0,1, 1,1)
angleslayout.addWidget(qtw.QLabel("Position 2:", anglespanel), 0,2, 1,1)
angleslayout.addWidget(qtw.QLabel("h (rlu):", anglespanel), 1,0, 1,1)
angleslayout.addWidget(self.edith1, 1,1, 1,1)
angleslayout.addWidget(self.edith2, 1,2, 1,1)
angleslayout.addWidget(qtw.QLabel("k (rlu):", anglespanel), 2,0, 1,1)
angleslayout.addWidget(self.editk1, 2,1, 1,1)
angleslayout.addWidget(self.editk2, 2,2, 1,1)
angleslayout.addWidget(qtw.QLabel("l (rlu):", anglespanel), 3,0, 1,1)
angleslayout.addWidget(self.editl1, 3,1, 1,1)
angleslayout.addWidget(self.editl2, 3,2, 1,1)
angleslayout.addWidget(qtw.QLabel("E (meV):", anglespanel), 4,0, 1,1)
angleslayout.addWidget(self.editE1, 4,1, 1,1)
angleslayout.addWidget(self.editE2, 4,2, 1,1)
angleslayout.addWidget(qtw.QLabel(u"ki (\u212b\u207b\u00b9):", anglespanel), 5,0, 1,1)
angleslayout.addWidget(self.editKi1, 5,1, 1,1)
angleslayout.addWidget(self.editKi2, 5,2, 1,1)
angleslayout.addWidget(qtw.QLabel(u"kf (\u212b\u207b\u00b9):", anglespanel), 6,0, 1,1)
angleslayout.addWidget(self.editKf1, 6,1, 1,1)
angleslayout.addWidget(self.editKf2, 6,2, 1,1)
angleslayout.addWidget(separatorAngles, 7,0,1,3)
angleslayout.addWidget(qtw.QLabel("Motor Speeds:", anglespanel), 8,1, 1,3)
angleslayout.addWidget(qtw.QLabel("v_a1, v_a2 (deg/s):", anglespanel), 9,0, 1,1)
angleslayout.addWidget(self.editSpeedA1, 9,1, 1,1)
angleslayout.addWidget(self.editSpeedA2, 9,2, 1,1)
angleslayout.addWidget(qtw.QLabel("v_a3, v_a4 (deg/s):", anglespanel), 10,0, 1,1)
angleslayout.addWidget(self.editSpeedA3, 10,1, 1,1)
angleslayout.addWidget(self.editSpeedA4, 10,2, 1,1)
angleslayout.addWidget(qtw.QLabel("v_a5, v_a6 (deg/s):", anglespanel), 11,0, 1,1)
angleslayout.addWidget(self.editSpeedA5, 11,1, 1,1)
angleslayout.addWidget(self.editSpeedA6, 11,2, 1,1)
angleslayout.addWidget(separatorAngles2, 12,0,1,3)
angleslayout.addWidget(qtw.QLabel("Angular Distances:", anglespanel), 13,1, 1,3)
angleslayout.addWidget(qtw.QLabel("\u0394a1, \u0394a2 (deg):", anglespanel), 14,0, 1,1)
angleslayout.addWidget(self.editdA1, 14,1, 1,1)
angleslayout.addWidget(self.editdA2, 14,2, 1,1)
angleslayout.addWidget(qtw.QLabel("\u0394a3, \u0394a4 (deg):", anglespanel), 15,0, 1,1)
angleslayout.addWidget(self.editdA3, 15,1, 1,1)
angleslayout.addWidget(self.editdA4, 15,2, 1,1)
angleslayout.addWidget(qtw.QLabel("\u0394a5, \u0394a6 (deg):", anglespanel), 16,0, 1,1)
angleslayout.addWidget(self.editdA5, 16,1, 1,1)
angleslayout.addWidget(self.editdA6, 16,2, 1,1)
angleslayout.addItem(qtw.QSpacerItem(16,16, qtw.QSizePolicy.Minimum, qtw.QSizePolicy.Expanding), 17,0, 1,3)
angleslayout.addWidget(self.anglesstatus, 18,0, 1,3)
tabs.addTab(anglespanel, "Distances")
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# info/settings tab
infopanel = qtw.QWidget()
infolayout = qtw.QGridLayout(infopanel)
self.comboA3 = qtw.QComboBox(infopanel)
self.comboA3.addItems(["Takin", "NOMAD", "SICS", "NICOS"])
self.a3_offsets = [np.pi/2., np.pi, 0., 0.]
self.comboA3.setCurrentIndex(sett.value("qtas/a3_conv", 1, type=int))
self.comboA3.currentIndexChanged.connect(self.comboA3ConvChanged)
separatorInfo = qtw.QFrame(infopanel)
separatorInfo.setFrameStyle(qtw.QFrame.HLine)
infolayout.addWidget(qtw.QLabel("TAS Calculator (doi: <a href=\"https://doi.org/10.5281/zenodo.4117437\">10.5281/zenodo.4117437</a>).", infopanel), 0,0, 1,2)
infolayout.addWidget(qtw.QLabel("Written by Tobias Weber <[email protected]>.", infopanel), 1,0, 1,2)
infolayout.addWidget(qtw.QLabel("Date: October 24, 2018.", infopanel), 2,0, 1,2)
infolayout.addWidget(separatorInfo, 3,0, 1,2)
infolayout.addWidget(qtw.QLabel("Interpreter Version: " + sys.version + ".", infopanel), 4,0, 1,2)
infolayout.addWidget(qtw.QLabel("Numpy Version: " + np.__version__ + ".", infopanel), 5,0, 1,2)
infolayout.addWidget(qtw.QLabel("Qt Version: " + qtc.QT_VERSION_STR + ".", infopanel), 6,0, 1,2)
infolayout.addItem(qtw.QSpacerItem(16,16, qtw.QSizePolicy.Minimum, qtw.QSizePolicy.Expanding), 7,0, 1,2)
infolayout.addWidget(qtw.QLabel("A3 Convention:", infopanel), 8,0, 1,1)
infolayout.addWidget(self.comboA3, 8,1, 1,1)
tabs.addTab(infopanel, "Infos")
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# main dialog window
dlg = qtw.QDialog()
dlg.setWindowTitle("TAS Calculator")
mainlayout = qtw.QGridLayout(dlg)
mainlayout.addWidget(tabs)
if sett.contains("qtas/geo"):
geo = sett.value("qtas/geo")
if qt_ver == 4:
try:
geo = geo.toByteArray()
except AttributeError as err:
pass
dlg.restoreGeometry(geo)
self.xtalChanged()
self.KiKfChanged()
self.comboA3ConvChanged()
#self.QChanged()
self.KiKfChanged_angles()
dlg.show()
app.exec_()
# save settings
sett.setValue("qtas/geo", dlg.saveGeometry())
sett.setValue("qtas/a", self.getfloat(self.editA.text()))
sett.setValue("qtas/b", self.getfloat(self.editB.text()))
sett.setValue("qtas/c", self.getfloat(self.editC.text()))
sett.setValue("qtas/alpha", self.getfloat(self.editAlpha.text()))
sett.setValue("qtas/beta", self.getfloat(self.editBeta.text()))
sett.setValue("qtas/gamma", self.getfloat(self.editGamma.text()))
sett.setValue("qtas/ax", self.getfloat(self.editAx.text()))
sett.setValue("qtas/ay", self.getfloat(self.editAy.text()))
sett.setValue("qtas/az", self.getfloat(self.editAz.text()))
sett.setValue("qtas/bx", self.getfloat(self.editBx.text()))
sett.setValue("qtas/by", self.getfloat(self.editBy.text()))
sett.setValue("qtas/bz", self.getfloat(self.editBz.text()))
sett.setValue("qtas/dm", self.getfloat(self.editDm.text()))
sett.setValue("qtas/da", self.getfloat(self.editDa.text()))
sett.setValue("qtas/h", self.getfloat(self.edith.text()))
sett.setValue("qtas/k", self.getfloat(self.editk.text()))
sett.setValue("qtas/l", self.getfloat(self.editl.text()))
#sett.setValue("qtas/E", self.getfloat(self.editE.text()))
sett.setValue("qtas/ki", self.getfloat(self.editKi.text()))
sett.setValue("qtas/kf", self.getfloat(self.editKf.text()))
sett.setValue("qtas/a3_conv", self.comboA3.currentIndex())
sett.setValue("qtas/a4_sense", self.checkA4Sense.isChecked())
sett.setValue("qtas/h1", self.getfloat(self.edith1.text()))
sett.setValue("qtas/k1", self.getfloat(self.editk1.text()))
sett.setValue("qtas/l1", self.getfloat(self.editl1.text()))
sett.setValue("qtas/ki1", self.getfloat(self.editKi1.text()))
sett.setValue("qtas/kf1", self.getfloat(self.editKf1.text()))
#sett.setValue("qtas/E1", self.getfloat(self.editE1.text()))
sett.setValue("qtas/h2", self.getfloat(self.edith2.text()))
sett.setValue("qtas/k2", self.getfloat(self.editk2.text()))
sett.setValue("qtas/l2", self.getfloat(self.editl2.text()))
sett.setValue("qtas/ki2", self.getfloat(self.editKi2.text()))
sett.setValue("qtas/kf2", self.getfloat(self.editKf2.text()))
#sett.setValue("qtas/E2", self.getfloat(self.editE2.text()))
sett.setValue("qtas/v_a1", self.getfloat(self.editSpeedA1.text()))
sett.setValue("qtas/v_a2", self.getfloat(self.editSpeedA2.text()))
sett.setValue("qtas/v_a3", self.getfloat(self.editSpeedA3.text()))
sett.setValue("qtas/v_a4", self.getfloat(self.editSpeedA4.text()))
sett.setValue("qtas/v_a5", self.getfloat(self.editSpeedA5.text()))
sett.setValue("qtas/v_a6", self.getfloat(self.editSpeedA6.text()))
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
#
# main
#
if __name__ == "__main__":
gui = TasGUI()
# -----------------------------------------------------------------------------
| gpl-2.0 | 4,280,889,633,136,214,500 | 36.459854 | 159 | 0.628897 | false |
pycodi/sockjs-flask | tests/test_session.py | 1 | 1369 | from sockjs_flask.session import STATE_NEW
from sockjs_flask import protocol as pt
from gevent.monkey import patch_all
import sockjs_flask
import logging
import gevent
patch_all(thread=False)
logging.basicConfig(
format='%(asctime)s, %(levelname)-3s [%(filename)s:%(lineno)d][%(module)s:%(funcName)s] - %(message)s',
datefmt='%H:%M:%S', level=logging.DEBUG)
def test_session():
""" Testing sockjs_flask.Session """
size_ = 4
s = sockjs_flask.Session( 'test', handler=lambda x, y: print(x, y), debug=True)
assert s.state == STATE_NEW
s._acquire()
for i in range(size_):
s.send('test_{}'.format(i))
s._release()
assert s._queue.qsize() == size_ + 1
def test_acquire_session():
""" Testing sockjs_flask.Session """
s = sockjs_flask.Session( 'test', handler=lambda x, y: print(x, y), debug=True)
assert s.acquired == False
s._acquire()
assert s.acquired == True
s._release()
assert s.acquired == False
def test_heartbeat_session():
""" Testing heartbeat from session """
s = sockjs_flask.Session( 'test', handler=lambda x, y: print(x, y), debug=True)
s._heartbeat()
assert s._queue.get_nowait() == (pt.FRAME_HEARTBEAT, pt.FRAME_HEARTBEAT)
def test_waiter_session():
s = sockjs_flask.Session( 'test', handler=lambda x, y: print(x, y), debug=True)
s._wait() | mit | -4,492,869,233,963,999,000 | 28.782609 | 111 | 0.641344 | false |
ymorired/google-app-engine-ranklist-ndb | example/index.py | 1 | 3587 | #!/usr/bin/python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import ranker.ranker as ranker
APP_KEY = 'default'
MIN_SCORE = 0
MAX_SCORE = 9999
def get_ranker():
return ranker.Ranker.get_or_create(APP_KEY, [MIN_SCORE, MAX_SCORE + 1], 100)
def show_error_page(self, error):
template_values = {"error": error}
path = os.path.join(os.path.dirname(__file__), 'error.html')
self.response.out.write(template.render(path, template_values))
class MainPage(webapp.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, {}))
class SetScoreHandler(webapp.RequestHandler):
def post(self):
score = self.request.get("score")
name = self.request.get("name")
try:
assert len(name) > 0
assert name[0] not in "0123456789"
score = int(score)
assert MIN_SCORE <= score <= MAX_SCORE
except Exception as e:
show_error_page(self, "Your name must not be empty, and must not start with "
"a digit. In addition, your score must be an integer "
"between 0 and 9999, inclusive. %s" % e.message)
return
r = get_ranker()
r.set_score(name, [score])
self.redirect("/")
class QueryRankPage(webapp.RequestHandler):
def get(self):
r = get_ranker()
rank = int(self.request.get("rank"))
if rank >= r.total_ranked_player_num():
show_error_page(self, "There aren't %d ranked people!" % (rank + 1))
return
(score, rank_at_tie) = r.find_score(rank)
template_values = {"score": score[0], "rank": rank}
if rank_at_tie < rank:
template_values["tied"] = True
template_values["rank_at_tie"] = rank_at_tie
path = os.path.join(os.path.dirname(__file__), 'rank.html')
self.response.out.write(template.render(path, template_values))
class QueryScorePage(webapp.RequestHandler):
def get(self):
r = get_ranker()
try:
score = int(self.request.get("score"))
assert MIN_SCORE <= score <= MAX_SCORE
except Exception as e:
show_error_page(self, "Scores must be integers between 0 and 9999 inclusive. %s" % e.message)
return
rank = r.find_rank([score])
template_values = {"score": score, "rank": rank}
path = os.path.join(os.path.dirname(__file__), 'score.html')
self.response.out.write(template.render(path, template_values))
application = webapp.WSGIApplication([
('/', MainPage),
('/setscore', SetScoreHandler),
('/getrank', QueryRankPage),
('/getscore', QueryScorePage)
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| apache-2.0 | 8,920,500,059,512,498,000 | 31.315315 | 105 | 0.623362 | false |
bbraithwaite/ECMAStrict | test/types/to_string_test.py | 1 | 1117 | import unittest
from jstypes import types
class ToStringTests(unittest.TestCase):
def test_undefined_tostring(self):
self.assertEquals(types.Undefined().toString().value(), 'undefined')
def test_null_tostring(self):
self.assertEquals(types.Null().toString().value(), 'null')
def test_nan_tostring(self):
self.assertEquals(types.NaN().toString().value(), 'NaN')
def test_boolean_true_tostring(self):
self.assertEquals(types.Boolean('true').toString().value(), 'true')
def test_boolean_false_tostring(self):
self.assertEquals(types.Boolean('false').toString().value(), 'false')
def test_string_tostring(self):
self.assertEquals(types.String('hello world').toString().value(), 'hello world')
def test_number_tostring(self):
self.assertEquals(types.Number(1.0).toString().value(), '1.0')
self.assertEquals(types.Number(-2.0).toString().value(), '-2.0')
self.assertEquals(types.Number('Infinity').toString().value(), 'Infinity')
self.assertEquals(types.Number('-Infinity').toString().value(), '-Infinity')
| mit | -1,963,726,129,554,807,800 | 38.892857 | 88 | 0.669651 | false |
PyQuake/earthquakemodels | code/cocobbob/coco/code-postprocessing/cocopp/bestalg.py | 1 | 33461 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
""" Best algorithm dataset module
This module implements :py:class:`BestAlgSet` class which is used as
data structure for the data set of the virtual best algorithm.
Therefore this module will be imported by other modules which need
to access best algorithm data set.
The best algorithm data set can be accessed by the
:py:data:`bestAlgorithmEntries` variable. This variable needs to be
initialized by executing functions :py:func:`load_reference_algorithm(...)`
This module can also be used generate the best algorithm data set
with its generate method.
"""
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import pickle
import gzip
import warnings
import numpy as np
import tarfile
import pkg_resources
from . import readalign, pproc
from .toolsdivers import print_done
from .ppfig import Usage
from . import toolsstats, testbedsettings, genericsettings
from .pproc import DataSet
bestAlgorithmEntries = {}
algs2009 = ("ALPS", "AMALGAM", "BAYEDA", "BFGS", "Cauchy-EDA", "BIPOP-CMA-ES",
"CMA-ESPLUSSEL", "DASA", "DE-PSO", "DIRECT", "EDA-PSO",
"FULLNEWUOA", "G3PCX", "GA", "GLOBAL", "iAMALGAM",
"IPOP-SEP-CMA-ES", "LSfminbnd", "LSstep", "MA-LS-CHAIN", "MCS",
"NELDER", "NELDERDOERR", "NEWUOA", "ONEFIFTH", "POEMS", "PSO",
"PSO_Bounds", "RANDOMSEARCH", "Rosenbrock", "SNOBFIT", "VNS")
# Warning: NEWUOA is there twice: NEWUOA noiseless is a 2009 entry, NEWUOA
# noisy is a 2010 entry
algs2010 = ("1komma2", "1komma2mir", "1komma2mirser", "1komma2ser", "1komma4",
"1komma4mir", "1komma4mirser", "1komma4ser", "1plus1",
"1plus2mirser", "ABC", "AVGNEWUOA", "CMAEGS", "DE-F-AUC",
"DEuniform", "IPOP-ACTCMA-ES", "BIPOP-CMA-ES", "MOS", "NBC-CMA",
"NEWUOA", "PM-AdapSS-DE", "RCGA", "SPSA", "oPOEMS", "pPOEMS")
algs2012 = ("ACOR", "BIPOPaCMA", "BIPOPsaACM", "aCMA", "CMAES", "aCMAa",
"aCMAm", "aCMAma", "aCMAmah", "aCMAmh", "DBRCGA", "DE", "DEAE",
"DEb", "DEctpb", "IPOPsaACM", "JADE", "JADEb", "JADEctpb",
"NBIPOPaCMA", "NIPOPaCMA", "DE-AUTO", "DE-BFGS", "DE-ROLL",
"DE-SIMPLEX", "MVDE", "PSO-BFGS", "xNES", "xNESas", "SNES")
# TODO: this should be reimplemented:
# o a best algorithm and an algorithm portfolio are almost the same,
# they should derive from a CombinedAlgorithmDataSet?
# CLASS DEFINITIONS
class BestAlgSet(DataSet):
"""Unit element of best algorithm data set.
Here unit element means for one function and one dimension.
This class is derived from :py:class:`DataSet` but it does not
inherit from it.
Class attributes:
- funcId -- function Id (integer)
- dim -- dimension (integer)
- comment -- comment for the setting (string)
- algId -- algorithm name (string)
- evals -- collected data aligned by function values (array)
- maxevals -- maximum number of function evaluations (array)
evals and funvals are arrays of data collected from N data sets.
Both have the same format: zero-th column is the value on which the
data of a row is aligned, the N subsequent columns are either the
numbers of function evaluations for evals or function values for
funvals.
Known bug: algorithms where the aRT is NaN or Inf are not taken into
account!?
"""
def __init__(self, dict_alg, algId='Virtual Best Algorithm'):
"""Instantiate one best algorithm data set with name algId.
:keyword dict_alg: dictionary of datasets, keys are algorithm
names, values are 1-element
:py:class:`DataSetList`.
:keyword algId: name of the to-be-constructed algorithm as string
"""
# values of dict dictAlg are DataSetList which should have only one
# element which will be assigned as values in the following lines.
d = set()
f = set()
pr = 0
for i in dict_alg.values():
d |= set(j.dim for j in i)
f |= set(j.funcId for j in i)
if len(i) > 0 and hasattr(i[0], 'precision'):
pr = max(pr, max(j.precision for j in i))
if len(f) > 1 or len(d) > 1:
Usage('Expect the data of algorithms for only one function and '
'one dimension.')
f = f.pop()
d = d.pop()
dictMaxEvals = {}
dictFinalFunVals = {}
tmpdictAlg = {}
best_algorithms = []
self.success_ratio = []
for alg, i in dict_alg.iteritems():
if len(i) == 0:
warnings.warn('Algorithm %s was not tested on f%d %d-D.'
% (alg, f, d))
continue
elif len(i) > 1:
warnings.warn('Algorithm %s has a problem on f%d %d-D.'
% (alg, f, d))
continue
tmpdictAlg[alg] = i[0] # Assign ONLY the first element as value
dictMaxEvals[alg] = i[0].maxevals
dictFinalFunVals[alg] = i[0].finalfunvals
best_algorithms = i[0].algs
self.success_ratio = i[0].success_ratio
dict_alg = tmpdictAlg
sortedAlgs = dict_alg.keys()
# algorithms will be sorted along sortedAlgs which is now a fixed list
# Align aRT
erts = list(np.transpose(np.vstack([dict_alg[i].target, dict_alg[i].ert]))
for i in sortedAlgs)
res = readalign.alignArrayData(readalign.HArrayMultiReader(erts, False))
resalgs = []
reserts = []
instance_numbers = []
# For each function value
for i in res:
# Find best algorithm
curerts = i[1:]
assert len((np.isnan(curerts) == False)) > 0
currentbestert = np.inf
currentbestalg = ''
for j, tmpert in enumerate(curerts):
if np.isnan(tmpert):
continue # TODO: don't disregard these entries
if tmpert == currentbestert:
# TODO: what do we do in case of ties?
# look at function values corresponding to the aRT?
# Look at the function evaluations? the success ratio?
pass
elif tmpert < currentbestert:
currentbestert = tmpert
currentbestalg = sortedAlgs[j]
reserts.append(currentbestert)
resalgs.append(currentbestalg)
sorted_instance_numbers = list(set(dict_alg[currentbestalg].instancenumbers))
sorted_instance_numbers.sort()
instance_numbers.append(sorted_instance_numbers)
dictiter = {}
dictcurLine = {}
resDataSet = []
# write down the #fevals to reach the function value.
for funval, alg in zip(res[:, 0], resalgs):
it = dictiter.setdefault(alg, iter(dict_alg[alg].evals))
curLine = dictcurLine.setdefault(alg, np.array([np.inf, 0]))
while curLine[0] > funval:
try:
curLine = it.next()
except StopIteration:
break
dictcurLine[alg] = curLine.copy()
tmp = curLine.copy()
tmp[0] = funval
resDataSet.append(tmp)
setalgs = set(resalgs)
dictFunValsNoFail = {}
for alg in setalgs:
for curline in dict_alg[alg].funvals:
if (curline[1:] == dict_alg[alg].finalfunvals).any():
# only works because the funvals are monotonous
break
dictFunValsNoFail[alg] = curline.copy()
self.evals = resDataSet
# evals is not a np array but a list of arrays because they may not
# all be of the same size.
self.maxevals = dict((i, dictMaxEvals[i]) for i in setalgs)
self.finalfunvals = dict((i, dictFinalFunVals[i]) for i in setalgs)
self.funvalsnofail = dictFunValsNoFail
self.dim = d
self.funcId = f
if pr > 0:
self.precision = pr
self.algs = best_algorithms if best_algorithms else resalgs
self.instances = instance_numbers
self.best_algorithm_data = resalgs
self.algId = algId
if len(sortedAlgs) > 1:
self.comment = 'Combination of ' + ', '.join(sortedAlgs)
else:
self.comment = dict_alg[sortedAlgs[0]].comment.lstrip('%% ')
self.ert = np.array(reserts)
self.target = res[:, 0]
self.testbed = dict_alg[sortedAlgs[0]].testbed_name # TODO: not nice
self.suite = getattr(dict_alg[sortedAlgs[0]], 'suite', None)
self.used_algorithms = sortedAlgs
bestfinalfunvals = np.array([np.inf])
for alg in sortedAlgs:
if np.median(dict_alg[alg].finalfunvals) < np.median(bestfinalfunvals):
bestfinalfunvals = dict_alg[alg].finalfunvals
algbestfinalfunvals = alg
self.bestfinalfunvals = bestfinalfunvals
self.algbestfinalfunvals = algbestfinalfunvals
def __eq__(self, other):
return (self.__class__ is other.__class__ and
self.funcId == other.funcId and
self.dim == other.dim and
# self.precision == other.precision and
self.algId == other.algId and
self.comment == other.comment)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return ('{alg: %s, F%d, dim: %d}'
% (self.algId, self.funcId, self.dim))
def pickle(self, outputdir=None):
"""Save instance to a pickle file.
Saves the instance to a pickle file. If not specified
by argument outputdir, the location of the pickle is given by
the location of the first index file associated.
"""
# the associated pickle file does not exist
if not getattr(self, 'pickleFile', False):
if outputdir is None:
outputdir = os.path.split(self.indexFiles[0])[0] + '-pickle'
if not os.path.isdir(outputdir):
try:
os.mkdir(outputdir)
except OSError:
print('Could not create output directory % for pickle files'
% outputdir)
raise
self.pickleFile = os.path.join(outputdir,
'bestalg_f%03d_%02d.pickle'
% (self.funcId, self.dim))
if getattr(self, 'modsFromPickleVersion', True):
try:
f = open(self.pickleFile, 'w') # TODO: what if file already exist?
pickle.dump(self, f)
f.close()
if genericsettings.verbose:
print('Saved pickle in %s.' % self.pickleFile)
except IOError, (errno, strerror):
print("I/O error(%s): %s" % (errno, strerror))
except pickle.PicklingError:
print("Could not pickle %s" % self)
# else: #What?
# if genericsettings.verbose:
# print('Skipped update of pickle file %s: no new data.'
# % self.pickleFile)
def createDictInstance(self):
"""Returns a dictionary of the instances
The key is the instance id, the value is a list of index.
"""
dictinstance = {}
for i in range(len(self.instancenumbers)):
dictinstance.setdefault(self.instancenumbers[i], []).append(i)
return dictinstance
def detERT(self, targets):
"""Determine the average running time to reach target values.
:keyword list targets: target function values of interest
:returns: list of average running times corresponding to the
targets.
"""
res = []
for f in targets:
idx = (self.target <= f)
try:
res.append(self.ert[idx][0])
except IndexError:
res.append(np.inf)
return res
# TODO: return the algorithm here as well.
def detEvals(self, targets):
"""Determine the number of evaluations to reach target values.
:keyword seq targets: target precisions
:returns: list of arrays each corresponding to one value in
targets and the list of the corresponding algorithms
"""
res = []
res2 = []
res3 = []
for f in targets:
tmp = np.array([np.nan] * len(self.bestfinalfunvals))
tmp2 = None
tmp3 = None
for i, line in enumerate(self.evals):
if len(self.success_ratio) > i:
tmp3 = [0, self.success_ratio[i][1]]
if line[0] <= f:
tmp = line[1:]
tmp2 = self.best_algorithm_data[i]
if len(self.success_ratio) > i:
tmp3 = self.success_ratio[i]
break
res.append(tmp)
res2.append(tmp2)
if tmp3 is not None:
res3.append(tmp3)
return res, res2, res3
def get_success_ratio(self, target):
det_evaluations = self.detEvals([target, ])
success_ratios = det_evaluations[2]
if len(success_ratios) > 0:
successful_runs = success_ratios[0][0]
all_runs = success_ratios[0][1]
else:
successful_runs = np.sum(np.isnan(det_evaluations[0][0]) == False) # count the nb of success
all_runs = len(det_evaluations[0][0])
return successful_runs, all_runs
# FUNCTION DEFINITIONS
def load_reference_algorithm(best_algo_filename, force=False, relative_load=True):
"""Assigns :py:data:`bestAlgorithmEntries`.
This function is needed to set the global variable
:py:data:`bestAlgorithmEntries`. It reads in the data, specified by
the string best_algo_filename which can
either be a pickled file (deprecated), generated by
deprecated_customgenerate or any standard data set (i.e. a zipped or
unzipped folder with .info, .dat, and .tdat files such as the ones
generated by custom_generate). This function will also set
the testbedsettings.current_testbed.reference_algorithm_displayname
according to the read data if not already present.
:py:data:`bestAlgorithmEntries` is a dictionary accessed by providing
a tuple :py:data:`(dimension, function)`. This returns an instance
of :py:class:`BestAlgSet`.
The data is that of specific algorithms (depending on the Testbed used).
"""
global bestAlgorithmEntries
# global statement necessary to change the variable bestalg.bestAlgorithmEntries
if not force and bestAlgorithmEntries:
return bestAlgorithmEntries
# If the file or folder name is not specified then we skip the load.
if not best_algo_filename:
# print the following line only once to not mess the output:
if not (bestAlgorithmEntries == None):
warnings.warn("no best algorithm data specified")
bestAlgorithmEntries = None
return bestAlgorithmEntries
print("Loading best algorithm data from %s ..." % best_algo_filename)
sys.stdout.flush()
if relative_load:
best_alg_file_path = os.path.split(__file__)[0]
pickleFilename = os.path.join(best_alg_file_path, best_algo_filename)
else:
best_alg_file_path = ''
pickleFilename = best_algo_filename
if pickleFilename.endswith('pickle.gz'):
warnings.warn(" !Loading reference algorithms from pickle files deprecated!")
# TODO: for backwards compatibility: check whether algorithm is
# actually in pickle format (and not just based on the file ending)
fid = gzip.open(pickleFilename, 'r')
try:
bestAlgorithmEntries = pickle.load(fid)
# in the deprecated case of loading pickle files, we use the
# filename to set reference_algorithm_displayname in testbedsettings
# if not present:
if testbedsettings.current_testbed:
if hasattr(testbedsettings.current_testbed, 'reference_algorithm_displayname'):
testbedsettings.current_testbed.reference_algorithm_displayname = pickleFilename
except:
warnings.warn("failed to open file %s" % pickleFilename)
# raise # outcomment to diagnose
bestAlgorithmEntries = None
fid.close()
else:
algList = [os.path.join(best_alg_file_path, best_algo_filename)]
dsList, sortedAlgs, dictAlg = pproc.processInputArgs(algList)
bestAlgorithmEntries = generate(dictAlg, dsList[0].algId)
# set reference_algorithm_displayname in testbedsetting if not present:
if testbedsettings.current_testbed:
if testbedsettings.current_testbed.reference_algorithm_displayname is None:
testbedsettings.current_testbed.reference_algorithm_displayname = dsList[0].algId
print_done()
return bestAlgorithmEntries
def usage():
print(__doc__) # same as: sys.modules[__name__].__doc__, was: main.__doc__
def generate(dict_alg, algId):
"""Generates dictionary of best algorithm data set.
"""
# dsList, sortedAlgs, dictAlg = processInputArgs(args)
res = {}
for f, i in pproc.dictAlgByFun(dict_alg).iteritems():
for d, j in pproc.dictAlgByDim(i).iteritems():
tmp = BestAlgSet(j, algId)
res[(d, f)] = tmp
return res
def deprecated_customgenerate(args=algs2009):
"""Generates best algorithm data set.
This functionality is deprecated because it writes
unsupported pickle files.
It will create a folder bestAlg in the current working directory
with a pickle file corresponding to the bestalg dataSet of the
algorithms listed in variable args.
This method is called from the python command line from a directory
containing all necessary data folders::
>>> from cocopp import bestalg
>>> import os
>>> path = os.path.abspath(os.path.dirname('__file__'))
>>> os.chdir(os.path.join(path, 'data'))
>>> infoFile = 'ALPS/bbobexp_f2.info'
>>> if not os.path.exists(infoFile):
... import urllib
... import tarfile
... dataurl = 'http://coco.gforge.inria.fr/data-archive/2009/ALPS_hornby_noiseless.tgz'
... filename, headers = urllib.urlretrieve(dataurl)
... archivefile = tarfile.open(filename)
... archivefile.extractall()
>>> os.chdir(os.path.join(path, 'data'))
>>> bestalg.custom_generate(('ALPS', ''), 'refAlgFromALPS') # doctest: +ELLIPSIS
Searching in...
>>> os.chdir(path)
"""
outputdir = 'bestCustomAlg'
genericsettings.verbose = True
dsList, sortedAlgs, dictAlg = pproc.processInputArgs(args)
if not os.path.exists(outputdir):
os.mkdir(outputdir)
if genericsettings.verbose:
print('Folder %s was created.' % outputdir)
res = generate(dictAlg, outputdir)
picklefilename = os.path.join(outputdir, 'bestalg.pickle')
fid = gzip.open(picklefilename + ".gz", 'w')
pickle.dump(res, fid)
fid.close()
print('done with writing pickle...')
def custom_generate(args=algs2009, algId='bestCustomAlg', suite=None):
"""Generates best algorithm data set from a given set of algorithms.
It will create a folder named as algId in the current working directory
corresponding to the bestalg dataSet of the algorithms listed in
variable args. This folder is furthermore added to a `.tar.gz` file
of the same name.
This method is called from the python command line from a directory
containing all necessary data folders::
"""
output_dir = algId
genericsettings.verbose = True
testbedsettings.reset_reference_values()
dsList, sortedAlgs, dictAlg = pproc.processInputArgs(args)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
if genericsettings.verbose:
print('Folder %s was created.' % output_dir)
result = generate(dictAlg, algId)
create_data_files(output_dir, result, suite)
tar = tarfile.open(output_dir + ".tar.gz", "w:gz")
tar.add(output_dir)
tar.close()
print('Best algorithm files were written to %s.tar.gz' % output_dir)
print('...using instances ', end='')
print(dsList[0].instancenumbers)
def create_data_files(output_dir, result, suite):
info_filename = 'bbob-bestalg'
filename_template = info_filename + '_f%02d_d%02d.%s'
info_lines = []
all_instances_used = []
algorithms_used = []
for key, value in sorted(result.iteritems()):
# TODO: throw an error
# if not len(value.target) == len(value.ert):
dict_evaluation = {}
for index in range(len(value.target)):
evaluation_value = value.ert[index]
target = value.target[index]
dict_evaluation[np.round(evaluation_value)] = target
lines = list()
lines.append("% Artificial instance")
lines.append("% algorithm type = best")
target_list = value.target.tolist()
instances_used = []
for key_target, value_target in sorted(dict_evaluation.iteritems()):
successful_runs, all_runs = result[(key[0], key[1])].get_success_ratio(value_target)
target_index = target_list.index(value_target)
alg_for_target = os.path.basename(value.algs[target_index])
instances_used.append(value.instances[target_index])
lines.append("%d %10.15e %10.15e %s %d %d" %
(key_target, value_target, value_target, alg_for_target, successful_runs, all_runs))
last_evaluation = key_target
last_value = value_target
instance_data = "%d:%d|%10.15e" % (0, last_evaluation, last_value)
all_instances_used.extend(instances_used)
instances_list = get_used_instance_list(instances_used)
test_suite = getattr(value, 'suite', None)
if test_suite is None:
test_suite = suite
algorithm_id = value.algId
if result[result.keys()[0]].testbed == testbedsettings.default_testbed_bi:
info_lines.append("function = %d, dim = %d, %s, %s"
% (key[1], key[0], filename_template % (key[1], key[0], 'dat'), instance_data))
else:
header = "funcId = %d, DIM = %d, Precision = %10.15e, algId = '%s'" \
% (key[1], key[0], value.precision, algorithm_id)
if test_suite is not None:
header += ", suite = '%s'" % test_suite
info_lines.append(header)
info_lines.append("%% %s; instance_numbers: %s" % (value.comment, instances_list))
info_lines.append("%s, %s" % (filename_template % (key[1], key[0], 'dat'), instance_data))
filename = os.path.join(output_dir, filename_template % (key[1], key[0], 'dat'))
write_to_file(filename, lines)
filename = os.path.join(output_dir, filename_template % (key[1], key[0], 'tdat'))
write_to_file(filename, lines)
for algorithm in value.used_algorithms:
if algorithm not in algorithms_used:
algorithms_used.append(algorithm)
if result[result.keys()[0]].testbed == testbedsettings.default_testbed_bi:
header = "algorithm = '%s', indicator = 'hyp'" % algorithm_id
if test_suite is not None:
header += ", suite = '%s'" % test_suite
reference_values = testbedsettings.get_first_reference_values()
if reference_values is not None:
header += ", reference_values_hash = '%s'" % reference_values
info_lines.insert(0, header)
instances_list = get_used_instance_list(all_instances_used)
if len(algorithms_used) > 1:
comment = 'Combination of ' + ', '.join(algorithms_used)
else:
comment = value.comment
comment += '; coco_version: ' + pkg_resources.require('cocopp')[0].version
info_lines.insert(1, "%% %s; instance_numbers: %s" % (comment, instances_list))
filename = os.path.join(output_dir, '%s.info' % info_filename)
write_to_file(filename, info_lines)
def write_to_file(filename, lines):
fid = open(filename, 'w')
for line in lines:
fid.write("%s\n" % line)
fid.close()
def getAllContributingAlgorithmsToBest(algnamelist, target_lb=1e-8,
target_ub=1e2):
"""Computes first the artificial best algorithm from given algorithm list
algnamelist, constructed by extracting for each target/function pair
the algorithm with best aRT among the given ones. Returns then the list
of algorithms that are contributing to the definition of the best
algorithm, separated by dimension, and sorted by importance (i.e. with
respect to the number of target/function pairs where each algorithm is
best). Only target/function pairs are taken into account where the target
is in between target_lb and target_ub.
This method should be called from the python command line from a directory
containing all necessary data folders::
>>> from cocopp import bestalg
>>> import os
>>> import urllib
>>> path = os.path.abspath(os.path.dirname(os.path.dirname('__file__')))
>>> os.chdir(path)
>>> infoFile = 'data/BIPOP-CMA-ES.tgz'
>>> if not os.path.exists(infoFile):
... os.chdir(os.path.join(path, 'data'))
... dataurl = 'http://coco.gforge.inria.fr/data-archive/2009/BIPOP-CMA-ES_hansen_noiseless.tgz'
... filename, headers = urllib.urlretrieve(dataurl, 'BIPOP-CMA-ES.tgz')
>>> os.chdir(path)
>>> infoFile = 'data/MCS.tgz'
>>> if not os.path.exists(infoFile):
... os.chdir(os.path.join(path, 'data'))
... dataurl = 'http://coco.gforge.inria.fr/data-archive/2009/MCS_huyer_noiseless.tgz'
... filename, headers = urllib.urlretrieve(dataurl, 'MCS.tgz')
>>> os.chdir(path)
>>> bestalg.getAllContributingAlgorithmsToBest(('data/BIPOP-CMA-ES.tgz', 'data/MCS.tgz')) # doctest:+ELLIPSIS
Generating best algorithm data...
"""
print("Generating best algorithm data from given algorithm list...")
custom_generate(algnamelist, algId='bestCustomAlg')
# dsList, sortedAlgs, dictAlg = pproc.processInputArgs(('bestCustomAlg', ''))
# bestalgentries = generate(dictAlg, dsList[0].algId)
path_to_bestalg_file = os.getcwd()
path_to_bestalg_file = os.path.join(path_to_bestalg_file, 'bestCustomAlg.tar.gz')
bestalgentries = load_reference_algorithm(path_to_bestalg_file, relative_load=False, force=True)
print('loading of best algorithm data done.')
countsperalgorithm = {}
for (d, f) in bestalgentries:
print('dimension: %d, function: %d' % (d, f))
print(f)
setofalgs = set(bestalgentries[d, f].algs)
# pre-processing data to only look at targets >= target_lb:
correctedbestalgentries = []
for i in range(0, len(bestalgentries[d, f].target)):
if ((bestalgentries[d, f].target[i] >= target_lb) and
(bestalgentries[d, f].target[i] <= target_ub)):
correctedbestalgentries.append(bestalgentries[d, f].algs[i])
print(len(correctedbestalgentries))
# now count how often algorithm a is best for the extracted targets
for a in setofalgs:
# use setdefault to initialize with zero if a entry not existant:
countsperalgorithm.setdefault((d, a), 0)
countsperalgorithm[(d, a)] += correctedbestalgentries.count(a)
selectedalgsperdimension = {}
for (d, a) in sorted(countsperalgorithm):
if not selectedalgsperdimension.has_key(d):
selectedalgsperdimension[d] = []
selectedalgsperdimension[d].append((countsperalgorithm[(d, a)], a))
for d in sorted(selectedalgsperdimension):
print('%dD:' % d)
for (count, alg) in sorted(selectedalgsperdimension[d], reverse=True):
print(count, alg)
print('\n')
print(" done.")
def extractBestAlgorithms(args=algs2009, f_factor=2,
target_lb=1e-8, target_ub=1e22):
"""Returns (and prints) per dimension a list of algorithms within
algorithm list args that contains an algorithm if for any
dimension/target/function pair this algorithm:
- is the best algorithm wrt aRT
- its own aRT lies within a factor f_factor of the best aRT
- there is no algorithm within a factor of f_factor of the best aRT
and the current algorithm is the second best.
"""
# TODO: use pproc.TargetValues class as input target values
# default target values:
targets = pproc.TargetValues(
10 ** np.arange(np.log10(max((1e-8, target_lb))),
np.log10(target_ub) + 1e-9, 0.2))
# there should be a simpler way to express this to become the
# interface of this function
print('Loading algorithm data from given algorithm list...\n')
genericsettings.verbose = True
dsList, sortedAlgs, dictAlg = pproc.processInputArgs(args)
print('This may take a while (depending on the number of algorithms)')
selectedAlgsPerProblem = {}
for f, i in pproc.dictAlgByFun(dictAlg).iteritems():
for d, j in pproc.dictAlgByDim(i).iteritems():
best = BestAlgSet(j)
selectedAlgsPerProblemDF = []
for i in range(0, len(best.target)):
t = best.target[i]
# if ((t <= target_ub) and (t >= target_lb)):
if toolsstats.in_approximately(t,
targets((f, d), discretize=True)):
# add best for this target:
selectedAlgsPerProblemDF.append(best.algs[i])
# add second best or all algorithms that have an aRT
# within a factor of f_factor of the best:
secondbest_ERT = np.infty
secondbest_str = ''
secondbest_included = False
for astring in j:
currdictalg = dictAlg[astring].dictByDim()
if currdictalg.has_key(d):
curralgdata = currdictalg[d][f - 1]
currERT = curralgdata.detERT([t])[0]
if (astring != best.algs[i]):
if (currERT < secondbest_ERT):
secondbest_ERT = currERT
secondbest_str = astring
if (currERT <= best.detERT([t])[0] * f_factor):
selectedAlgsPerProblemDF.append(astring)
secondbest_included = True
if not (secondbest_included) and (secondbest_str != ''):
selectedAlgsPerProblemDF.append(secondbest_str)
if len(selectedAlgsPerProblemDF) > 0:
selectedAlgsPerProblem[(d, f)] = selectedAlgsPerProblemDF
print('pre-processing of function %d done.' % f)
print('loading of best algorithm(s) data done.')
countsperalgorithm = {}
for (d, f) in selectedAlgsPerProblem:
print('dimension: %d, function: %d' % (d, f))
setofalgs = set(selectedAlgsPerProblem[d, f])
# now count how often algorithm a is best for the extracted targets
for a in setofalgs:
# use setdefault to initialize with zero if a entry not existant:
countsperalgorithm.setdefault((d, a), 0)
countsperalgorithm[(d, a)] += selectedAlgsPerProblem[d, f].count(a)
selectedalgsperdimension = {}
for (d, a) in sorted(countsperalgorithm):
if not selectedalgsperdimension.has_key(d):
selectedalgsperdimension[d] = []
selectedalgsperdimension[d].append((countsperalgorithm[(d, a)], a))
for d in sorted(selectedalgsperdimension):
print('%dD:' % d)
for (count, alg) in sorted(selectedalgsperdimension[d], reverse=True):
print(count, alg)
print('\n')
print(" done.")
return selectedalgsperdimension
def get_used_instance_list(instance_number_list):
different_instances = []
for instance_numbers in instance_number_list:
if instance_numbers not in different_instances:
different_instances.append(instance_numbers)
if len(different_instances) == 0:
return None
elif len(different_instances) == 1:
return ','.join(str(i) for i in different_instances[0])
else:
instance_summary = []
for instance_list in different_instances:
instance_summary.append(','.join(str(i) for i in instance_list))
return '[' + '],['.join(str(i) for i in instance_summary) + ']'
| bsd-3-clause | 1,950,457,813,896,722,700 | 38.739905 | 117 | 0.596754 | false |
ivanlyon/exercises | kattis/k_bachetsgame.py | 1 | 1511 | '''
Determine perfect play winner of Bachet's game given players stan & ollie. This
is a normal nim game where the winner is the last person to make a move.
Status: Accepted
'''
import sys
###############################################################################
def nim_normal(total_stones, quants):
'''Normal nim game where stones removed must be a listed value.'''
unknown = 0
first = 1 # ID of 1st player to draw
second = 2 # ID of 2nd drawing player
limit = total_stones - max(quants)
winner = [unknown for _ in range(total_stones + 1)]
for stones in range(limit):
if winner[stones] == unknown:
winner[stones] = second
for amount in quants:
winner[stones + amount] = first
for stones in range(limit, total_stones + 1):
if winner[stones] == unknown:
winner[stones] = second
for amount in quants:
if stones + amount <= total_stones:
winner[stones + amount] = first
if winner[total_stones] != unknown:
break
if winner[total_stones] == second:
return "Ollie wins"
return "Stan wins"
###############################################################################
if __name__ == '__main__':
for testCase in sys.stdin:
inputs = [int(i) for i in testCase.split()]
print(nim_normal(inputs[0], inputs[2:]))
###############################################################################
| mit | -3,028,064,864,049,791,000 | 31.847826 | 79 | 0.497022 | false |
pepincho/Python-Course-FMI | 02_challenge/solution.py | 1 | 2343 | COLORS = ['red', 'green', 'blue']
def get_number_rows_cols(image):
return len(image), len(image[0])
def create_new_image_matrix(rows, cols):
new_image = [
[() for x in range(cols)] for y in range(rows)
]
return new_image
def invert_pixel(pixel):
return [255 - x for x in pixel]
def light_pixel(pixel, number):
return [int(x + (255 - x) * number) for x in pixel]
def dark_pixel(pixel, number):
return [int(x - (x - 0) * number) for x in pixel]
def grayscale(func):
def helper(image, *args):
rows, cols = len(image), len(image[0])
for i in range(rows):
for j in range(cols):
gray = int(sum([x for x in image[i][j]]) / len(COLORS))
image[i][j] = tuple([gray for x in range(len(COLORS))])
if len(args) != 0:
return func(image, args[0])
return func(image)
return helper
@grayscale
def rotate_left(image):
rows, cols = get_number_rows_cols(image)
rotated_image = create_new_image_matrix(cols, rows)
for i in range(cols):
for j in range(rows):
rotated_image[i][j] = image[j][- (i - cols + 1)]
return rotated_image
@grayscale
def rotate_right(image):
rows, cols = get_number_rows_cols(image)
rotated_image = create_new_image_matrix(cols, rows)
for i in range(cols):
for j in range(rows):
rotated_image[i][j] = image[- (j - rows + 1)][i]
return rotated_image
@grayscale
def invert(image):
rows, cols = get_number_rows_cols(image)
inverted_image = create_new_image_matrix(rows, cols)
for i in range(rows):
for j in range(cols):
inverted_image[i][j] = tuple(invert_pixel(image[i][j]))
return inverted_image
@grayscale
def lighten(image, number):
rows, cols = get_number_rows_cols(image)
lighten_image = create_new_image_matrix(rows, cols)
for i in range(rows):
for j in range(cols):
lighten_image[i][j] = tuple(light_pixel(image[i][j], number))
return lighten_image
@grayscale
def darken(image, number):
rows, cols = get_number_rows_cols(image)
darken_image = create_new_image_matrix(rows, cols)
for i in range(rows):
for j in range(cols):
darken_image[i][j] = tuple(dark_pixel(image[i][j], number))
return darken_image
| mit | 1,009,137,948,147,740,700 | 25.931034 | 73 | 0.601793 | false |
etalab/ckan-toolbox | ckantoolbox/texthelpers.py | 1 | 3182 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# CKAN-Toolbox -- Various modules that handle CKAN API and data
# By: Emmanuel Raviart <[email protected]>
#
# Copyright (C) 2013 Etalab
# http://github.com/etalab/ckan-toolbox
#
# This file is part of CKAN-Toolbox.
#
# CKAN-Toolbox is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# CKAN-Toolbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle strings"""
import re
from biryani1 import strings
tag_char_re = re.compile(ur'[- \w]', re.UNICODE)
def namify(text, encoding = 'utf-8'):
"""Convert a string to a CKAN name."""
if text is None:
return None
if isinstance(text, str):
text = text.decode(encoding)
assert isinstance(text, unicode), str((text,))
simplified = u''.join(namify_char(unicode_char) for unicode_char in text)
# CKAN accepts names with duplicate "-" or "_" and/or ending with "-" or "_".
#while u'--' in simplified:
# simplified = simplified.replace(u'--', u'-')
#while u'__' in simplified:
# simplified = simplified.replace(u'__', u'_')
#simplified = simplified.strip(u'-_')
return simplified
def namify_char(unicode_char):
"""Convert an unicode character to a subset of lowercase ASCII characters or an empty string.
The result can be composed of several characters (for example, 'œ' becomes 'oe').
"""
chars = strings.unicode_char_to_ascii(unicode_char)
if chars:
chars = chars.lower()
split_chars = []
for char in chars:
if char not in '-_0123456789abcdefghijklmnopqrstuvwxyz':
char = '-'
split_chars.append(char)
chars = ''.join(split_chars)
return chars
def tag_namify(text, encoding = 'utf-8'):
"""Convert a string to a CKAN tag name."""
if text is None:
return None
if isinstance(text, str):
text = text.decode(encoding)
assert isinstance(text, unicode), str((text,))
simplified = u''.join(tag_namify_char(unicode_char) for unicode_char in text)
# CKAN accepts tag names with duplicate "-" or "_" and/or ending with "-" or "_".
#while u'--' in simplified:
# simplified = simplified.replace(u'--', u'-')
#while u'__' in simplified:
# simplified = simplified.replace(u'__', u'_')
#simplified = simplified.strip(u'-_')
return simplified
def tag_namify_char(unicode_char):
"""Convert an unicode character to a subset of lowercase characters or an empty string."""
unicode_char = unicode_char.lower()
if tag_char_re.match(unicode_char) is None:
unicode_char = u'-'
return unicode_char
| agpl-3.0 | 4,626,307,164,070,041,000 | 32.840426 | 97 | 0.658598 | false |
jeffx/python_roshambo_runner | python_roshambo_runner/python_roshambo_runner.py | 1 | 3623 | # -*- coding: utf-8 -*-
import sys
from prettytable import PrettyTable
from bots import *
ROUNDS = 1000
class RoShamBo():
def __init__(self):
self.bots = []
self.result_table = PrettyTable(["Bot Name", "Hand Total", "Losses",
"Ties", "Wins", "Score"])
self._load_bots()
self._setup_table()
def _load_bots(self):
bots = [key for key in sys.modules.keys() if key.startswith("bots.")]
for b in bots:
function_name = sys.modules[b].__entryfunction__
bot = {}
bot['name'] = sys.modules[b].__botname__
bot['function'] = getattr(sys.modules[b], function_name)
bot['totalhands'] = 0
bot['wins'] = []
bot['losses'] = []
bot['ties'] = []
self.bots.append(bot)
def _setup_table(self):
self.result_table.align['Bot Name'] = 'l'
self.result_table.padding_width = 1
def generate_results(self):
results = sorted(self.bots, key=lambda k: k['score'], reverse=True)
for bot in results:
self.result_table.add_row([bot['name'], bot['totalhands'],
len(bot['losses']), len(bot['ties']),
len(bot['wins']), bot['score']])
def run_match(self, player1, player2):
player1_score = 0
player2_score = 0
player1_match = []
player2_match = []
for round_count in range(ROUNDS):
player1_match.append(player1['function'](player2_match))
player2_match.append(player2['function'](player1_match))
hand_result = self.compare_hand(player1_match[round_count],
player2_match[round_count])
if hand_result == 0:
# leaving in here in case there is any work to do in the event
# of a hand tie
pass
elif hand_result == 1:
player1_score = player1_score + 1
elif hand_result == 2:
player2_score = player2_score + 1
else:
print("Shit fucked up")
player1['totalhands'] = player1['totalhands'] + player1_score
player2['totalhands'] = player2['totalhands'] + player2_score
if player1_score == player2_score:
return 0
elif player1_score > player2_score:
return 1
elif player1_score < player2_score:
return 2
else:
return False # this should no happen but just being safe
def run_tourney(self):
bot_index = 0
for bot in self.bots:
bot_index = bot_index + 1
if bot_index > len(self.bots):
return
for opp in self.bots[bot_index:]:
winner = self.run_match(bot, opp)
if winner == 0:
bot['ties'].append(opp)
opp['ties'].append(bot)
elif winner == 1:
bot['wins'].append(opp)
opp['losses'].append(bot)
elif winner == 2:
bot['losses'].append(opp)
opp['wins'].append(bot)
else:
return False # should never get here
for bot in self.bots:
bot['score'] = (len(bot['wins']) * 2) + len(bot['ties'])
def compare_hand(self, p1, p2):
if p1 == p2:
return 0
elif (p1 - p2 + 3) % 3 == 1:
return 1
else:
return 2
| gpl-3.0 | 166,068,250,727,550,600 | 34.519608 | 78 | 0.484405 | false |
dasbruns/PrismaIO | MarkovModel.py | 1 | 1783 | #!/usr/bin/env python
#from PrismaState import PrismaState as P
class MarkovModel(object):
def __init__(self):
self.model = {}
def __str__(self):
s = ''
for key in self.model:
for value in self.model[key]:
s += str(value) + ', '
return s[:-2]
def __getitem__(self,key):
return self.model[key]
def keys(self):
return self.model.keys()
def __iter__(self):
return self.model.items().__iter__()
def add(self,mtrans):
if mtrans.curState not in self.model:
self.model[mtrans.curState] = []
self.model[mtrans.curState].append(mtrans.nextState)
def modelEnhancer(self, depth=0):
import copy
goOn = False
# if depth == 0:
# print(len(self.model))
# for k,v in self.model.items():
# print(k, '--> ', v)
# print()
cpy = copy.deepcopy(self.model)
for key in cpy.keys():
for value in self.model[key]:
if 'END' in value.getCurState():
# print('removing value ', value, 'from key', key)
self.model[key].remove(value)
if self.model[key] == []:
del self.model[key]
# print('removing key ',key, 'because it is empty')
cpyDash = copy.deepcopy(self.model)
for otherKey in cpyDash.keys():
if key in self.model[otherKey]:
goOn = True
# print('removing from key', otherKey, 'value', key)
self.model[otherKey].remove(key)
if goOn:
self.modelEnhancer(depth+1)
# if depth == 0:
# print(len(self.model))
| gpl-2.0 | 6,881,292,675,420,472,000 | 29.741379 | 76 | 0.492989 | false |
nicksenger/mydegreen | infoParsing/universityParser.py | 1 | 1561 | def findBetween(string, before, after):
cursor = 0
finished = False
matchArray = []
while finished == False:
try:
start = string.index(before, cursor) + len(before)
end = string.index(after, start)
matchArray.append(string[start:end])
cursor = end
except ValueError:
finished = True
return matchArray
def findRubbish(string, before, after):
try:
start = string.index(before)
end = string.index(after, start) + len(after)
matchArray = string[start:end]
tupled = string.partition(matchArray)
return tupled[2]
except ValueError:
return ''
return matchArray
def removeRubbish(item):
return findRubbish(item, '//', '/\">')
def removeTruncants(item):
return len(item) > 6
def removeLink(item):
try:
return item.partition(' - Link')[0]
except:
return item
rawUniversities = open("rawUniversities.txt", "r+")
rawString = rawUniversities.read()
rawUniversities.close
firstMatch = findBetween(rawString, '<a href=\"', '</a>')
# print firstMatch
cleaned = map(removeRubbish, firstMatch)
# print cleaned
refined = filter(removeTruncants, cleaned)
# print refined
polished = map(removeLink, refined)
# print polished
stringified = ""
for item in polished:
stringified = stringified + '\"' + item + '\"' + ', '
degreeList = open("degreeList.txt", "wb")
degreeList.write(stringified)
degreeList.close
| mit | -2,632,530,555,844,304,000 | 23.177419 | 62 | 0.609225 | false |
arogozhnikov/einops | tests/test_examples.py | 1 | 11194 | import numpy
from einops import rearrange, parse_shape, reduce
from tests import collect_test_backends
from tests.test_ops import imp_op_backends
def test_rearrange_examples():
def test1(x):
# transpose
y = rearrange(x, 'b c h w -> b h w c')
assert y.shape == (10, 30, 40, 20)
return y
def test2(x):
# view / reshape
y = rearrange(x, 'b c h w -> b (c h w)')
assert y.shape == (10, 20 * 30 * 40)
return y
def test3(x):
# depth-to-space
y = rearrange(x, 'b (c h1 w1) h w -> b c (h h1) (w w1)', h1=2, w1=2)
assert y.shape == (10, 5, 30 * 2, 40 * 2)
return y
def test4(x):
# space-to-depth
y = rearrange(x, 'b c (h h1) (w w1) -> b (h1 w1 c) h w', h1=2, w1=2)
assert y.shape == (10, 20 * 4, 30 // 2, 40 // 2)
return y
def test5(x):
# simple transposition
y = rearrange(x, 'b1 sound b2 letter -> b1 b2 sound letter')
assert y.shape == (10, 30, 20, 40)
return y
def test6(x):
# parsing parameters
t = rearrange(x, 'b c h w -> (b h w) c')
t = t[:, ::2] # replacement for dot-product, just changes size of second axis
assert t.shape == (10 * 30 * 40, 10)
y = rearrange(t, '(b h w) c2 -> b c2 h w', **parse_shape(x, 'b _ h w'))
assert y.shape == (10, 10, 30, 40)
return y
def test7(x):
# split of embedding into groups
y1, y2 = rearrange(x, 'b (c g) h w -> g b c h w', g=2)
assert y1.shape == (10, 10, 30, 40)
assert y2.shape == (10, 10, 30, 40)
return y1 + y2 # only one tensor is expected in output
def test8(x):
# max-pooling
y = reduce(x, 'b c (h h1) (w w1) -> b c h w', reduction='max', h1=2, w1=2)
assert y.shape == (10, 20, 30 // 2, 40 // 2)
return y
def test9(x):
# squeeze - unsqueeze
y = reduce(x, 'b c h w -> b c () ()', reduction='max')
assert y.shape == (10, 20, 1, 1)
y = rearrange(y, 'b c () () -> c b')
assert y.shape == (20, 10)
return y
def test10(x):
# stack
tensors = list(x + 0) # 0 is needed https://github.com/tensorflow/tensorflow/issues/23185
tensors = rearrange(tensors, 'b c h w -> b h w c')
assert tensors.shape == (10, 30, 40, 20)
return tensors
def test11(x):
# concatenate
tensors = list(x + 0) # 0 is needed https://github.com/tensorflow/tensorflow/issues/23185
tensors = rearrange(tensors, 'b c h w -> h (b w) c')
assert tensors.shape == (30, 10 * 40, 20)
return tensors
def shufflenet(x, convolve, c1, c2):
# shufflenet reordering example
x = convolve(x)
x = rearrange(x, 'b (c1 c2) h w-> b (c2 c1) h w', c1=c1, c2=c2)
x = convolve(x)
return x
def convolve_strided_1d(x, stride, usual_convolution):
x = rearrange(x, 'b c t1 t2 -> b c (t1 t2)') # reduce dimensionality
x = rearrange(x, 'b c (t stride) -> (stride b) c t', stride=stride)
x = usual_convolution(x)
x = rearrange(x, '(stride b) c t -> b c (t stride)', stride=stride)
return x
def convolve_strided_2d(x, h_stride, w_stride, usual_convolution):
x = rearrange(x, 'b c (h hs) (w ws) -> (hs ws b) c h w', hs=h_stride, ws=w_stride)
x = usual_convolution(x)
x = rearrange(x, '(hs ws b) c h w -> b c (h hs) (w ws)', hs=h_stride, ws=w_stride)
return x
def unet_like_1d(x, usual_convolution):
# u-net like steps for increasing / reducing dimensionality
x = rearrange(x, 'b c t1 t2 -> b c (t1 t2)') # reduce dimensionality
y = rearrange(x, 'b c (t dt) -> b (dt c) t', dt=2)
y = usual_convolution(y)
x = x + rearrange(y, 'b (dt c) t -> b c (t dt)', dt=2)
return x
# mock for convolution (works for all backends)
convolve_mock = lambda x: x
tests = [test1, test2, test3, test4, test5, test6, test7, test8, test9, test10, test11,
lambda x: shufflenet(x, convolve=convolve_mock, c1=4, c2=5),
lambda x: convolve_strided_1d(x, stride=2, usual_convolution=convolve_mock),
lambda x: convolve_strided_2d(x, h_stride=2, w_stride=2, usual_convolution=convolve_mock),
lambda x: unet_like_1d(x, usual_convolution=convolve_mock),
]
for backend in imp_op_backends:
print('testing source_examples for ', backend.framework_name)
for test in tests:
x = numpy.arange(10 * 20 * 30 * 40).reshape([10, 20, 30, 40])
result1 = test(x)
result2 = backend.to_numpy(test(backend.from_numpy(x)))
assert numpy.array_equal(result1, result2)
# now with strides
x = numpy.arange(10 * 2 * 20 * 3 * 30 * 1 * 40).reshape([10 * 2, 20 * 3, 30 * 1, 40 * 1])
# known torch bug - torch doesn't support negative steps
last_step = -1 if backend.framework_name != 'torch' else 1
indexing_expression = numpy.index_exp[::2, ::3, ::1, ::last_step]
result1 = test(x[indexing_expression])
result2 = backend.to_numpy(test(backend.from_numpy(x)[indexing_expression]))
assert numpy.array_equal(result1, result2)
def tensor_train_example_numpy():
# kept here just for a collection, only tested for numpy
# https://arxiv.org/pdf/1509.06569.pdf, (5)
x = numpy.ones([3, 4, 5, 6])
rank = 4
if numpy.__version__ < '1.15.0':
# numpy.einsum fails here, skip test
return
# creating appropriate Gs
Gs = [numpy.ones([d, d, rank, rank]) for d in x.shape]
Gs[0] = Gs[0][:, :, :1, :]
Gs[-1] = Gs[-1][:, :, :, :1]
# einsum way
y = x.reshape((1,) + x.shape)
for G in Gs:
# taking partial results left-to-right
# y = numpy.einsum('i j alpha beta, alpha i ... -> beta ... j', G, y)
y = numpy.einsum('i j a b, a i ... -> b ... j', G, y)
y1 = y.reshape(-1)
# alternative way
y = x.reshape(-1)
for G in Gs:
i, j, alpha, beta = G.shape
y = rearrange(y, '(i rest alpha) -> rest (alpha i)', alpha=alpha, i=i)
y = y @ rearrange(G, 'i j alpha beta -> (alpha i) (j beta)')
y = rearrange(y, 'rest (beta j) -> (beta rest j)', beta=beta, j=j)
y2 = y
assert numpy.allclose(y1, y2)
# yet another way
y = x
for G in Gs:
i, j, alpha, beta = G.shape
y = rearrange(y, 'i ... (j alpha) -> ... j (alpha i)', alpha=alpha, i=i)
y = y @ rearrange(G, 'i j alpha beta -> (alpha i) (j beta)')
y3 = y.reshape(-1)
assert numpy.allclose(y1, y3)
def test_pytorch_yolo_fragment():
if not any(b.framework_name == 'torch' for b in collect_test_backends(symbolic=False, layers=False)):
return
import torch
def old_way(input, num_classes, num_anchors, anchors, stride_h, stride_w):
# https://github.com/BobLiu20/YOLOv3_PyTorch/blob/c6b483743598b5f64d520d81e7e5f47ba936d4c9/nets/yolo_loss.py#L28-L44
bs = input.size(0)
in_h = input.size(2)
in_w = input.size(3)
scaled_anchors = [(a_w / stride_w, a_h / stride_h) for a_w, a_h in anchors]
prediction = input.view(bs, num_anchors,
5 + num_classes, in_h, in_w).permute(0, 1, 3, 4, 2).contiguous()
# Get outputs
x = torch.sigmoid(prediction[..., 0]) # Center x
y = torch.sigmoid(prediction[..., 1]) # Center y
w = prediction[..., 2] # Width
h = prediction[..., 3] # Height
conf = torch.sigmoid(prediction[..., 4]) # Conf
pred_cls = torch.sigmoid(prediction[..., 5:]) # Cls pred.
# https://github.com/BobLiu20/YOLOv3_PyTorch/blob/c6b483743598b5f64d520d81e7e5f47ba936d4c9/nets/yolo_loss.py#L70-L92
FloatTensor = torch.cuda.FloatTensor if x.is_cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if x.is_cuda else torch.LongTensor
# Calculate offsets for each grid
grid_x = torch.linspace(0, in_w - 1, in_w).repeat(in_w, 1).repeat(
bs * num_anchors, 1, 1).view(x.shape).type(FloatTensor)
grid_y = torch.linspace(0, in_h - 1, in_h).repeat(in_h, 1).t().repeat(
bs * num_anchors, 1, 1).view(y.shape).type(FloatTensor)
# Calculate anchor w, h
anchor_w = FloatTensor(scaled_anchors).index_select(1, LongTensor([0]))
anchor_h = FloatTensor(scaled_anchors).index_select(1, LongTensor([1]))
anchor_w = anchor_w.repeat(bs, 1).repeat(1, 1, in_h * in_w).view(w.shape)
anchor_h = anchor_h.repeat(bs, 1).repeat(1, 1, in_h * in_w).view(h.shape)
# Add offset and scale with anchors
pred_boxes = FloatTensor(prediction[..., :4].shape)
pred_boxes[..., 0] = x.data + grid_x
pred_boxes[..., 1] = y.data + grid_y
pred_boxes[..., 2] = torch.exp(w.data) * anchor_w
pred_boxes[..., 3] = torch.exp(h.data) * anchor_h
# Results
_scale = torch.Tensor([stride_w, stride_h] * 2).type(FloatTensor)
output = torch.cat((pred_boxes.view(bs, -1, 4) * _scale,
conf.view(bs, -1, 1), pred_cls.view(bs, -1, num_classes)), -1)
return output
def new_way(input, num_classes, num_anchors, anchors, stride_h, stride_w):
raw_predictions = rearrange(input, ' b (anchor prediction) h w -> prediction b anchor h w', anchor=num_anchors)
anchors = torch.FloatTensor(anchors).to(input.device)
anchor_sizes = rearrange(anchors, 'anchor dim -> dim () anchor () ()')
_, _, _, in_h, in_w = raw_predictions.shape
grid_h = rearrange(torch.arange(in_h).float(), 'h -> () () h ()').to(input.device)
grid_w = rearrange(torch.arange(in_w).float(), 'w -> () () () w').to(input.device)
predicted_bboxes = torch.zeros_like(raw_predictions)
predicted_bboxes[0] = (raw_predictions[0].sigmoid() + grid_h) * stride_h # center y
predicted_bboxes[1] = (raw_predictions[1].sigmoid() + grid_w) * stride_w # center x
predicted_bboxes[2:4] = (raw_predictions[2:4].exp()) * anchor_sizes # bbox width and height
predicted_bboxes[4] = raw_predictions[4].sigmoid() # confidence
predicted_bboxes[5:] = raw_predictions[5:].sigmoid() # class predictions
# only to match results of original code, not needed
return rearrange(predicted_bboxes, 'prediction b anchor h w -> b anchor h w prediction')
stride_h = 4
stride_w = 4
batch_size = 5
num_classes = 12
anchors = [[50, 100], [100, 50], [75, 75]]
num_anchors = len(anchors)
input = torch.randn([batch_size, num_anchors * (5 + num_classes), 1, 1])
result1 = old_way(input=input, num_anchors=num_anchors, num_classes=num_classes,
stride_h=stride_h, stride_w=stride_w, anchors=anchors)
result2 = new_way(input=input, num_anchors=num_anchors, num_classes=num_classes,
stride_h=stride_h, stride_w=stride_w, anchors=anchors)
result1 = result1.reshape(result2.shape)
assert torch.allclose(result1, result2) | mit | 2,737,831,977,579,695,600 | 42.057692 | 124 | 0.563516 | false |
DXCanas/content-curation | contentcuration/contentcuration/tests/test_files.py | 1 | 3630 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import json
import pytest
from builtins import str
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse_lazy
from le_utils.constants import content_kinds
from le_utils.constants import format_presets
from mock import patch
from past.builtins import basestring
from .base import BaseAPITestCase
from .base import StudioTestCase
from .testdata import base64encoding
from .testdata import generated_base64encoding
from .testdata import srt_subtitle
from contentcuration.api import write_raw_content_to_storage
from contentcuration.models import ContentNode
from contentcuration.models import delete_empty_file_reference
from contentcuration.models import File
from contentcuration.models import generate_object_storage_name
from contentcuration.utils.files import create_thumbnail_from_base64
from contentcuration.utils.files import get_thumbnail_encoding
from contentcuration.utils.nodes import map_files_to_node
from contentcuration.utils.publish import create_associated_thumbnail
pytestmark = pytest.mark.django_db
class FileThumbnailTestCase(BaseAPITestCase):
def setUp(self):
super(FileThumbnailTestCase, self).setUp()
self.thumbnail_fobj = create_thumbnail_from_base64(base64encoding())
filepath = generate_object_storage_name(self.thumbnail_fobj.checksum, str(self.thumbnail_fobj))
with default_storage.open(filepath, 'rb') as fobj:
self.thumbnail_contents = fobj.read()
def test_get_thumbnail_encoding(self):
encoding = get_thumbnail_encoding(str(self.thumbnail_fobj))
self.assertEqual(encoding, generated_base64encoding())
@patch('contentcuration.api.default_storage.save')
@patch('contentcuration.api.default_storage.exists', return_value=True)
def test_existing_thumbnail_is_not_created(self, storage_exists_mock, storage_save_mock):
create_thumbnail_from_base64(base64encoding())
storage_exists_mock.assert_called()
storage_save_mock.assert_not_called()
def test_internal_thumbnail(self):
# Create exercise node (generated images are more predictable)
node = ContentNode(title="Test Node", kind_id=content_kinds.VIDEO)
node.save()
file_data = [{
"preset": None,
"filename": str(self.thumbnail_fobj),
"language": "en",
"size": self.thumbnail_fobj.file_size,
}]
map_files_to_node(self.user, node, file_data)
self.assertTrue(isinstance(node.thumbnail_encoding, basestring))
thumbnail_data = json.loads(node.thumbnail_encoding)
self.assertEqual(thumbnail_data['base64'], generated_base64encoding())
def test_exportchannel_thumbnail(self):
node = ContentNode(title="Test Node", kind_id=content_kinds.VIDEO)
node.save()
newfile = create_associated_thumbnail(node, self.thumbnail_fobj)
self.assertTrue(isinstance(newfile, File))
thumbnail_data = json.loads(node.thumbnail_encoding)
self.assertEqual(thumbnail_data['base64'], generated_base64encoding())
class NodeFileDeletionTestCase(StudioTestCase):
def test_delete_empty_file_reference(self):
checksum, _, storage_path = write_raw_content_to_storage(b'some fake PDF data', ext='.pdf')
assert default_storage.exists(storage_path), 'file should be saved'
delete_empty_file_reference(checksum, 'pdf')
assert not default_storage.exists(storage_path), 'file should be deleted'
| mit | -8,231,573,291,239,268,000 | 41.209302 | 103 | 0.736639 | false |
yingxuanxuan/fabric_script | shadowsocks.py | 1 | 2157 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from fabric.api import reboot, sudo, settings
logging.basicConfig(level=logging.INFO)
def ssserver(port, password, method):
try:
sudo('hash yum')
sudo('hash python')
sudo('yum -y update 1>/dev/null')
sudo('yum -y install python-setuptools 1>/dev/null')
sudo('yum -y install m2crypto 1>/dev/null')
sudo('easy_install pip 1>/dev/null')
sudo('pip install shadowsocks 1>/dev/null')
sudo('hash ssserver')
sudo("sed -i '/ssserver/d' /etc/rc.d/rc.local")
cmd = '/usr/bin/python /usr/bin/ssserver -p %s -k %s -m %s --user nobody -d start' % \
(port, password, method)
sudo("sed -i '$a %s' /etc/rc.d/rc.local" % cmd)
sudo('chmod +x /etc/rc.d/rc.local')
sudo('firewall-cmd --zone=public --add-port=%s/tcp --permanent' % port)
with settings(warn_only=True):
reboot()
sudo('ps -ef | grep ssserver')
return True
except BaseException as e:
logging.error(e)
return False
def sslocal(server_addr, server_port, server_password, method, local_port):
try:
sudo('hash yum')
sudo('hash python')
sudo('yum -y update 1>/dev/null')
sudo('yum -y install python-setuptools 1>/dev/null')
sudo('yum -y install m2crypto 1>/dev/null')
sudo('easy_install pip 1>/dev/null')
sudo('pip install shadowsocks 1>/dev/null')
sudo('hash sslocal')
sudo("sed -i '/sslocal /d' /etc/rc.d/rc.local")
cmd = '/usr/bin/python /usr/bin/sslocal -s %s -p %s -k %s -m %s -b 0.0.0.0 -l %s --user nobody -d start' % \
(server_addr, server_port, server_password, method, local_port)
sudo("sed -i '$a %s' /etc/rc.d/rc.local" % cmd)
sudo('chmod +x /etc/rc.d/rc.local')
sudo('firewall-cmd --zone=public --add-port=%s/tcp --permanent' % local_port)
with settings(warn_only=True):
reboot()
sudo('ps -ef | grep sslocal')
return True
except BaseException as e:
logging.error(e)
return False
| apache-2.0 | -8,513,914,804,228,070,000 | 34.360656 | 116 | 0.572091 | false |
yavuzovski/playground | python/Udacity/cs212/fill_in.py | 1 | 1106 | # -------------
# User Instructions
#
# Complete the fill_in(formula) function by adding your code to
# the two places marked with ?????.
import string, re, itertools
from __future__ import division
def solve(formula):
"""Given a formula like 'ODD + ODD == EVEN', fill in digits to solve it.
Input formula is a string; output is a digit-filled-in string or None."""
for f in fill_in(formula):
if valid(f):
return f
def fill_in(formula):
"Generate all possible fillings-in of letters in formula with digits."
letters = "".join(set(re.findall("[A-Z]", formula)))
print letters
for digits in itertools.permutations('1234567890', len(letters)):
table = string.maketrans(letters, ''.join(digits))
yield formula.translate(table)
def valid(f):
"""Formula f is valid if and only if it has no
numbers with leading zero, and evals true."""
try:
return not re.search(r"\b0[0-9]", f) and eval(f) is True
except ArithmeticError:
return False
print solve("ODD + ODD == EVEN")
# print solve("A**2 + BC**2 == BD**2")
| gpl-3.0 | -4,835,289,567,200,269,000 | 28.105263 | 77 | 0.637432 | false |
SUNET/python-vccs_client | setup.py | 1 | 1112 | #!/usr/bin/env python
#
from setuptools import setup, find_packages
import sys, os
#from distutils import versionpredicate
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
version = '0.4.7'
install_requires = [
'py-bcrypt >= 0.4',
'simplejson >= 3.3.0',
'pymongo >= 3.6',
'six',
]
testing_extras = [
'pytest>=5.2.0',
'pytest-cov>=2.7.1',
]
setup(name='vccs_client',
version=version,
description="Very Complicated Credential System - authentication client",
long_description=README,
classifiers=[
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
],
keywords='security password hashing bcrypt PBKDF2',
author='Fredrik Thulin',
author_email='[email protected]',
license='BSD',
packages=['vccs_client',],
package_dir = {'': 'src'},
#include_package_data=True,
#package_data = { },
zip_safe=False,
install_requires=install_requires,
extras_require={
'testing': testing_extras,
},
)
| bsd-3-clause | 6,115,842,742,278,403,000 | 24.272727 | 81 | 0.614209 | false |
gurunars/dict-validator | dict_validator/fields/regexp/email_field.py | 1 | 2362 | import re
from dict_validator.fields import String
class Email(String):
"""
Make sure that the input is a valid email.
:param domain: string representing a desired domain name. e.g. "gmail.com"
if not present matches any domain name
>>> from dict_validator import validate, deserialize
>>> class Schema:
... field = Email()
>>> list(validate(Schema, {"field": "[email protected]"}))
[]
>>> list(validate(Schema, {"field": "[email protected]"}))
[]
>>> list(validate(Schema, {"field": "[email protected]"}))
[]
>>> list(validate(Schema, {"field": "[email protected]"}))
[]
>>> list(validate(Schema, {"field": "test%%[email protected]"}))
[]
>>> list(validate(Schema, {"field": "[email protected]"}))
[]
Missing domain:
>>> list(validate(Schema, {"field": "test@"}))
[(['field'], 'Did not match Regexp(email)')]
Missing beginning:
>>> list(validate(Schema, {"field": "@example-ff.com"}))
[(['field'], 'Did not match Regexp(email)')]
Wrong beginning:
>>> list(validate(Schema, {"field": "[email protected]"}))
[(['field'], 'Did not match Regexp(email)')]
Wrong domain:
>>> list(validate(Schema, {"field": "test123@examp++e.com"}))
[(['field'], 'Did not match Regexp(email)')]
No @ char:
>>> list(validate(Schema, {"field": "fdfdfdgdg"}))
[(['field'], 'Did not match Regexp(email)')]
Specify a domain:
>>> class Schema:
... field = Email(domain="example.com")
>>> list(validate(Schema, {"field": "[email protected]"}))
[]
Wrong domain:
>>> list(validate(Schema, {"field": "[email protected]"}))
[(['field'], 'Did not match Regexp(email)')]
>>> deserialize(Schema, {"field": "[email protected]"}).field
'[email protected]'
"""
def __init__(self, domain=None, **kwargs):
if domain:
domain = re.escape(domain)
else:
domain = r"(?:[a-zA-Z0-9-]+\.)+[a-z]{2,}"
super(Email, self).__init__(
r"^[a-zA-Z0-9._%+-]+@{}$".format(domain),
"email", **kwargs)
def deserialize(self, value):
# Make sure that the domain name is always in lower case
parts = value.split("@", 1)
return "@".join([parts[0], parts[1].lower()])
| mit | -4,664,667,914,755,185,000 | 24.956044 | 78 | 0.558002 | false |
CI-WATER/django-tethys_datasets | tethys_datasets/models.py | 1 | 1798 | from django.db import models
from tethys_dataset_services.valid_engines import VALID_ENGINES, VALID_SPATIAL_ENGINES
# Create your models here.
class DatasetService(models.Model):
"""
ORM for Dataset Service settings.
"""
# Define default values for engine choices
CKAN = VALID_ENGINES['ckan']
HYDROSHARE = VALID_ENGINES['hydroshare']
# Define default choices for engine selection
ENGINE_CHOICES = (
(CKAN, 'CKAN'),
(HYDROSHARE, 'HydroShare')
)
name = models.CharField(max_length=30, unique=True)
engine = models.CharField(max_length=200, choices=ENGINE_CHOICES, default=CKAN)
endpoint = models.CharField(max_length=1024)
apikey = models.CharField(max_length=100, blank=True)
username = models.CharField(max_length=100, blank=True)
password = models.CharField(max_length=100, blank=True)
class Meta:
verbose_name = 'Dataset Service'
verbose_name_plural = 'Dataset Services'
def __unicode__(self):
return self.name
class SpatialDatasetService(models.Model):
"""
ORM for Spatial Dataset Service settings.
"""
GEOSERVER = VALID_SPATIAL_ENGINES['geoserver']
ENGINE_CHOICES = (
(GEOSERVER, 'GeoServer'),
)
name = models.CharField(max_length=30, unique=True)
engine = models.CharField(max_length=200, choices=ENGINE_CHOICES, default=GEOSERVER)
endpoint = models.CharField(max_length=1024)
apikey = models.CharField(max_length=100, blank=True)
username = models.CharField(max_length=100, blank=True)
password = models.CharField(max_length=100, blank=True)
class Meta:
verbose_name = 'Spatial Dataset Service'
verbose_name_plural = 'Spatial Dataset Services'
def __unicode__(self):
return self.name
| bsd-2-clause | 7,734,351,802,227,745,000 | 30.54386 | 88 | 0.683537 | false |
tsybulkin/jumper | nn.py | 1 | 7153 | import random,csv
# Third-party libraries
import numpy as np
class Network(object):
def __init__(self, sizes):
"""The list ``sizes`` contains the number of neurons in the
respective layers of the network. For example, if the list
was [2, 3, 1] then it would be a three-layer network, with the
first layer containing 2 neurons, the second layer 3 neurons,
and the third layer 1 neuron. The biases and weights for the
network are initialized randomly, using a Gaussian
distribution with mean 0, and variance 1. Note that the first
layer is assumed to be an input layer, and by convention we
won't set any biases for those neurons, since biases are only
ever used in computing the outputs from later layers."""
self.num_layers = len(sizes)
self.sizes = sizes
self.biases = [np.random.randn(y, 1) for y in sizes[1:]]
self.weights = [np.random.randn(y, x)
for x, y in zip(sizes[:-1], sizes[1:])]
def feedforward(self, a):
"""Return the output of the network if ``a`` is input."""
for b, w in zip(self.biases, self.weights):
a = sigmoid(np.dot(w, a)+b)
#a = tanh(np.dot(w, a)+b)
return a
def SGD(self, training_data, epochs, mini_batch_size, eta,
test_data=None):
"""Train the neural network using mini-batch stochastic
gradient descent. The ``training_data`` is a list of tuples
``(x, y)`` representing the training inputs and the desired
outputs. The other non-optional parameters are
self-explanatory. If ``test_data`` is provided then the
network will be evaluated against the test data after each
epoch, and partial progress printed out. This is useful for
tracking progress, but slows things down substantially."""
if test_data: n_test = len(test_data)
n = len(training_data)
for j in xrange(epochs):
random.shuffle(training_data)
mini_batches = [
training_data[k:k+mini_batch_size]
for k in xrange(0, n, mini_batch_size)]
for mini_batch in mini_batches:
self.update_mini_batch(mini_batch, eta)
if test_data:
print "Epoch {0}: {1} / {2}".format(
j, self.evaluate(test_data), n_test)
else:
print "Epoch {0} complete".format(j)
def update_mini_batch(self, mini_batch, eta):
"""Update the network's weights and biases by applying
gradient descent using backpropagation to a single mini batch.
The ``mini_batch`` is a list of tuples ``(x, y)``, and ``eta``
is the learning rate."""
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
for x,y in mini_batch:
delta_nabla_b, delta_nabla_w = self.backprop(x, y)
nabla_b = [nb+dnb for nb, dnb in zip(nabla_b, delta_nabla_b)]
nabla_w = [nw+dnw for nw, dnw in zip(nabla_w, delta_nabla_w)]
self.weights = [w-(eta/len(mini_batch))*nw
for w, nw in zip(self.weights, nabla_w)]
self.biases = [b-(eta/len(mini_batch))*nb
for b, nb in zip(self.biases, nabla_b)]
def backprop(self, x, y):
"""Return a tuple ``(nabla_b, nabla_w)`` representing the
gradient for the cost function C_x. ``nabla_b`` and
``nabla_w`` are layer-by-layer lists of numpy arrays, similar
to ``self.biases`` and ``self.weights``."""
nabla_b = [np.zeros(b.shape) for b in self.biases]
nabla_w = [np.zeros(w.shape) for w in self.weights]
# feedforward
activation = x
#print 'x:',x
activations = [x] # list to store all the activations, layer by layer
zs = [] # list to store all the z vectors, layer by layer
for b, w in zip(self.biases, self.weights):
#print 'b:',b
#print 'w:',w
z = np.dot(w, activation)+b
zs.append(z)
activation = sigmoid(z)
#activation = tanh(z)
activations.append(activation)
#print 'activations:',activations
# backward pass
delta = self.cost_derivative(activations[-1], y) * \
sigmoid_prime(zs[-1])
#tanh_prime(zs[-1])
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta, activations[-2].transpose())
# Note that the variable l in the loop below is used a little
# differently to the notation in Chapter 2 of the book. Here,
# l = 1 means the last layer of neurons, l = 2 is the
# second-last layer, and so on. It's a renumbering of the
# scheme in the book, used here to take advantage of the fact
# that Python can use negative indices in lists.
for l in xrange(2, self.num_layers):
z = zs[-l]
sp = sigmoid_prime(z)
#sp = tanh_prime(z)
delta = np.dot(self.weights[-l+1].transpose(), delta) * sp
nabla_b[-l] = delta
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
return (nabla_b, nabla_w)
def evaluate(self, test_data):
"""Return the number of test inputs for which the neural
network outputs the correct result. Note that the neural
network's output is assumed to be the index of whichever
neuron in the final layer has the highest activation."""
#test_results = [(np.argmax(self.feedforward(x)), y)
test_results = sum( abs(self.feedforward(x)- y)
for (x, y) in test_data )
#return sum(int(abs(dy)<0.1) for dy in test_results)
return test_results/len(test_data)
def cost_derivative(self, output_activations, y):
"""Return the vector of partial derivatives \partial C_x /
\partial a for the output activations."""
return (output_activations-y)
#### Miscellaneous functions
def sigmoid(z):
"""The sigmoid function."""
return 1.0/(1.0+np.exp(-z))
def sigmoid_prime(z):
"""Derivative of the sigmoid function."""
return sigmoid(z)*(1-sigmoid(z))
def tanh(z):
e_minus, e_plus = np.exp(-z), np.exp(z)
return (e_plus - e_minus)/(e_plus + e_minus)
def tanh_prime(z):
return (2/(np.exp(z)+np.exp(-z)))**2
def load_data():
with open('dataset.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=';')
data = []
for row in reader:
data.append((random.random(), (np.array([(float(row[0])-1.51)/2,
(float(row[1])-1.51)/2,
float(row[2])/20]).reshape(-1,1),
np.array([[(float(row[3])+1.0)/2]])
) ))
csvfile.close()
data = [ t for (_,t) in sorted(data)]
N = len(data)/6
return data[:4*N], data[4*N:5:N], data[5*N:]
| bsd-2-clause | 710,113,169,008,055,400 | 42.351515 | 85 | 0.562701 | false |
DamCB/tyssue | tyssue/core/multisheet.py | 2 | 2554 | import numpy as np
import pandas as pd
from scipy.interpolate import Rbf
from .sheet import Sheet
class MultiSheet:
def __init__(self, name, layer_datasets, specs):
self.coords = ["x", "y", "z"]
self.layers = [
Sheet("{}_{}".format(name, i), dsets, specs, coords=self.coords)
for i, dsets in enumerate(layer_datasets)
]
for i, layer in enumerate(self):
for dset in layer.datasets.values():
dset["layer"] = i
def __iter__(self):
for layer in self.layers:
yield layer
def __getitem__(self, n):
return self.layers[n]
def __len__(self):
return len(self.layers)
@property
def Nes(self):
return [layer.Ne for i, layer in self]
@property
def Nvs(self):
return [layer.Nv for i, layer in self]
@property
def Nfs(self):
return [layer.Nf for i, layer in self]
@property
def v_idxs(self):
return np.array([sheet.Nv for sheet in self]).cumsum()
@property
def f_idxs(self):
return np.array([sheet.Nf for sheet in self]).cumsum()
@property
def e_idxs(self):
return np.array([sheet.Ne for sheet in self]).cumsum()
def concat_datasets(self):
datasets = {}
v_dfs = [self[0].vert_df]
e_dfs = [self[0].edge_df]
f_dfs = [self[0].face_df]
v_shift = 0
f_shift = 0
e_shift = 0
for lower, upper in zip(self[:-1], self[1:]):
v_shift += lower.Nv
v_dfs.append(upper.vert_df.set_index(upper.vert_df.index + v_shift))
f_shift += lower.Nf
f_dfs.append(upper.face_df.set_index(upper.face_df.index + f_shift))
e_shift += lower.Ne
shifted_edge_df = upper.edge_df.set_index(upper.edge_df.index + e_shift)
shifted_edge_df[["srce", "trgt"]] += v_shift
shifted_edge_df["face"] += f_shift
e_dfs.append(shifted_edge_df)
for key, dfs in zip(["edge", "face", "vert"], [e_dfs, f_dfs, v_dfs]):
datasets[key] = pd.concat(dfs)
return datasets
def update_interpolants(self):
self.interpolants = [
Rbf(
sheet.vert_df["x"],
sheet.vert_df["y"],
sheet.vert_df["z"],
**sheet.specs["settings"]["interpolate"]
)
for sheet in self
]
# for interp in self.interpolants:
# interp.nodes = interp.nodes.clip(-1e2, 1e2)
| gpl-3.0 | 3,584,156,780,322,627,600 | 27.065934 | 84 | 0.53054 | false |
RomainGoussault/Deepov | DeepovTuning/optRoutines.py | 1 | 4435 | # -*- coding: utf-8 -*-
"""
Created on Sun Nov 8 21:16:15 2015
@author: navid
"""
from scipy.optimize import differential_evolution
from scipy.optimize import basinhopping
from tuner import *
from deepovFunction import *
from interface import *
import settings
elo = -10000
values = []
# TODO : create a function to save result of each step in the algorithms (callable func in scipy.optimize)
def opt_differential_evolution(parametersList):
# Maximul of iterations of the algorithm
max_iter=10
# Set the bounds of each parameter
bounds=list()
for i in range(0,len(parametersList)):
bounds.append((parametersList[i][1],parametersList[i][2]))
# TODO : change the criterium of convergence
scipy_res=differential_evolution(deepov_func,bounds,args=parametersList,maxiter=max_iter,disp=True,polish=False)
return scipy_res
def opt_basinhopping(parametersList):
# Number of iterations of the algorithm
n_iter=10
# Set the bounds of each parameter
bounds=list()
for i in range(0,len(parametersList)):
bounds.append((parametersList[i][1],parametersList[i][2]))
# Set the dictionary of additional options
args={'bounds':bounds,'args':parametersList}
# Find the initial guess
x0=[parametersList[i][4] for i in range(0,len(parametersList))]
print('Initial guess')
print(x0)
scipy_res=basinhopping(deepov_func,x0,niter=n_iter,T=50,stepsize=10, minimizer_kwargs=args,niter_success=3,disp=True)
return scipy_res
def opt_gridSearch(parametersList):
""" Does a simple grid search over the parameter space and returns the elo win and the best values in a dictionnary"""
global elo
global values
values = list() # Stores the results
n = len(parametersList) # dimension of the problem = number of parameters
# Iterates over the the parameters to initialize values list
# The parameter temporary value does not need an initial guess in grid search and is set as the lower bound
for i in range(0,n):
parametersList[i][4] = parametersList[i][1]
values.append(parametersList[i][1])
# Initialize the lists in the dataset (a list of n lists)
settings.dataset.append([])
# Add one dimension to store the corresponding elo
settings.dataset.append([])
# Goes over the paramter space and launch cutechess at each point
recursive_iterate(parametersList,0,n)
return values,elo
def recursive_iterate(parametersList,i,max_dim):
global elo
global values
""" Recursive iteration along the dimension current_dim """
# At the leaf of the tree
if (i == max_dim-1):
while ( parametersList[i][4] <= parametersList[i][2] ):
# step 1 : evaluate the score at current leaf
parameters = setParam(parametersList)
command = generateCommand(parameters)
score = evaluate(command)
print("score {}".format(score))
print("For values: {}".format([parametersList[j][4] for j in range(0,max_dim)] ))
# Store results in the dataset
for i in range(0,max_dim):
settings.dataset[i].append(parametersList[i][4])
# Appends the score in the nth list
settings.dataset[max_dim].append(score)
if score > elo:
values = [parametersList[j][4] for j in range(0,max_dim)]
elo = score
print("NEW ELO: {}".format(elo))
print("For values: {}".format(values))
print("")
# step 2 : at end of evaluation, increment current leave
parametersList[i][4] += parametersList[i][3]
# step 3 : at end of loop, reinitialize the leaf at lower bound
parametersList[i][4] = parametersList[i][1]
else:
while ( parametersList[i][4] <= parametersList[i][2] ):
# step 1b : if not at leaf, recursive call at dim i+1
recursive_iterate(parametersList,i+1,max_dim,values,elo)
# step 2b : at end of evaluation, increment current dim
parametersList[i][4] += parametersList[i][3]
# step 3b : at end of loop, reinitialize the dim at lower bound and i++
parametersList[i][4] = parametersList[i][1]
| gpl-2.0 | -2,958,768,602,807,209,500 | 35.966667 | 122 | 0.629087 | false |
niwinz/bytehold | bytehold/env.py | 1 | 2087 | #!/usr/bin/env python3
import os, socket
from .exceptions import FileDoesNotExists
from .exceptions import InvalidConfiguration
from .util import resolve_absolute_path
class Environment(object):
instance = None
config = {}
default_compress_command = resolve_absolute_path('xz', '-z6')
default_rsync_command = resolve_absolute_path('rsync', '-avr')
default_scp_command = resolve_absolute_path('scp')
default_tar_command = resolve_absolute_path('tar')
def __new__(cls, *args, **kwargs):
if cls.instance == None:
cls.instance = super(Environment, cls).__new__(cls, *args, **kwargs)
return cls.instance
def __init__(self, **kwargs):
if kwargs:
self.config.update(kwargs)
def name(self):
if "name" not in self.config:
self.config['name'] = socket.gethostname()
return self.config['name']
def remote_host(self):
if "remote_host" not in self.config:
raise InvalidConfiguration("remote_host variable does not exist in global scope")
return self.config['remote_host']
def remote_path(self):
if "remote_path" not in self.config:
raise InvalidConfiguration()
return os.path.join(
self.config['remote_path'],
self.name(),
)
def command_compress(self):
if "compress_command" not in self.config:
return self.default_compress_command()
return self.config["compress_command"]
def command_scp(self):
if "scp_command" not in self.config:
return self.default_scp_command()
return self.config['scp_command']
def command_tar(self):
if "tar_command" not in self.config:
return self.default_tar_command()
return self.config['tar_command']
def command_rsync(self):
if "rsync_command" not in self.config:
return self.default_rsync_command()
return self.config['rsync_command']
def extend(self, **kwargs):
self.config.update(kwargs)
| bsd-3-clause | -6,739,658,359,552,375,000 | 28.814286 | 93 | 0.616195 | false |
ninja-ide/ninja-ide | ninja_profiling/typing_performance.py | 1 | 3231 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of NINJA-IDE (http://ninja-ide.org).
#
# NINJA-IDE is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NINJA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NINJA-IDE; If not, see <http://www.gnu.org/licenses/>.
import sys
from unittest import mock
import time
from PyQt5.QtWidgets import QApplication
from PyQt5.QtCore import QTimer
from PyQt5.QtCore import Qt
from PyQt5.QtTest import QTest
sys.path.append("..")
from ninja_ide.tools import json_manager
from ninja_ide import resources
from ninja_ide.core.file_handling import nfile
from ninja_ide.gui.editor import neditable
from ninja_ide.gui.editor.editor import NEditor
from ninja_ide.gui.syntax_registry import syntax_registry # noqa
from ninja_ide.gui.ide import IDE
json_manager.load_syntax()
themes = json_manager.load_editor_schemes()
resources.COLOR_SCHEME = themes["Ninja Dark"]
qapp = QApplication(sys.argv)
IDE.register_service("ide", mock.Mock())
ninja_editor = NEditor(neditable=neditable.NEditable(nfile.NFile()))
ninja_editor.side_widgets.remove("CodeFoldingWidget")
ninja_editor.side_widgets.remove("MarkerWidget")
ninja_editor.side_widgets.remove("TextChangeWidget")
ninja_editor.side_widgets.update_viewport()
ninja_editor.side_widgets.resize()
ninja_editor.register_syntax_for()
ninja_editor.showMaximized()
click_times = {}
with open(sys.argv[1]) as fp:
text = fp.read()
def click(key):
clock_before = time.clock()
if isinstance(key, str):
QTest.keyClicks(ninja_editor, key)
else:
QTest.keyClick(ninja_editor, key)
while qapp.hasPendingEvents():
qapp.processEvents()
clock_after = time.clock()
ms = int((clock_after - clock_before) * 100)
click_times[ms] = click_times.get(ms, 0) + 1
def test():
clock_before = time.clock()
for line in text.splitlines():
indent_width = len(line) - len(line.lstrip())
while ninja_editor.textCursor().positionInBlock() > indent_width:
click(Qt.Key_Backspace)
for i in range(
indent_width - ninja_editor.textCursor().positionInBlock()):
click(Qt.Key_Space)
line = line[indent_width:]
for char in line:
click(char)
click(Qt.Key_Enter)
clock_after = time.clock()
typing_time = clock_after - clock_before
print("Typed {} chars in {} sec. {} ms per character".format(
len(text), typing_time, typing_time * 1000 / len(text)))
print("Time per click: Count of clicks")
click_time_keys = sorted(click_times.keys())
for click_time_key in click_time_keys:
print(" %5dms: %4d" % (
click_time_key, click_times[click_time_key]))
qapp.quit()
QTimer.singleShot(0, test)
qapp.exec_()
| gpl-3.0 | 3,850,145,051,517,410,000 | 28.642202 | 76 | 0.694831 | false |
dianshen/github | newstart/bbs/migrations/0009_auto_20160830_2256.py | 1 | 1123 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-30 14:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bbs', '0008_auto_20160830_1708'),
]
operations = [
migrations.AddField(
model_name='forum',
name='priority',
field=models.IntegerField(blank=True, default=0, null=True, verbose_name='优先级'),
),
migrations.AddField(
model_name='forum',
name='set_top_ok',
field=models.NullBooleanField(default=False, verbose_name='是否置顶'),
),
migrations.AlterField(
model_name='arctiles',
name='head_img',
field=models.ImageField(blank=True, null=True, upload_to='static/img'),
),
migrations.AlterField(
model_name='userprofiles',
name='head_icon',
field=models.ImageField(blank=True, height_field=74, null=True, upload_to='static/img', verbose_name='头像', width_field=74),
),
]
| gpl-3.0 | -9,174,891,676,077,669,000 | 30.571429 | 135 | 0.579186 | false |
kitstar/DNNConvert | common/IR/IR_graph.py | 1 | 1733 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import common.IR.graph_pb2 as graph_pb2
from common.DataStructure.graph import Graph, GraphNode
from common.utils import load_protobuf_from_file
class IRGraphNode(GraphNode):
@property
def IR_layer(self):
return self.layer
@property
def name(self):
return self.layer.name
@property
def type(self):
return self.layer.op
class IRGraph(Graph):
@staticmethod
def shapeToStr(tensor_shape):
ret = ""
first = True
for e in tensor_shape.dim:
if e.size != -1:
if first == False:
ret += ", "
ret += str(e.size)
first = False
return ret
@classmethod
def __init__(self, filename):
model = graph_pb2.GraphDef()
load_protobuf_from_file(model, filename)
super(IRGraph, self).__init__(model)
@classmethod
def build(self):
self.input_layers = list()
for i, layer in enumerate(self.model.node):
self.layer_map[layer.name] = IRGraphNode(layer)
self.layer_name_map[layer.name] = layer.name
for pred in layer.input:
self._make_connection(pred, layer.name)
super(IRGraph, self).build()
@classmethod
def saveToJson(filename = None):
json_str = json_format.MessageToJson(parser.IR_graph, preserving_proto_field_name = True)
if filename != None:
with open(filename, "wb") as of:
of.write(json_str)
print ("IR saved as {}".format(filename))
return json_str
| apache-2.0 | -899,402,253,043,025,800 | 23.757143 | 97 | 0.582804 | false |
PaddlePaddle/Paddle | tools/externalError/spider.py | 1 | 16768 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ssl
import re
import urllib.request
import json
import collections
import sys, getopt
import external_error_pb2
def parsing(externalErrorDesc):
#*********************************************************************************************#
#*********************************** CUDA Error Message **************************************#
print("start crawling errorMessage for nvidia CUDA API--->")
url = 'https://docs.nvidia.com/cuda/cuda-runtime-api/group__CUDART__TYPES.html#group__CUDART__TYPES_1g3f51e3575c2178246db0a94a430e0038'
allMessageDesc = externalErrorDesc.errors.add()
allMessageDesc.type = external_error_pb2.CUDA
ssl._create_default_https_context = ssl._create_unverified_context
html = urllib.request.urlopen(url).read().decode('utf-8')
res_div = r'<div class="section">.*?<p>CUDA error types </p>.*?</div>.*?<div class="enum-members">(.*?)</div>'
m_div = re.findall(res_div, html, re.S | re.M)[0]
res_dt = r'<dt>(.*?)</dt>.*?<dd>(.*?)</dd>'
m_dt = re.findall(res_dt, m_div, re.S | re.M)
for error in m_dt:
res_type = r'<span class="enum-member-name-def">(.*?) = <span class="ph ph apiData">(.*?)</span></span>'
m_type = re.findall(res_type, error[0], re.S | re.M)[0]
m_message = error[1]
m_message = m_message.replace('\n', '')
res_a = r'(<a class=.*?</a>)'
res_shape = r'<a class=.*?>(.*?)</a>'
list_a = re.findall(res_a, m_message, re.S | re.M)
list_shape = re.findall(res_shape, m_message, re.S | re.M)
assert len(list_a) == len(list_shape)
for idx in range(len(list_a)):
m_message = m_message.replace(list_a[idx], list_shape[idx])
m_message = m_message.replace(
'<h6 class=\"deprecated_header\">Deprecated</h6>', '')
res_span = r'(<span class=.*?</span>)'
res_span_detail = r'<span class=.*?>(.*?)</span>'
list_span = re.findall(res_span, m_message, re.S | re.M)
list_span_detail = re.findall(res_span_detail, m_message, re.S | re.M)
assert len(list_span) == len(list_span_detail)
for idx in range(len(list_span)):
m_message = m_message.replace(list_span[idx], list_span_detail[idx])
res_p = r'(<p>.*?</p>)'
res_p_detail = r'<p>(.*?)</p>'
list_p = re.findall(res_p, m_message, re.S | re.M)
list_p_detail = re.findall(res_p_detail, m_message, re.S | re.M)
assert len(list_p) == len(list_p_detail)
for idx in range(len(list_p)):
m_message = m_message.replace(list_p[idx], list_p_detail[idx])
m_message = m_message.replace(' ', '')
_Messages = allMessageDesc.messages.add()
try:
_Messages.code = int(m_type[1])
except ValueError:
if re.match('0x', m_type[1]):
_Messages.code = int(m_type[1], 16)
else:
raise ValueError
_Messages.message = "'%s'. %s" % (m_type[0], m_message)
print("End crawling errorMessage for nvidia CUDA API!\n")
#***********************************************************************************************#
#*********************************** CURAND Error Message **************************************#
print("start crawling errorMessage for nvidia CURAND API--->")
url = 'https://docs.nvidia.com/cuda/curand/group__HOST.html#group__HOST_1gb94a31d5c165858c96b6c18b70644437'
allMessageDesc = externalErrorDesc.errors.add()
allMessageDesc.type = external_error_pb2.CURAND
html = urllib.request.urlopen(url).read().decode('utf-8')
res_div = r'<div class="section">.*?<p>CURAND function call status types </p>.*?</div>.*?<div class="enum-members">(.*?)</div>'
m_div = re.findall(res_div, html, re.S | re.M)[0]
res_dt = r'<dt>(.*?)</dt>.*?<dd>(.*?)</dd>'
m_dt = re.findall(res_dt, m_div, re.S | re.M)
for error in m_dt:
res_type = r'<span class="enum-member-name-def">(.*?) = <span class="ph ph apiData">(.*?)</span></span>'
m_type = re.findall(res_type, error[0], re.S | re.M)[0]
m_message = error[1]
_Messages = allMessageDesc.messages.add()
try:
_Messages.code = int(m_type[1])
except ValueError:
if re.match('0x', m_type[1]):
_Messages.code = int(m_type[1], 16)
else:
raise ValueError
_Messages.message = "'%s'. %s" % (m_type[0], m_message)
print("End crawling errorMessage for nvidia CURAND API!\n")
#**************************************************************************************************#
#*********************************** CUDNN Error Message ******************************************#
cudnnStatus_t = {
"CUDNN_STATUS_SUCCESS": 0,
"CUDNN_STATUS_NOT_INITIALIZED": 1,
"CUDNN_STATUS_ALLOC_FAILED": 2,
"CUDNN_STATUS_BAD_PARAM": 3,
"CUDNN_STATUS_INTERNAL_ERROR": 4,
"CUDNN_STATUS_INVALID_VALUE": 5,
"CUDNN_STATUS_ARCH_MISMATCH": 6,
"CUDNN_STATUS_MAPPING_ERROR": 7,
"CUDNN_STATUS_EXECUTION_FAILED": 8,
"CUDNN_STATUS_NOT_SUPPORTED": 9,
"CUDNN_STATUS_LICENSE_ERROR": 10,
"CUDNN_STATUS_RUNTIME_PREREQUISITE_MISSING": 11,
"CUDNN_STATUS_RUNTIME_IN_PROGRESS": 12,
"CUDNN_STATUS_RUNTIME_FP_OVERFLOW": 13,
}
print("start crawling errorMessage for nvidia CUDNN API--->")
url = 'https://docs.nvidia.com/deeplearning/cudnn/api/index.html#cudnnStatus_t'
allMessageDesc = externalErrorDesc.errors.add()
allMessageDesc.type = external_error_pb2.CUDNN
html = urllib.request.urlopen(url).read().decode('utf-8')
f = open('1.txt', 'w')
f.write(html)
res_div = r'<div class="section" id="cudnnStatus_t__section_lmp_dgr_2jb"><a name="cudnnStatus_t__section_lmp_dgr_2jb" shape="rect">(.*?)</div>'
m_div = re.findall(res_div, html, re.S | re.M)[0]
res_dt = r'<dt class="dt dlterm"><samp class="ph codeph">(.*?)</samp></dt>.*?<dd class="dd">(.*?)</dd>'
m_dt = re.findall(res_dt, m_div, re.S | re.M)
for error in m_dt:
m_message = error[1]
res_class = r'<p class="p">.*?</p>'
res_class_detail = r'<p class="p">(.*?)</p>'
list_class = re.findall(res_class, m_message, re.S | re.M)
list_class_detail = re.findall(res_class_detail, m_message, re.S | re.M)
assert len(list_class) == len(list_class_detail)
for idx in range(len(list_class)):
m_message = m_message.replace(list_class[idx],
list_class_detail[idx])
res_a = r'(<a class="xref".*?</a>)'
res_shape = r'<a class="xref".*?>(.*?)</a>'
list_a = re.findall(res_a, m_message, re.S | re.M)
list_shape = re.findall(res_shape, m_message, re.S | re.M)
assert len(list_a) == len(list_shape)
for idx in range(len(list_a)):
m_message = m_message.replace(list_a[idx], list_shape[idx])
res_span = r'(<span class="ph">.*?</span>)'
res_span_detail = r'<span class="ph">(.*?)</span>'
list_span = re.findall(res_span, m_message, re.S | re.M)
list_span_detail = re.findall(res_span_detail, m_message, re.S | re.M)
assert len(list_span) == len(list_span_detail)
for idx in range(len(list_span)):
m_message = m_message.replace(list_span[idx], list_span_detail[idx])
res_samp = r'(<samp class="ph codeph">.*?</samp>)'
res_samp_detail = r'<samp class="ph codeph">(.*?)</samp>'
list_samp = re.findall(res_samp, m_message, re.S | re.M)
list_samp_detail = re.findall(res_samp_detail, m_message, re.S | re.M)
assert len(list_samp) == len(list_samp_detail)
for idx in range(len(list_samp)):
m_message = m_message.replace(list_samp[idx], list_samp_detail[idx])
m_message = re.sub(r'\n +', ' ', m_message)
_Messages = allMessageDesc.messages.add()
_Messages.code = int(cudnnStatus_t[error[0]])
_Messages.message = "'%s'. %s" % (error[0], m_message)
print("End crawling errorMessage for nvidia CUDNN API!\n")
#*************************************************************************************************#
#*********************************** CUBLAS Error Message ****************************************#
cublasStatus_t = {
"CUBLAS_STATUS_SUCCESS": 0,
"CUBLAS_STATUS_NOT_INITIALIZED": 1,
"CUBLAS_STATUS_ALLOC_FAILED": 3,
"CUBLAS_STATUS_INVALID_VALUE": 7,
"CUBLAS_STATUS_ARCH_MISMATCH": 8,
"CUBLAS_STATUS_MAPPING_ERROR": 11,
"CUBLAS_STATUS_EXECUTION_FAILED": 13,
"CUBLAS_STATUS_INTERNAL_ERROR": 14,
"CUBLAS_STATUS_NOT_SUPPORTED": 15,
"CUBLAS_STATUS_LICENSE_ERROR": 16
}
print("start crawling errorMessage for nvidia CUBLAS API--->")
url = 'https://docs.nvidia.com/cuda/cublas/index.html#cublasstatus_t'
allMessageDesc = externalErrorDesc.errors.add()
allMessageDesc.type = external_error_pb2.CUBLAS
html = urllib.request.urlopen(url).read().decode('utf-8')
res_div = r'<p class="p">The type is used for function status returns. All cuBLAS library.*?<div class="tablenoborder">(.*?)</div>'
m_div = re.findall(res_div, html, re.S | re.M)[0]
res_dt = r'<p class="p"><samp class="ph codeph">(.*?)</samp></p>.*?colspan="1">(.*?)</td>'
m_dt = re.findall(res_dt, m_div, re.S | re.M)
for error in m_dt:
m_message = error[1]
m_message = re.sub(r'\n +', ' ', m_message)
res_p = r'<p class="p">.*?</p>'
res_p_detail = r'<p class="p">(.*?)</p>'
list_p = re.findall(res_p, m_message, re.S | re.M)
list_p_detail = re.findall(res_p_detail, m_message, re.S | re.M)
assert len(list_p) == len(list_p_detail)
for idx in range(len(list_p)):
m_message = m_message.replace(list_p[idx], list_p_detail[idx])
res_samp = r'<samp class="ph codeph">.*?</samp>'
res_samp_detail = r'<samp class="ph codeph">(.*?)</samp>'
list_samp = re.findall(res_samp, m_message, re.S | re.M)
list_samp_detail = re.findall(res_samp_detail, m_message, re.S | re.M)
assert len(list_samp) == len(list_samp_detail)
for idx in range(len(list_samp)):
m_message = m_message.replace(list_samp[idx], list_samp_detail[idx])
_Messages = allMessageDesc.messages.add()
_Messages.code = int(cublasStatus_t[error[0]])
_Messages.message = "'%s'. %s" % (error[0], m_message)
print("End crawling errorMessage for nvidia CUBLAS API!\n")
#*************************************************************************************************#
#*********************************** CUSOLVER Error Message **************************************#
cusolverStatus_t = {
"CUSOLVER_STATUS_SUCCESS": 0,
"CUSOLVER_STATUS_NOT_INITIALIZED": 1,
"CUSOLVER_STATUS_ALLOC_FAILED": 2,
"CUSOLVER_STATUS_INVALID_VALUE": 3,
"CUSOLVER_STATUS_ARCH_MISMATCH": 4,
"CUSOLVER_STATUS_MAPPING_ERROR": 5,
"CUSOLVER_STATUS_EXECUTION_FAILED": 6,
"CUSOLVER_STATUS_INTERNAL_ERROR": 7,
"CUSOLVER_STATUS_MATRIX_TYPE_NOT_SUPPORTED": 8,
"CUSOLVER_STATUS_NOT_SUPPORTED": 9,
"CUSOLVER_STATUS_ZERO_PIVOT": 10,
"CUSOLVER_STATUS_INVALID_LICENSE": 11,
"CUSOLVER_STATUS_IRS_PARAMS_NOT_INITIALIZED": 12,
"CUSOLVER_STATUS_IRS_PARAMS_INVALID": 13,
"CUSOLVER_STATUS_IRS_INTERNAL_ERROR": 14,
"CUSOLVER_STATUS_IRS_NOT_SUPPORTED": 15,
"CUSOLVER_STATUS_IRS_OUT_OF_RANGE": 16,
"CUSOLVER_STATUS_IRS_NRHS_NOT_SUPPORTED_FOR_REFINE_GMRES": 17,
"CUSOLVER_STATUS_IRS_INFOS_NOT_INITIALIZED": 18
}
print("start crawling errorMessage for nvidia CUSOLVER API--->")
url = 'https://docs.nvidia.com/cuda/cusolver/index.html#cuSolverSPstatus'
allMessageDesc = externalErrorDesc.errors.add()
allMessageDesc.type = external_error_pb2.CUSOLVER
html = urllib.request.urlopen(url).read().decode('utf-8')
res_div = r'This is a status type returned by the library functions and.*?<div class="tablenoborder">(.*?)</div>'
m_div = re.findall(res_div, html, re.S | re.M)[0]
res_dt = r'<samp class="ph codeph">(.*?)</samp></td>.*?colspan="1">(.*?)</td>'
m_dt = re.findall(res_dt, m_div, re.S | re.M)
for error in m_dt:
m_message = error[1]
m_message = re.sub(r'\n +', '', m_message)
m_message = re.sub(r'<p class="p"></p>', '', m_message)
res_p = r'<p class="p">.*?</p>'
res_p_detail = r'<p class="p">(.*?)</p>'
list_p = re.findall(res_p, m_message, re.S | re.M)
list_p_detail = re.findall(res_p_detail, m_message, re.S | re.M)
assert len(list_p) == len(list_p_detail)
for idx in range(len(list_p)):
m_message = m_message.replace(list_p[idx], list_p_detail[idx])
res_samp = r'<samp class="ph codeph">.*?</samp>'
res_samp_detail = r'<samp class="ph codeph">(.*?)</samp>'
list_samp = re.findall(res_samp, m_message, re.S | re.M)
list_samp_detail = re.findall(res_samp_detail, m_message, re.S | re.M)
assert len(list_samp) == len(list_samp_detail)
for idx in range(len(list_samp)):
m_message = m_message.replace(list_samp[idx], list_samp_detail[idx])
res_strong = r'<strong class="ph b">.*?</strong>'
res_strong_detail = r'<strong class="ph b">(.*?)</strong>'
list_strong = re.findall(res_strong, m_message, re.S | re.M)
list_strong_detail = re.findall(res_strong_detail, m_message, re.S |
re.M)
assert len(list_strong) == len(list_strong_detail)
for idx in range(len(list_strong)):
m_message = m_message.replace(list_strong[idx],
list_strong_detail[idx])
_Messages = allMessageDesc.messages.add()
_Messages.code = int(cusolverStatus_t[error[0]])
_Messages.message = "'%s'. %s" % (error[0], m_message)
print("End crawling errorMessage for nvidia CUSOLVER API!\n")
#**********************************************************************************************#
#*************************************** NCCL error *******************************************#
print("start crawling errorMessage for nvidia NCCL API--->")
url = 'https://docs.nvidia.com/deeplearning/nccl/user-guide/docs/api/types.html#ncclresult-t'
allMessageDesc = externalErrorDesc.errors.add()
allMessageDesc.type = external_error_pb2.NCCL
html = urllib.request.urlopen(url).read().decode('utf-8')
res_div = r'<code class="descname">ncclResult_t</code>(.*?)</div>'
m_div = re.findall(res_div, html, re.S | re.M)[0]
res_dt = r'<code class="descname">(.*?)</code>.*?<span class="pre">(.*?)</span></code>\)(.*?)</p>\n</dd></dl>'
m_dt = re.findall(res_dt, m_div, re.S | re.M)
for error in m_dt:
m_message = re.sub(r'\n', '', error[2])
_Messages = allMessageDesc.messages.add()
_Messages.code = int(error[1])
_Messages.message = "'%s'. %s" % (error[0], m_message)
print("End crawling errorMessage for nvidia NCCL API!\n")
def main(argv):
try:
opts, _ = getopt.getopt(argv, "h", ["help"])
except getopt.GetoptError:
print('python spider.py')
sys.exit(2)
for opt, _ in opts:
if opt in ("-h", "--help"):
print('python spider.py')
sys.exit(2)
externalErrorDesc = external_error_pb2.ExternalErrorDesc()
parsing(externalErrorDesc)
serializedString = externalErrorDesc.SerializeToString()
with open("externalErrorMsg.pb", "wb") as f:
# save for externalErrorMsg.pb from Python-protobuf interface
# load from C++-protobuf interface and get error message
f.write(serializedString)
print(
"Generating data file [externalErrorMsg.pb] for external third_party API error has been done!"
)
if __name__ == "__main__":
main(sys.argv[1:])
| apache-2.0 | 3,218,303,806,324,997,600 | 45.192837 | 147 | 0.555821 | false |
malon/presupuesto | budget_app/models/funding_category.py | 1 | 1051 | from django.db import models
class FundingCategoriesManager(models.Manager):
pass
class FundingCategory(models.Model):
budget = models.ForeignKey('Budget')
expense = models.BooleanField()
source = models.CharField(max_length=1, null=True)
fund_class = models.CharField(max_length=2, null=True)
fund = models.CharField(max_length=5, null=True)
description = models.CharField(max_length=200)
updated_at = models.DateTimeField(auto_now=True)
created_at = models.DateTimeField(auto_now_add=True)
objects = FundingCategoriesManager()
class Meta:
app_label = "budget_app"
db_table = "funding_categories"
# Return the 'budget domain' id, used to uniquely identify a category
# in a budget
def uid(self):
# We are using only the 'fund' category
# if self.fund_class == None:
# return self.source
# elif self.fund == None:
# return self.fund_class
return self.fund
def __unicode__(self):
return self.description
| gpl-2.0 | 1,729,028,940,044,533,800 | 29.028571 | 73 | 0.659372 | false |
Felipe-Caldeira/Tic-Tac-Toe | Tic-Tac-Toe.py | 1 | 8348 | #Python Tic-Tac-Toe Game
import random
import collections as c
import os
import time
#The game board
board = b = list(range(1,10))
rows = ['012', '345', '678']
columns = ['036', '147', '258']
X = ['048', '246']
def show_Board(): #Displays game board
print(board[0], '|', board[1], '|', board[2])
print('---------')
print(board[3], '|', board[4], '|', board[5])
print('---------')
print(board[6], '|', board[7], '|', board[8])
print('')
def getrow(space): #Gets the row of a space
for i in range(3):
if str(space) in rows[i]:
return(i)
def getcolumn(space): #Gets the column of a space
for i in range(3):
if str(space) in columns[i]:
return(i)
def getX(space): #Gets the diagonal of a space, and checks if center
if space == 4:
return(2)
for i in range(2):
if str(space) in X[i]:
return(i)
def check_Row(space): #Checks for X's and for empty spaces in row
RS = rows[getrow(space)]
ARS = []
if 'X' not in set(b[int(RS[i])] for i in range(3)):
for i in RS:
if board[int(i)] != 'O':
ARS.append(i)
return(ARS)
def check_Column(space): #Checks for X's and for empty spaces in column
CS = columns[getcolumn(space)]
ACS = []
if 'X' not in set(b[int(CS[i])] for i in range(3)):
for i in CS:
if board[int(i)] != 'O':
ACS.append(i)
return(ACS)
def check_Center(): #Checks for X's and for empty spaces in both diagonals
ACS = []
if 'X' not in set(b[int(X[0][i])] for i in range(3)):
for i in X[0]:
if board[int(i)] != 'O':
ACS.append(i)
if 'X' not in set(b[int(X[1][i])] for i in range(3)):
for i in X[1]:
if board[int(i)] != 'O':
ACS.append(i)
return(ACS)
def check_X(space): #Checks for X's and for empty spaces in diagonal
AXS = []
if getX(space) == 2:
for i in check_Center():
AXS.append(i)
return(AXS)
XS = X[getX(space)]
if 'X' not in set(b[int(XS[i])] for i in range(3)):
for i in XS:
if board[int(i)] != 'O':
AXS.append(i)
return(AXS)
def scan_Board(): #Checks for open spaces
openspaces = osp = []
for space in range(9):
if board[space] == 'O':
if check_Row(space) != None:
for i in check_Row(space):
osp.append(int(i))
if check_Column(space) != None:
for i in check_Column(space):
osp.append(int(i))
if getX(space) != None:
if check_X(space) != None:
for i in check_X(space):
osp.append(int(i))
if all(isinstance(spaces, int) for spaces in board):
osp.append(space)
return openspaces
def can_Win(player): #Checks if (player) has 2-in-a-row
for row in rows:
if list(board[int(row[i])] for i in range(3)).count(player) == 2:
for i in row:
if isinstance(board[int(i)], int):
return(int(i))
for column in columns:
if list(board[int(column[i])] for i in range(3)).count(player) == 2:
for i in column:
if isinstance(board[int(i)], int):
return(int(i))
for diag in X:
if list(board[int(diag[i])] for i in range(3)).count(player) == 2:
for i in diag:
if isinstance(board[int(i)], int):
return(int(i))
return(None)
def player_Move(mark): #Player's turn
while True:
while True:
try:
move = int(input("Select a spot: "))
break
except ValueError:
print("Input an available number from the board.")
if int(move) not in board:
print("Number out of range. Choose an available spot on the board.")
continue
else:
move = int(move) - 1
if board[move] != 'X' and board[move] != 'O':
board[move] = mark
break
else:
print("Spot already taken. Select another.")
continue
def com_Move(): #Computer's turn. Computer scans board and picks from the best possible spaces.
while True:
if can_Win('O') != None: #Go for the win if possible!
board[(can_Win('O'))] = 'O'
break
if can_Win('X') != None: #Block player from winning!
board[can_Win('X')] = 'O'
break
openspaces = osp = scan_Board()
bestspaces = bs = []
count = c.Counter(osp) #Counts how many 'points' each space has
#print(openspaces)
if not openspaces:
for i in range(9):
if isinstance(board[i], int):
board[i] = 'O'
break
break
if 3 in count.values():
for k,v in count.items():
if v == 3:
bs.append(k)
board[random.choice(bs)] = 'O'
break
if 2 in count.values():
for k,v in count.items():
if v == 2:
bs.append(k)
board[random.choice(bs)] = 'O'
break
else:
board[random.choice(osp)] = 'O'
break
def win_Check(): #Checks for 3-in-a-row
for row in rows:
lst = list(board[int(row[i])] for i in range(3))
if lst[1:] == lst[:-1]:
return True
for column in columns:
lst = list(board[int(column[i])] for i in range(3))
if lst[1:] == lst[:-1]:
return True
for diag in X:
lst = list(board[int(diag[i])] for i in range(3))
if lst[1:] == lst[:-1]:
return True
else:
return False
def player_Turn(mark, player):
os.system('cls')
print(player + "'s move...")
show_Board()
player_Move(mark)
def computer_Turn():
os.system('cls')
print("Computer's move...")
show_Board()
time.sleep(2)
os.system('cls')
com_Move()
#------------------------------------------------------------------------------
while True:
gamemode = input("PvP or PvC? ")
if gamemode != "PvP" and gamemode != "PvC":
print("Please choose a valid game mode.")
continue
break
if gamemode == 'PvC':
if random.randrange(0,100) > 50: #Randomly selects who starts
for i in range(5):
player_Turn('X', 'Player')
if win_Check() == True:
os.system('cls')
print("You won the game!")
show_Board()
break
if i == 4:
os.system('cls')
print("It's a cat's game!")
show_Board()
break
computer_Turn()
if win_Check() == True:
os.system('cls')
print("Computer won the game!")
show_Board()
break
else:
for i in range(5):
computer_Turn()
if win_Check() == True:
os.system('cls')
print("Computer won the game!")
show_Board()
break
if i == 4:
os.system('cls')
print("It's a cat's game!")
show_Board()
break
player_Turn('X', 'Player')
if win_Check() == True:
os.system('cls')
print("You won the game!")
show_Board()
break
if gamemode == 'PvP':
for i in range(5):
player_Turn('O', 'Player 1')
if win_Check() == True:
os.system('cls')
print("Player 1 won the game!")
show_Board()
break
if i == 4:
os.system('cls')
print("It's a cat's game!")
show_Board()
break
player_Turn('X', 'Player 2')
if win_Check() == True:
os.system('cls')
print("Player 2 won the game!")
show_Board()
break
input()
| mit | 9,108,046,094,650,863,000 | 29.804428 | 95 | 0.462386 | false |
Baldanos/Stegpy | plugins/color_information.py | 1 | 2454 | class info():
def __init__(self):
self.name="color_info"
self.description="Displays the color information about an image"
self.inputdata="image"
self.parameters = None
self.mode="command"
def process(self, image):
"""
displays color information about the file
"""
print 'File color mode : %s' % image.mode
print ''
if image.mode == 'P':
print ''
print 'Palette information :'
palette = image.getpalette()
colors = {number:[color, (palette[color*3], palette[(color*3)+1],
palette[(color*3)+2])]
for number, color in image.getcolors()}
numbers = colors.keys()
numbers.sort();numbers.reverse()
print ' Nb Color Times used'
for number in numbers:
print ' %03s %15s (%s times)' % \
(colors[number][0], colors[number][1], number)
image = image.convert('RGB')
print 'Image colors : '
colors = {n:c for n, c in [color for color in image.getcolors(image.size[0]*image.size[1])]}
numbers = colors.keys()
numbers.sort();numbers.reverse()
print ' Color Times used'
for number in numbers:
print ' %20s (%02s times)' % (colors[number], number)
print ''
print 'Color statistics : '
if image.mode == 'RGBA':
print ' Red distribution : %s' % \
str(list(set([r for r,g,b,a in colors.values()])))
print ' Green distribution : %s' % \
str(list(set([g for r,g,b,a in colors.values()])))
print ' Blue distribution : %s' % \
str(list(set([b for r,g,b,a in colors.values()])))
print ' Alpha distribution : %s' % \
str(list(set([a for r,g,b,a in colors.values()])))
else:
stat = image.convert('RGB')
print ' Red distribution : %s' % \
str(list(set([r for r,g,b in colors.values()])))
print ' Green distribution : %s' % \
str(list(set([g for r,g,b in colors.values()])))
print ' Blue distribution : %s' % \
str(list(set([b for r,g,b in colors.values()])))
return ''
def register():
return info()
| gpl-2.0 | 8,203,311,512,014,303,000 | 37.952381 | 100 | 0.484515 | false |
IntegerMan/Pi-MFD | PiMFD/UI/Checkboxes.py | 1 | 4091 | # coding=utf-8
"""
Contains checkbox style controls for manipulating pages
"""
from pygame.rect import Rect
from PiMFD.UI import Keycodes
from PiMFD.UI.Focus import FocusableWidget
from PiMFD.UI.Keycodes import is_enter_key
from PiMFD.UI.Panels import UIWidget, StackPanel
from PiMFD.UI.Rendering import render_rectangle
from PiMFD.UI.Text import TextBlock
__author__ = 'Matt Eland'
class CheckBoxGlyph(UIWidget):
"""
A checkbox style UI without a label associated with it. This is used by other controls to render a checkbox UI.
Use CheckBox instead if you're wanting to put this on a page.
"""
checked = False
render_focus = False
check_pad = 4
def __init__(self, display, page, checked=False):
super(CheckBoxGlyph, self).__init__(display, page)
self.checked = checked
def arrange(self):
rect_size = self.display.fonts.normal.size + self.check_pad
self.desired_size = rect_size, rect_size
return super(CheckBoxGlyph, self).arrange()
def render(self):
"""
Renders the glyph and returns its dimensions
:return: The dimensions of the glyph
"""
# Size Constants
rect_size = self.display.fonts.normal.size + self.check_pad
self.rect = Rect(self.pos[0], self.pos[1], self.desired_size[0], self.desired_size[1])
focus_color = self.display.color_scheme.get_focus_color(self.render_focus)
# Draw the border
render_rectangle(self.display, focus_color, self.rect)
# Draw checkmark (if checked)
if self.checked:
checked_rect = Rect(self.pos[0] + self.check_pad,
self.pos[1] + self.check_pad,
rect_size - (self.check_pad * 2),
rect_size - (self.check_pad * 2))
render_rectangle(self.display, focus_color, checked_rect, width=0)
# Update and return our dimensions
return self.set_dimensions_from_rect(self.rect)
class CheckBox(FocusableWidget):
"""
A CheckBox with an associated label.
"""
text = None
panel = None
label = None
glyph = None
checked = False
def __init__(self, display, page, label):
super(CheckBox, self).__init__(display, page)
self.text = label
self.label = TextBlock(display, page, label)
self.glyph = CheckBoxGlyph(display, page)
self.panel = StackPanel(display, page, is_horizontal=True)
self.panel.center_align = True
self.panel.children = [self.label, self.glyph]
def arrange(self):
self.desired_size = self.panel.arrange()
return super(CheckBox, self).arrange()
def render(self):
"""
Renders the checkbox with its current state
:return: The rectangle of the checkbox
"""
# Pass along our values to the children
self.label.text = self.text
self.glyph.checked = self.checked
# Render the panel's contents
self.panel.set_dimensions_from(self)
self.panel.render()
return self.set_dimensions_from(self.panel)
def got_focus(self):
"""
Occurs when the control gets focus
"""
self.label.is_highlighted = True
self.glyph.render_focus = True
super(CheckBox, self).got_focus()
def lost_focus(self):
"""
Occurs when the control loses focus
"""
self.label.is_highlighted = False
self.glyph.render_focus = False
super(CheckBox, self).lost_focus()
def handle_key(self, key):
"""
Handles a keypress
:param key: The keycode
:returns: True if the event was handled; otherwise False
"""
if is_enter_key(key) or key == Keycodes.KEY_SPACE:
if self.checked:
self.checked = False
else:
self.checked = True
self.state_changed()
return True
else:
return super(CheckBox, self).handle_key(key)
| gpl-2.0 | -1,053,093,297,687,374,000 | 26.641892 | 115 | 0.603275 | false |
gajim/gajim | gajim/session.py | 1 | 13176 | # Copyright (C) 2008-2014 Yann Leboulanger <asterix AT lagaule.org>
# Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
# Jonathan Schleifer <js-gajim AT webkeks.org>
# Stephan Erb <steve-e AT h3c.de>
#
# This file is part of Gajim.
#
# Gajim is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; version 3 only.
#
# Gajim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Gajim. If not, see <http://www.gnu.org/licenses/>.
import string
import random
import itertools
from gajim.common import helpers
from gajim.common import events
from gajim.common import app
from gajim.common import contacts
from gajim.common import ged
from gajim.common.helpers import AdditionalDataDict
from gajim.common.const import KindConstant
from gajim.gui.util import get_show_in_roster
from gajim.gui.util import get_show_in_systray
class ChatControlSession:
def __init__(self, conn, jid, thread_id, type_='chat'):
self.conn = conn
self.jid = jid
self.type_ = type_
self.resource = jid.resource
self.control = None
if thread_id:
self.received_thread_id = True
self.thread_id = thread_id
else:
self.received_thread_id = False
if type_ == 'normal':
self.thread_id = None
else:
self.thread_id = self.generate_thread_id()
self.loggable = True
self.last_send = 0
self.last_receive = 0
app.ged.register_event_handler('decrypted-message-received',
ged.PREGUI,
self._nec_decrypted_message_received)
def generate_thread_id(self):
return ''.join(
[f(string.ascii_letters) for f in itertools.repeat(
random.choice, 32)]
)
def is_loggable(self):
return helpers.should_log(self.conn.name, self.jid.bare)
def get_to(self):
bare_jid = self.jid.bare
if not self.resource:
return bare_jid
return bare_jid + '/' + self.resource
def _nec_decrypted_message_received(self, obj):
"""
Dispatch a received <message> stanza
"""
if obj.session != self:
return
if obj.properties.is_muc_pm:
contact = app.contacts.get_gc_contact(
self.conn.name, obj.jid, obj.resource)
else:
contact = app.contacts.get_contact(
self.conn.name, obj.jid, obj.resource)
if self.resource != obj.resource:
self.resource = obj.resource
if self.control:
if isinstance(contact, contacts.GC_Contact):
self.control.gc_contact = contact
self.control.contact = contact.as_contact()
else:
self.control.contact = contact
if self.control.resource:
self.control.change_resource(self.resource)
if not obj.msgtxt:
return
log_type = KindConstant.CHAT_MSG_RECV
if obj.properties.is_sent_carbon:
log_type = KindConstant.CHAT_MSG_SENT
if self.is_loggable() and obj.msgtxt:
jid = obj.fjid
if not obj.properties.is_muc_pm:
jid = obj.jid
obj.msg_log_id = app.storage.archive.insert_into_logs(
self.conn.name,
jid,
obj.properties.timestamp,
log_type,
message=obj.msgtxt,
subject=obj.properties.subject,
additional_data=obj.additional_data,
stanza_id=obj.unique_id,
message_id=obj.properties.id)
if obj.properties.is_muc_pm and not obj.gc_control:
# This is a carbon of a PM from a MUC we are not currently
# joined. We log it silently without notification.
return True
if not obj.msgtxt: # empty message text
return True
if not self.control:
ctrl = app.interface.msg_win_mgr.search_control(obj.jid,
obj.conn.name, obj.resource)
if ctrl:
self.control = ctrl
self.control.set_session(self)
if isinstance(contact, contacts.GC_Contact):
self.control.gc_contact = contact
self.control.contact = contact.as_contact()
else:
self.control.contact = contact
if not obj.properties.is_muc_pm:
self.roster_message2(obj)
def roster_message2(self, obj):
"""
Display the message or show notification in the roster
"""
contact = None
jid = obj.jid
resource = obj.resource
fjid = jid
# Try to catch the contact with correct resource
if resource:
fjid = jid + '/' + resource
contact = app.contacts.get_contact(obj.conn.name, jid, resource)
highest_contact = app.contacts.get_contact_with_highest_priority(
obj.conn.name, jid)
if not contact:
# If there is another resource, it may be a message from an
# invisible resource
lcontact = app.contacts.get_contacts(obj.conn.name, jid)
if (len(lcontact) > 1 or (lcontact and lcontact[0].resource and \
lcontact[0].show != 'offline')) and jid.find('@') > 0:
contact = app.contacts.copy_contact(highest_contact)
contact.resource = resource
contact.priority = 0
contact.show = 'offline'
contact.status = ''
app.contacts.add_contact(obj.conn.name, contact)
else:
# Default to highest prio
fjid = jid
contact = highest_contact
if not contact:
# contact is not in roster
contact = app.interface.roster.add_to_not_in_the_roster(
obj.conn.name, jid, obj.properties.nickname)
if not self.control:
ctrl = app.interface.msg_win_mgr.search_control(obj.jid,
obj.conn.name, obj.resource)
if ctrl:
self.control = ctrl
self.control.set_session(self)
else:
fjid = jid
obj.popup = helpers.allow_popup_window(self.conn.name)
event_t = events.ChatEvent
event_type = 'message_received'
if self.control:
# We have a ChatControl open
obj.show_in_roster = False
obj.show_in_systray = False
do_event = False
elif obj.properties.is_sent_carbon:
# Its a Carbon Copied Message we sent
obj.show_in_roster = False
obj.show_in_systray = False
unread_events = app.events.get_events(
self.conn.name, fjid, types=['chat'])
read_ids = []
for msg in unread_events:
read_ids.append(msg.msg_log_id)
app.storage.archive.set_read_messages(read_ids)
app.events.remove_events(self.conn.name, fjid, types=['chat'])
do_event = False
else:
# Everything else
obj.show_in_roster = get_show_in_roster(event_type, self)
obj.show_in_systray = get_show_in_systray(event_type,
obj.conn.name,
contact.jid)
do_event = True
if do_event:
kind = obj.properties.type.value
event = event_t(
obj.msgtxt,
obj.properties.subject,
kind,
obj.properties.timestamp,
obj.resource,
obj.msg_log_id,
correct_id=obj.correct_id,
message_id=obj.properties.id,
session=self,
displaymarking=obj.displaymarking,
sent_forwarded=obj.properties.is_sent_carbon,
show_in_roster=obj.show_in_roster,
show_in_systray=obj.show_in_systray,
additional_data=obj.additional_data)
app.events.add_event(self.conn.name, fjid, event)
def roster_message(self, jid, msg, tim, msg_type='',
subject=None, resource='', msg_log_id=None, user_nick='',
displaymarking=None, additional_data=None):
"""
Display the message or show notification in the roster
"""
contact = None
fjid = jid
if additional_data is None:
additional_data = AdditionalDataDict()
# Try to catch the contact with correct resource
if resource:
fjid = jid + '/' + resource
contact = app.contacts.get_contact(self.conn.name, jid, resource)
highest_contact = app.contacts.get_contact_with_highest_priority(
self.conn.name, jid)
if not contact:
# If there is another resource, it may be a message from an invisible
# resource
lcontact = app.contacts.get_contacts(self.conn.name, jid)
if (len(lcontact) > 1 or (lcontact and lcontact[0].resource and \
lcontact[0].show != 'offline')) and jid.find('@') > 0:
contact = app.contacts.copy_contact(highest_contact)
contact.resource = resource
if resource:
fjid = jid + '/' + resource
contact.priority = 0
contact.show = 'offline'
contact.status = ''
app.contacts.add_contact(self.conn.name, contact)
else:
# Default to highest prio
fjid = jid
contact = highest_contact
if not contact:
# contact is not in roster
contact = app.interface.roster.add_to_not_in_the_roster(
self.conn.name, jid, user_nick)
if not self.control:
ctrl = app.interface.msg_win_mgr.get_control(fjid, self.conn.name)
if ctrl:
self.control = ctrl
self.control.set_session(self)
else:
fjid = jid
# Do we have a queue?
no_queue = len(app.events.get_events(self.conn.name, fjid)) == 0
popup = helpers.allow_popup_window(self.conn.name)
# We print if window is opened and it's not a single message
if self.control:
typ = ''
if msg_type == 'error':
typ = 'error'
self.control.add_message(msg,
typ,
tim=tim,
subject=subject,
displaymarking=displaymarking,
additional_data=additional_data)
if msg_log_id:
app.storage.archive.set_read_messages([msg_log_id])
return
# We save it in a queue
event_t = events.ChatEvent
event_type = 'message_received'
show_in_roster = get_show_in_roster(event_type, self)
show_in_systray = get_show_in_systray(event_type,
self.conn.name,
contact.jid)
event = event_t(msg, subject, msg_type, tim, resource,
msg_log_id, session=self,
displaymarking=displaymarking, sent_forwarded=False,
show_in_roster=show_in_roster, show_in_systray=show_in_systray,
additional_data=additional_data)
app.events.add_event(self.conn.name, fjid, event)
if popup:
if not self.control:
self.control = app.interface.new_chat(contact,
self.conn.name, session=self)
if app.events.get_events(self.conn.name, fjid):
self.control.read_queue()
else:
if no_queue: # We didn't have a queue: we change icons
app.interface.roster.draw_contact(jid, self.conn.name)
app.interface.roster.show_title() # we show the * or [n]
# Select the big brother contact in roster, it's visible because it has
# events.
family = app.contacts.get_metacontacts_family(self.conn.name, jid)
if family:
_nearby_family, bb_jid, bb_account = \
app.contacts.get_nearby_family_and_big_brother(family,
self.conn.name)
else:
bb_jid, bb_account = jid, self.conn.name
app.interface.roster.select_contact(bb_jid, bb_account)
| gpl-3.0 | -4,599,732,808,298,735,600 | 35.70195 | 81 | 0.544627 | false |
robinson96/GRAPE | vine/foreach.py | 1 | 2084 | import os
import option
import grapeGit as git
import utility
import grapeConfig
class ForEach(option.Option):
"""
Executes a command in the top level project, each submodule, and each nested subproject in this workspace.
Usage: grape-foreach [--quiet] [--noTopLevel] [--noSubprojects] [--noSubmodules] [--currentCWD] <cmd>
Options:
--quiet Quiets git's printout of "Entering submodule..."
--noTopLevel Does not call <cmd> in the workspace directory.
--noSubprojects Does not call <cmd> in any grape nested subprojects.
--noSubmodules Does not call <cmd> in any git submodules.
--currentCWD grape foreach normally starts work from the workspace top level directory. This flag
starts work from the current working directory.
Arguments:
<cmd> The cmd to execute.
"""
def __init__(self):
super(ForEach, self).__init__()
self._key = "foreach"
self._section = "Workspace"
def description(self):
return "runs a command in all projects in this workspace"
def execute(self,args):
cmd = args["<cmd>"]
retvals = utility.MultiRepoCommandLauncher(foreach, runInOuter = not args["--noTopLevel"],
skipSubmodules= args["--noSubmodules"],
runInSubprojects= not args["--noSubprojects"], globalArgs = args).launchFromWorkspaceDir(handleMRE=handleForeachMRE)
return retvals
def setDefaultConfig(self,config):
pass
def foreach(repo='', branch='', args={}):
cmd = args["<cmd>"]
with utility.cd(repo):
utility.executeSubProcess(cmd, repo, verbose = -1)
return True
def handleForeachMRE(mre):
for e1 in mre.exceptions():
try:
raise e1
except git.GrapeGitError as e:
utility.printMsg("Foreach failed.")
print e.gitCommand
print e.cwd
print e.gitOutput
return False
| bsd-3-clause | -205,728,636,987,969,500 | 33.163934 | 167 | 0.599808 | false |
hds-lab/coding-ml | msgvis/apps/api/urls.py | 1 | 2748 | from django.conf.urls import url
from msgvis.apps.api import views
from django.views.decorators.csrf import csrf_exempt
api_root_urls = {
'dataset': url(r'^dataset/$', csrf_exempt(views.DatasetView.as_view()), name='dataset'),
'message': url(r'^message/(?P<message_id>[0-9]+)$', csrf_exempt(views.MessageView.as_view()), name='message'),
'list_distribution': url(r'^list_distribution/(?P<dataset_id>[0-9]+)$', csrf_exempt(views.ListDistributionView.as_view()), name='list_distribution'),
'dictionary': url(r'^dictionary/$', csrf_exempt(views.DictionaryView.as_view()), name='dictionary'),
'svm': url(r'^svm/$', csrf_exempt(views.SVMResultView.as_view()), name='svm'),
'vector': url(r'^vector/(?P<message_id>[0-9]+)$', csrf_exempt(views.FeatureVectorView.as_view()), name='vector'),
'feature_list': url(r'^feature/$', csrf_exempt(views.UserFeatureView.as_view()), name='feature_list'),
'feature': url(r'^feature/(?P<feature_id>[0-9]+)/$', csrf_exempt(views.UserFeatureView.as_view()), name='feature'),
'distribution': url(r'^distribution/$', csrf_exempt(views.FeatureCodeDistributionView.as_view()), name='distribution'),
'assignment': url(r'^assignment/(?P<message_id>[0-9]+)$', csrf_exempt(views.CodeAssignmentView.as_view()), name='assignment'),
'definitions': url(r'^definition/$', csrf_exempt(views.CodeDefinitionView.as_view()), name='definitions'),
#'definition': url(r'^definition/(?P<code_id>[0-9]+)$', csrf_exempt(views.CodeDefinitionView.as_view()), name='definition'),
'code_messages': url(r'^code_messages/$', csrf_exempt(views.CodeMessageView.as_view()), name='code_messages'),
'some_messages': url(r'^some_messages/$', csrf_exempt(views.SomeMessageView.as_view()), name='some_messages'),
'all_coded_messages': url(r'^all_coded_messages/$', csrf_exempt(views.AllCodedMessageView.as_view()), name='all_coded_messages'),
'disagreement': url(r'^disagreement/(?P<message_id>[0-9]+)$', csrf_exempt(views.DisagreementIndicatorView.as_view()), name='disagreement'),
'pairwise': url(r'^pairwise/$', csrf_exempt(views.PairwiseConfusionMatrixView.as_view()), name='pairwise'),
'progress': url(r'^progress/$', csrf_exempt(views.ProgressView.as_view()), name='progress'),
'exp_progress': url(r'^exp_progress/(?P<exp_id>[0-9]+)/$', csrf_exempt(views.ExperimentProgressView.as_view()), name='exp_progress'),
'action-history': url(r'^history/$', views.ActionHistoryView.as_view(), name='action-history'),
'partners': url(r'^partners/$', views.PartnerView.as_view(), name='partners'),
'user': url(r'^user/$', views.UserView.as_view(), name='user'),
}
urlpatterns = api_root_urls.values() + [
url(r'^$', views.APIRoot.as_view(root_urls=api_root_urls)),
]
| mit | -6,190,470,258,981,440,000 | 77.514286 | 153 | 0.682678 | false |
tktrungna/leetcode | Python/zigzag-iterator.py | 1 | 1419 | """
QUESTION:
Given two 1d vectors, implement an iterator to return their elements alternately.
For example, given two 1d vectors:
v1 = [1, 2]
v2 = [3, 4, 5, 6]
By calling next repeatedly until hasNext returns false, the order of elements returned by next should be: [1, 3, 2, 4,
5, 6].
Follow up: What if you are given k 1d vectors? How well can your code be extended to such cases?
Clarification for the follow up question - Update (2015-09-18):
The "Zigzag" order is not clearly defined and is ambiguous for k > 2 cases. If "Zigzag" does not look right to you,
replace "Zigzag" with "Cyclic". For example, given the following input:
[1,2,3]
[4,5,6,7]
[8,9]
It should return [1,4,8,2,5,9,3,6,7].
ANSWER
flatten the list first
"""
class ZigzagIterator(object):
def __init__(self, v1, v2):
"""
Initialize your data structure here.
:type v1: List[int]
:type v2: List[int]
"""
self.v = []
self.cur = 0
i = 0
while i < max(len(v1),len(v2)):
if i < len(v1): self.v += [v1[i]]
if i < len(v2): self.v += [v2[i]]
i += 1
def next(self):
self.cur += 1
return self.v[self.cur-1]
def hasNext(self):
return self.cur < len(self.v)
if __name__ == '__main__':
v1 = [1, 2]
v2 = [3, 4, 5, 6]
iter = ZigzagIterator(v1,v2)
while iter.hasNext():
print iter.next() | mit | 3,069,377,655,333,628,400 | 26.307692 | 118 | 0.589147 | false |
Snegovikufa/HPGL-GUI | hpgl_run/lvm_thread.py | 1 | 1909 | from geo_bsd import set_output_handler
from geo_bsd import set_progress_handler
from geo_bsd.geo import lvm_kriging
from PySide import QtCore
class LVMThread(QtCore.QThread):
propSignal = QtCore.Signal(object)
logMessage = QtCore.Signal(str)
progressMessage = QtCore.Signal(int)
def __init__(self, Prop, GridObject, Mean, EllipsoidRanges,
IntPoints, Variogram):
QtCore.QThread.__init__(self)
self.Prop = Prop
self.GridObject = GridObject
self.Mean = Mean
self.EllipsoidRanges = EllipsoidRanges
self.IntPoints = IntPoints
self.Variogram = Variogram
def run(self):
'''Runs thread'''
set_output_handler(self.OutputLog, None)
set_progress_handler(self.ProgressShow, None)
self.Result = lvm_kriging( self.Prop, self.GridObject,
self.Mean,
self.EllipsoidRanges,
self.IntPoints,
self.Variogram )
#self.emit(QtCore.SIGNAL("Result(PyQt_PyObject)"), self.Result)
self.propSignal.emit(self.Result)
def OutputLog(self, string, _):
'''Emits HPGL logs to main thread'''
#self.emit(QtCore.SIGNAL("msg(QString)"), QtCore.QString(self.StrForLog))
self.logMessage.emit(string)
return 0
def ProgressShow(self, stage, Percent, _):
'''Emits HPGL progress to main thread'''
self.Percent = Percent
self.stage = stage
if self.Percent == 0:
print self.stage,
elif self.Percent == -1:
print ""
else:
self.OutStr = int(self.Percent)
#self.emit(QtCore.SIGNAL("progress(QString)"),
# QtCore.QString(self.OutStr))
self.progressMessage.emit(self.OutStr)
return 0
| gpl-2.0 | -2,951,198,958,696,396,000 | 34.351852 | 81 | 0.575694 | false |
benmishkanian/ASF-JIRA-mine | jiradb/employer.py | 1 | 3113 | import logging
from .schema import Contributor, AccountProject, ContributorAccount
from sqlalchemy import func, desc
log = logging.getLogger(__name__)
def getProjectCompaniesByCommits(session, project):
"""
Gets (organization, commitcount) for this project, ordered by commitcount descending. Organizations are obtained
from AccountProject.LinkedInEmployer.
:param session: The JIRADB session to query
:param project: The project for which commitcounts should be aggregated
:return: Organizations ranked by commit count for this project
"""
companiesByCommitsSubquery = session.query(AccountProject.LinkedInEmployer, func.sum(
AccountProject.BHCommitCount + AccountProject.NonBHCommitCount).label('commitcount')).filter(
AccountProject.project == project).group_by(AccountProject.LinkedInEmployer).subquery()
return session.query(companiesByCommitsSubquery).order_by(desc('commitcount'))
def getLikelyLinkedInEmployer(jiradb, contributorId):
"""
Gets a list of possible employers for the contributor based off of the employer of each of their accounts.
:param jiradb: JIRADB object
:param contributorId:
:return: a list of possible employer names for this contributor
"""
accountProjectRows = jiradb.session.query(Contributor, AccountProject.LinkedInEmployer,
AccountProject.project).join(ContributorAccount).join(
AccountProject).filter(Contributor.id == contributorId)
possibleEmployers = []
projects = []
for accountProjectRow in accountProjectRows:
if accountProjectRow.LinkedInEmployer not in possibleEmployers:
possibleEmployers.append(accountProjectRow.LinkedInEmployer)
if accountProjectRow.project not in projects:
projects.append(accountProjectRow.project)
if len(projects) == 1:
mainProject = projects[0]
elif len(projects) > 1:
# The main project is the one this person did the most commits to
countSubq = jiradb.session.query(AccountProject.project, func.sum(
AccountProject.BHCommitCount + AccountProject.NonBHCommitCount).label('commitcount')).join(
ContributorAccount).join(Contributor).filter(Contributor.id == contributorId).group_by(
AccountProject.project).subquery()
mainRow = jiradb.session.query(countSubq).order_by(desc('commitcount')).first()
assert mainRow is not None, 'Found 0 projects for contributor ' + contributorId
mainProject = mainRow.project
else:
raise RuntimeError('contributor {} has no projects'.format(contributorId))
log.info('contributor # %s contributed to project(s): %s', contributorId, projects)
companyRankings = getProjectCompaniesByCommits(jiradb.session, mainProject)
for companyRanking in companyRankings:
if companyRanking.LinkedInEmployer in possibleEmployers:
return companyRanking.LinkedInEmployer
log.warning('%s has uncommon employer; taking first of: %s', contributorId, possibleEmployers)
return possibleEmployers[0]
| mit | -5,523,390,128,857,304,000 | 51.762712 | 116 | 0.733697 | false |
caterinaurban/Lyra | src/lyra/unittests/test_SignLattice.py | 1 | 3609 | """
Sign Lattice - Unit Tests
=========================
:Author: Jérôme Dohrau
"""
import unittest
from typing import List
from lyra.abstract_domains.numerical.sign_domain import SignLattice
from lyra.unittests.abstract_tests import AbstractTest
class TestSignLattice(AbstractTest.ArithmeticLatticeTest):
def default(self):
return SignLattice()
@staticmethod
def negative() -> SignLattice:
return SignLattice(True, False, False)
@staticmethod
def positive() -> SignLattice:
return SignLattice(False, False, True)
@staticmethod
def zero() -> SignLattice:
return SignLattice(False, True, False)
@staticmethod
def non_negative() -> SignLattice:
return SignLattice(False, True, True)
@staticmethod
def non_positive() -> SignLattice:
return SignLattice(True, True, False)
@staticmethod
def non_zero() -> SignLattice:
return SignLattice(True, False, True)
def elements(self) -> List[SignLattice]:
return [self.bottom(),
self.negative(), self.zero(), self.positive(),
self.non_negative(), self.non_zero(), self.non_positive(),
self.top()]
def test_less_equal(self):
super().test_less_equal()
self.assertTrue(self.negative().less_equal(self.non_positive()))
self.assertFalse(self.non_negative().less_equal(self.negative()))
self.assertTrue(self.positive().less_equal(self.non_negative()))
self.assertFalse(self.non_negative().less_equal(self.positive()))
self.assertFalse(self.zero().less_equal(self.non_zero()))
self.assertFalse(self.non_zero().less_equal(self.zero()))
def test_join(self):
super().test_join()
self.assertEqual(self.negative().join(self.positive()), self.non_zero())
self.assertEqual(self.non_negative().join(self.non_positive()), self.top())
def test_meet(self):
super().test_meet()
self.assertEqual(self.negative().meet(self.positive()), self.bottom())
self.assertEqual(self.non_negative().meet(self.non_positive()), self.zero())
def test_neg(self):
super().test_neg()
self.assertEqual(self.negative().neg(), self.positive())
self.assertEqual(self.positive().neg(), self.negative())
self.assertEqual(self.zero().neg(), self.zero())
self.assertEqual(self.non_negative().neg(), self.non_positive())
self.assertEqual(self.non_positive().neg(), self.non_negative())
self.assertEqual(self.non_zero().neg(), self.non_zero())
def test_add(self):
super().test_add()
self.assertEqual(self.positive().add(self.positive()), self.positive())
self.assertEqual(self.non_negative().add(self.zero()), self.non_negative())
self.assertEqual(self.negative().add(self.positive()), self.top())
def test_sub(self):
super().test_sub()
self.assertEqual(self.positive().sub(self.negative()), self.positive())
self.assertEqual(self.non_negative().sub(self.zero()), self.non_negative())
self.assertEqual(self.positive().sub(self.positive()), self.top())
def test_mult(self):
super().test_mult()
self.assertEqual(self.positive().mult(self.positive()), self.positive())
self.assertEqual(self.positive().mult(self.non_negative()), self.non_negative())
self.assertEqual(self.negative().mult(self.positive()), self.negative())
self.assertEqual(self.negative().mult(self.zero()), self.zero())
if __name__ == '__main__':
unittest.main()
| mpl-2.0 | -6,792,681,734,868,110,000 | 33.352381 | 88 | 0.636817 | false |
alphagov/digitalmarketplace-api | tests/main/views/test_frameworks.py | 1 | 59798 | import datetime
import mock
import pytest
from itertools import cycle
from flask import json
from freezegun import freeze_time
from sqlalchemy.exc import IntegrityError
from tests.bases import BaseApplicationTest, JSONUpdateTestMixin
from app.models import db, Framework, SupplierFramework, DraftService, User, FrameworkLot, AuditEvent, Brief
from tests.helpers import FixtureMixin
from app.main.views.frameworks import FRAMEWORK_UPDATE_WHITELISTED_ATTRIBUTES_MAP
class TestListFrameworks(BaseApplicationTest):
def test_all_frameworks_are_returned(self):
response = self.client.get('/frameworks')
data = json.loads(response.get_data())
assert response.status_code == 200
assert len(data['frameworks']) == len(Framework.query.all())
assert set(data['frameworks'][0].keys()) == set([
'allowDeclarationReuse',
'applicationsCloseAtUTC',
'clarificationQuestionsOpen',
'clarificationsCloseAtUTC',
'clarificationsPublishAtUTC',
'countersignerName',
'framework',
'family',
'frameworkAgreementDetails',
'frameworkAgreementVersion',
'frameworkExpiresAtUTC',
'frameworkLiveAtUTC',
'id',
'intentionToAwardAtUTC',
'lots',
'name',
'slug',
'status',
'variations',
'hasDirectAward',
'hasFurtherCompetition',
'isESignatureSupported',
])
class TestCreateFramework(BaseApplicationTest):
def framework(self, **kwargs):
return {
"frameworks": {
"slug": kwargs.get("slug", "example"),
"name": "Example",
"framework": "g-cloud",
"status": kwargs.get("status", "coming"),
"clarificationQuestionsOpen": kwargs.get("clarificationQuestionsOpen", False),
"lots": kwargs.get("lots", [
"saas", "paas", "iaas", "scs"
]),
"hasDirectAward": True,
"hasFurtherCompetition": False,
},
"updated_by": "example"
}
def teardown(self):
framework = Framework.query.filter(Framework.slug == "example").first()
if framework:
FrameworkLot.query.filter(FrameworkLot.framework_id == framework.id).delete()
Framework.query.filter(Framework.id == framework.id).delete()
db.session.commit()
super(TestCreateFramework, self).teardown()
def test_create_a_framework(self):
response = self.client.post("/frameworks",
data=json.dumps(self.framework()),
content_type="application/json")
assert response.status_code == 201
framework = Framework.query.filter(Framework.slug == "example").first()
assert framework.name == "Example"
assert len(framework.lots) == 4
def test_create_adds_audit_event(self):
framework_response = self.client.post(
"/frameworks",
data=json.dumps(self.framework()),
content_type="application/json",
)
audit_response = self.client.get("/audit-events")
framework_id = json.loads(framework_response.data)['frameworks']['id']
data = json.loads(audit_response.get_data(as_text=True))
assert len(data["auditEvents"]) == 1
assert data["auditEvents"][0] == {
'acknowledged': False,
'createdAt': mock.ANY,
'data': {
'update': {
'clarificationQuestionsOpen': False,
'framework': 'g-cloud',
'lots': [
'saas',
'paas',
'iaas',
'scs'
],
'name': 'Example',
'slug': 'example',
'status': 'coming',
'hasDirectAward': True,
'hasFurtherCompetition': False,
},
},
'id': mock.ANY,
'links': {'self': 'http://127.0.0.1:5000/audit-events'},
'objectId': framework_id,
'objectType': 'Framework',
'type': 'create_framework',
'user': 'example',
}
def test_create_fails_if_framework_already_exists(self):
self.client.post("/frameworks",
data=json.dumps(self.framework()),
content_type="application/json")
response = self.client.post("/frameworks",
data=json.dumps(self.framework()),
content_type="application/json")
assert response.status_code == 400
assert json.loads(response.get_data(as_text=True))["error"] == "Slug 'example' already in use"
def test_create_fails_if_status_is_invalid(self):
response = self.client.post("/frameworks",
data=json.dumps(self.framework(status="invalid")),
content_type="application/json")
assert response.status_code == 400
assert json.loads(response.get_data(as_text=True))["error"] == "Invalid status value 'invalid'"
def test_create_fails_if_clarification_questions_open_is_invalid(self):
response = self.client.post("/frameworks",
data=json.dumps(self.framework(clarificationQuestionsOpen="invalid")),
content_type="application/json")
assert response.status_code == 400
assert json.loads(response.get_data(as_text=True))["error"] == "Invalid framework"
def test_create_fails_if_lot_slug_is_invalid(self):
response = self.client.post("/frameworks",
data=json.dumps(self.framework(lots=["saas", "invalid", "bad"])),
content_type="application/json")
assert response.status_code == 400
assert json.loads(response.get_data(as_text=True))["error"] == "Invalid lot slugs: bad, invalid"
def test_create_fails_if_slug_is_invalid(self):
response = self.client.post("/frameworks",
data=json.dumps(self.framework(slug="this is/invalid")),
content_type="application/json")
assert response.status_code == 400
assert json.loads(response.get_data(as_text=True))["error"] == "Invalid slug value 'this is/invalid'"
def test_create_fails_if_direct_award_and_further_competition_false(self):
framework = self.framework()
framework['frameworks']['hasDirectAward'] = False
framework['frameworks']['hasFurtherCompetition'] = False
response = self.client.post("/frameworks",
data=json.dumps(framework),
content_type="application/json")
assert response.status_code == 400
assert json.loads(response.get_data(as_text=True))["error"] == "At least one of `hasDirectAward` or " \
"`hasFurtherCompetition` must be True"
def test_update_fails_if_direct_award_and_further_competition_both_false(self):
framework = self.framework(slug='example')
self.client.post("/frameworks", data=json.dumps(framework), content_type="application/json")
framework = {'frameworks': {'hasDirectAward': False, 'hasFurtherCompetition': False}, 'updated_by': 'test'}
response = self.client.post(f'/frameworks/example', data=json.dumps(framework), content_type='application/json')
assert response.status_code == 400
assert json.loads(response.get_data(as_text=True))["error"] == "At least one of `hasDirectAward` or " \
"`hasFurtherCompetition` must be True"
class TestGetFramework(BaseApplicationTest):
def test_a_single_framework_is_returned(self):
response = self.client.get('/frameworks/g-cloud-7')
data = json.loads(response.get_data())
assert response.status_code == 200
assert data['frameworks']['slug'] == 'g-cloud-7'
assert 'status' in data['frameworks']
def test_framework_lots_are_returned(self):
response = self.client.get('/frameworks/g-cloud-7')
data = json.loads(response.get_data())
assert data['frameworks']['lots'] == [
{
u'id': 1,
u'name': u'Software as a Service',
u'slug': u'saas',
u'allowsBrief': False,
u'unitSingular': u'service',
u'oneServiceLimit': False,
u'unitPlural': u'services',
},
{
u'id': 2,
u'name': u'Platform as a Service',
u'slug': u'paas',
u'allowsBrief': False,
u'oneServiceLimit': False,
u'unitSingular': u'service',
u'unitPlural': u'services',
},
{
u'id': 3,
u'name': u'Infrastructure as a Service',
u'slug': u'iaas',
u'allowsBrief': False,
u'oneServiceLimit': False,
u'unitSingular': u'service',
u'unitPlural': u'services',
},
{
u'id': 4,
u'name': u'Specialist Cloud Services',
u'slug': u'scs',
u'allowsBrief': False,
u'oneServiceLimit': False,
u'unitSingular': u'service',
u'unitPlural': u'services',
}
]
def test_a_404_is_raised_if_it_does_not_exist(self):
response = self.client.get('/frameworks/biscuits-for-gov')
assert response.status_code == 404
class TestUpdateFramework(BaseApplicationTest, JSONUpdateTestMixin, FixtureMixin):
endpoint = '/frameworks/example'
method = 'post'
def setup(self):
super(TestUpdateFramework, self).setup()
self.framework_attributes_and_values_for_update = {
'id': 1,
'name': "Example Framework 2",
'slug': "example-framework-2",
'framework': "digital-outcomes-and-specialists",
'family': "digital-outcomes-and-specialists",
'frameworkAgreementDetails': {
"countersignerName": "Dan Saxby",
"frameworkAgreementVersion": "v1.0",
"variations": {
"banana": {
"createdAt": "2016-06-06T20:01:34.000000Z",
},
"toblerone": {
"createdAt": "2016-07-06T21:09:09.000000Z",
},
},
"lotOrder": ['iaas', 'scs', 'saas', 'paas'],
},
'status': "standstill",
'clarificationQuestionsOpen': False,
'lots': ['saas', 'paas', 'iaas', 'scs'],
'applicationsCloseAtUTC': '2023-04-11T16:00:00.000000Z',
'intentionToAwardAtUTC': '2023-04-25T00:00:00.000000Z',
'clarificationsCloseAtUTC': '2023-03-30T17:00:00.000000Z',
'clarificationsPublishAtUTC': '2023-04-04T17:00:00.000000Z',
'frameworkLiveAtUTC': '2023-05-01T00:00:00.000000Z',
'frameworkExpiresAtUTC': '2024-04-30T00:00:00.000000Z',
'allowDeclarationReuse': True,
'hasDirectAward': True,
'hasFurtherCompetition': False,
}
self.attribute_whitelist = FRAMEWORK_UPDATE_WHITELISTED_ATTRIBUTES_MAP.keys()
def post_framework_update(self, update):
return self.client.post(
'/frameworks/example-framework',
data=json.dumps({
'frameworks': update,
'updated_by': 'example user'
}),
content_type="application/json"
)
def test_returns_404_on_non_existent_framework(self, open_example_framework):
response = self.client.post(
'/frameworks/example-framework-2',
data=json.dumps({'frameworks': {
'status': 'expired'
}, 'updated_by': 'example user'}),
content_type="application/json"
)
assert response.status_code == 404
def test_can_update_whitelisted_fields(self, open_example_framework):
valid_attributes_and_values = {
key: value for key, value in self.framework_attributes_and_values_for_update.items()
if key in self.attribute_whitelist
}
for key, value in valid_attributes_and_values.items():
response = self.post_framework_update({
key: value
})
assert response.status_code == 200
post_data = json.loads(response.get_data())['frameworks']
# certain keys of `frameworkAgreementDetails` are un-nested and returned with other top-level keys
if isinstance(value, dict):
for nested_key, nested_value in value.items():
if nested_key in ("countersignerName", "frameworkAgreementVersion", "variations",):
assert post_data[nested_key] == nested_value
assert post_data[key] == value
# check the same data was actually persisted
get_data = json.loads(
self.client.get('/frameworks/example-framework').get_data()
)['frameworks']
assert post_data == get_data
def test_adds_audit_event(self, live_example_framework):
update_response = self.post_framework_update({'status': 'expired'})
framework_id = json.loads(update_response.data)['frameworks']['id']
audit_response = self.client.get("/audit-events")
data = json.loads(audit_response.get_data(as_text=True))
assert len(data["auditEvents"]) == 1
assert data["auditEvents"][0] == {
'acknowledged': False,
'createdAt': mock.ANY,
'data': {
'frameworkSlug': 'example-framework',
'update': {
'status': 'expired',
},
'framework_expires_at_utc set': "framework status set to 'expired'",
},
'id': mock.ANY,
'links': {'self': 'http://127.0.0.1:5000/audit-events'},
'objectId': framework_id,
'objectType': 'Framework',
'type': 'framework_update',
'user': 'example user',
}
def test_cannot_update_non_whitelisted_fields(self, open_example_framework):
invalid_attributes_and_values = {
key: value for key, value in self.framework_attributes_and_values_for_update.items()
if key not in self.attribute_whitelist
}
# add some random key
invalid_attributes_and_values.update({'beverage': 'Clamato'})
for key, value in invalid_attributes_and_values.items():
response = self.post_framework_update({
key: value
})
assert response.status_code == 400
data = json.loads(response.get_data())['error']
assert data == "Invalid keys for framework update: '{}'".format(key)
def test_cannot_update_framework_with_invalid_status(self, open_example_framework):
response = self.post_framework_update({
'status': 'invalid'
})
assert response.status_code == 400
data = json.loads(response.get_data())['error']
assert 'Invalid status value' in data
def test_passing_in_an_empty_update_is_a_failure(self, open_example_framework):
response = self.post_framework_update({})
assert response.status_code == 400
data = json.loads(response.get_data())['error']
assert data == "Framework update expects a payload"
def test_schema_validation_for_framework_agreement_details(self, open_example_framework):
invalid_framework_agreement_details = [
# frameworkAgreementVersion should be a string
{
'variations': {},
'frameworkAgreementVersion': 1,
},
# can't have a numeric lotDescription
{
'variations': {},
'frameworkAgreementVersion': "1",
'lotDescriptions': {"test-lot": 4321},
},
# can't have empty lotOrder
{
'variations': {},
'frameworkAgreementVersion': "1",
'lotOrder': [],
},
# frameworkAgreementVersion cannot be empty
{
'variations': {},
'frameworkAgreementVersion': "",
},
# variations should be an object
{
'variations': 1,
'frameworkAgreementVersion': "1.1.1",
},
# variations object must have 'createdAt' key
{
'frameworkAgreementVersion': "2",
'variations': {"created_at": "today"},
},
# countersignerName cannot be empty
{
'variations': {},
'frameworkAgreementVersion': "1",
'countersignerName': "",
},
# invalid key
{
'variations': {},
'frameworkAgreementVersion': "1",
'frameworkAgreementDessert': "Portuguese tart",
},
# empty update
{}
]
for invalid_value in invalid_framework_agreement_details:
response = self.post_framework_update({
'frameworkAgreementDetails': invalid_value
})
assert response.status_code == 400
@mock.patch('app.db.session.commit')
def test_update_framework_catches_db_errors(self, db_commit, open_example_framework):
db_commit.side_effect = IntegrityError("Could not commit", orig=None, params={})
valid_attributes_and_values = {
key: value for key, value in self.framework_attributes_and_values_for_update.items()
if key in self.attribute_whitelist
}
for key, value in valid_attributes_and_values.items():
response = self.post_framework_update({
key: value
})
assert response.status_code == 400
assert "Could not commit" in json.loads(response.get_data())["error"]
def test_timestamps_set_on_state_change_with_audit_data(self, open_example_framework):
updates = [
{'clarificationQuestionsOpen': False},
{'status': 'pending'},
{'status': 'live'},
{'status': 'expired'},
]
timestamp_keys = [
'clarificationsCloseAtUTC',
'applicationsCloseAtUTC',
'frameworkLiveAtUTC',
'frameworkExpiresAtUTC'
]
audit_data = [
{'clarifications_close_at_utc set': 'clarification questions closed'},
{'applications_close_at_utc set': "framework status set to 'pending'"},
{'framework_live_at_utc set': "framework status set to 'live'"},
{'framework_expires_at_utc set': "framework status set to 'expired'"},
]
for update, timestamp_key, data in zip(updates, timestamp_keys, audit_data):
update_timestamp = f'{datetime.datetime.utcnow().isoformat()}Z'
with freeze_time(update_timestamp):
self.post_framework_update(update)
response = self.client.get('/frameworks/example-framework')
framework = json.loads(response.get_data())['frameworks']
assert framework[timestamp_key] == update_timestamp
audit = AuditEvent.query.all()[-1]
assert audit.data == {
'frameworkSlug': 'example-framework',
'update': update,
**data,
}
def test_timestamps_not_updated_if_not_change_in_state(self, open_example_framework):
updates = [
{'clarificationQuestionsOpen': False},
{'status': 'pending'},
{'status': 'live'},
{'status': 'expired'},
]
timestamp_keys = [
'clarificationsCloseAtUTC',
'applicationsCloseAtUTC',
'frameworkLiveAtUTC',
'frameworkExpiresAtUTC'
]
for update, timestamp_key in zip(updates, timestamp_keys):
# Update the framework
self.post_framework_update(update)
check_time = datetime.datetime.utcnow()
response = self.client.get('/frameworks/example-framework')
framework = json.loads(response.get_data())['frameworks']
timestamp = framework[timestamp_key]
# Make sure a measurable amount of time has passed since last update
assert datetime.datetime.utcnow() > check_time
# Update the framework again, with the same values.
self.post_framework_update(update)
response = self.client.get('/frameworks/example-framework')
framework = json.loads(response.get_data())['frameworks']
# Make sure the timestamp hasn't changed.
assert framework[timestamp_key] == timestamp
class TestFrameworkStats(BaseApplicationTest, FixtureMixin):
def make_declaration(self, framework_id, supplier_ids, status=None):
db.session.query(
SupplierFramework
).filter(
SupplierFramework.framework_id == framework_id,
SupplierFramework.supplier_id.in_(supplier_ids)
).update({
SupplierFramework.declaration: {'status': status}
}, synchronize_session=False)
db.session.commit()
def register_framework_interest(self, framework_id, supplier_ids):
for supplier_id in supplier_ids:
db.session.add(
SupplierFramework(
framework_id=framework_id,
supplier_id=supplier_id,
declaration={}
)
)
db.session.commit()
def create_drafts(self, framework_id, supplier_id_counts):
framework = Framework.query.get(framework_id)
framework_lots = framework.lots
for supplier_id, unsub_count, sub_count in supplier_id_counts:
for ind, lot in zip(range(unsub_count + sub_count), cycle(framework_lots)):
if lot.one_service_limit and ind >= len(framework_lots):
# skip creating second+ services for one_service_limit lots
continue
db.session.add(
DraftService(
lot=lot,
framework_id=framework_id,
supplier_id=supplier_id,
data={},
status="not-submitted" if ind < unsub_count else "submitted",
lot_one_service_limit=lot.one_service_limit,
)
)
db.session.commit()
def create_users(self, supplier_ids, logged_in_at):
for supplier_id in supplier_ids:
db.session.add(
User(
name='supplier user',
email_address='supplier-{}@user.dmdev'.format(supplier_id),
password='testpassword',
active=True,
password_changed_at=datetime.datetime.utcnow(),
role='supplier',
supplier_id=supplier_id,
logged_in_at=logged_in_at
)
)
db.session.commit()
def setup_supplier_data(self):
self.setup_dummy_suppliers(30)
self.create_users(
[1, 2, 3, 4, 5],
logged_in_at=datetime.datetime.utcnow() - datetime.timedelta(days=1)
)
self.create_users(
[6, 7, 8, 9],
logged_in_at=datetime.datetime.utcnow() - datetime.timedelta(days=10)
)
self.create_users(
[10, 11],
logged_in_at=None
)
def setup_framework_data(self, framework_slug):
framework = Framework.query.filter(Framework.slug == framework_slug).first()
self.register_framework_interest(framework.id, range(20))
self.make_declaration(framework.id, [1, 3, 5, 7, 9, 11], status='started')
self.make_declaration(framework.id, [0, 2, 4, 6, 8, 10], status='complete')
self.create_drafts(framework.id, [
(1, 1, 2),
(2, 7, 15),
(3, 2, 2),
(14, 3, 7),
])
def setup_data(self, framework_slug):
self.setup_supplier_data()
self.setup_framework_data(framework_slug)
def test_stats(self):
self.setup_supplier_data()
self.setup_framework_data('g-cloud-7')
self.setup_framework_data('digital-outcomes-and-specialists')
response = self.client.get('/frameworks/g-cloud-7/stats')
assert json.loads(response.get_data()) == {
u'services': [
{u'count': 1, u'status': u'not-submitted',
u'declaration_made': False, u'lot': u'iaas'},
{u'count': 2, u'status': u'not-submitted',
u'declaration_made': True, u'lot': u'iaas'},
{u'count': 2, u'status': u'not-submitted',
u'declaration_made': False, u'lot': u'paas'},
{u'count': 2, u'status': u'not-submitted',
u'declaration_made': True, u'lot': u'paas'},
{u'count': 3, u'status': u'not-submitted',
u'declaration_made': False, u'lot': u'saas'},
{u'count': 2, u'status': u'not-submitted',
u'declaration_made': True, u'lot': u'saas'},
{u'count': 1, u'status': u'not-submitted',
u'declaration_made': True, u'lot': u'scs'},
{u'count': 3, u'status': u'submitted',
u'declaration_made': False, u'lot': u'iaas'},
{u'count': 3, u'status': u'submitted',
u'declaration_made': True, u'lot': u'iaas'},
{u'count': 3, u'status': u'submitted',
u'declaration_made': False, u'lot': u'paas'},
{u'count': 4, u'status': u'submitted',
u'declaration_made': True, u'lot': u'paas'},
{u'count': 2, u'status': u'submitted',
u'declaration_made': False, u'lot': u'saas'},
{u'count': 4, u'status': u'submitted',
u'declaration_made': True, u'lot': u'saas'},
{u'count': 3, u'status': u'submitted',
u'declaration_made': False, u'lot': u'scs'},
{u'count': 4, u'status': u'submitted',
u'declaration_made': True, u'lot': u'scs'},
],
u'interested_suppliers': [
{u'count': 7, u'declaration_status': None, u'has_completed_services': False},
{u'count': 1, u'declaration_status': None, u'has_completed_services': True},
{u'count': 5, u'declaration_status': 'complete', u'has_completed_services': False},
{u'count': 1, u'declaration_status': 'complete', u'has_completed_services': True},
{u'count': 4, u'declaration_status': 'started', u'has_completed_services': False},
{u'count': 2, u'declaration_status': 'started', u'has_completed_services': True},
],
u'supplier_users': [
{u'count': 4, u'recent_login': False},
{u'count': 2, u'recent_login': None},
{u'count': 5, u'recent_login': True},
]
}
def test_stats_are_for_g_cloud_7_only(self):
self.setup_data('g-cloud-6')
response = self.client.get('/frameworks/g-cloud-7/stats')
assert json.loads(response.get_data()) == {
u'interested_suppliers': [],
u'services': [],
u'supplier_users': [
{u'count': 4, u'recent_login': False},
{u'count': 2, u'recent_login': None},
{u'count': 5, u'recent_login': True},
]
}
def test_stats_handles_null_declarations(self):
self.setup_data('g-cloud-7')
framework = Framework.query.filter(Framework.slug == 'g-cloud-7').first()
db.session.query(
SupplierFramework
).filter(
SupplierFramework.framework_id == framework.id,
SupplierFramework.supplier_id.in_([0, 1])
).update({
SupplierFramework.declaration: None
}, synchronize_session=False)
db.session.commit()
response = self.client.get('/frameworks/g-cloud-7/stats')
assert response.status_code == 200
class TestGetFrameworkSuppliers(BaseApplicationTest, FixtureMixin):
def setup(self):
"""Sets up supplier frameworks as follows:
Suppliers with IDs 0-10 have a G-Cloud 8 SupplierFramework record ("have registered interest")
Supplier 0 has returned a G-Cloud 7 agreement but not G-Cloud 8
Suppliers 1 and 2 have drafts of G-Cloud 8 agreements
Suppliers 3, 4 and 5 have returned their G-Cloud 8 agreements
Supplier 4 and 9's agreements were put on hold
Supplier 6's has been approved for countersignature but doesn't have a file yet
Suppliers 7, 8, 9 and 10 have countersigned agreements
Supplier 11 has nothing to do with anything or anyone
We use freeze_time to create a non-trivial ordering of creation/signing events in time, so that different
suppliers event timelines overlap in slightly complex ways, ensuring we test things like ordering properly.
"""
super(TestGetFrameworkSuppliers, self).setup()
with freeze_time("2016-10-09", tick=True):
self.setup_dummy_suppliers(12)
self.setup_dummy_user(id=123, role='supplier')
self.setup_dummy_user(id=321, role='admin-ccs-sourcing')
db.session.execute("UPDATE frameworks SET status='open' WHERE slug='g-cloud-7'")
db.session.execute("UPDATE frameworks SET status='open' WHERE slug='g-cloud-8'")
db.session.commit()
with freeze_time("2016-10-10", tick=True):
# Supplier zero is on G-Cloud 7
response = self.client.put(
'/suppliers/0/frameworks/g-cloud-7',
data=json.dumps({
'updated_by': 'example'
}),
content_type='application/json')
assert response.status_code == 201, response.get_data(as_text=True)
response = self.client.post(
'/suppliers/0/frameworks/g-cloud-7',
data=json.dumps({
'updated_by': 'example',
'frameworkInterest': {'onFramework': True}
}),
content_type='application/json')
assert response.status_code == 200, response.get_data(as_text=True)
response = self.client.post(
'/agreements',
data=json.dumps({
'updated_by': 'example',
'agreement': {'supplierId': 0, 'frameworkSlug': 'g-cloud-7'},
}),
content_type='application/json')
assert response.status_code == 201, response.get_data(as_text=True)
data = json.loads(response.get_data())
agreement_id = data['agreement']['id']
response = self.client.post(
'/agreements/{}'.format(agreement_id),
data=json.dumps({
'updated_by': 'example',
'agreement': {'signedAgreementPath': '/path-to-g-cloud-7.pdf'},
}),
content_type='application/json')
assert response.status_code == 200, response.get_data(as_text=True)
response = self.client.post(
'/agreements/{}/sign'.format(agreement_id),
data=json.dumps({
'updated_by': 'example',
'agreement': {},
}),
content_type='application/json')
assert response.status_code == 200, response.get_data(as_text=True)
# (Almost) everyone is on G-Cloud 8
for supplier_id in range(11):
with freeze_time(datetime.datetime(2016, 10, supplier_id + 2)):
response = self.client.put(
'/suppliers/{}/frameworks/g-cloud-8'.format(supplier_id),
data=json.dumps({
'updated_by': 'example'
}),
content_type='application/json')
assert response.status_code == 201, response.get_data(as_text=True)
with freeze_time(datetime.datetime(2016, 10, supplier_id + 2, 10)):
response = self.client.put(
'/suppliers/{}/frameworks/g-cloud-8/declaration'.format(supplier_id),
data=json.dumps({
'updated_by': 'example',
'declaration': {
"status": "complete",
"firstRegistered": "16/06/1904",
},
}),
content_type='application/json')
assert response.status_code == 201, response.get_data(as_text=True)
with freeze_time(datetime.datetime(2016, 10, supplier_id + 3)):
response = self.client.post(
'/suppliers/{}/frameworks/g-cloud-8'.format(supplier_id),
data=json.dumps({
'updated_by': 'example',
'frameworkInterest': {
'onFramework': True,
},
}),
content_type='application/json')
assert response.status_code == 200, response.get_data(as_text=True)
# Suppliers 1-10 have started to return a G-Cloud 8 agreement (created a draft)
agreement_ids = {}
for supplier_id in range(1, 11):
with freeze_time(datetime.datetime(2016, 11, (supplier_id + 1) * 2)):
response = self.client.post(
'/agreements',
data=json.dumps({
'updated_by': 'example',
'agreement': {'supplierId': supplier_id, 'frameworkSlug': 'g-cloud-8'},
}),
content_type='application/json'
)
assert response.status_code == 201, response.get_data(as_text=True)
data = json.loads(response.get_data())
agreement_ids[supplier_id] = data['agreement']['id']
# (supplier 10 created a superfluous agreement which they then didn't use
with freeze_time(datetime.datetime(2016, 11, 26)):
response = self.client.post(
'/agreements',
data=json.dumps({
'updated_by': 'example',
'agreement': {'supplierId': 10, 'frameworkSlug': 'g-cloud-8'},
}),
content_type='application/json'
)
assert response.status_code == 201, response.get_data(as_text=True)
for supplier_id in range(1, 11):
with freeze_time(datetime.datetime(2016, 11, (supplier_id + 1) * 2, 10)):
response = self.client.post(
'/agreements/{}'.format(agreement_ids[supplier_id]),
data=json.dumps({
'updated_by': 'example',
'agreement': {
'signedAgreementPath': 'path/to/agreement/{}.pdf'.format(supplier_id),
'signedAgreementDetails': {
'signerName': 'name_{}'.format(supplier_id),
'signerRole': 'job_{}'.format(supplier_id)
},
}
}),
content_type='application/json'
)
assert response.status_code == 200, response.get_data(as_text=True)
# Suppliers 3-10 have returned their G-Cloud 8 agreement
for supplier_id in range(3, 11):
with freeze_time(datetime.datetime(2016, 11, 30, 11 - supplier_id)):
response = self.client.post(
'/agreements/{}/sign'.format(agreement_ids[supplier_id]),
data=json.dumps({
'updated_by': 'example',
'agreement': {
'signedAgreementDetails': {
'uploaderUserId': 123,
},
},
}),
content_type='application/json'
)
assert response.status_code == 200, response.get_data(as_text=True)
# Supplier 4 and 9's agreements were put on hold (only 4 subsequently remained on hold)
for supplier_id in (4, 9,):
with freeze_time(datetime.datetime(2016, 11, 30, 12 - (supplier_id // 3))):
response = self.client.post(
'/agreements/{}/on-hold'.format(agreement_ids[supplier_id]),
data=json.dumps({'updated_by': 'example'}),
content_type='application/json'
)
assert response.status_code == 200, response.get_data(as_text=True)
# Suppliers 6-10 have been approved for countersignature
for supplier_id in range(6, 11):
with freeze_time(datetime.datetime(2016, 11, 30, 15 - supplier_id)):
response = self.client.post(
'/agreements/{}/approve'.format(agreement_ids[supplier_id]),
data=json.dumps({
'updated_by': 'example',
"agreement": {'userId': 321},
}),
content_type='application/json'
)
assert response.status_code == 200, response.get_data(as_text=True)
# Suppliers 7-10 have countersigned agreements
for supplier_id in range(7, 11):
with freeze_time(datetime.datetime(2016, 12, 25, 5 + supplier_id)):
response = self.client.post(
'/agreements/{}'.format(agreement_ids[supplier_id]),
data=json.dumps({
'updated_by': 'example',
'agreement': {
'countersignedAgreementPath': 'path/to/countersigned{}.pdf'.format(supplier_id)
}
}),
content_type='application/json'
)
assert response.status_code == 200, response.get_data(as_text=True)
def test_list_suppliers_combined(self, live_g8_framework):
# it would be nice to implement the following as individual tests, but the setup method is too expensive and has
# a detrimental effect on testrun time. since this is a readonly endpoint we shouldn't me mutating state between
# calls anyway, and we're always testing the same state setup by the same setup routine, so a quick fix for now
# is to merge these into a combined supertest. they are still kept apart as separate methods to avoid locals
# leaking from one test to another and disguising broken tests.
# TODO perhaps fix db global teardown fixture so that db isn't mandatorily cleared after every test, allowing
# us to use shared-setup fixtures.
self._subtest_list_suppliers_related_to_a_framework()
self._subtest_list_suppliers_by_agreement_returned_false()
self._subtest_list_suppliers_by_agreement_returned_true()
self._subtest_list_suppliers_by_agreement_returned_false()
self._subtest_list_suppliers_by_status_signed()
self._subtest_list_suppliers_by_status_on_hold()
self._subtest_list_suppliers_by_status_approved()
self._subtest_list_suppliers_by_status_countersigned()
self._subtest_list_suppliers_by_multiple_statuses_1()
self._subtest_list_suppliers_by_multiple_statuses_2()
self._subtest_list_suppliers_by_multiple_statuses_and_agreement_returned_true()
self._subtest_list_suppliers_by_multiple_statuses_and_agreement_returned_false()
def _subtest_list_suppliers_related_to_a_framework(self):
# One G7 supplier
response = self.client.get('/frameworks/g-cloud-7/suppliers')
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (0,)
assert not any(
(sf.get("agreementDetails") or {}).get("uploaderUserEmail") for sf in data["supplierFrameworks"]
)
# Ten G8 suppliers
response = self.client.get('/frameworks/g-cloud-8/suppliers?with_users=true')
assert response.status_code == 200
data = json.loads(response.get_data())
# supplierFrameworks are returned in order of ID if they don't have a framework agreement
# returned, and from oldest to newest returned if they do
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (0, 1, 2, 10, 9, 8, 7, 6, 5, 4, 3,)
# this listing view should not include extended user information
assert not any(
(sf.get("agreementDetails") or {}).get("uploaderUserEmail") for sf in data["supplierFrameworks"]
)
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_agreement_returned_true(self):
response = self.client.get(
'/frameworks/g-cloud-8/suppliers?with_users=false&agreement_returned=true'
)
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (10, 9, 8, 7, 6, 5, 4, 3,)
assert all(sf["agreementReturnedAt"] for sf in data["supplierFrameworks"])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_agreement_returned_false(self):
response = self.client.get(
'/frameworks/g-cloud-8/suppliers?agreement_returned=false'
)
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (0, 1, 2,)
assert all(sf['agreementReturnedAt'] is None for sf in data['supplierFrameworks'])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_status_signed(self):
response = self.client.get('/frameworks/g-cloud-8/suppliers?status=signed&with_declarations=false')
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (5, 3,)
assert all(sf['agreementStatus'] == "signed" for sf in data['supplierFrameworks'])
assert not any('declaration' in sf for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_status_on_hold(self):
response = self.client.get('/frameworks/g-cloud-8/suppliers?status=on-hold')
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (4,)
assert all(sf['agreementStatus'] == "on-hold" for sf in data['supplierFrameworks'])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_status_approved(self):
response = self.client.get('/frameworks/g-cloud-8/suppliers?status=approved')
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (6,)
assert all(sf['agreementStatus'] == "approved" for sf in data['supplierFrameworks'])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_status_countersigned(self):
response = self.client.get('/frameworks/g-cloud-8/suppliers?status=countersigned')
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (10, 9, 8, 7,)
assert all(sf['agreementStatus'] == "countersigned" for sf in data['supplierFrameworks'])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_multiple_statuses_1(self):
response = self.client.get('/frameworks/g-cloud-8/suppliers?status=approved,countersigned&with_users=true')
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (10, 9, 8, 7, 6,)
assert all(sf['agreementStatus'] in ("approved", "countersigned") for sf in data['supplierFrameworks'])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_multiple_statuses_2(self):
response = self.client.get('/frameworks/g-cloud-8/suppliers?with_declarations=true&status=signed,approved')
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (6, 5, 3,)
assert all(sf['agreementStatus'] in ("approved", "signed") for sf in data['supplierFrameworks'])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_multiple_statuses_and_agreement_returned_true(self):
response = self.client.get(
'/frameworks/g-cloud-8/suppliers?status=approved,countersigned&agreement_returned=true'
)
assert response.status_code == 200
data = json.loads(response.get_data())
assert tuple(sf["supplierId"] for sf in data["supplierFrameworks"]) == (10, 9, 8, 7, 6,)
assert all(sf['agreementStatus'] in ("approved", "countersigned") for sf in data['supplierFrameworks'])
assert all(sf["agreementReturnedAt"] for sf in data["supplierFrameworks"])
assert all(sf['declaration'] for sf in data['supplierFrameworks'])
def _subtest_list_suppliers_by_multiple_statuses_and_agreement_returned_false(self):
response = self.client.get(
'/frameworks/g-cloud-8/suppliers?status=approved,countersigned&agreement_returned=false'
)
assert response.status_code == 200
data = json.loads(response.get_data())
assert len(data['supplierFrameworks']) == 0
class TestGetFrameworkInterest(BaseApplicationTest, FixtureMixin):
def setup(self):
super(TestGetFrameworkInterest, self).setup()
self.register_g7_interest(5)
def register_g7_interest(self, num):
self.setup_dummy_suppliers(num)
for supplier_id in range(num):
db.session.add(
SupplierFramework(
framework_id=4,
supplier_id=supplier_id
)
)
db.session.commit()
def test_interested_suppliers_are_returned(self):
response = self.client.get('/frameworks/g-cloud-7/interest')
assert response.status_code == 200
data = json.loads(response.get_data())
assert data['interestedSuppliers'] == [0, 1, 2, 3, 4]
def test_a_404_is_raised_if_it_does_not_exist(self):
response = self.client.get('/frameworks/biscuits-for-gov/interest')
assert response.status_code == 404
class TestTransitionDosFramework(BaseApplicationTest, FixtureMixin):
def _setup_for_succesful_call(self):
self.setup_dummy_framework(
slug="digital-outcomes-and-specialists-2", framework_family="digital-outcomes-and-specialists",
status='live', id=101,
)
self.setup_dummy_framework(
slug="digital-outcomes-and-specialists-3", framework_family="digital-outcomes-and-specialists",
status='standstill', id=102,
)
self.setup_dummy_user()
for status in ("draft", "live", "withdrawn", "closed", "draft", "cancelled", "unsuccessful"):
self.setup_dummy_brief(
status=status,
framework_slug="digital-outcomes-and-specialists-2",
data={"some": "data"},
user_id=123,
)
def test_400s_if_invalid_updater_json(self):
response = self.client.post(
"/frameworks/transition-dos/sausage-cloud-6",
data=json.dumps({"not-updated": "correctly"}),
content_type="application/json"
)
data = response.get_data(as_text=True)
assert response.status_code == 400
assert json.loads(data)['error'] == "JSON validation error: 'updated_by' is a required property"
def test_400s_if_expiring_framework_not_in_request_body(self):
response = self.client.post(
"/frameworks/transition-dos/sausage-cloud-6",
data=json.dumps({"updated_by": "🤖"}),
content_type="application/json"
)
data = response.get_data(as_text=True)
assert response.status_code == 400
assert json.loads(data)['error'] == "Invalid JSON must have 'expiringFramework' keys"
def test_400s_if_going_live_framework_is_older_than_expiring_framework(self):
dos_2_id = self.setup_dummy_framework(
slug="digital-outcomes-and-specialists-2", framework_family="digital-outcomes-and-specialists", id=101,
)
response = self.client.post(
"/frameworks/transition-dos/digital-outcomes-and-specialists",
data=json.dumps({
"updated_by": "🤖",
"expiringFramework": "digital-outcomes-and-specialists-2",
}),
content_type="application/json"
)
data = response.get_data(as_text=True)
assert response.status_code == 400
assert json.loads(data)['error'] == \
f"'going_live_framework' ID ('5') must greater than'expiring_framework' ID ('{dos_2_id}')"
@pytest.mark.parametrize(
("going_live_slug", "going_live_family", "expiring_slug", "expiring_family"),
(
("g-cloud-10", "g-cloud", "digital-outcomes-and-specialists", "digital-outcomes-and-specialists",),
("digital-outcomes-and-specialists-2", "digital-outcomes-and-specialists", "g-cloud-10", "g-cloud",)
)
)
def test_400s_if_either_framework_has_wrong_family(
self, going_live_slug, going_live_family, expiring_slug, expiring_family,
):
self.setup_dummy_framework(
slug="g-cloud-10", framework_family="g-cloud", lots=[], id=101,
)
self.setup_dummy_framework(
slug="digital-outcomes-and-specialists-2", framework_family="digital-outcomes-and-specialists", id=102,
)
response = self.client.post(
f"/frameworks/transition-dos/{going_live_slug}",
data=json.dumps({
"updated_by": "🤖",
"expiringFramework": f"{expiring_slug}",
}),
content_type="application/json"
)
data = response.get_data(as_text=True)
assert response.status_code == 400
assert json.loads(data)['error'] == f"'going_live_framework' family: '{going_live_family}' and " \
f"'expiring_framework' family: '{expiring_family}' must both be 'digital-outcomes-and-specialists'"
@pytest.mark.parametrize('status', ('coming', 'open', 'pending', 'live', 'expired'))
def test_400s_if_going_live_framework_status_is_not_standstill(self, status):
self.setup_dummy_framework(
slug="digital-outcomes-and-specialists-2", framework_family="digital-outcomes-and-specialists",
status='live', id=101,
)
self.setup_dummy_framework(
slug="digital-outcomes-and-specialists-3", framework_family="digital-outcomes-and-specialists",
status=status, id=102,
)
response = self.client.post(
"/frameworks/transition-dos/digital-outcomes-and-specialists-3",
data=json.dumps({
"updated_by": "🤖",
"expiringFramework": "digital-outcomes-and-specialists-2",
}),
content_type="application/json"
)
data = response.get_data(as_text=True)
assert response.status_code == 400
assert json.loads(data)['error'] == f"'going_live_framework' status ({status}) must be 'standstill', and " \
"'expiring_framework' status (live) must be 'live'"
def test_success_does_all_the_right_things(self):
# Remove all audit events to make assertions easier later
AuditEvent.query.delete()
self._setup_for_succesful_call()
# keep track of brief ids for assertions later
draft_brief_ids = {
brief.id for brief in Brief.query.filter(Brief.framework_id == 101).all() if brief.status == 'draft'
}
assert len(draft_brief_ids) == 2
not_draft_brief_ids = {
brief.id for brief in Brief.query.filter(Brief.framework_id == 101).all() if brief.status != 'draft'
}
assert len(not_draft_brief_ids) == 5
with freeze_time('2018-09-03 17:09:56.999999'):
response = self.client.post(
"/frameworks/transition-dos/digital-outcomes-and-specialists-3",
data=json.dumps({
"updated_by": "🤖",
"expiringFramework": "digital-outcomes-and-specialists-2",
}),
content_type="application/json"
)
assert response.status_code == 200
# Assert that the correct briefs were transferred to the new live framework
expired_framework_briefs = Brief.query.filter(Brief.framework_id == 101).all()
new_live_framework_briefs = Brief.query.filter(Brief.framework_id == 102).all()
assert all(brief.status == "draft" for brief in new_live_framework_briefs)
assert {brief.id for brief in new_live_framework_briefs} == draft_brief_ids
assert all(brief.status != "draft" for brief in expired_framework_briefs)
assert {brief.id for brief in expired_framework_briefs} == not_draft_brief_ids
# Assert audit events were created for the brief changes
brief_audits = AuditEvent.query.filter(AuditEvent.type == "update_brief_framework_id").all()
assert len(brief_audits) == 2
assert all(
(audit.data["previousFrameworkId"], audit.data["newFrameworkId"]) == (101, 102) for audit in brief_audits
)
assert {audit.data["briefId"] for audit in brief_audits} == draft_brief_ids
# Assert the frameworks statuses were correctly changed and timestamps set
expired_framework = Framework.query.get(101)
new_live_framework = Framework.query.get(102)
assert expired_framework.status == "expired"
assert expired_framework.framework_expires_at_utc == datetime.datetime(2018, 9, 3, 17, 9, 56, 999999)
assert new_live_framework.status == "live"
assert new_live_framework.framework_live_at_utc == datetime.datetime(2018, 9, 3, 17, 9, 56, 999999)
# Assert audit events for the framework updates were created
framework_audits = AuditEvent.query.filter(AuditEvent.type == "framework_update").all()
assert len(framework_audits) == 2
assert {(audit.data["update"]["status"], audit.data["frameworkSlug"]) for audit in framework_audits} == \
{("expired", "digital-outcomes-and-specialists-2"), ("live", "digital-outcomes-and-specialists-3")}
# Assert the endpoint returns the new live framework to us
assert json.loads(response.get_data(as_text=True))["frameworks"]["slug"] == "digital-outcomes-and-specialists-3"
def test_audit_events_have_corresponding_timestamps(self):
AuditEvent.query.delete()
self._setup_for_succesful_call()
response = self.client.post(
"/frameworks/transition-dos/digital-outcomes-and-specialists-3",
data=json.dumps({
"updated_by": "🤖",
"expiringFramework": "digital-outcomes-and-specialists-2",
}),
content_type="application/json"
)
assert response.status_code == 200
framework_audit_events = AuditEvent.query.filter(AuditEvent.type == "framework_update").all()
assert all(audit.created_at == framework_audit_events[0].created_at for audit in framework_audit_events)
brief_audit_events = AuditEvent.query.filter(AuditEvent.type == "update_brief_framework_id").all()
assert all(audit.created_at == brief_audit_events[0].created_at for audit in brief_audit_events)
@pytest.mark.parametrize('commit_to_fail_on', ('frameworks', 'briefs'))
def test_integrity_errors_are_handled_and_changes_rolled_back(self, commit_to_fail_on):
from app.main.views.frameworks import db as app_db
commit_func = app_db.session.commit
# Using a generator here so that `commit_func` gets called when the commit mock is called, and not before.
def _side_effects(commit_to_fail_on):
if commit_to_fail_on == 'briefs':
yield commit_func()
raise IntegrityError("Could not commit", orig=None, params={})
self._setup_for_succesful_call()
with mock.patch("app.main.views.frameworks.db.session.commit") as commit_mock:
commit_mock.side_effect = _side_effects(commit_to_fail_on)
response = self.client.post(
"/frameworks/transition-dos/digital-outcomes-and-specialists-3",
data=json.dumps({
"updated_by": "🤖",
"expiringFramework": "digital-outcomes-and-specialists-2",
}),
content_type="application/json"
)
data = response.get_data(as_text=True)
assert response.status_code == 400
assert "Could not commit" in json.loads(data)["error"]
expiring_framework_briefs = Brief.query.filter(Brief.framework_id == 101).all()
going_live_framework_briefs = Brief.query.filter(Brief.framework_id == 102).all()
assert len(expiring_framework_briefs) == 7
assert not going_live_framework_briefs
if commit_to_fail_on == 'frameworks':
assert Framework.query.get(101).status == "live"
assert Framework.query.get(102).status == "standstill"
else:
assert Framework.query.get(101).status == "expired"
assert Framework.query.get(102).status == "live"
| mit | 1,454,688,783,343,137,500 | 43.083333 | 120 | 0.565552 | false |
andersjel/boxes | src/boxes/context.py | 1 | 1227 | """
boxes.context
-------------
.. autoclass:: Context()
:members:
"""
import symmath
import boxes.box
class Context:
"""
.. attribute:: system
The underlying :class:`~symmath.system.System` holding all equations
constraining the layout.
.. attribute:: is_solved
A :class:`bool` indicating whether :func:`solve` has been called.
"""
def __init__(self):
self.system = symmath.System()
self.num_symbols = 0
self.is_solved = False
def equate(self, x, y):
"""
Add a constraint setting *x == y*.
"""
self.system.equate(x, y)
def solve(self):
"""
Solve the layout. This function raises an error if the layout is not fully
defined.
"""
for n in range(self.num_symbols):
assert n in self.system.facts
self.is_solved = True
def sym(self):
"""
Create an expression (of type :class:`symmath.expr.Expr`) representing a
fresh symbol unused in this context.
"""
n = self.num_symbols
self.num_symbols += 1
return symmath.sym(n)
def box(self, *args, **kwargs):
"""
Construct a :class:`~boxes.box.Box` using this context.
"""
return boxes.box.Box(self, *args, **kwargs)
| mit | 5,090,945,779,129,099,000 | 18.171875 | 80 | 0.599022 | false |
ingenieroariel/pinax | apps/tribes/views.py | 1 | 10389 | from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden, Http404
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.conf import settings
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
else:
notification = None
try:
from friends.models import Friendship
friends = True
except ImportError:
friends = False
try:
from threadedcomments.models import ThreadedComment
forums = True
except ImportError:
forums = False
try:
from wiki.models import Article
from wiki.views import get_ct
wiki = True
except ImportError:
wiki = False
from tribes.models import Tribe
from tribes.forms import *
from microblogging.models import TweetInstance
TOPIC_COUNT_SQL = """
SELECT COUNT(*)
FROM tribes_topic
WHERE tribes_topic.tribe_id = tribes_tribe.id
"""
MEMBER_COUNT_SQL = """
SELECT COUNT(*)
FROM tribes_tribe_members
WHERE tribes_tribe_members.tribe_id = tribes_tribe.id
"""
from schedule.models import Calendar, CalendarRelation
def create(request, form_class=TribeForm, template_name="tribes/create.html"):
if request.user.is_authenticated() and request.method == "POST":
if request.POST["action"] == "create":
tribe_form = form_class(request.POST)
if tribe_form.is_valid():
tribe = tribe_form.save(commit=False)
tribe.creator = request.user
tribe.save()
tribe.members.add(request.user)
tribe.save()
# @@@ this is just temporary to give tribes a single calendar -- will revisit during whole
# tribe/project merge effort
calendar = Calendar(name = "%s Calendar" % tribe.name)
calendar.save()
CalendarRelation.objects.create_relation(calendar, tribe, distinction="default", inheritable=True)
if notification:
# @@@ might be worth having a shortcut for sending to all users
notification.send(User.objects.all(), "tribes_new_tribe", {"tribe": tribe}, queue=True)
if friends: # @@@ might be worth having a shortcut for sending to all friends
notification.send((x['friend'] for x in Friendship.objects.friends_for_user(tribe.creator)), "tribes_friend_tribe", {"tribe": tribe})
#return render_to_response("base.html", {
#}, context_instance=RequestContext(request))
return HttpResponseRedirect(tribe.get_absolute_url())
else:
tribe_form = form_class()
else:
tribe_form = form_class()
return render_to_response(template_name, {
"tribe_form": tribe_form,
}, context_instance=RequestContext(request))
def tribes(request, template_name="tribes/tribes.html", order=None):
tribes = Tribe.objects.filter(deleted=False)
search_terms = request.GET.get('search', '')
if search_terms:
tribes = (tribes.filter(name__icontains=search_terms) |
tribes.filter(description__icontains=search_terms))
if order == 'least_topics':
tribes = tribes.extra(select={'topic_count': TOPIC_COUNT_SQL})
tribes = tribes.order_by('topic_count')
elif order == 'most_topics':
tribes = tribes.extra(select={'topic_count': TOPIC_COUNT_SQL})
tribes = tribes.order_by('-topic_count')
elif order == 'least_members':
tribes = tribes.extra(select={'member_count': MEMBER_COUNT_SQL})
tribes = tribes.order_by('member_count')
elif order == 'most_members':
tribes = tribes.extra(select={'member_count': MEMBER_COUNT_SQL})
tribes = tribes.order_by('-member_count')
elif order == 'name_ascending':
tribes = tribes.order_by('name')
elif order == 'name_descending':
tribes = tribes.order_by('-name')
elif order == 'date_oldest':
tribes = tribes.order_by('-created')
elif order == 'date_newest':
tribes = tribes.order_by('created')
context = {
'tribes': tribes,
'search_terms': search_terms,
'order': order,
}
return render_to_response(
template_name,
context,
context_instance=RequestContext(request)
)
def delete(request, slug, redirect_url=None):
tribe = get_object_or_404(Tribe, slug=slug)
if not redirect_url:
redirect_url = reverse('tribe_list')
# @@@ eventually, we'll remove restriction that tribe.creator can't leave tribe but we'll still require tribe.members.all().count() == 1
if request.user.is_authenticated() and request.method == "POST" and request.user == tribe.creator and tribe.members.all().count() == 1:
tribe.deleted = True
tribe.save()
request.user.message_set.create(message="Tribe %s deleted." % tribe)
# @@@ no notification as the deleter must be the only member
return HttpResponseRedirect(redirect_url)
def your_tribes(request, template_name="tribes/your_tribes.html"):
return render_to_response(template_name, {
"tribes": Tribe.objects.filter(deleted=False, members=request.user).order_by("name"),
}, context_instance=RequestContext(request))
your_tribes = login_required(your_tribes)
def tribe(request, slug, form_class=TribeUpdateForm,
template_name="tribes/tribe.html"):
tribe = get_object_or_404(Tribe, slug=slug)
if tribe.deleted:
raise Http404
photos = tribe.photos.all()
if request.user.is_authenticated() and request.method == "POST":
if request.POST["action"] == "update" and request.user == tribe.creator:
tribe_form = form_class(request.POST, instance=tribe)
if tribe_form.is_valid():
tribe = tribe_form.save()
else:
tribe_form = form_class(instance=tribe)
if request.POST["action"] == "join":
tribe.members.add(request.user)
request.user.message_set.create(message="You have joined the tribe %s" % tribe.name)
if notification:
notification.send([tribe.creator], "tribes_created_new_member", {"user": request.user, "tribe": tribe})
notification.send(tribe.members.all(), "tribes_new_member", {"user": request.user, "tribe": tribe})
if friends: # @@@ might be worth having a shortcut for sending to all friends
notification.send((x['friend'] for x in Friendship.objects.friends_for_user(request.user)), "tribes_friend_joined", {"user": request.user, "tribe": tribe})
elif request.POST["action"] == "leave":
tribe.members.remove(request.user)
request.user.message_set.create(message="You have left the tribe %s" % tribe.name)
if notification:
pass # @@@
else:
tribe_form = form_class(instance=tribe)
topics = tribe.topics.all()[:5]
articles = Article.objects.filter(
content_type=get_ct(tribe),
object_id=tribe.id).order_by('-last_update')
total_articles = articles.count()
articles = articles[:5]
tweets = TweetInstance.objects.tweets_for(tribe).order_by("-sent")
are_member = request.user in tribe.members.all()
return render_to_response(template_name, {
"tribe_form": tribe_form,
"tribe": tribe,
"photos": photos,
"topics": topics,
"articles": articles,
"tweets": tweets,
"total_articles": total_articles,
"are_member": are_member,
}, context_instance=RequestContext(request))
def topics(request, slug, form_class=TopicForm,
template_name="tribes/topics.html"):
tribe = get_object_or_404(Tribe, slug=slug)
if tribe.deleted:
raise Http404
are_member = False
if request.user.is_authenticated():
are_member = request.user in tribe.members.all()
if request.method == "POST":
if request.user.is_authenticated():
if are_member:
topic_form = form_class(request.POST)
if topic_form.is_valid():
topic = topic_form.save(commit=False)
topic.tribe = tribe
topic.creator = request.user
topic.save()
request.user.message_set.create(message="You have started the topic %s" % topic.title)
if notification:
notification.send(tribe.members.all(), "tribes_new_topic", {"topic": topic})
topic_form = form_class() # @@@ is this the right way to reset it?
else:
request.user.message_set.create(message="You are not a member and so cannot start a new topic")
topic_form = form_class()
else:
return HttpResponseForbidden()
else:
topic_form = form_class()
return render_to_response(template_name, {
"tribe": tribe,
"topic_form": topic_form,
"are_member": are_member,
}, context_instance=RequestContext(request))
def topic(request, id, edit=False, template_name="tribes/topic.html"):
topic = get_object_or_404(Topic, id=id)
if topic.tribe.deleted:
raise Http404
if request.method == "POST" and edit == True and \
(request.user == topic.creator or request.user == topic.tribe.creator):
topic.body = request.POST["body"]
topic.save()
return HttpResponseRedirect(reverse('tribe_topic', args=[topic.id]))
return render_to_response(template_name, {
'topic': topic,
'edit': edit,
}, context_instance=RequestContext(request))
def topic_delete(request, pk):
topic = Topic.objects.get(pk=pk)
if topic.tribe.deleted:
raise Http404
if request.method == "POST" and (request.user == topic.creator or \
request.user == topic.tribe.creator):
if forums:
ThreadedComment.objects.all_for_object(topic).delete()
topic.delete()
return HttpResponseRedirect(request.POST["next"])
| mit | -3,486,418,312,111,164,400 | 38.804598 | 175 | 0.626624 | false |
peterhinch/Micropython-scheduler | switch.py | 1 | 2238 | # Switch class for Micropython and scheduler.
# Author: Peter Hinch
# Copyright Peter Hinch 2016 Released under the MIT license
import pyb
from usched import Timeout
# ************************************************** SWITCH CLASS ***************************************************
# The purpose of the switch object is to work with event driven programming using the scheduler. The switch is polled:
# there is usually no merit in using interrupts for a manually operated switch, especially with a scheduler using
# cooperative multi threading.
# A switch object will call user supplied open and close callback functions when the switch is operated. Polling is
# done by running a thread which runs forever and is done in a way to ensure debouncing.
# The switch is presumed to be wired from the assigned pin to ground.
class Switch(object):
DEBOUNCETIME = 0.02
def __init__(self, objSched, pinName, close_func=None, close_func_args=(), open_func=None, open_func_args=()):
self.pin = pyb.Pin(pinName, pyb.Pin.IN, pyb.Pin.PULL_UP) # Initialise for input, switch to ground
self.close_func = close_func
self.close_func_args = close_func_args
self.open_func = open_func
self.open_func_args = open_func_args
self.switchstate = self.pin.value() # Get initial state
objSched.add_thread(self.switchcheck()) # Thread runs forever
def __call__(self):
return self.switchstate # Return current state of switch (0 = pressed)
def switchcheck(self): # Generator object: thread which tests and debounces
wf = Timeout(Switch.DEBOUNCETIME)
while True:
state = self.pin.value()
if state != self.switchstate: # State has changed: act on it now.
self.switchstate = state
if state == 0 and self.close_func:
self.close_func(*self.close_func_args)
elif state == 1 and self.open_func:
self.open_func(*self.open_func_args)
yield wf() # Ignore further state changes until switch has settled
| mit | 4,681,996,483,596,706,000 | 52.285714 | 118 | 0.604558 | false |
okrt/usbtransfer | mainui.py | 1 | 8944 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main.ui'
#
# Created: Sat Nov 12 05:33:33 2011
# by: PyQt4 UI code generator 4.8.5
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(340, 320)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(340, 320))
MainWindow.setMaximumSize(QtCore.QSize(340, 320))
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "USBTransfer", None, QtGui.QApplication.UnicodeUTF8))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/main/images/usbtransfer.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setWindowOpacity(1.0)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.frame = QtGui.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(-1, -1, 342, 321))
self.frame.setStyleSheet(_fromUtf8("background-image: url(:/main/images/background.png);\n"
""))
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.welcometext = QtGui.QLabel(self.frame)
self.welcometext.setGeometry(QtCore.QRect(10, 10, 241, 41))
self.welcometext.setStyleSheet(_fromUtf8("font: 30pt \"Myriad Pro\";\n"
"background-image:url(:/main/images/trans.png);\n"
"color: rgb(57, 57, 57);"))
self.welcometext.setText(QtGui.QApplication.translate("MainWindow", "Welcome", None, QtGui.QApplication.UnicodeUTF8))
self.welcometext.setObjectName(_fromUtf8("welcometext"))
self.widget = QtGui.QWidget(self.frame)
self.widget.setGeometry(QtCore.QRect(250, 0, 92, 92))
self.widget.setMinimumSize(QtCore.QSize(92, 92))
self.widget.setMaximumSize(QtCore.QSize(92, 92))
self.widget.setToolTip(_fromUtf8(""))
self.widget.setStyleSheet(_fromUtf8("background-image:url(:/main/images/usbicon.png);\n"
"border:none;"))
self.widget.setObjectName(_fromUtf8("widget"))
self.versionLabel = QtGui.QLabel(self.frame)
self.versionLabel.setGeometry(QtCore.QRect(110, 300, 221, 20))
self.versionLabel.setStyleSheet(_fromUtf8("background-image:url(:/main/images/trans.png);\n"
"\n"
"color.rgb(0, 0, 0);"))
self.versionLabel.setText("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#0000ff;\">USBTransfer v3.0.1</span><span style=\" font-size:8pt;\"> - oguzkirat.com</span></p></body></html>")
self.versionLabel.setObjectName(_fromUtf8("versionLabel"))
self.transferlabel = QtGui.QLabel(self.frame)
self.transferlabel.setGeometry(QtCore.QRect(10, 45, 231, 20))
self.transferlabel.setLayoutDirection(QtCore.Qt.LeftToRight)
self.transferlabel.setStyleSheet(_fromUtf8("background-image:url(:/main/images/trans.png);\n"
"color: rgb(57, 57, 57);\n"
""))
self.transferlabel.setText("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt;\">"+QtGui.QApplication.translate("MainWindow", "Transfer distributions to USB drive", None, QtGui.QApplication.UnicodeUTF8)+"</span></p></body></html>")
self.transferlabel.setObjectName(_fromUtf8("transferlabel"))
self.Btn_cddvd = QtGui.QPushButton(self.frame)
self.Btn_cddvd.setGeometry(QtCore.QRect(30, 134, 277, 44))
self.Btn_cddvd.setMinimumSize(QtCore.QSize(277, 44))
self.Btn_cddvd.setMaximumSize(QtCore.QSize(277, 44))
self.Btn_cddvd.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.Btn_cddvd.setToolTip(_fromUtf8(""))
self.Btn_cddvd.setStyleSheet(_fromUtf8("border:none;\n"
"background-image: url(:/main/images/cddvd.png);\n"
""))
self.Btn_cddvd.setText(QtGui.QApplication.translate("MainWindow", "CD or DVD", None, QtGui.QApplication.UnicodeUTF8))
self.Btn_cddvd.setObjectName(_fromUtf8("Btn_cddvd"))
self.Btn_isoimg = QtGui.QPushButton(self.frame)
self.Btn_isoimg.setGeometry(QtCore.QRect(30, 85, 277, 44))
self.Btn_isoimg.setMinimumSize(QtCore.QSize(277, 44))
self.Btn_isoimg.setMaximumSize(QtCore.QSize(277, 44))
self.Btn_isoimg.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.Btn_isoimg.setToolTip(_fromUtf8(""))
self.Btn_isoimg.setStyleSheet(_fromUtf8("border:none;\n"
"background-image: url(:/main/images/isoimg.png);\n"
""))
self.Btn_isoimg.setText(QtGui.QApplication.translate("MainWindow", "ISO or IMG Image", None, QtGui.QApplication.UnicodeUTF8))
self.Btn_isoimg.setObjectName(_fromUtf8("Btn_isoimg"))
self.toolslabel = QtGui.QLabel(self.frame)
self.toolslabel.setGeometry(QtCore.QRect(10, 179, 301, 16))
self.toolslabel.setStyleSheet(_fromUtf8("background-image:url(:/main/images/trans.png);\n"
"\n"
"color.rgb(0, 0, 0);"))
self.toolslabel.setText(QtGui.QApplication.translate("MainWindow", "Tools and Utilities", None, QtGui.QApplication.UnicodeUTF8))
self.toolslabel.setObjectName(_fromUtf8("toolslabel"))
self.Btn_download = QtGui.QPushButton(self.frame)
self.Btn_download.setGeometry(QtCore.QRect(30, 198, 277, 44))
self.Btn_download.setMinimumSize(QtCore.QSize(277, 44))
self.Btn_download.setMaximumSize(QtCore.QSize(277, 44))
self.Btn_download.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.Btn_download.setToolTip(_fromUtf8(""))
self.Btn_download.setStyleSheet(_fromUtf8("border:none;\n"
"background-image: url(:/main/images/download.png);\n"
""))
self.Btn_download.setText(QtGui.QApplication.translate("MainWindow", "Download a distribution", None, QtGui.QApplication.UnicodeUTF8))
self.Btn_download.setObjectName(_fromUtf8("Btn_download"))
self.Btn_format = QtGui.QPushButton(self.frame)
self.Btn_format.setGeometry(QtCore.QRect(30, 248, 277, 44))
self.Btn_format.setMinimumSize(QtCore.QSize(277, 44))
self.Btn_format.setMaximumSize(QtCore.QSize(277, 44))
self.Btn_format.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.Btn_format.setToolTip(_fromUtf8(""))
self.Btn_format.setStyleSheet(_fromUtf8("border:none;\n"
"background-image: url(:/main/images/format.png);\n"
""))
self.Btn_format.setText(QtGui.QApplication.translate("MainWindow", "Format drive", None, QtGui.QApplication.UnicodeUTF8))
self.Btn_format.setObjectName(_fromUtf8("Btn_format"))
self.wherelabel = QtGui.QLabel(self.frame)
self.wherelabel.setGeometry(QtCore.QRect(10, 63, 321, 16))
self.wherelabel.setStyleSheet(_fromUtf8("background-image:url(:/main/images/trans.png);\n"
"color.rgb(0, 0, 0);"))
self.wherelabel.setText(QtGui.QApplication.translate("MainWindow", "Where do you want to transfer from?", None, QtGui.QApplication.UnicodeUTF8))
self.wherelabel.setObjectName(_fromUtf8("wherelabel"))
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
pass
import resources_rc
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| gpl-2.0 | 3,045,485,479,885,092,400 | 56.703226 | 320 | 0.692867 | false |
maratsh/getsecure | secure.py | 1 | 1261 | # -*- coding: utf-8 -*-
import hashlib
from base64 import b64encode
from datetime import datetime, timedelta
from urllib.parse import urlparse
def secure_link(baselink, secret, period=30):
"""
:param baselink: base url for signing
:param secret: secret string shared only with web server
:param period: optional period in days
:return: signed link as str
"""
url = urlparse(baselink)
expires = int((datetime.now() + timedelta(days=period)).timestamp())
hashstring = '{e}{u} {s}'.format(e=expires, u=url.path, s=secret)
m = hashlib.md5()
m.update(bytes(hashstring, encoding='utf-8'))
protection_string = b64encode(m.digest(), altchars=b'-_').replace(b'=', b'').decode("ascii")
protected_link = '{b}?md5={p}&expires={e}'.format(b=baselink, p=protection_string, e=expires)
return protected_link
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Utility for securing expiring links')
parser.add_argument('baselink')
parser.add_argument('secret', type=str)
parser.add_argument('--period', default=30, type=int)
options = parser.parse_args()
link = secure_link(options.baselink, options.secret, options.period)
print(link)
| mit | -8,140,245,217,300,452,000 | 26.413043 | 97 | 0.674068 | false |
Chasvortex/caffe-gui-tool | __init__.py | 1 | 5980 | __author__ = 'H'
bl_info = {
'name': 'Caffe-Gui-Tool',
'author': 'Hugh Tomkins',
'location': 'Node view - Properties panel',
'category': 'Node View'
}
# To support reload properly, try to access a package var,
# # if it's there, reload everything
if "bpy" in locals():
import imp
imp.reload(IOwriteprototxt)
imp.reload(IOcexp)
imp.reload(CGTNodes)
imp.reload(IOloadprototxt)
imp.reload(CGTArrangeHelper)
imp.reload(CGTGraph)
print("Reloaded multifiles")
else:
from . import IOwriteprototxt, CGTNodes, IOloadprototxt, CGTArrangeHelper, CGTGraph, IOcexp
print("Imported multifiles")
import bpy
import random
from bpy.props import *
def getactivefcurve():
ncurves = 0
for object in bpy.context.selected_objects:
if object.animation_data:
if object.animation_data.action:
for curve in object.animation_data.action.fcurves.items():
if curve[1].select:
ncurves += 1
activeobject = object
activecurve = curve[1]
if ncurves == 1:
return activecurve, activeobject
elif ncurves == 0:
return None, None
else:
return False, False
def initSceneProperties():
bpy.types.Scene.traintest = bpy.props.StringProperty(
name="Train Test Prototxt",
default="",
description="Get the path to the data",
subtype='FILE_PATH'
)
bpy.types.Scene.solver = bpy.props.StringProperty(
name="Solver Prototxt",
default="",
description="Get the path to the data",
subtype='FILE_PATH'
)
bpy.types.Scene.deploy = bpy.props.StringProperty(
name="Deploy (optional) Prototxt",
default="",
description="Get the path to the data",
subtype='FILE_PATH'
)
bpy.types.Scene.savetempdata = bpy.props.StringProperty(
name="Log folder",
default="",
description="Folder in which to store saved graphs and log data",
subtype='DIR_PATH'
)
bpy.types.Scene.loadtempdata = bpy.props.StringProperty(
name="Log file to Load",
default="",
description="File to load tree and curve from",
subtype='FILE_PATH'
)
bpy.types.Scene.comment = bpy.props.StringProperty(
name="Comment",
default="",
description="Add a comment that helps identify the current experiment"
)
bpy.types.Scene.filecomment = bpy.props.StringProperty(
name="Filename",
default="",
description="Add a string to beginning of filename to describe experiment"
)
bpy.types.Scene.loadtree = bpy.props.BoolProperty(
name="Load Node tree",
default=1,
description="Load the node tree from .cexp"
)
bpy.types.Scene.loadloss = bpy.props.BoolProperty(
name="Load Loss graphs",
default=1,
description="Load the loss data from .cexp"
)
bpy.types.Scene.donetraining = bpy.props.IntProperty(default=1)
return
initSceneProperties()
class LoadDialogPanel(bpy.types.Panel):
bl_label = "Load Prototxt"
bl_space_type = "NODE_EDITOR"
bl_region_type = "UI"
def draw(self, context):
scn = context.scene
self.layout.prop(scn, "traintest")
self.layout.prop(scn, "solver")
self.layout.prop(scn, "deploy")
self.layout.operator("nodes.load_solver")
class RunDialogPanel(bpy.types.Panel):
bl_label = "Run Caffe"
bl_space_type = "NODE_EDITOR"
bl_region_type = "UI"
def draw(self, context):
scn = bpy.context.scene
self.layout.operator("nodes.make_solver")
self.layout.prop(scn, "savetempdata")
self.layout.prop(scn, "filecomment")
self.layout.prop(scn, "comment")
self.layout.operator("nodes.run_solver")
self.layout.operator("nodes.cancel_solver")
class GraphInfoPanel(bpy.types.Panel):
bl_label = "Selected loss plot"
bl_space_type = "GRAPH_EDITOR"
bl_region_type = "UI"
def draw(self, context):
activecurve, activeobject = getactivefcurve()
if activecurve == None:
self.layout.label("No curve selected")
elif not activecurve:
self.layout.label("Multiple curves selected")
self.layout.label("Select a single curve to view comments")
else:
try:
self.layout.label(activeobject["comment"])
except KeyError:
self.layout.label("No comment")
self.layout.operator("nodes.load_tree_from_curve")
if activeobject["originaltree"] != '':
self.layout.label("Original tree loaded to:")
self.layout.label(activeobject["originaltree"])
class CexpLoadPanel(bpy.types.Panel):
bl_label = "Load experiment"
bl_space_type = "NODE_EDITOR"
bl_region_type = "UI"
def draw(self, context):
scn = context.scene
self.layout.prop(scn, "loadtempdata")
self.layout.prop(scn, "loadtree")
self.layout.prop(scn, "loadloss")
self.layout.operator("nodes.load_trained_solver")
def register():
bpy.utils.register_class(RunDialogPanel)
bpy.utils.register_class(LoadDialogPanel)
bpy.utils.register_class(CexpLoadPanel)
bpy.utils.register_class(GraphInfoPanel)
# bpy.utils.register_module(__name__)
CGTArrangeHelper.register()
CGTGraph.register()
IOwriteprototxt.register()
CGTNodes.register()
IOloadprototxt.register()
IOcexp.register()
def unregister():
bpy.utils.unregister_class(RunDialogPanel)
bpy.utils.unregister_class(LoadDialogPanel)
bpy.utils.unregister_class(CexpLoadPanel)
bpy.utils.unregister_class(GraphInfoPanel)
CGTArrangeHelper.unregister()
CGTGraph.unregister()
IOwriteprototxt.unregister()
CGTNodes.unregister()
IOcexp.unregister()
# bpy.utils.unregister_module(__name__)
| unlicense | 7,756,089,098,321,866,000 | 29.824742 | 95 | 0.632441 | false |
Swind/TuringCoffee | src/utils/channel.py | 1 | 1035 | import msgpack
from nanomsg import (
PUB,
SUB,
SUB_SUBSCRIBE,
PAIR,
DONTWAIT,
Socket,
NanoMsgAPIError,
EAGAIN
)
class Channel(object):
type_map = {
'Sub': SUB,
'Pub': PUB,
'Pair': PAIR
}
def __init__(self, address, channel_type, is_server):
self.__socket = Socket(self.type_map[channel_type])
if is_server:
self.__socket.bind(address)
else:
self.__socket.connect(address)
if channel_type == 'Sub':
self.__socket.set_string_option(SUB, SUB_SUBSCRIBE, '')
def recv(self, blocking=True):
if blocking:
result = self.__socket.recv()
else:
try:
result = self.__socket.recv(flags=DONTWAIT)
except NanoMsgAPIError as error:
if error.errno == EAGAIN:
return None
return msgpack.unpackb(result)
def send(self, msg):
return self.__socket.send(msgpack.packb(msg))
| mit | -2,679,298,421,624,794,600 | 20.5625 | 71 | 0.527536 | false |
kedz/cuttsum | old/resource-install-scripts/text2lm_input.py | 1 | 3011 | import os
import argparse
import re
from collections import defaultdict
import gzip
import codecs
import sys
import nltk.data
from nltk.tokenize.punkt import PunktWordTokenizer
import multiprocessing as mp
def main():
tdir, ofile = parse_cmdline()
paths = []
files = [fname for fname in os.listdir(tdir)]
nfiles = len(files)
cpus = mp.cpu_count()
jobsize = nfiles / cpus
jobs = []
tmpfiles = []
pid = 0
for i in xrange(0, nfiles, jobsize):
tmpfile = ofile+'_{}'.format(pid)
jobs.append((tdir, files[i:i+jobsize], tmpfile))
tmpfiles.append(tmpfile)
pid += 1
pool = mp.Pool(cpus)
x = pool.map_async(worker, jobs)
x.get()
pool.close()
pool.join()
from subprocess import call
if os.path.exists(ofile):
os.remove(ofile)
with open(ofile, 'a') as f:
for tmpfile in tmpfiles:
call(['cat', tmpfile], stdout=f)
os.remove(tmpfile)
print "Completed processing files in ", tdir
def worker(args):
sent_detector = nltk.data.load('tokenizers/punkt/english.pickle')
word_detector = PunktWordTokenizer()
def split_sentences(txt):
sents = sent_detector.tokenize(txt.strip(),
realign_boundaries=True)
for sent in sents:
sstr = u' '.join(word for word
in word_detector.tokenize(sent))
tkns = filter(None, sstr.split(u' '))
if len(tkns) > 0:
yield u' '.join(tkns)
tdir, txt_files, ofile = args
with codecs.open(ofile, 'w', 'utf-8') as of:
for fname in txt_files:
txt_file = os.path.join(tdir, fname)
with codecs.open(txt_file, 'r', 'utf-8') as f:
text = u' '.join(f.readlines())
for sent in split_sentences(text):
of.write(sent)
of.write(u'\n')
of.flush()
print 'Completed', txt_file
def parse_cmdline():
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--text-dir',
help=u'Location of text data directory.',
type=unicode, required=True)
parser.add_argument('-of', '--output-file',
help=u'Output location to write text files.',
type=unicode, required=True)
args = parser.parse_args()
tdir = args.text_dir
ofile = args.output_file
if not os.path.exists(tdir) or not os.path.isdir(tdir):
sys.stderr.write((u'--text-dir argument {} either does not exist '
+ u'or is not a directory!\n').format(tdir))
sys.stderr.flush()
sys.exit()
odir = os.path.dirname(ofile)
if odir != '' and not os.path.exists(odir):
os.makedirs(odir)
return tdir, ofile
if __name__ == u'__main__':
main()
| apache-2.0 | -7,365,270,911,938,395,000 | 26.126126 | 84 | 0.538691 | false |
zooniverse/aggregation | engine/rectangle_clustering.py | 1 | 4950 | from __future__ import print_function
import clustering
import agglomerative
import networkx
import numpy as np
class AreaRectangleClustering(clustering.Cluster):
def __init__(self,shape,project,additional_params):
assert shape != "point"
clustering.Cluster.__init__(self,shape,project,additional_params)
self.rectangle = (shape == "rectangle") or (shape == "image")
def __overlap__(self,l1,l2):
"""
do two lines overlap? assume horizontal lines - if vertical then you need to flip coordinates before calling
:param l1:
:param l2:
:return:
"""
(l1_a,l1_b) = l1
(l2_a,l2_b) = l2
# l2 ends before l1 begins
if l2_b < l1_a:
return False
# l2 starts after l1 ends
elif l2_a > l1_b:
return False
else:
return True
def __overlap_graph__(self,markings):
"""
given a set of rectangle markings return a graph where each node corresponds to a rectangle
and an edge exists iff two rectangles overlap
:param markings:
:return:
"""
g = networkx.Graph()
g.add_nodes_from(range(len(markings)))
# go through each pair of rectangles and see if they overlap
for i,((x1,y1),_,(x2,y2),_) in enumerate(markings):
for j,((m1,n1),_,(m2,n2),_) in list(enumerate(markings))[i+1:]:
# do these rectangles overlap on the x axis?
overlap_x = self.__overlap__((x1,x2),(m1,m2))
if not overlap_x:
continue
# and on the y axis?
overlap_y = self.__overlap__((y1,y2),(n1,n2))
if not overlap_y:
continue
# we know that these rectangles overlap
g.add_edge(i,j)
return g
def __median_rectangles__(self,markings):
"""
given a set of rectangles (which should represent a clique)
create a "representative" rectangle based on median corners
:param markings:
:return:
"""
# don't assume that all rectangles will be in the same order
# e.g. don't assume that the first point is the lower left hand corner
maximum_x = [max(m[0][0],m[2][0]) for m in markings]
minimum_x = [min(m[0][0],m[2][0]) for m in markings]
maximum_y = [max(m[0][1],m[2][1]) for m in markings]
minimum_y = [min(m[0][1],m[2][1]) for m in markings]
x_top = np.median(maximum_x)
x_bot = np.median(minimum_x)
y_top = np.median(maximum_y)
y_bot = np.median(minimum_y)
return (x_top,y_top),(x_bot,y_bot)
def __cluster__(self,markings,user_ids,tools,reduced_markings,dimensions,subject_id):
"""
main clustering algorithm - works on a single per-subject basis
for rectangles, doesn't make use of reduced_markings
:param markings:
:param user_ids:
:param tools:
:param reduced_markings:
:param dimensions:
:param subject_id:
:return:
"""
print(markings)
print(reduced_markings)
print(len(zip(*markings)))
print([np.median(axis) for axis in zip(*markings)])
print([np.median(axis,axis=0) for axis in zip(*markings)])
print([np.median(axis,axis=1) for axis in zip(*markings)])
assert False
# if empty markings, just return nothing
if markings == []:
return [],0
results = []
overlap_graph = self.__overlap_graph__(markings)
# each clique is a group of markings which all refer to the same region on the page
# go through each clique
for c in networkx.find_cliques(overlap_graph):
# ignore any clique with less than 3 markings in it
if len(c) < 0:
continue
# get the specific markings in this clique and their corresponding tools
clique = [markings[i] for i in c]
tools_in_clique = [tools[i] for i in c]
# create the new cluster based on this clique
new_cluster = dict()
new_cluster["center"] = self.__median_rectangles__(clique)
new_cluster["cluster members"] = clique
new_cluster["users"] = [user_ids[i] for i in c]
# the tools used by each person with a rectangle in this cluster
new_cluster["tools"] = tools_in_clique
new_cluster["image area"] = None
results.append(new_cluster)
return results,0
class RectangleClustering(agglomerative.Agglomerative):
def __init__(self,shape,project,additional_params):
clustering.Cluster.__init__(self,shape,None,additional_params)
def __cluster_center__(self,pts):
center = list(np.median(pts,axis=0))
assert len(center) == 2
return center | apache-2.0 | -6,786,756,923,071,207,000 | 33.622378 | 116 | 0.570909 | false |
6-Degrees/Instagram_data | examples/user_interest_analysis/dictionary_testor.py | 1 | 7264 | from Instagram_Spider import *
from nltk.stem import WordNetLemmatizer
from nltk.corpus import words
from nltk.corpus import wordnet as wn
from nltk.corpus import wordnet_ic
import time
def load_dictionary(dict_name):
file = open(dict_name, 'r')
dict_data = json.load(file)
file.close()
return dict_data
def clean_up_string(old_string):
characters = 'QWERTYUIOPASDFGHJKLZXCVBNMqwertyuiopasdfghjklzxcvbnm'
new_string = ''
for char in old_string:
if char in characters:
new_string += char
return new_string.lower()
def combine_dictionary(official_word_list, dictionary):
official_word_list1 = list(official_word_list)
for category in dictionary:
word_list = dictionary[category]
for word in word_list:
official_word_list1.append(word)
official_word_list2 = set(official_word_list1)
return official_word_list2
def successful_rate(successful_list, fail_list):
successful_number = 0
fail_number = 0
for tag_pair in successful_list:
successful_number += tag_pair[1]
for tag_pair in fail_list:
fail_number += tag_pair[1]
my_rate = successful_number/(successful_number+fail_number)
return my_rate
def tag2word(tag_list):
result_list = list()
unsolved_list = list()
one_tenth = int(len(tag_list)/10)
current_number = 0
progress = 0
for tag_pair in tag_list:
current_number += 1
if current_number > one_tenth:
progress += 1
current_number = 0
print('finish ' + str(progress) + '0%')
tag = clean_up_string(tag_pair[0]).lower()
tag = clean_up_string(tag)
pos = len(tag)
while pos > 1:
word = wordnet_lemmatizer.lemmatize(tag[0:pos])
if word in wordlist:
result_list.append((word, tag_pair[1]))
tag = tag[pos:]
pos = len(tag)
else:
pos -= 1
if len(tag) > 1:
unsolved_list.append((tag, tag_pair[1]))
print('done...')
return result_list, unsolved_list
def analyze_words(my_words, dictionary):
similarity_dictionary = dict()
local_similarity_dictionary = dict()
distribution_dictionary = dict()
total_number = 0
valid_word_count = 0
for category in dictionary:
similarity_dictionary[category] = 0
local_similarity_dictionary[category] = 0
distribution_dictionary[category] = list()
distribution_dictionary['unknown'] = list()
one_tenth = int(len(my_words)/10)
current_number = 0
progress = 0
total_words = 0
for word_pair in my_words:
find_category = False
current_number += 1
if current_number > one_tenth:
progress += 1
current_number = 0
print('finish ' + str(progress) + '0%')
for category in dictionary:
if word_pair[0] in dictionary[category]:
if not find_category:
valid_word_count += 1
similarity_dictionary[category] += 10 * word_pair[1]
total_number += word_pair[1]
distribution_dictionary[category].append(word_pair)
find_category = True
if find_category:
continue
try:
word = wn.synsets(word_pair[0])[0]
total_number += word_pair[1]
valid_word_count += 1
except:
continue
for category in dictionary:
word_list = dictionary[category]
total_similarity = 0
total_categary_words = 0
for test_word in word_list:
try:
test = wn.synsets(test_word)[0]
except:
continue
try:
total_similarity += word.res_similarity(test, brown_ic)
total_categary_words += 1
except:
continue
if total_categary_words > 0:
similarity_dictionary[category] += word_pair[1] * total_similarity / total_categary_words
local_similarity_dictionary[category] = total_similarity / total_categary_words
final_category = 'others'
for category in local_similarity_dictionary:
if local_similarity_dictionary[category] > local_similarity_dictionary[final_category]:
final_category = category
if local_similarity_dictionary[final_category] > 2.5:
if local_similarity_dictionary[final_category] > 4:
if word_pair[0] not in dictionary[final_category]:
dictionary[final_category].append(word_pair[0])
find_category = True
distribution_dictionary[final_category].append(word_pair)
if not find_category:
distribution_dictionary['unknown'].append(word_pair)
for category in similarity_dictionary:
similarity_dictionary[category] /= total_number
recognition_rate = valid_word_count/len(my_words)
percentage_dictionary = dict()
for category in distribution_dictionary:
percentage_dictionary[category] = 0
for word_pair2 in distribution_dictionary[category]:
percentage_dictionary[category] += word_pair2[1]
total_words += word_pair2[1]
for category in percentage_dictionary:
percentage_dictionary[category] /= total_words
print('done...')
return similarity_dictionary, recognition_rate, distribution_dictionary, percentage_dictionary
test_user_list = ['bodybymark', 'fetchlightphoto', 'michael_alfuso', 'desgnarlais', 'thelifeasalex',
'carolina_dronz', 'luxweave', 'reshred', 'easyonthecheeks', 'socalwithkids']
wordlist = set(words.words())
wordnet_lemmatizer = WordNetLemmatizer()
brown_ic = wordnet_ic.ic('ic-brown.dat')
semcor_ic = wordnet_ic.ic('ic-semcor.dat')
my_dictionary = load_dictionary('Instagram_tag_dictionary.json')
wordlist = combine_dictionary(wordlist, my_dictionary)
spider = InstagramSpider()
start_time = time.time()
total_recognition_rate = 0
current_number = 0
for test_user in test_user_list:
current_number += 1
print('testing our dictionary on user: ' + test_user + '(' + str(current_number) + '/10)')
tag_data = spider.get_tag_from_user(test_user)
print('data got...')
words_from_tags, unsolved_data = tag2word(tag_list=tag_data)
print('analyzing words from tags from user: ' + test_user)
result, rate, distribute_result, percentage_result = analyze_words(my_words=words_from_tags,
dictionary=my_dictionary)
print("successful rate of fitting words into dictionary is:%.2f%%" % (rate * 100))
print('percentage result: ')
print(percentage_result)
recognize_rate = 1 - percentage_result['unknown']
print("our machine's current recognize rate is:%.2f%%" % (recognize_rate * 100))
total_recognition_rate += recognize_rate
average_recognition_rate = total_recognition_rate/10
print("our machine's current recognize rate is:%.2f%%" % (average_recognition_rate * 100))
print('used time: ' + str(time.time() - start_time))
print('end')
| gpl-3.0 | 5,556,061,869,338,022,000 | 37 | 105 | 0.615597 | false |
pcarbonn/pyDatalog | pyDatalog/examples/queens_N.py | 1 | 1551 |
from pyDatalog import pyDatalog
import time
@pyDatalog.program()
def _():
size=8
# when is it ok to have a queen in row X1 and another in row X2, separated by N columns
# this is memoized !
queens(N, X) <= (N>1) & queens(N-1, X[:-1]) & next_queen(N, X)
queens(1, X) <= (X1._in(range(size))) & (X[0]==X1)
next_queen(N, X) <= (N>2) & next_queen(N-1, X[1:]) & ok(X[0], N-1, X[-1])
next_queen(2, X) <= queens(1,(X1,)) & ok(X[0], 1, X1) & (X[1] == X1)
ok(X1, N, X2) <= (X1 != X2) & (X1 != X2+N) & (X1 != X2-N)
start_time = time.time()
print(queens(size, (X0,X1,X2,X3,X4,X5,X6,X7)))
print("First datalog run in %f seconds" % (time.time() - start_time))
start = time.time()
for i in range(20):
# there is a warm-up period for the JIT --> let's compute it again
start_time = time.time()
datalog_count = len(queens(size, (X0,X1,X2,X3,X4,X5,X6,X7)).data)
datalog_time = (time.time() - start_time)
print(datalog_time)
print("Average : %s" % ((time.time() - start)/20))
print("%i solutions by datalog in %f seconds" % (datalog_count, datalog_time))
# pure python solution found on http://rosettacode.org/wiki/N-Queens#Python, for comparison purposes
from itertools import permutations
n = 8
cols = range(n)
def queens():
for vec in permutations(cols):
if n == len(set(vec[i]+i for i in cols)) \
== len(set(vec[i]-i for i in cols)):
#print ( vec )
pass
start_time = time.time()
queens()
python_time = time.time() - start_time
print("python : %f seconds" % python_time)
| lgpl-2.1 | 1,697,591,810,006,322,200 | 29.411765 | 100 | 0.602837 | false |
AstroPrint/AstroBox | src/astroprint/plugin/services/printer.py | 1 | 15165 | # coding=utf-8
__author__ = "AstroPrint Product Team <[email protected]>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2017 3DaGoGo, Inc - Released under terms of the AGPLv3 License"
from . import PluginService
from octoprint.events import Events
from octoprint.settings import settings
from astroprint.camera import cameraManager
from astroprint.network.manager import networkManager
from astroprint.boxrouter import boxrouterManager
from astroprint.cloud import astroprintCloud
from astroprint.printer.manager import printerManager
from astroprint.printerprofile import printerProfileManager
class PrinterService(PluginService):
_validEvents = [
#watch the printer's status. Returns and Object with a state and a value
'printer_state_changed',
#watch the printer's profile. Returns and Object with a state and a value
'printer_profile_changed',
#watch the timelapse selected for photos capture while printing. Return the frequence value.
'print_capture_info_changed',
#watch the temperature changes. Return object containing [tool0: actual, target - bed: actual, target]
'temperature_changed',
#watch the printing progress. Returns Object containing [completion, currentLayer, filamentConsumed, filepos, printTime, printTimeLeft]
'printing_progress_changed',
#watch the current printing state
'printing_state_changed',
#watch the printer comms
'printer_comms_changed',
#watch the printer bed status
"bed_cleared_changed",
#Watch for printer initiated prompt events
"printer_prompt_event"
]
def __init__(self):
super(PrinterService, self).__init__()
#printer status
self._eventManager.subscribe(Events.CONNECTED, self._onConnect)
self._eventManager.subscribe(Events.DISCONNECTED, self._onDisconnect)
self._eventManager.subscribe(Events.CONNECTING, self._onConnecting)
self._eventManager.subscribe(Events.HEATING_UP, self._onHeatingUp)
self._eventManager.subscribe(Events.TOOL_CHANGE, self._onToolChange)
self._eventManager.subscribe(Events.PRINTINGSPEED_CHANGE, self._onPrintingSpeedChange)
self._eventManager.subscribe(Events.PRINTINGFLOW_CHANGE, self._onPrintingFlowChange)
self._eventManager.subscribe(Events.PRINTERPROFILE_CHANGE, self._onPrintingProfileChange)
self._eventManager.subscribe(Events.COMMS_CHANGE, self._onPrinterCommsChange)
#bed
self._eventManager.subscribe(Events.BED_CLEARED_CHANGED, self._onPrinterClearChanged)
#temperature
self._eventManager.subscribe(Events.TEMPERATURE_CHANGE, self._onTemperatureChanged)
#printing progress
self._eventManager.subscribe(Events.PRINTING_PROGRESS, self._onPrintingProgressChanged)
#printing timelapse
self._eventManager.subscribe(Events.CAPTURE_INFO_CHANGED, self._onPrintCaptureInfoChanged)
#host commands (prompts)
self._eventManager.subscribe(Events.PRINTER_PROMPT, self._onPromptEvent)
#printing handling
self._eventManager.subscribe(Events.PRINT_STARTED, self._onPrintStarted)
self._eventManager.subscribe(Events.PRINT_DONE, self._onPrintDone)
self._eventManager.subscribe(Events.PRINT_FAILED, self._onPrintFailed)
self._eventManager.subscribe(Events.PRINT_CANCELLED, self._onPrintCancelled)
self._eventManager.subscribe(Events.PRINT_PAUSED, self._onPrintPaused)
self._eventManager.subscribe(Events.PRINT_RESUMED, self._onPrintResumed)
self._eventManager.subscribe(Events.ERROR, self._onPrintingError)
#REQUESTS
##Printer status
def getStatus(self):
printer = printerManager()
cm = cameraManager()
ppm = printerProfileManager()
cloudInstance = astroprintCloud()
fileName = None
if printer.isPrinting():
currentJob = printer.getCurrentJob()
fileName = currentJob["file"]["name"]
return {
'id': boxrouterManager().boxId,
'name': networkManager().getHostname(),
'orgId' : cloudInstance.orgId,
'groupId' : cloudInstance.groupId,
'printing': printer.isPrinting(),
'fileName': fileName,
'printerModel': ppm.data['printer_model'] if ppm.data['printer_model']['id'] else None,
'filament' : ppm.data['filament'],
'material': None,
'operational': printer.isOperational(),
'paused': printer.isPaused(),
'camera': cm.isCameraConnected(),
'isBedClear' : printer.isBedClear,
#'printCapture': cm.timelapseInfo,
'remotePrint': True,
'capabilities': ['remotePrint'] + cm.capabilities
}
##Printhead movement
def printerPrintheadCommand(self, data, callback):
pm = printerManager()
if not pm.isOperational() or pm.isPrinting():
# do not jog when a print job is running or we don't have a connection
callback("Printer is not operational or currently printing",True)
valid_axes = ["x", "y", "z"]
validated_values = {}
for axis in valid_axes:
try:
value = data[axis]
except:
value = None
if isinstance(value,(int,long,float)):
validated_values[axis] = value
if len(validated_values) <= 0:
self._logger.error('not a number')
callback('movement value is not a number',True)
else:
# execute the jog commands
for axis, value in validated_values.iteritems():
pm.jog(axis, value)
callback({'success': 'no_error'})
def printerHomeCommand(self,axes,callback):
pm = printerManager()
valid_axes = ["xy", "z"]
if not axes in valid_axes:
callback("Invalid axes: " + axes,True)
if axes == 'xy':
pm.home('x')
pm.home('y')
else:
pm.home('z')
callback({'success': 'no_error'})
def printerBabysteppingCommand(self, amount, callback):
pm = printerManager()
if not pm.isOperational():
# do not try baystepping when we don't have a connection
callback('Printer is not operational', 409)
if amount:
if not isinstance(amount, (int, long, float)):
callback("Not a number for amount: %r" % (amount), True)
validated_values = {}
validated_values['amount'] = amount
# execute the babystepping command
pm.babystepping(validated_values['amount'])
callback({'success': 'no_error'})
else:
callback('No amount provided', True)
def printerPrintingSpeed(self, data, callback):
pm = printerManager()
amount = data["amount"]
pm.printingSpeed(amount)
callback({'success': 'no_error'})
def printerFanSpeed(self, data, callback):
pm = printerManager()
speed = data["speed"]
tool = data["tool"]
pm.fan(tool, speed)
callback({'success': 'no_error'})
def sendComm(self, data ,callback):
pm = printerManager()
if not pm.allowTerminal:
callback('Driver does not support terminal access',True)
if not pm.isOperational():
callback('No Printer connected',True)
command = data['command']
if command:
pm.sendRawCommand(command)
callback({'success': 'no_error'})
else:
callback("Command is missing", True)
def startCommBroadcasting(self, data ,callback):
pm = printerManager()
if not pm.allowTerminal:
callback("Driver does not support terminal access", True)
pm.broadcastTraffic += 1
#Stop doing temperature reports
pm.doIdleTempReports = False
callback({'success': 'no_error'})
def stopCommBroadcasting(self, data ,callback):
pm = printerManager()
if not pm.allowTerminal:
callback("Driver does not support terminal access", True)
#Protect against negative values
pm.broadcastTraffic = max(0, pm.broadcastTraffic - 1)
if pm.broadcastTraffic == 0:
#Restore temperature reports
pm.doIdleTempReports = True
callback({'success': 'no_error'})
##Printer connection
def getConnection(self):
pm = printerManager()
state, port, baudrate = pm.getCurrentConnection()
current = {
"state": state,
"port": port,
"baudrate": baudrate
}
return { 'current': current, 'option': pm.getConnectionOptions() }
##Temperature
def getTemperature(self):
pm = printerManager()
tempData = pm.getCurrentTemperatures()
return tempData
def setTemperature(self,data,callback):
pm = printerManager()
if not pm.isOperational():
callback("Printer is not operational", True)
return
temperature = data['temperature']
element = data['element']
if not isinstance(temperature, (int, long, float)):
callback("Not a number: " + temperature, True)
return
# perform the actual temperature command
pm.setTemperature(element, temperature)
callback({'success': 'no_error'})
def extrude(self,data,callback):
pm = printerManager()
if not pm.isOperational():
callback("Printer is not operational", True)
return
if pm.isPrinting():
# do not extrude when a print job is running
callback("Printer is currently printing", True)
return
amount = data["amount"]
speed = data["speed"]
tool = data["tool"]
if not isinstance(amount, (int, long, float)):
callback("Not a number for extrusion amount: " + amount, True)
return
if speed and not isinstance(speed, (int, long, float)):
speed = None
pm.extrude(tool, amount, speed)
callback({'success': 'no_error'})
return
def getNumberOfExtruders(self,data,sendResponse=None):
ppm = printerProfileManager()
extruderCount = ppm.data.get('extruder_count')
if sendResponse:
sendResponse(extruderCount)
return extruderCount
def getSelectedExtruder(self, data, sendResponse= None):
pm = printerManager()
if pm.isConnected():
selectedTool = pm.getSelectedTool()
else:
selectedTool = None
if sendResponse:
sendResponse(selectedTool)
return selectedTool
def getPrintingSpeed(self, data, sendResponse= None):
pm = printerManager()
if pm.isConnected():
printingSpeed = int(pm.getPrintingSpeed())
else:
printingSpeed = None
if sendResponse:
sendResponse(printingSpeed)
return printingSpeed
def setPrintingSpeed(self, data, sendResponse= None):
pm = printerManager()
pm.setPrintingSpeed(int(data))
sendResponse({'success': 'no_error'})
return
def getPrintingFlow(self, data, sendResponse= None):
pm = printerManager()
if pm.isConnected():
printingFlow = int(pm.getPrintingFlow())
else:
printingFlow = None
if sendResponse:
sendResponse(printingFlow)
return printingFlow
def setPrintingFlow(self, data, sendResponse= None):
pm = printerManager()
pm.setPrintingFlow(int(data))
sendResponse({'success': 'no_error'})
return
def selectTool(self,data,sendResponse):
pm = printerManager()
pm.changeTool(int(data))
sendResponse({'success': 'no_error'})
return
def getPrintJobId(self, data, sendResponse):
pm = printerManager()
sendResponse(pm.currentPrintJobId)
def pause(self,data,sendResponse):
printerManager().togglePausePrint()
sendResponse({'success': 'no_error'})
def resume(self,data,sendResponse):
printerManager().togglePausePrint()
sendResponse({'success': 'no_error'})
def cancel(self,data,sendResponse):
sendResponse(printerManager().cancelPrint())
def setTimelapse(self,data,sendResponse):
freq = data['freq']
if freq:
cm = cameraManager()
if cm.timelapseInfo:
if not cm.update_timelapse(freq):
sendResponse('error_updating_timelapse',True)
return
else:
r = cm.start_timelapse(freq)
if r != 'success':
sendResponse('error_starting_timelapse',True)
return
else:
sendResponse('erro_no_frequency',True)
return
sendResponse({'success': 'no_error'})
def getTimelapse(self,data,sendResponse):
sendResponse(cameraManager().timelapseInfo)
def clearBed(self,data,sendResponse):
pm = printerManager()
pm.set_bed_clear(True)
sendResponse({'success': 'no_error'})
def loadFilament(self,data,callback):
pm = printerManager()
if not pm.isOperational():
callback("No Printer connected",True)
executedSuccess = pm.executeRoutine('loadFilament')
if not executedSuccess:
callback("LoadFilament routine executing failed",True)
else:
#callback("LoadFilament executed successfuly",True)
callback({'success': 'no_error'})
def unLoadFilament(self,data,callback):
pm = printerManager()
if not pm.isOperational():
callback("No Printer connected",True)
executedSuccess = pm.executeRoutine('unLoadFilament')
if not executedSuccess:
callback("UnLoadFilament routine executing failed",True)
else:
#callback("UnloadFilament executed successfuly",True)
callback({'success': 'no_error'})
def reportPromptChoice(self, choice, sendResponse):
pm = printerManager()
if choice is not None:
if choice != 'close':
pm.onPromptResponse(int(choice))
pm.promptManager.end_prompt()
sendResponse({'success': 'no_error'})
else:
sendResponse('invalid_choice',True)
#EVENTS
def _onConnect(self,event,value):
self.publishEvent('printer_state_changed', {"operational": True})
def _onConnecting(self,event,value):
self.publishEvent('printer_state_changed', {"connecting": True})
def _onDisconnect(self,event,value):
self.publishEvent('printer_state_changed', {"operational": False})
def _onToolChange(self,event,value):
self.publishEvent('printer_state_changed', {"tool": value})
def _onPrintingSpeedChange(self,event,value):
self.publishEvent('printer_state_changed', {"speed": value})
def _onPrinterCommsChange(self,event,value):
self.publishEvent('printer_comms_changed', value)
def _onPrinterClearChanged(self,event,value):
pm = printerManager()
self.publishEvent('bed_cleared_changed', {'isBedClear' : pm.isBedClear})
def _onPrintingFlowChange(self,event,value):
self.publishEvent('printer_state_changed', {"flow": value})
def _onHeatingUp(self,event,value):
self.publishEvent('printer_state_changed', {"heatingUp": value})
def _onPrintingProfileChange(self,event,data):
self.publishEvent('printer_profile_changed', data)
def _onTemperatureChanged(self,event,value):
self.publishEvent('temperature_changed', value)
def _onPrintingProgressChanged(self,event,value):
self.publishEvent('printing_progress_changed', value)
def _onPrintCaptureInfoChanged(self,event,value):
self.publishEvent('print_capture_info_changed',value)
def _onPrintStarted(self,event,value):
data = value
data['state'] = 'started'
self.publishEvent('printing_state_changed',data)
def _onPrintDone(self,event,value):
data = value
data['state'] = 'done'
self.publishEvent('printing_state_changed', data)
def _onPrintFailed(self,event,value):
data = value
data['state'] = 'failed'
self.publishEvent('printing_state_changed', data)
def _onPrintCancelled(self,event,value):
data = value
data['state'] = 'cancelled'
self.publishEvent('printing_state_changed', data)
def _onPrintPaused(self,event,value):
data = value
data['state'] = 'paused'
self.publishEvent('printing_state_changed', data)
def _onPrintResumed(self,event,value):
data = value
data['state'] = 'resumed'
self.publishEvent('printing_state_changed', data)
def _onPrintingError(self,event,value):
data = value
data['state'] = 'printing_error'
self.publishEvent('printing_state_changed', data)
def _onBedCleared(self,event,value):
data = value
data['state'] = 'printing_error'
self.publishEvent('printing_state_changed', data)
def _onPromptEvent(self, event, value):
self.publishEvent('printer_prompt_event', value)
| agpl-3.0 | -385,700,360,344,196,740 | 25.936057 | 137 | 0.724233 | false |
jhcepas/eggnog-mapper | eggnogmapper/annotation/pfam/pfam_modes.py | 1 | 4199 | ##
## CPCantalapiedra 2020
from collections import Counter
from ...emapperException import EmapperException
from ...utils import colorify
from .pfam_denovo import pfam_align_denovo
from .pfam_scan import pfam_align_parallel_scan
PFAM_REALIGN_NONE = 'none'
PFAM_REALIGN_REALIGN = 'realign'
PFAM_REALIGN_DENOVO = 'denovo'
def run_pfam_mode(pfam_search_mode, annots_generator, queries_fasta, resume, translate, trans_table,
cpu, num_servers, num_workers, cpus_per_worker, port, end_port,
temp_dir, pfam_file):
##
# 1) Align queries to PFAMs
aligned_pfams = None
all_annotations = None
if pfam_search_mode == PFAM_REALIGN_DENOVO:
print(colorify("De novo scan of PFAM domains", 'lgreen'))
all_annotations, queries_pfams = load_all_annotations(annots_generator)
aligned_pfams = pfam_align_denovo(queries_pfams,
queries_fasta,
resume,
translate,
trans_table,
cpu,
num_servers,
num_workers,
cpus_per_worker,
port,
end_port,
temp_dir,
pfam_file)
elif pfam_search_mode == PFAM_REALIGN_REALIGN:
print(colorify("Re-aligning queries to PFAM domains from orthologs", 'lgreen'))
all_annotations, queries_pfams = load_all_annotations(annots_generator, with_pfam_only = True)
aligned_pfams = pfam_align_parallel_scan(queries_pfams,
queries_fasta,
resume,
translate,
trans_table,
cpu,
temp_dir,
pfam_file)
else:
raise EmapperException(f"Unrecognized pfam search mode {pfam_search_mode}.")
##
# 2) Add found pfams to annotations output
if aligned_pfams is not None and all_annotations is not None:
for (hit, annotation), exists in all_annotations:
# if --resume and annotation exists, skip pfam realignment
if exists == False and annotation is not None:
(query_name, best_hit_name, best_hit_evalue, best_hit_score,
annotations,
(og_name, og_cat, og_desc),
max_annot_lvl,
match_nog_names,
all_orthologies, annot_orthologs) = annotation
if query_name in aligned_pfams:
annotations["PFAMs"] = Counter(aligned_pfams[query_name])
else:
annotations["PFAMs"] = None
yield ((hit, annotation), exists)
return
##
def load_all_annotations(annots_generator, with_pfam_only = False):
all_annotations = []
queries_pfams = []
for (hit, annotation), exists in annots_generator:
all_annotations.append(((hit, annotation), exists))
# if --resume and annotation exists, skip pfam realignment
if exists == False and annotation is not None:
(query_name, best_hit_name, best_hit_evalue, best_hit_score,
annotations,
(og_name, og_cat, og_desc),
max_annot_lvl,
match_nog_names,
all_orthologies, annot_orthologs) = annotation
if "PFAMs" in annotations:
queries_pfams.append((query_name, list(annotations["PFAMs"])))
else:
if with_pfam_only == False:
queries_pfams.append((query_name, []))
return all_annotations, queries_pfams
## END
| gpl-2.0 | 1,073,159,795,631,545,100 | 36.159292 | 102 | 0.486783 | false |
registerguard/celebrate | celebrate/settings/base.py | 1 | 5226 | """
Django settings for celebrate project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(PROJECT_DIR)
# Get secrets
from sys import path
path.append(PROJECT_DIR)
import json
from django.core.exceptions import ImproperlyConfigured
with open(os.path.join(PROJECT_DIR, 'secrets.json')) as f:
secrets = json.loads(f.read())
def get_secret(setting, secrets=secrets):
try:
return secrets[setting]
except KeyError:
error_msg = "set the {0} environment variable".format(setting)
raise ImproperlyConfigured(error_msg)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = get_secret('SECRET_KEY')
# Application definition
INSTALLED_APPS = [
'blog',
'home',
'search',
'wagtail.api.v2',
'wagtail.contrib.modeladmin',
'wagtail.contrib.wagtailsitemaps',
'wagtail.contrib.wagtailstyleguide',
'wagtail.wagtailforms',
'wagtail.wagtailredirects',
'wagtail.wagtailembeds',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailsnippets',
'wagtail.wagtaildocs',
'wagtail.wagtailimages',
'wagtail.wagtailsearch',
'wagtail.wagtailadmin',
'wagtail.wagtailcore',
'crispy_forms',
'captcha',
'modelcluster',
'rest_framework',
'taggit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.wagtailcore.middleware.SiteMiddleware',
'wagtail.wagtailredirects.middleware.RedirectMiddleware',
]
ROOT_URLCONF = 'celebrate.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(PROJECT_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.media',
'django.template.context_processors.request',
'django.template.context_processors.static',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'celebrate.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'ENGINE': get_secret('DB_ENGINE'),
'NAME': get_secret('DB_NAME'),
'USER': get_secret('DB_USER'),
'PASSWORD': get_secret('DB_PASSWORD'),
'HOST': get_secret('DB_HOST'),
'PORT': get_secret('DB_PORT'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Los_Angeles'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATICFILES_DIRS = [
os.path.join(PROJECT_DIR, 'static'),
]
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Google reCAPTCHA
RECAPTCHA_PUBLIC_KEY = get_secret('RECAPTCHA_PUBLIC_KEY')
RECAPTCHA_PRIVATE_KEY = get_secret('RECAPTCHA_PRIVATE_KEY')
NOCAPTCHA = True
# Django Crispy forms setting
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# Wagtail settings
WAGTAIL_SITE_NAME = "celebrate"
# Base URL to use when referring to full URLs within the Wagtail admin backend -
# e.g. in notification emails. Don't include '/admin' or a trailing slash
BASE_URL = 'http://celebrate.registerguard.com'
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch',
'INDEX': 'celebrate',
},
}
WAGTAILIMAGES_FEATURE_DETECTION_ENABLED = True
| mit | 8,953,759,563,735,617,000 | 25.261307 | 80 | 0.684845 | false |
PiWare/kicad_library | script/fpgen/qfp.py | 1 | 1739 | import fp
from fp import cfg
class qfp(fp.base):
"""Generator for LQFP/TQFP/PQFP and other xQFP footprints"""
def __init__(self, name, model, description, tags, package_width, package_height, pad_width, pad_height, pad_grid, pad_distance_x, pad_distance_y, pad_count_x, pad_count_y):
super(qfp, self).__init__(name, model, description, tags)
if pad_count_x % 2 or pad_count_y % 2:
raise NameError("Pad count is odd!")
fp.base.add(self, fp.rectangle(cfg.FOOTPRINT_PACKAGE_LAYER, 0, 0, package_width, package_height, cfg.FOOTPRINT_PACKAGE_LINE_WIDTH, True))
pin = 1
y = pad_grid * -((float(pad_count_y) / 4) - 0.5)
x = pad_grid * -((float(pad_count_x) / 4) - 0.5)
fp.base.add(self, fp.circle(cfg.FOOTPRINT_PACKAGE_LAYER, x, y, x + 0.5, y, cfg.FOOTPRINT_PACKAGE_LINE_WIDTH))
for i in range(pad_count_y / 2):
fp.base.add(self, fp.pad(cfg.FOOTPRINT_SMD_LAYERS, pin, fp.technology.smd, fp.type.rect, -pad_distance_x / 2, y, pad_width, pad_height, 0, 90))
y += pad_grid
pin += 1
for i in range(pad_count_x / 2):
fp.base.add(self, fp.pad(cfg.FOOTPRINT_SMD_LAYERS, pin, fp.technology.smd, fp.type.rect, x, pad_distance_y / 2, pad_width, pad_height, 0, 0))
x += pad_grid
pin += 1
y = pad_grid * ((float(pad_count_y) / 4) - 0.5)
for i in range(pad_count_y / 2):
fp.base.add(self, fp.pad(cfg.FOOTPRINT_SMD_LAYERS, pin, fp.technology.smd, fp.type.rect, pad_distance_x / 2, y, pad_width, pad_height, 0, 90))
y -= pad_grid
pin += 1
x = pad_grid * ((float(pad_count_x) / 4) - 0.5)
for i in range(pad_count_x / 2):
fp.base.add(self, fp.pad(cfg.FOOTPRINT_SMD_LAYERS, pin, fp.technology.smd, fp.type.rect, x, -pad_distance_y / 2, pad_width, pad_height, 0, 0))
x -= pad_grid
pin += 1
| gpl-2.0 | 6,151,101,319,068,916,000 | 43.589744 | 174 | 0.646348 | false |
Aeolitus/Sephrasto | CharakterMain.py | 1 | 7106 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'CharakterMain.ui'
#
# Created by: PyQt5 UI code generator 5.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_formMain(object):
def setupUi(self, formMain):
formMain.setObjectName("formMain")
formMain.setWindowModality(QtCore.Qt.ApplicationModal)
formMain.resize(900, 541)
self.gridLayout = QtWidgets.QGridLayout(formMain)
self.gridLayout.setObjectName("gridLayout")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.checkReq = QtWidgets.QCheckBox(formMain)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkReq.sizePolicy().hasHeightForWidth())
self.checkReq.setSizePolicy(sizePolicy)
self.checkReq.setMinimumSize(QtCore.QSize(125, 0))
self.checkReq.setChecked(True)
self.checkReq.setObjectName("checkReq")
self.horizontalLayout_3.addWidget(self.checkReq)
self.buttonQuicksave = QtWidgets.QPushButton(formMain)
self.buttonQuicksave.setMinimumSize(QtCore.QSize(75, 0))
self.buttonQuicksave.setObjectName("buttonQuicksave")
self.horizontalLayout_3.addWidget(self.buttonQuicksave)
self.buttonSave = QtWidgets.QPushButton(formMain)
self.buttonSave.setMinimumSize(QtCore.QSize(75, 0))
self.buttonSave.setObjectName("buttonSave")
self.horizontalLayout_3.addWidget(self.buttonSave)
self.buttonSavePDF = QtWidgets.QPushButton(formMain)
self.buttonSavePDF.setMinimumSize(QtCore.QSize(100, 0))
self.buttonSavePDF.setMaximumSize(QtCore.QSize(16777214, 16777215))
self.buttonSavePDF.setObjectName("buttonSavePDF")
self.horizontalLayout_3.addWidget(self.buttonSavePDF)
self.gridLayout.addLayout(self.horizontalLayout_3, 3, 0, 1, 1)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.tabs = QtWidgets.QTabWidget(formMain)
self.tabs.setTabPosition(QtWidgets.QTabWidget.North)
self.tabs.setElideMode(QtCore.Qt.ElideRight)
self.tabs.setDocumentMode(False)
self.tabs.setObjectName("tabs")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.tabs.addTab(self.tab, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.tabs.addTab(self.tab_2, "")
self.horizontalLayout.addWidget(self.tabs)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label = QtWidgets.QLabel(formMain)
self.label.setObjectName("label")
self.horizontalLayout_2.addWidget(self.label)
self.spinEP = QtWidgets.QSpinBox(formMain)
self.spinEP.setAlignment(QtCore.Qt.AlignCenter)
self.spinEP.setButtonSymbols(QtWidgets.QAbstractSpinBox.PlusMinus)
self.spinEP.setMaximum(100000)
self.spinEP.setObjectName("spinEP")
self.horizontalLayout_2.addWidget(self.spinEP)
self.label_3 = QtWidgets.QLabel(formMain)
self.label_3.setObjectName("label_3")
self.horizontalLayout_2.addWidget(self.label_3)
self.spinSpent = QtWidgets.QSpinBox(formMain)
self.spinSpent.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinSpent.setAlignment(QtCore.Qt.AlignCenter)
self.spinSpent.setReadOnly(True)
self.spinSpent.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinSpent.setMinimum(-100000)
self.spinSpent.setMaximum(100000)
self.spinSpent.setObjectName("spinSpent")
self.horizontalLayout_2.addWidget(self.spinSpent)
self.label_2 = QtWidgets.QLabel(formMain)
self.label_2.setObjectName("label_2")
self.horizontalLayout_2.addWidget(self.label_2)
self.spinRemaining = QtWidgets.QSpinBox(formMain)
self.spinRemaining.setFocusPolicy(QtCore.Qt.NoFocus)
self.spinRemaining.setAutoFillBackground(False)
self.spinRemaining.setAlignment(QtCore.Qt.AlignCenter)
self.spinRemaining.setReadOnly(True)
self.spinRemaining.setButtonSymbols(QtWidgets.QAbstractSpinBox.NoButtons)
self.spinRemaining.setMinimum(-100000)
self.spinRemaining.setMaximum(100000)
self.spinRemaining.setObjectName("spinRemaining")
self.horizontalLayout_2.addWidget(self.spinRemaining)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 1)
self.retranslateUi(formMain)
self.tabs.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(formMain)
formMain.setTabOrder(self.tabs, self.spinEP)
formMain.setTabOrder(self.spinEP, self.spinSpent)
formMain.setTabOrder(self.spinSpent, self.spinRemaining)
formMain.setTabOrder(self.spinRemaining, self.checkReq)
formMain.setTabOrder(self.checkReq, self.buttonQuicksave)
formMain.setTabOrder(self.buttonQuicksave, self.buttonSave)
formMain.setTabOrder(self.buttonSave, self.buttonSavePDF)
def retranslateUi(self, formMain):
_translate = QtCore.QCoreApplication.translate
formMain.setWindowTitle(_translate("formMain", "Sephrasto - Charakter erstellen"))
self.checkReq.setText(_translate("formMain", "Voraussetzungen überprüfen"))
self.buttonQuicksave.setText(_translate("formMain", "Speichern"))
self.buttonSave.setText(_translate("formMain", "Speichern als..."))
self.buttonSavePDF.setText(_translate("formMain", "PDF erstellen"))
self.tabs.setTabText(self.tabs.indexOf(self.tab), _translate("formMain", "Tab 1"))
self.tabs.setTabText(self.tabs.indexOf(self.tab_2), _translate("formMain", "Tab 2"))
self.label.setText(_translate("formMain", " Total: "))
self.spinEP.setSuffix(_translate("formMain", " EP"))
self.label_3.setText(_translate("formMain", " Ausgegeben: "))
self.spinSpent.setSuffix(_translate("formMain", " EP"))
self.label_2.setText(_translate("formMain", " Verbleibend: "))
self.spinRemaining.setSuffix(_translate("formMain", " EP"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
formMain = QtWidgets.QWidget()
ui = Ui_formMain()
ui.setupUi(formMain)
formMain.show()
sys.exit(app.exec_())
| mit | 8,834,931,052,019,028,000 | 49.742857 | 114 | 0.70763 | false |
mganeva/mantid | scripts/reduction_workflow/instruments/sans/sns_command_interface.py | 1 | 6343 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
#pylint: disable=invalid-name,unused-import
"""
Command set for EQSANS reduction
"""
# Import the specific commands that we need - some of these are used in systemtests
from __future__ import (absolute_import, division, print_function)
from reduction_workflow.command_interface import *
# The following imports allow users to import this file and have all functionality automatically imported
# Do not remove these imports as it will break user scripts which rely on them
from .hfir_command_interface import DarkCurrent, NoDarkCurrent, NoNormalization # noqa: F401
from .hfir_command_interface import SolidAngle, NoSolidAngle # noqa: F401
from .hfir_command_interface import DirectBeamCenter, ScatteringBeamCenter # noqa: F401
from .hfir_command_interface import SetBeamCenter as BaseSetBeamCenter # noqa: F401
from .hfir_command_interface import SensitivityCorrection, SetSensitivityBeamCenter # noqa: F401
from .hfir_command_interface import SensitivityDirectBeamCenter, SensitivityScatteringBeamCenter # noqa: F401
from .hfir_command_interface import NoSensitivityCorrection, DivideByThickness # noqa: F401
from .hfir_command_interface import IQxQy, NoIQxQy, SaveIq, NoSaveIq, SaveIqAscii # noqa: F401
from .hfir_command_interface import DirectBeamTransmission, TransmissionDarkCurrent # noqa: F401
from .hfir_command_interface import ThetaDependentTransmission # noqa: F401
from .hfir_command_interface import SetTransmissionBeamCenter, TransmissionDirectBeamCenter # noqa: F401
from .hfir_command_interface import SetTransmission, NoTransmission # noqa: F401
from .hfir_command_interface import Background, NoBackground, NoBckTransmission # noqa: F401
from .hfir_command_interface import SetBckTransmission, BckDirectBeamTransmission # noqa: F401
from .hfir_command_interface import SetBckTransmissionBeamCenter, BckThetaDependentTransmission # noqa: F401
from .hfir_command_interface import BckTransmissionDirectBeamCenter, BckTransmissionDarkCurrent # noqa: F401
from .hfir_command_interface import SetSampleDetectorOffset, SetSampleDetectorDistance # noqa: F401
from .hfir_command_interface import Mask, MaskRectangle, MaskDetectors, MaskDetectorSide # noqa: F401
from .hfir_command_interface import SetAbsoluteScale, SetDirectBeamAbsoluteScale # noqa: F401
from .hfir_command_interface import Stitch # noqa: F401
from reduction_workflow.find_data import find_data
def EQSANS(keep_events=False, property_manager=None):
Clear()
ReductionSingleton().set_instrument("EQSANS",
"SetupEQSANSReduction",
"SANSReduction")
ReductionSingleton().reduction_properties["PreserveEvents"]=keep_events
SolidAngle()
AzimuthalAverage()
if property_manager is not None:
ReductionSingleton().set_reduction_table_name(property_manager)
def SetBeamCenter(x,y):
if x==0 and y==0:
ReductionSingleton().reduction_properties["UseConfigBeam"]=True
else:
BaseSetBeamCenter(x,y)
def TotalChargeNormalization(normalize_to_beam=True, beam_file=''):
if normalize_to_beam:
ReductionSingleton().reduction_properties["Normalisation"]="BeamProfileAndCharge"
ReductionSingleton().reduction_properties["MonitorReferenceFile"]=beam_file
else:
ReductionSingleton().reduction_properties["Normalisation"]="Charge"
def BeamMonitorNormalization(reference_flux_file):
reference_flux_file = find_data(reference_flux_file, instrument=ReductionSingleton().get_instrument())
ReductionSingleton().reduction_properties["Normalisation"]="Monitor"
ReductionSingleton().reduction_properties["MonitorReferenceFile"]=reference_flux_file
def PerformFlightPathCorrection(do_correction=True):
ReductionSingleton().reduction_properties["CorrectForFlightPath"]=do_correction
def SetTOFTailsCutoff(low_cut=0.0, high_cut=0.0):
ReductionSingleton().reduction_properties["LowTOFCut"]=low_cut
ReductionSingleton().reduction_properties["HighTOFCut"]=high_cut
def UseConfigTOFTailsCutoff(use_config=True):
ReductionSingleton().reduction_properties["UseConfigTOFCuts"]=use_config
def SkipTOFCorrection(skip=True):
ReductionSingleton().reduction_properties["SkipTOFCorrection"]=skip
def UseConfigMask(use_config=True):
ReductionSingleton().reduction_properties["UseConfigMask"]=use_config
def SetWavelengthStep(step=0.1):
ReductionSingleton().reduction_properties["WavelengthStep"]=step
def UseConfig(use_config=True):
ReductionSingleton().reduction_properties["UseConfig"]=use_config
def AzimuthalAverage(suffix="_Iq", n_bins=100, n_subpix=1, log_binning=False,
scale=True):
# Suffix is no longer used but kept for backward compatibility
# N_subpix is also no longer used
ReductionSingleton().reduction_properties["DoAzimuthalAverage"]=True
ReductionSingleton().reduction_properties["IQNumberOfBins"]=n_bins
ReductionSingleton().reduction_properties["IQLogBinning"]=log_binning
ReductionSingleton().reduction_properties["IQScaleResults"]=scale
def CombineTransmissionFits(combine_frames=True):
ReductionSingleton().reduction_properties["FitFramesTogether"]=combine_frames
def BckCombineTransmissionFits(combine_frames=True):
ReductionSingleton().reduction_properties["BckFitFramesTogether"]=combine_frames
def Resolution(sample_aperture_diameter=10.0):
ReductionSingleton().reduction_properties["ComputeResolution"]=True
ReductionSingleton().reduction_properties["SampleApertureDiameter"]=sample_aperture_diameter
def IndependentBinning(independent_binning=True):
ReductionSingleton().reduction_properties["IQIndependentBinning"]=independent_binning
def SetDetectorOffset(distance):
ReductionSingleton().reduction_properties["DetectorOffset"] = distance
def SetSampleOffset(distance):
ReductionSingleton().reduction_properties["SampleOffset"] = distance
def LoadNexusInstrumentXML(value=True):
ReductionSingleton().reduction_properties["LoadNexusInstrumentXML"] = value
| gpl-3.0 | -6,325,847,668,192,674,000 | 42.744828 | 110 | 0.78023 | false |
JohnStarich/dotfiles | python/johnstarich/power/__init__.py | 1 | 1519 | from johnstarich.power.parse import raw_power_info
from johnstarich.interval import Interval
from johnstarich.segment import segment, segment_default
power_status_mappings = {
'discharging': '🔥',
'charging': '⚡️',
'finishing charge': '🔋',
'charged': '🔋',
'AC attached': '🔌',
}
power_highlight_groups = ['battery_gradient', 'battery']
update_interval = Interval(10)
last_status = ''
last_gradient = 0
last_highlight_groups = power_highlight_groups
def power(pl, **kwargs):
global last_status, last_gradient, last_highlight_groups
if not update_interval.should_run():
return segment(last_status, gradient_level=last_gradient,
highlight_groups=last_highlight_groups)
power = raw_power_info()
percentage = int(power['percentage'])
status = power['status']
time = power['time']
time = time.replace(" remaining", "")
if status == 'charged':
time = 'full'
elif time == '0:00' or time == '(no estimate)' or time == 'not charging':
time = '-:--'
if status in power_status_mappings:
status = power_status_mappings[status]
contents = '{s} {p}% ({t})'.format(
p=percentage,
s=status,
t=time,
)
update_interval.start()
last_status = contents
last_gradient = 100 - percentage
last_highlight_groups = power_highlight_groups
return segment(last_status, gradient_level=last_gradient,
highlight_groups=last_highlight_groups)
| apache-2.0 | -2,715,335,057,318,915,600 | 27.903846 | 77 | 0.634065 | false |
grimoirelab/sortinghat | tests/test_cmd_organizations.py | 1 | 19755 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2019 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Santiago Dueñas <[email protected]>
#
import sys
import unittest
if '..' not in sys.path:
sys.path.insert(0, '..')
from sortinghat import api
from sortinghat.command import CMD_SUCCESS
from sortinghat.cmd.organizations import Organizations
from sortinghat.exceptions import NotFoundError, CODE_ALREADY_EXISTS_ERROR, CODE_NOT_FOUND_ERROR
from tests.base import TestCommandCaseBase
REGISTRY_ORG_ALREADY_EXISTS_ERROR = "Error: organization 'Bitergium' already exists in the registry"
REGISTRY_DOM_ALREADY_EXISTS_ERROR = "Error: domain 'bitergia.com' already exists in the registry"
REGISTRY_ORG_NOT_FOUND_ERROR = "Error: Bitergium not found in the registry"
REGISTRY_ORG_NOT_FOUND_ERROR_ALT = "Error: LibreSoft not found in the registry"
REGISTRY_DOM_NOT_FOUND_ERROR = "Error: example.com not found in the registry"
REGISTRY_DOM_NOT_FOUND_ERROR_ALT = "Error: bitergia.com not found in the registry"
REGISTRY_EMPTY_OUTPUT = ""
REGISTRY_OUTPUT = """Bitergia\tbitergia.com *
Bitergia\tbitergia.net
Example\texample.com
Example\texample.net
Example\texample.org
LibreSoft"""
REGISTRY_OUTPUT_ALT = """Bitergia\tbitergia.net
Example\tbitergia.com
Example\texample.com
Example\texample.net
Example\texample.org
LibreSoft"""
REGISTRY_OUTPUT_EXAMPLE = """Example\texample.com
Example\texample.net
Example\texample.org
MyExample\tmyexample.com"""
REGISTRY_OUTPUT_EXAMPLE_ALT = """Example\texample.com
Example\texample.net"""
class TestOrgsCaseBase(TestCommandCaseBase):
"""Defines common setup and teardown methods orgs unit tests"""
cmd_klass = Organizations
def load_test_dataset(self):
pass
class TestOrgsCommand(TestOrgsCaseBase):
"""Organization command unit tests"""
def load_test_dataset(self):
self.cmd.add('Example')
self.cmd.add('Example', 'example.com')
self.cmd.add('Bitergia')
self.cmd.add('Bitergia', 'bitergia.net')
self.cmd.add('Bitergia', 'bitergia.com', is_top_domain=True)
self.cmd.add('LibreSoft')
self.cmd.add('Example', 'example.org')
self.cmd.add('Example', 'example.net')
def test_default_action(self):
"""Check whether when no action is given it runs --list"""
# Remove pre-loaded dataset
self.db.clear()
# Add some contents first
self.cmd.add('Example')
self.cmd.add('Example', 'example.com')
self.cmd.add('Example', 'example.org')
self.cmd.add('Example', 'example.net')
self.cmd.add('MyExample')
self.cmd.add('MyExample', 'myexample.com')
code = self.cmd.run()
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT_EXAMPLE)
def test_list_without_args(self):
"""Test list action with and without arguments"""
code = self.cmd.run('-l')
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT)
def test_list_with_args(self):
"""Test list action with arguments"""
# Add an extra organization
self.cmd.add('MyExample')
self.cmd.add('MyExample', 'myexample.com')
code = self.cmd.run('--list', 'Example')
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT_EXAMPLE)
def test_add_with_args(self):
"""Test add action"""
# Remove pre-loaded dataset
self.db.clear()
code = self.cmd.run('--add', 'LibreSoft')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('-a', 'Example')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('--add', 'Example', 'example.com')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('--add', 'Bitergia')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('-a', 'Bitergia', 'bitergia.net')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('--add', 'Example', 'example.org')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('--add', 'Bitergia', 'bitergia.com', '--top-domain')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('-a', 'Example', 'example.net')
self.assertEqual(code, CMD_SUCCESS)
self.cmd.run('--list')
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT)
def test_add_without_args(self):
"""Check when calling --add without args, it does not do anything"""
# Remove pre-loaded dataset
self.db.clear()
code = self.cmd.run('--add')
self.assertEqual(code, CMD_SUCCESS)
self.cmd.run('-l')
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_EMPTY_OUTPUT)
def test_add_with_overwrite_option(self):
"""Check whether it not fails running add with overwrite option"""
code = self.cmd.run('--add', 'Example', 'bitergia.com')
self.assertEqual(code, CODE_ALREADY_EXISTS_ERROR)
output = sys.stderr.getvalue().strip()
self.assertEqual(output, REGISTRY_DOM_ALREADY_EXISTS_ERROR)
code = self.cmd.run('--add', '--overwrite', 'Example', 'bitergia.com')
self.assertEqual(code, CMD_SUCCESS)
self.cmd.run('-l')
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT_ALT)
def test_delete_with_args(self):
"""Test delete action"""
# Delete contents
code = self.cmd.run('--delete', 'Bitergia', 'bitergia.com')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('-d', 'LibreSoft')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('--delete', 'Bitergia')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.run('-d', 'Example', 'example.org')
self.assertEqual(code, CMD_SUCCESS)
self.cmd.run('--list')
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT_EXAMPLE_ALT)
def test_delete_without_args(self):
"""Check when calling --delete without args, it does not do anything"""
code = self.cmd.run('--delete')
self.assertEqual(code, CMD_SUCCESS)
self.cmd.run('-l')
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT)
def test_run_mixing_actions(self):
"""Check how it works when mixing actions"""
# Remove pre-loaded dataset
self.db.clear()
self.cmd.run('--add', 'LibreSoft')
self.cmd.run('-a', 'LibreSoft', 'libresoft.es')
self.cmd.run('-a', 'Example')
self.cmd.run('--add', 'Example', 'example.org')
self.cmd.run('-d', 'Example', 'example.org')
self.cmd.run('--add', 'Bitergia')
self.cmd.run('-a', 'Bitergia', 'bitergia.net')
self.cmd.run('--delete', 'LibreSoft')
self.cmd.run('--add', 'Example', 'example.com')
self.cmd.run('--add', 'Bitergia', 'bitergia.com')
self.cmd.run('-a', 'Example', 'example.net')
self.cmd.run('--delete', 'Bitergia', 'bitergia.com')
self.cmd.run('-d', 'Bitergia')
self.cmd.run()
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT_EXAMPLE_ALT)
class TestOrgsAdd(TestOrgsCaseBase):
def test_add(self):
"""Check whether everything works ok when adding organizations and domains"""
code = self.cmd.add('Example')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.add('Example', 'example.com')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.add('Bitergia')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.add('Bitergia', 'bitergia.net')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.add('Bitergia', 'bitergia.com', is_top_domain=True)
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.add('LibreSoft', '') # This will work like adding a organization
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.add('Example', 'example.org')
self.assertEqual(code, CMD_SUCCESS)
code = self.cmd.add('Example', 'example.net')
self.assertEqual(code, CMD_SUCCESS)
# List the registry and check the output
self.cmd.registry()
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT)
def test_existing_organization(self):
"""Check if it fails adding an organization that already exists"""
code1 = self.cmd.add('Bitergium')
self.assertEqual(code1, CMD_SUCCESS)
code2 = self.cmd.add('Bitergium')
self.assertEqual(code2, CODE_ALREADY_EXISTS_ERROR)
output = sys.stderr.getvalue().strip()
self.assertEqual(output, REGISTRY_ORG_ALREADY_EXISTS_ERROR)
def test_non_existing_organization(self):
"""Check if it fails adding domains to not existing organizations"""
code = self.cmd.add('Bitergium', 'bitergium.com')
self.assertEqual(code, CODE_NOT_FOUND_ERROR)
output = sys.stderr.getvalue().strip()
self.assertEqual(output, REGISTRY_ORG_NOT_FOUND_ERROR)
def test_existing_domain(self):
"""Check if it fails adding a domain that already exists"""
# Add a pair of organizations and domains first
self.cmd.add('Example')
self.cmd.add('Example', 'example.com')
self.cmd.add('Bitergia')
self.cmd.add('Bitergia', 'bitergia.com')
# Add 'bitergia.com' to 'Example' org
# It should print an error
code = self.cmd.add('Example', 'bitergia.com')
self.assertEqual(code, CODE_ALREADY_EXISTS_ERROR)
output = sys.stderr.getvalue().strip()
self.assertEqual(output, REGISTRY_DOM_ALREADY_EXISTS_ERROR)
def test_overwrite_domain(self):
"""Check whether it overwrites the old organization-domain relationship
and the top_domain flag"""
# Add a pair of organizations and domains first
self.cmd.add('Example')
self.cmd.add('Example', 'example.com')
self.cmd.add('Example', 'example.org')
self.cmd.add('Bitergia')
self.cmd.add('Bitergia', 'bitergia.com')
# Overwrite the relationship assigning the domain to a different
# company and top_domain flag
code = self.cmd.add('Bitergia', 'example.com',
is_top_domain=True, overwrite=True)
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_EMPTY_OUTPUT)
# Check if the domain has been assigned to Bitergia
orgs = api.registry(self.db)
org1 = orgs[0]
self.assertEqual(org1.name, 'Bitergia')
doms1 = org1.domains
doms1.sort(key=lambda x: x.domain)
self.assertEqual(len(doms1), 2)
dom = doms1[0]
self.assertEqual(dom.domain, 'bitergia.com')
dom = doms1[1]
self.assertEqual(dom.domain, 'example.com')
self.assertEqual(dom.is_top_domain, True)
org2 = orgs[1]
self.assertEqual(org2.name, 'Example')
doms2 = org2.domains
doms2.sort(key=lambda x: x.domain)
self.assertEqual(len(doms2), 1)
dom1 = doms2[0]
self.assertEqual(dom1.domain, 'example.org')
def test_none_organization(self):
"""Check behavior adding None organizations"""
code = self.cmd.add(None)
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_EMPTY_OUTPUT)
# The registry should be empty
orgs = api.registry(self.db)
self.assertEqual(len(orgs), 0)
def test_empty_organization(self):
"""Check behavior adding empty organizations"""
code = self.cmd.add('')
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_EMPTY_OUTPUT)
# The registry should be empty
orgs = api.registry(self.db)
self.assertEqual(len(orgs), 0)
class TestOrgsDelete(TestOrgsCaseBase):
def test_delete(self):
"""Check whether everything works ok when deleting organizations and domains"""
# First, add a set of organizations, including some domains
self.cmd.add('Example')
self.cmd.add('Example', 'example.com')
self.cmd.add('Example', 'example.org')
self.cmd.add('Example', 'example.net')
self.cmd.add('Bitergia')
self.cmd.add('Bitergia', 'bitergia.com')
self.cmd.add('LibreSoft')
self.cmd.add('Bitergium')
self.cmd.add('Bitergium', 'bitergium.com')
self.cmd.add('Bitergium', 'bitergium.net')
# Delete an organization
orgs = api.registry(self.db, 'Bitergia')
self.assertEqual(len(orgs), 1)
code = self.cmd.delete('Bitergia')
self.assertEqual(code, CMD_SUCCESS)
self.assertRaises(NotFoundError, api.registry,
self.db, 'Bitergia')
# Delete a domain
orgs = api.registry(self.db, 'Bitergium')
self.assertEqual(len(orgs[0].domains), 2)
code = self.cmd.delete('Bitergium', 'bitergium.com')
self.assertEqual(code, CMD_SUCCESS)
orgs = api.registry(self.db, 'Bitergium')
self.assertEqual(len(orgs[0].domains), 1)
# Delete organization with several domains
orgs = api.registry(self.db, 'Example')
self.assertEqual(len(orgs), 1)
code = self.cmd.delete('Example')
self.assertEqual(code, CMD_SUCCESS)
self.assertRaises(NotFoundError, api.registry,
self.db, 'Example')
# The final content of the registry should have
# two companies and one domain
orgs = api.registry(self.db)
self.assertEqual(len(orgs), 2)
org1 = orgs[0]
self.assertEqual(org1.name, 'Bitergium')
doms1 = org1.domains
self.assertEqual(len(doms1), 1)
self.assertEqual(doms1[0].domain, 'bitergium.net')
org2 = orgs[1]
self.assertEqual(org2.name, 'LibreSoft')
doms2 = org2.domains
self.assertEqual(len(doms2), 0)
def test_not_found_organization(self):
"""Check if it fails removing an organization that does not exists"""
# It should print an error when the registry is empty
code = self.cmd.delete('Bitergium')
self.assertEqual(code, CODE_NOT_FOUND_ERROR)
output = sys.stderr.getvalue().strip().split('\n')[0]
self.assertEqual(output, REGISTRY_ORG_NOT_FOUND_ERROR)
# Add a pair of organizations to check delete with a registry
# with contents
self.cmd.add('Example')
self.cmd.add('Bitergia')
self.cmd.add('Bitergia', 'bitergia.com')
# The error should be the same
code = self.cmd.delete('Bitergium')
self.assertEqual(code, CODE_NOT_FOUND_ERROR)
output = sys.stderr.getvalue().strip('\n').split('\n')[-1]
self.assertEqual(output, REGISTRY_ORG_NOT_FOUND_ERROR)
# It fails again, when trying to delete a domain from
# a organization that does not exist
code = self.cmd.delete('LibreSoft', 'bitergium.com')
self.assertEqual(code, CODE_NOT_FOUND_ERROR)
output = sys.stderr.getvalue().strip('\n').split('\n')[-1]
self.assertEqual(output, REGISTRY_ORG_NOT_FOUND_ERROR_ALT)
# Nothing has been deleted from the registry
orgs = api.registry(self.db)
self.assertEqual(len(orgs), 2)
self.assertEqual(len(orgs[0].domains), 1)
self.assertEqual(len(orgs[1].domains), 0)
def test_not_found_domain(self):
"""Check if it fails removing an domain that does not exists"""
# Add a pair of organizations to check delete with a registry
# with contents
self.cmd.add('Example')
self.cmd.add('Bitergia')
self.cmd.add('Bitergia', 'bitergia.com')
code = self.cmd.delete('Example', 'example.com')
self.assertEqual(code, CODE_NOT_FOUND_ERROR)
output = sys.stderr.getvalue().strip().split('\n')[0]
self.assertEqual(output, REGISTRY_DOM_NOT_FOUND_ERROR)
# It should not fail because the domain is assigned
# to other organization
code = self.cmd.delete('Example', 'bitergia.com')
self.assertEqual(code, CODE_NOT_FOUND_ERROR)
output = sys.stderr.getvalue().strip().split('\n')[-1]
self.assertEqual(output, REGISTRY_DOM_NOT_FOUND_ERROR_ALT)
# Nothing has been deleted from the registry
orgs = api.registry(self.db)
self.assertEqual(len(orgs), 2)
self.assertEqual(len(orgs[0].domains), 1)
self.assertEqual(len(orgs[1].domains), 0)
class TestOrgsRegistry(TestOrgsCaseBase):
def load_test_dataset(self):
api.add_organization(self.db, 'Example')
api.add_domain(self.db, 'Example', 'example.com')
api.add_domain(self.db, 'Example', 'example.org')
api.add_domain(self.db, 'Example', 'example.net')
api.add_organization(self.db, 'Bitergia')
api.add_domain(self.db, 'Bitergia', 'bitergia.net')
api.add_domain(self.db, 'Bitergia', 'bitergia.com', is_top_domain=True)
api.add_organization(self.db, 'LibreSoft')
def test_registry(self):
"""Check registry output list"""
code = self.cmd.registry()
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT)
def test_registry_term(self):
"""Check if it returns the info about orgs using a search term"""
# Add an extra organization first
api.add_organization(self.db, 'MyExample')
api.add_domain(self.db, 'MyExample', 'myexample.com')
code = self.cmd.registry('Example')
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_OUTPUT_EXAMPLE)
def test_not_found_term(self):
"""Check whether it prints an error for not existing organizations"""
code = self.cmd.registry('Bitergium')
self.assertEqual(code, CODE_NOT_FOUND_ERROR)
output = sys.stderr.getvalue().strip()
self.assertEqual(output, REGISTRY_ORG_NOT_FOUND_ERROR)
def test_empty_registry(self):
"""Check output when the registry is empty"""
# Delete the contents of the database
self.db.clear()
code = self.cmd.registry()
self.assertEqual(code, CMD_SUCCESS)
output = sys.stdout.getvalue().strip()
self.assertEqual(output, REGISTRY_EMPTY_OUTPUT)
if __name__ == "__main__":
unittest.main(buffer=True, exit=False)
| gpl-3.0 | -8,525,998,975,963,093,000 | 34.087034 | 100 | 0.63253 | false |
SwarmTheVision/immutable | app/models.py | 1 | 6070 | from app import db
from datetime import datetime
from flask_bcrypt import generate_password_hash, check_password_hash
import shortuuid
class User(db.Model):
__tablename__ = "users"
id = db.Column('id',db.Integer , primary_key=True)
name = db.Column('name', db.String(200), index=True)
username = db.Column('username', db.String(200), unique=True , index=True)
password = db.Column('password' , db.String(10))
email = db.Column('email',db.String(50), unique=True , index=True)
created = db.Column('created' , db.DateTime)
modified = db.Column('modified' , db.DateTime)
birthday = db.Column('birthday' , db.DateTime)
birthplace = db.Column('birthplace',db.String(200))
avatar = db.Column('avatar',db.String(200))
parent_id = db.Column(db.Integer, db.ForeignKey('users.id'))
tree_level = db.Column('tree_level',db.Integer)
def __init__(self , username ,password , email):
self.username = username
pw_hash = generate_password_hash(password).decode('utf-8')
self.password = pw_hash
self.email = email
self.created = datetime.utcnow()
self.modified = datetime.utcnow()
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
# return self.email
def __repr__(self):
return '<User %r>' % (self.username)
class UserPath(db.Model):
__tablename__ = "user_paths"
id = db.Column('id',db.Integer , primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
node_id = db.Column(db.Integer, db.ForeignKey('users.id'))
tree_level = db.Column('tree_level',db.Integer)
user = db.relationship(User, foreign_keys=[user_id])
node = db.relationship(User, foreign_keys=[node_id])
def __init__(self, user_id, node_id, tree_level):
self.user_id = user_id
self.node_id = node_id
self.tree_level = tree_level
def get_id(self):
return self.id
class SponsorRequest(db.Model):
__tablename__ = "sponsor_requests"
id = db.Column('id',db.Integer , primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
child_id = db.Column(db.Integer, db.ForeignKey('users.id'))
created = db.Column('created' , db.DateTime)
modified = db.Column('modified' , db.DateTime)
status = db.Column('status', db.Enum('waiting','approved', 'denied'))
user = db.relationship(User, foreign_keys=[user_id])
child = db.relationship(User, foreign_keys=[child_id])
def __init__(self, user_id, child_id):
self.user_id = user_id
self.child_id = child_id
self.status = 'waiting'
self.created = datetime.utcnow()
self.modified = datetime.utcnow()
def get_id(self):
return self.id
class Invite(db.Model):
__tablename__ = "invites"
id = db.Column('id',db.Integer , primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
email = db.Column('email',db.String(200))
created = db.Column('created' , db.DateTime)
modified = db.Column('modified' , db.DateTime)
status = db.Column('status', db.Enum('sent', 'opened', 'joined', 'canceled'))
code = db.Column('code',db.String(200))
user = db.relationship(User)
def __init__(self, user_id, email):
self.user_id = user_id
self.email = email
self.status = 'sent'
self.created = datetime.utcnow()
self.modified = datetime.utcnow()
self.code = str(shortuuid.uuid()[:8])
def get_id(self):
return self.id
class Avatar(db.Model):
__tablename__ = "avatars"
id = db.Column('id',db.Integer , primary_key=True)
created = db.Column('created' , db.DateTime)
modified = db.Column('modified' , db.DateTime)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
avatar_id = db.Column('avatar_id', db.Integer)
user = db.relationship(User)
def __init__(self):
self.created = datetime.utcnow()
self.modified = datetime.utcnow()
def get_id(self):
return self.id
def __repr__(self):
return '<Avatar %r>' % (self.avatar_id)
class Topic(db.Model):
__tablename__ = "topics"
id = db.Column('id',db.Integer , primary_key=True)
topic_text = db.Column('topic_text', db.Text, index=True)
created = db.Column('created' , db.DateTime)
modified = db.Column('modified' , db.DateTime)
creator_id = db.Column(db.Integer, db.ForeignKey('users.id'))
num_threads = db.Column('num_threads',db.Integer)
num_posts = db.Column('num_posts',db.Integer)
num_followers = db.Column('num_followers',db.Integer)
creator = db.relationship(User)
def __init__(self, topic_text):
self.topic_text = topic_text
self.created = datetime.utcnow()
self.modified = datetime.utcnow()
def get_id(self):
return self.id
def __repr__(self):
return '<Topic %r>' % (self.topic_text)
class Post(db.Model):
__tablename__ = "posts"
id = db.Column('id',db.Integer , primary_key=True)
post = db.Column('post' , db.Text)
created = db.Column('created' , db.DateTime)
modified = db.Column('modified' , db.DateTime)
topic_text = db.Column('topic_text', db.Text, index=True)
topic_id = db.Column(db.Integer, db.ForeignKey('topics.id'))
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
num_children = db.Column('num_children',db.Integer)
tree_level = db.Column('tree_level',db.Integer)
root_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
parent_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
second_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
user = db.relationship(User)
def __init__(self, post):
self.post = post
self.created = datetime.utcnow()
def get_id(self):
return self.id
def __repr__(self):
return '<Post %r>' % (self.post)
| gpl-3.0 | 8,475,430,820,194,779,000 | 32.910615 | 81 | 0.621911 | false |
zhlinh/leetcode | 0063.Unique Paths II/solution.py | 1 | 1306 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: [email protected]
Version: 0.0.1
Created Time: 2016-02-22
Last_modify: 2016-02-22
******************************************
'''
'''
Follow up for "Unique Paths":
Now consider if some obstacles are added to the grids.
How many unique paths would there be?
An obstacle and empty space is marked as 1 and 0 respectively in the grid.
For example,
There is one obstacle in the middle of a 3x3 grid as illustrated below.
[
[0,0,0],
[0,1,0],
[0,0,0]
]
The total number of unique paths is 2.
Note: m and n will be at most 100.
'''
class Solution(object):
def uniquePathsWithObstacles(self, obstacleGrid):
"""
:type obstacleGrid: List[List[int]]
:rtype: int
"""
if not obstacleGrid or obstacleGrid[0][0] == 1:
return 0
m = len(obstacleGrid)
n = len(obstacleGrid[0])
dp = [[0 for _ in range(n+1)] for __ in range(m+1)]
# let the entry dp[1][1] be 1 later
dp[0][1] = 1
for i in range(1, m+1):
for j in range(1, n+1):
if not obstacleGrid[i-1][j-1]:
dp[i][j] = dp[i-1][j] + dp[i][j-1]
return dp[m][n]
| apache-2.0 | -5,948,935,491,585,923,000 | 24.607843 | 74 | 0.527565 | false |
tijko/fdbus | fdbus/exceptions/exceptions.py | 1 | 8401 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from ..fdbus_h import *
class SocketError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.socket
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.socket error: %s" % self.msg
class ListenError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.listen
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.listen error: %s" % self.msg
class BindError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.bind
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.bind error: %s" % self.msg
class AcceptError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.accept
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.accept error: %s" % self.msg
class SendmsgError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.sendmsg
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.sendmsg error: %s" % self.msg
class SendError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.send
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.send error: %s" % self.msg
class ConnectError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.connect
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.connect error: %s" % self.msg
class RecvError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.recv
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.recv.error: %s" % self.msg
class RecvmsgError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.recvmsg
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.recvmsg error: %s" % self.msg
class ReadError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.read
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.read error: %s" % self.msg
class WriteError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.write
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.write error: %s" % self.msg
class OpenError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.open
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.open error: %s" % self.msg
class CloseError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.close
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.close error: %s" % self.msg
class LseekError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.lseek
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.lseek error: %s" % self.msg
class StatError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.stat
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.stat error: %s" % self.msg
class FileDescriptorError(Exception):
"""
Exception class raised in the event of an error returned from a call to
a FileDescriptor class method.
This will be proprogated from the above libc calls down to the callee.
"""
# XXX pass this more descriptive error or further propagations
def __init__(self, fdobj):
self.fdobj = fdobj
def __str__(self):
return "FileDescriptor error: %s" % repr(self.fdobj)
class MsghdrError(Exception):
"""
Exception class raised in the event of an error returned from a call to
a msghdr class.
This error is from supplying a file descriptor argument without a
command argument to specify an action the server is to take.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Msghdr error: %s" % self.msg
class InvalidCmdError(Exception):
"""
Exception class raised in the event of an invalid cmd passed through
a protocol message.
This error is from supplying a protocol function call with an invalid
command argument to specify an action the caller is to take.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "InvalidCmdError error: %s" % self.msg
class InvalidProtoError(Exception):
"""
Exception class raised in the event of an invalid protocol passed
through a protocol message.
This error is from supplying a protocol function call with an invalid
protocol argument to specify an action the caller is to take.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "InvalidProtoError error: %s" % self.msg
class UnlinkError(Exception):
"""
Exception class raised in the event of an error returned from a call to
libc.unlink.
The msg is propagated from the system call giving a more descriptive
reason for the fail.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "libc.unlink error: %s" % self.msg
class UnknownDescriptorError(Exception):
"""
Exception class raised in the event of an error returned from a call to
a method that handles file descriptors.
This error occurs whenever a reference to unknown descriptor is made
(i.e. it has not been created yet or has been removed.).
"""
def __init__(self, name):
self.name = name
def __str__(self):
return "UnknownDescriptor: %s" % self.name
| mit | -4,846,153,122,253,493,000 | 22.336111 | 79 | 0.60588 | false |
laslabs/geospatial | geoengine_stock_quant/geo_stock_quant.py | 1 | 3227 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Dave Lasley
# Copyright: 2015 LasLabs
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#from openerp import models, fields, api
import logging
from openerp.osv import fields, osv
from openerp.addons.base_geoengine import geo_model
from openerp.addons.base_geoengine import fields as geo_fields
_logger = logging.getLogger(__name__)
class StockQuant(geo_model.GeoModel):
"""Add geo_point to stock.quant """
_inherit = "stock.quant"
geo_point = geo_fields.GeoPoint('Address Coordinate',
readonly=True, store=True)
class StockMove(osv.osv):
_inherit = 'stock.move'
def write(self, cr, uid, ids, vals, context=None):
super(StockMove, self).write(cr, uid, ids, vals, context)
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'done':
if move.location_dest_id.usage == 'customer':
for quant in move.quant_ids:
_logger.info('Internal location quant geo_point - %s', quant.id)
try:
quant.geo_point = move.partner_id.geo_point
except Exception as e:
_logger.error(
'Unable to apply geo for customer %s\n%s\n',
move.partner_id.id, e
)
elif move.location_dest_id.usage == 'internal':
for quant in move.quant_ids:
_logger.info('Internal location quant geo_point - %s', quant.id)
try:
quant.geo_point = move.location_dest_id.partner_id.geo_point
except Exception as e:
_logger.error(
'Unable to apply geo for location %s\n%s\n',
move.location_dest_id.id, e
)
else:
_logger.error(
'Location type %s not implemented for geo tracking',
move.location_dest_id.usage
)
| agpl-3.0 | -501,661,473,294,209,340 | 38.851852 | 88 | 0.489619 | false |
ianblenke/awsebcli | ebcli/bundled/botocore/endpoint.py | 1 | 14636 | # Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import logging
import time
import threading
from botocore.vendored.requests.sessions import Session
from botocore.vendored.requests.utils import get_environ_proxies
from botocore.vendored import six
from botocore.exceptions import UnknownEndpointError
from botocore.awsrequest import AWSRequest
from botocore.compat import urljoin, filter_ssl_san_warnings
from botocore.utils import percent_encode_sequence
from botocore.hooks import first_non_none_response
from botocore.response import StreamingBody
from botocore import parsers
logger = logging.getLogger(__name__)
DEFAULT_TIMEOUT = 60
NOT_SET = object()
filter_ssl_san_warnings()
def convert_to_response_dict(http_response, operation_model):
"""Convert an HTTP response object to a request dict.
This converts the requests library's HTTP response object to
a dictionary.
:type http_response: botocore.vendored.requests.model.Response
:param http_response: The HTTP response from an AWS service request.
:rtype: dict
:return: A response dictionary which will contain the following keys:
* headers (dict)
* status_code (int)
* body (string or file-like object)
"""
response_dict = {
'headers': http_response.headers,
'status_code': http_response.status_code,
}
if response_dict['status_code'] >= 300:
response_dict['body'] = http_response.content
elif operation_model.has_streaming_output:
response_dict['body'] = StreamingBody(
http_response.raw, response_dict['headers'].get('content-length'))
else:
response_dict['body'] = http_response.content
return response_dict
class PreserveAuthSession(Session):
def rebuild_auth(self, prepared_request, response):
pass
class Endpoint(object):
"""
Represents an endpoint for a particular service in a specific
region. Only an endpoint can make requests.
:ivar service: The Service object that describes this endpoints
service.
:ivar host: The fully qualified endpoint hostname.
:ivar session: The session object.
"""
def __init__(self, region_name, host, user_agent,
endpoint_prefix, event_emitter, proxies=None, verify=True,
timeout=DEFAULT_TIMEOUT, response_parser_factory=None):
self._endpoint_prefix = endpoint_prefix
self._event_emitter = event_emitter
self._user_agent = user_agent
self.region_name = region_name
self.host = host
self.verify = verify
if proxies is None:
proxies = {}
self.proxies = proxies
self.http_session = PreserveAuthSession()
self.timeout = timeout
self._lock = threading.Lock()
if response_parser_factory is None:
response_parser_factory = parsers.ResponseParserFactory()
self._response_parser_factory = response_parser_factory
def __repr__(self):
return '%s(%s)' % (self._endpoint_prefix, self.host)
def make_request(self, operation_model, request_dict):
logger.debug("Making request for %s (verify_ssl=%s) with params: %s",
operation_model, self.verify, request_dict)
return self._send_request(request_dict, operation_model)
def create_request(self, params, operation_model=None):
request = self._create_request_object(params)
if operation_model:
event_name = 'request-created.{endpoint_prefix}.{op_name}'.format(
endpoint_prefix=self._endpoint_prefix,
op_name=operation_model.name)
self._event_emitter.emit(event_name, request=request,
operation_name=operation_model.name)
prepared_request = self.prepare_request(request)
return prepared_request
def _create_request_object(self, request_dict):
r = request_dict
user_agent = self._user_agent
headers = r['headers']
headers['User-Agent'] = user_agent
url = urljoin(self.host, r['url_path'])
if r['query_string']:
encoded_query_string = percent_encode_sequence(r['query_string'])
if '?' not in url:
url += '?%s' % encoded_query_string
else:
url += '&%s' % encoded_query_string
request = AWSRequest(method=r['method'], url=url,
data=r['body'],
headers=headers)
return request
def _encode_headers(self, headers):
# In place encoding of headers to utf-8 if they are unicode.
for key, value in headers.items():
if isinstance(value, six.text_type):
headers[key] = value.encode('utf-8')
def prepare_request(self, request):
self._encode_headers(request.headers)
return request.prepare()
def _send_request(self, request_dict, operation_model):
attempts = 1
request = self.create_request(request_dict, operation_model)
success_response, exception = self._get_response(
request, operation_model, attempts)
while self._needs_retry(attempts, operation_model,
success_response, exception):
attempts += 1
# If there is a stream associated with the request, we need
# to reset it before attempting to send the request again.
# This will ensure that we resend the entire contents of the
# body.
request.reset_stream()
# Create a new request when retried (including a new signature).
request = self.create_request(
request_dict, operation_model=operation_model)
success_response, exception = self._get_response(
request, operation_model, attempts)
if exception is not None:
raise exception
else:
return success_response
def _get_response(self, request, operation_model, attempts):
# This will return a tuple of (success_response, exception)
# and success_response is itself a tuple of
# (http_response, parsed_dict).
# If an exception occurs then the success_response is None.
# If no exception occurs then exception is None.
try:
logger.debug("Sending http request: %s", request)
http_response = self.http_session.send(
request, verify=self.verify,
stream=operation_model.has_streaming_output,
proxies=self.proxies, timeout=self.timeout)
except Exception as e:
logger.debug("Exception received when sending HTTP request.",
exc_info=True)
return (None, e)
# This returns the http_response and the parsed_data.
response_dict = convert_to_response_dict(http_response,
operation_model)
parser = self._response_parser_factory.create_parser(
operation_model.metadata['protocol'])
return ((http_response, parser.parse(response_dict,
operation_model.output_shape)),
None)
def _needs_retry(self, attempts, operation_model, response=None,
caught_exception=None):
event_name = 'needs-retry.%s.%s' % (self._endpoint_prefix,
operation_model.name)
responses = self._event_emitter.emit(
event_name, response=response, endpoint=self,
operation=operation_model, attempts=attempts,
caught_exception=caught_exception)
handler_response = first_non_none_response(responses)
if handler_response is None:
return False
else:
# Request needs to be retried, and we need to sleep
# for the specified number of times.
logger.debug("Response received to retry, sleeping for "
"%s seconds", handler_response)
time.sleep(handler_response)
return True
def _get_proxies(url):
# We could also support getting proxies from a config file,
# but for now proxy support is taken from the environment.
return get_environ_proxies(url)
def get_endpoint(service, region_name, endpoint_url, verify=None):
service_name = getattr(service, 'signing_name', service.endpoint_prefix)
endpoint_prefix = service.endpoint_prefix
session = service.session
event_emitter = session.get_component('event_emitter')
user_agent = session.user_agent()
return get_endpoint_complex(service_name, endpoint_prefix,
region_name, endpoint_url, verify, user_agent,
event_emitter)
def get_endpoint_complex(service_name, endpoint_prefix,
region_name, endpoint_url, verify,
user_agent, event_emitter,
response_parser_factory=None):
proxies = _get_proxies(endpoint_url)
verify = _get_verify_value(verify)
return Endpoint(
region_name, endpoint_url,
user_agent=user_agent,
endpoint_prefix=endpoint_prefix,
event_emitter=event_emitter,
proxies=proxies,
verify=verify,
response_parser_factory=response_parser_factory)
def _get_verify_value(verify):
# This is to account for:
# https://github.com/kennethreitz/requests/issues/1436
# where we need to honor REQUESTS_CA_BUNDLE because we're creating our
# own request objects.
# First, if verify is not None, then the user explicitly specified
# a value so this automatically wins.
if verify is not None:
return verify
# Otherwise use the value from REQUESTS_CA_BUNDLE, or default to
# True if the env var does not exist.
return os.environ.get('REQUESTS_CA_BUNDLE', True)
class EndpointCreator(object):
def __init__(self, endpoint_resolver, configured_region, event_emitter,
user_agent):
self._endpoint_resolver = endpoint_resolver
self._configured_region = configured_region
self._event_emitter = event_emitter
self._user_agent = user_agent
def create_endpoint(self, service_model, region_name=None, is_secure=True,
endpoint_url=None, verify=None, credentials=None,
response_parser_factory=None,
signature_version=NOT_SET):
if region_name is None:
region_name = self._configured_region
# Use the endpoint resolver heuristics to build the endpoint url.
scheme = 'https' if is_secure else 'http'
try:
endpoint = self._endpoint_resolver.construct_endpoint(
service_model.endpoint_prefix,
region_name, scheme=scheme)
except UnknownEndpointError:
if endpoint_url is not None:
# If the user provides an endpoint_url, it's ok
# if the heuristics didn't find anything. We use the
# user provided endpoint_url.
endpoint = {'uri': endpoint_url, 'properties': {}}
else:
raise
region_name = self._determine_region_name(endpoint, region_name,
endpoint_url)
if endpoint_url is not None:
# If the user provides an endpoint url, we'll use that
# instead of what the heuristics rule gives us.
final_endpoint_url = endpoint_url
else:
final_endpoint_url = endpoint['uri']
return self._get_endpoint(service_model, region_name,
final_endpoint_url, verify,
response_parser_factory)
def _determine_region_name(self, endpoint_config, region_name=None,
endpoint_url=None):
# This is a helper function to determine region name to use.
# It will take into account whether the user passes in a region
# name, whether their is a rule in the endpoint JSON, or
# an endpoint url was provided.
# TODO: Once we completely move to clients. We will remove region
# as public attribute from endpoints and as a result move this helper
# function to clients becuase region is really only important for
# signing.
# We only support the credentialScope.region in the properties
# bag right now, so if it's available, it will override the
# provided region name.
region_name_override = endpoint_config['properties'].get(
'credentialScope', {}).get('region')
if endpoint_url is not None:
# If an endpoint_url is provided, do not use region name
# override if a region
# was provided by the user.
if region_name is not None:
region_name_override = None
if region_name_override is not None:
# Letting the heuristics rule override the region_name
# allows for having a default region of something like us-west-2
# for IAM, but we still will know to use us-east-1 for sigv4.
region_name = region_name_override
return region_name
def _get_endpoint(self, service_model, region_name, endpoint_url,
verify, response_parser_factory):
service_name = service_model.signing_name
endpoint_prefix = service_model.endpoint_prefix
user_agent = self._user_agent
event_emitter = self._event_emitter
user_agent = self._user_agent
return get_endpoint_complex(service_name, endpoint_prefix,
region_name, endpoint_url,
verify, user_agent, event_emitter,
response_parser_factory)
| apache-2.0 | -788,345,907,564,210,200 | 40.817143 | 78 | 0.619637 | false |
kozlowsm/Python | missingElementFinder.py | 1 | 1646 | import collections
# O(n log n) solution
def finder1(arr1,arr2):
arr1.sort()
arr2.sort()
#zip the arrays together to create tuples
for num1, num2 in zip(arr1,arr2):
#(1,1), (2,2), (3,4) - 3 is missing
#(1,1), (2,2), (3,3) (4,) - 4 is missing last element arr[-1]
if num1 != num2:
return num1
return arr1[-1]
# O(n) solution
def finder2(arr1, arr2):
#default dictionary, if the key doesnt exist, do not throw error
d = collections.defaultdict(int)
#count how many times each element shows up
for num in arr2:
d[num] += 1
#if 0 return num otherwise subtract 1
for num in arr1:
if d[num] == 0:
return num
else:
d[num] -= 1
# O(n) solution
#not the best approach algorithmicly
#if arrays are large or numbers are decimal
def finder3(arr1, arr2):
sum1 = sum(arr1)
sum2 = sum(arr2)
missing = sum1 - sum2
return missing
# O(n) solution
#exclusive or XOR bit comparison
def finder4(arr1, arr2):
result = 0
for num in arr1+arr2:
result^= num
#print(result)
return result
# O(n) solution
def finder5(arr1, arr2):
count = {}
for num in arr1:
if str(num) not in count:
count[str(num)] = 1
for num in arr2:
if str(num) in count:
count[str(num)] += 1
for key, value in count.items():
if value != 2:
return int(key)
#testing solutions
lst1 = [i for i in range(1,21)]
lst2 = random.sample(lst1, len(lst1))
del lst2[random.randint(0,len(lst1)-1)]
print(finder1(lst1, lst2))
| mit | -2,510,109,725,343,111,700 | 20.946667 | 69 | 0.575942 | false |
appendif/microera | partner_protect_data/res_partner.py | 1 | 1376 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 MicroEra s.r.l.
# (<http://www.microera.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
class res_partner(osv.Model):
_inherit = "res.partner"
_columns = {
'protect_data': fields.boolean('Protect data', required=False,
groups="base.group_no_one,base.group_erp_manager,base.group_priviliged_users"),
}
_defaults = {'protect_data': lambda *a: 0}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -6,973,759,221,105,904,000 | 37.222222 | 118 | 0.600291 | false |
AmmsA/django-resumator | tests/test_settings.py | 1 | 1175 | import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DEBUG = True
ROOT_URLCONF = 'theresumator.urls'
SECRET_KEY = 'fake-key'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
"tests",
"resumator",
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'TEST_NAME': ':memory:',
}
}
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
MEDIA_ROOT = os.path.join(BASE_DIR, 'site_media')
MEDIA_URL = '/'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
'/var/www/static/',
)
| mit | -6,961,679,835,662,842,000 | 22.5 | 70 | 0.57617 | false |
AltSchool/django-scheduler | schedule/settings.py | 1 | 6376 | # Django settings for paquetin project.
from __future__ import absolute_import
import os
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'schedule.db', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = PROJECT_PATH + '/media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = '' # os.path.join(PROJECT_PATH, 'assets/')
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_PATH, 'assets/'),
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '1-%gfd@@8l$8r=ck_7^dy5_x!a0f5%qfj@ix#!xig(_2zq&b&2'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'schedule.urls'
TEMPLATES = (
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'DIRS': os.path.join(PROJECT_PATH, 'templates'),
},
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'schedule',
# 'django_nose',
# 'django_coverage',
# 'south'
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django': {
'handlers': ['console'],
'propagate': True,
'level': 'DEBUG',
},
}
}
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# Tell nose to measure coverage on the 'foo' and 'bar' apps
NOSE_ARGS = [
'--with-coverage',
'--cover-package=schedule',
]
| bsd-3-clause | 4,367,741,345,235,764,700 | 31.530612 | 127 | 0.656211 | false |
702nADOS/sumo | tools/edgesInDistricts.py | 1 | 8190 | #!/usr/bin/env python
"""
@file edgesInDistricts.py
@author Daniel Krajzewicz
@author Michael Behrisch
@author Jakob Erdmann
@date 2007-07-26
@version $Id: edgesInDistricts.py 22608 2017-01-17 06:28:54Z behrisch $
Parsing a number of networks and taz (district) files with shapes
this script writes a taz file with all the edges which are inside
the relevant taz.
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2007-2017 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
from __future__ import print_function
from __future__ import absolute_import
import sys
import collections
from optparse import OptionParser
from xml.sax import parse
import sumolib
import random
# written into the net. All members are "private".
class DistrictEdgeComputer:
def __init__(self, net):
self._net = net
self._districtEdges = collections.defaultdict(list)
self._edgeDistricts = collections.defaultdict(list)
self._invalidatedEdges = set()
def computeWithin(self, polygons, options):
districtBoxes = {}
for district in polygons:
districtBoxes[district.id] = district.getBoundingBox()
for idx, edge in enumerate(self._net.getEdges()):
shape = edge.getShape()
if edge.getSpeed() < options.maxspeed and edge.getSpeed() > options.minspeed and (options.internal or edge.getFunction() != "internal"):
if options.vclass is None or edge.allows(options.vclass):
if options.assign_from:
xmin, ymin = shape[0]
xmax, ymax = shape[0]
else:
xmin, ymin, xmax, ymax = edge.getBoundingBox()
for district in polygons:
dxmin, dymin, dxmax, dymax = districtBoxes[district.id]
if dxmin <= xmax and dymin <= ymax and dxmax >= xmin and dymax >= ymin:
if options.assign_from:
if sumolib.geomhelper.isWithin(shape[0], district.shape):
self._districtEdges[district].append(edge)
self._edgeDistricts[edge].append(district)
break
else:
for pos in shape:
if sumolib.geomhelper.isWithin(pos, district.shape):
self._districtEdges[
district].append(edge)
self._edgeDistricts[
edge].append(district)
break
if options.verbose and idx % 100 == 0:
sys.stdout.write("%s/%s\r" % (idx, len(self._net.getEdges())))
if options.complete:
for edge in self._edgeDistricts:
if len(self._edgeDistricts[edge]) > 1:
self._invalidatedEdges.add(edge)
def getEdgeDistrictMap(self):
result = {}
for edge, districts in self._edgeDistricts.iteritems():
if len(districts) == 1:
result[edge] = districts[0]
return result
def writeResults(self, options):
fd = open(options.output, "w")
fd.write("<tazs>\n")
for district, edges in sorted(self._districtEdges.items()):
filtered = [
edge for edge in edges if edge not in self._invalidatedEdges]
if len(filtered) == 0:
print("District '" + district.id + "' has no edges!")
else:
if options.weighted:
if options.shapeinfo:
fd.write(' <taz id="%s" shape="%s">\n' %
(district.id, district.getShapeString()))
else:
fd.write(' <taz id="%s">\n' % district.id)
for edge in filtered:
weight = edge.getSpeed() * edge.getLength()
fd.write(
' <tazSource id="%s" weight="%.2f"/>\n' % (edge.getID(), weight))
fd.write(
' <tazSink id="%s" weight="%.2f"/>\n' % (edge.getID(), weight))
fd.write(" </taz>\n")
else:
if options.shapeinfo:
fd.write(' <taz id="%s" shape="%s" edges="%s"/>\n' %
(district.id, district.getShapeString(), " ".join([e.getID() for e in filtered])))
else:
fd.write(' <taz id="%s" edges="%s"/>\n' %
(district.id, " ".join([e.getID() for e in filtered])))
fd.write("</tazs>\n")
fd.close()
def getTotalLength(self, edgeID):
edge = self._net.getEdge(edgeID)
return edge.getLength() * edge.getLaneNumber()
def fillOptions(optParser):
optParser.add_option("-v", "--verbose", action="store_true",
default=False, help="tell me what you are doing")
optParser.add_option("-c", "--complete", action="store_true",
default=False, help="assign edges only if they are not in more than one district")
optParser.add_option("-n", "--net-file",
help="read SUMO network from FILE (mandatory)", metavar="FILE")
optParser.add_option("-t", "--taz-files",
help="read districts from FILEs", metavar="FILE")
optParser.add_option("-o", "--output", default="districts.taz.xml",
help="write results to FILE (default: %default)", metavar="FILE")
optParser.add_option("-x", "--max-speed", type="float", dest="maxspeed",
default=1000.0, help="use lanes where speed is not greater than this (m/s) (default: %default)")
optParser.add_option("-m", "--min-speed", type="float", dest="minspeed",
default=0., help="use lanes where speed is greater than this (m/s) (default: %default)")
optParser.add_option("-w", "--weighted", action="store_true",
default=False, help="Weights sources/sinks by lane number and length")
optParser.add_option("-f", "--assign-from", action="store_true",
default=False, help="Assign the edge always to the district where the \"from\" node is located")
optParser.add_option("-i", "--internal", action="store_true",
default=False, help="Include internal edges in output")
optParser.add_option(
"-l", "--vclass", help="Include only edges allowing VCLASS")
optParser.add_option("-s", "--shapeinfo", action="store_true",
default=False, help="write also the shape info in the file")
if __name__ == "__main__":
optParser = OptionParser()
fillOptions(optParser)
(options, args) = optParser.parse_args()
if not options.net_file:
optParser.print_help()
optParser.exit("Error! Providing a network is mandatory")
if options.verbose:
print("Reading net '" + options.net_file + "'")
nets = options.net_file.split(",")
if len(nets) > 1:
print(
"Warning! Multiple networks specified. Parsing the first one for edges and tazs, the others for taz only.")
reader = DistrictEdgeComputer(sumolib.net.readNet(nets[0]))
tazFiles = nets + options.taz_files.split(",")
polyReader = sumolib.shapes.polygon.PolygonReader(True)
for tf in tazFiles:
parse(tf, polyReader)
if options.verbose:
print("Calculating")
reader.computeWithin(polyReader.getPolygons(), options)
if options.verbose:
print("Writing results")
reader.writeResults(options)
| gpl-3.0 | 132,449,332,623,357,950 | 46.34104 | 148 | 0.54823 | false |
joselamego/patchwork | patchwork/migrations/0016_add_delegation_rule_model.py | 1 | 1084 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('patchwork', '0015_remove_version_n_patches'),
]
operations = [
migrations.CreateModel(
name='DelegationRule',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('path', models.CharField(max_length=255)),
('priority', models.IntegerField(default=0)),
('project', models.ForeignKey(to='patchwork.Project')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-priority', 'path'],
},
),
migrations.AlterUniqueTogether(
name='delegationrule',
unique_together=set([('path', 'project')]),
),
]
| gpl-2.0 | 7,435,913,786,337,504,000 | 31.848485 | 114 | 0.563653 | false |
thetoine/eruditorg | erudit/apps/public/journal/urls_compat.py | 1 | 1861 | # -*- coding: utf-8 -*-
from django.conf.urls import url
from django.views.generic import RedirectView
from base.views import DummyView
unsupported_patterns = [
r'^iderudit/(?P<code>[\w-]+)$',
r'^revue/(?P<code>[\w-]+)/rss.xml$',
r'^revue/(?P<code>[\w-]+)/(?P<year>\d{4})/(?P<v>[\w-]+)/(?P<issue>[\w-]+)/?$',
r'^revue/(?P<code>[\w-]+)/(?P<year>\d{4})/(?P<v>[\w-]+)/(?P<issue>[\w-]+)/index.html?$',
(r'^revue/(?P<code>[\w-]+)/(?P<year>\d{4})/(?P<v>[\w-]+)/' +
'(?P<issue>[\w-]+)/(?P<article>[\w-]+).html?$'),
r'^revue/(?P<code>[\w-]+)/auteurs.html?$',
r'^revue/(?P<code>[\w-]+)/thematique.html?$',
r'^revue/(?P<code>[\w-]+)/apropos.html?$',
r'^revue/redirection/(?P<code>[\w-]+)/',
r'^culture/(?P<code>[\w-]+)/$',
r'^culture/(?P<code>[\w-]+)/index.html?$',
r'^culture/(?P<code>[\w-]+)/rss.xml$',
r'^culture/(?P<code>[\w-]+)/(?P<year>\d{4})/(?P<v>[\w-]+)/(?P<issue>[\w-]+)/?$',
r'^culture/(?P<code>[\w-]+)/(?P<year>\d{4})/(?P<v>[\w-]+)/(?P<issue>[\w-]+)/index.html?$',
(r'^culture/(?P<code>[\w-]+)/(?P<year>\d{4})/(?P<v>[\w-]+)/' +
'(?P<issue>[\w-]+)/(?P<article>[\w-]+).html?$'),
r'^culture/(?P<code>[\w-]+)/auteurs.html?$',
r'^culture/(?P<code>[\w-]+)/thematique.html?$',
r'^feuilletage/index.html?$',
r'^feuilletage_(?P<code1>[\w-]+)\.(?P<code2>[\w-]+)@(?P<id>[0-9]+)$',
(r'^feuilletage_(?P<code1>[\w-]+)\.(?P<code2>[\w-]+)@(?P<id>[0-9]+)'
'(?:\&(?P<key>[\w-]+)=(?P<val>[\w-]+))*$'),
]
urlpatterns = [
url(r'^revue/(?P<code>[\w-]+)/$',
RedirectView.as_view(pattern_name='journal:journal-detail', permanent=True)),
url(r'^revue/(?P<code>[\w-]+)/index.html?$',
RedirectView.as_view(pattern_name='journal:journal-detail', permanent=True)),
]
urlpatterns += [url(pattern_re, DummyView.as_view()) for pattern_re in unsupported_patterns]
| gpl-3.0 | -6,712,453,348,256,577,000 | 40.355556 | 94 | 0.496507 | false |
truemped/dopplr | dopplr/solr/query/dismax.py | 1 | 1819 | # vim: set fileencoding=utf-8 :
#
# Copyright (c) 2012 Retresco GmbH
# Copyright (c) 2011 Daniel Truemper <truemped at googlemail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
"""
The Solr (e)DisMax query.
For more information see: http://wiki.apache.org/solr/DisMaxQParserPlugin
"""
from dopplr.basequery import BaseQuery
class DisMax(BaseQuery):
"""
The dismax query.
"""
def __init__(self, qf, alt=None, mm=None, pf=None, ps=None, qs=None,
tie=None, bq=None, bf=None, edismax=True):
"""
Initialize the query values.
"""
self.__qf = qf
if edismax:
self.__deftype = 'edismax'
else:
self.__deftype = 'dismax'
self.__optional_params = {
'q.alt': alt,
'mm': mm,
'pf': pf,
'ps': ps,
'qs': qs,
'tie': tie,
'bq': bq,
'bf': bf
}
def get_params(self):
"""
Return the list of query params.
"""
params = []
params.append(('defType', self.__deftype))
params.append(('qf', self.__qf))
for p in self.__optional_params:
if self.__optional_params[p]:
params.append((p, self.__optional_params[p]))
return params
| apache-2.0 | -4,771,037,861,120,512,000 | 26.984615 | 74 | 0.582188 | false |
labase/superpython | src/server/controllers/mig_util.py | 1 | 3989 | #! /usr/bin/env python
# -*- coding: UTF8 -*-
# Este arquivo é parte do programa SuperPython
# Copyright 2013-2015 Carlo Oliveira <[email protected]>,
# `Labase <http://labase.selfip.org/>`__; `GPL <http://is.gd/3Udt>`__.
#
# SuperPython é um software livre; você pode redistribuí-lo e/ou
# modificá-lo dentro dos termos da Licença Pública Geral GNU como
# publicada pela Fundação do Software Livre (FSF); na versão 2 da
# Licença.
#
# Este programa é distribuído na esperança de que possa ser útil,
# mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO
# a qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a
# Licença Pública Geral GNU para maiores detalhes.
#
# Você deve ter recebido uma cópia da Licença Pública Geral GNU
# junto com este programa, se não, veja em <http://www.gnu.org/licenses/>
"""Controller handles routes starting with /RESOURCE_NAME.
Change this file's name and contents as appropriate to the
resources your app exposes to clients.
"""
__author__ = 'carlo'
import urllib
import json
from time import sleep
# USERS = "alpha bravo charlie echo golf hotel india" \
# " kilo lima mike november oscar papa quebec sierra uniform zulu".split()
USERS = "aries touro gemeos cancer leao virgem libra escorpiao sagitario capricornio aquario peixes".split()
class Util:
def __init__(self, user, passwd="nceufrj"):
self._token = 0
self._baseURL = "http://pyschool.net/"
self.user = user
self.passwd = passwd
def login(self):
sleep(0.5)
if True:
# send userid/password
_userid = self.user
_password = self.passwd
_url = "http://pyschool.net/Auth?userid=%s&password=%s" % (_userid, _password)
_fp = urllib.urlopen(_url)
_data = _fp.read()
_json = json.loads(_data)
if _json['status'] == 'Okay':
# import remote_storage_fs
# fs = remote_storage_fs.GoogleDataStorage("/pyschool")
self._token = _json['token']
_cookie = 'token=%s|login_type=authenticate' % _json['token']
return self
print(self.user, _json)
return self
def _remote_call(self, data):
# console.log("remote call", data)
data['token'] = self._token # add in token to call
_json = json.dumps({'data': data})
try:
_fp = urllib.urlopen(self._baseURL + "FS", _json)
return json.loads(_fp.read()) # returns a string (in json format)
except:
return {'status': 'Error',
'message': 'Network connectivity issues'}
def _read_file(self, filename):
""" retrieves file from storage, returns fileobj if successful,
return None if unsuccessful
"""
_json = self._remote_call({'command': 'read_file', 'filename': filename})
try:
# _f = FileObject.FileObject()
# _f=FileSystemBase.FileObject()
# _f = json.load(_json['fileobj'])#["contents"]
_f = _json['fileobj'] # ["contents"]
_f = json.loads(_f)[u'contents']
return {'status': 'Okay', 'fileobj': _f}
except Exception as e:
return {'status': 'Error', 'message': str(e), 'json': _json, 'file': filename}
def read_main_file(self, filename):
""" retrieves file from storage, returns fileobj if successful,
return None if unsuccessful
"""
try:
cnt = self._read_file("/pyschool/%s" % filename)
return cnt['fileobj']
except Exception as e:
return {'status': 'Error', 'message': str(e), 'json': cnt}
ps = Util('carlo', 'labase')
print(ps.login()._token)
#USERS =["alpha"]
files = [Util(fl).login().read_main_file("%s/%s.py" % (fl, "gripe")) for fl in USERS]
# print(ps._read_file("/pyschool/adv.py")['fileobj'])
for f in files:
print(f)
| gpl-2.0 | 7,276,171,206,063,403,000 | 35.018182 | 108 | 0.595406 | false |
allan-simon/xmpp-conformance-suite | xep-0045/iq_admin_without_item.py | 1 | 2036 | from sleekxmpp.exceptions import IqError
from sleekxmpp.exceptions import IqTimeout
from ConformanceUtils import init_test
from ConformanceUtils import print_test_description
from JoinMUCBot import JoinTestMUCBot
from config import SECOND_BOT
from config import SECOND_BOT_JID
from config import ROOM_JID
#TODO still need to add little more test to see if the set role
# is actually effective
class EchoBot(JoinTestMUCBot):
def __init__(self, jid, password, nick):
JoinTestMUCBot.__init__(self, jid, password, nick)
self.add_event_handler("got_offline", self.got_offline)
def other_participant_online(self, msg):
try:
self.make_set_role_iq(childtag="NOT-ITEM", role="none").send()
print("[fail]")
except IqError as e:
isCancel = e.iq['error']['type'] == 'cancel'
isBadRequest = e.iq['error']['condition'] == 'bad-request'
if isCancel and isBadRequest :
print("[pass]")
else:
print("[fail]")
except IqTimeout:
print("[fail]")
self.send_message(
mto=ROOM_JID,
mbody="disconnect %s" % SECOND_BOT,
mtype='groupchat'
)
def got_offline(self, presence):
# when the second disconnect we disconnect to
if presence['from'].bare == SECOND_BOT_JID:
self.disconnect()
class SecondBot(JoinTestMUCBot):
def __init__(self, jid, password, nick):
JoinTestMUCBot.__init__(self, jid, password, nick)
self.add_event_handler("groupchat_message", self.muc_message)
def muc_message(self, msg):
if msg['body'] == 'disconnect %s' % SECOND_BOT:
self.disconnect()
if __name__ == '__main__':
print_test_description(
"An admin iq with something different than a 'item' tag as child " +
"of query should return a bad-request error ..."
)
init_test(
class_first_bot = EchoBot,
class_second_bot = SecondBot
)
| mit | 5,037,214,174,934,995,000 | 27.676056 | 76 | 0.610511 | false |
UndeadBlow/giraffe | src/giraffe/tests.py | 2 | 10673 | from django.test import TestCase
from django.core.urlresolvers import reverse
from hippo.models import Feature, Feature_Type, Feature_Database
import json, random
from giraffe.features import blast
class BlastTest(TestCase):
def setUp(self):
ft_gene = Feature_Type(type='Gene')
ft_gene.save()
self.dna = 'ATTGCGGATCGCGAATGCGATCG'
self.pro = 'MKKKAPSI'
self.pro_dna = 'ATGAAGAAGAAAGCACCAAGCATA'
self.feature1 = Feature(type=ft_gene, name='G1', sequence=self.dna)
self.feature1.as_dna()
self.feature1.save()
self.feature2 = Feature(type=ft_gene, name='G2', sequence=self.pro)
self.feature2.as_protein()
self.feature2.save()
self.feature_db = Feature_Database(name='test')
self.feature_db.save()
self.feature_db.features.add(self.feature1, self.feature2)
def test_blast(self):
self.feature_db.build()
query = 'G'*100+self.dna+'A'*40
feature_list = blast(query, self.feature_db)
self.assertEquals(len(feature_list), 1)
self.assertEquals(feature_list[0].name, self.feature1.name)
self.assertEquals(feature_list[0].query_start, 101)
self.assertEquals(feature_list[0].query_end, 100+len(self.dna))
self.assertEquals(feature_list[0].subject_start, 1)
self.assertEquals(feature_list[0].subject_end, len(self.dna))
def test_blast_evalue_threshold(self):
self.feature_db.build()
query = 'G'*100+self.dna+'A'*40
feature_list = blast(query, self.feature_db)
self.assertEquals(len(feature_list), 1)
feature_list = blast(query, self.feature_db, evalue_threshold=1E-50)
self.assertEquals(len(feature_list), 0)
def test_blast_identity_threshold(self):
self.feature_db.build()
q = self.dna
# make two changes
q = q[0:3]+'C'+q[4:6]+'C'+q[7:]
self.assertEquals(len(q), len(self.dna))
query = 'G'*100+q+'A'*40
feature_list = blast(query, self.feature_db, identity_threshold=None)
self.assertEquals(len(feature_list), 1)
self.assertEquals(feature_list[0].query_start, 101)
self.assertEquals(feature_list[0].query_end, 100+len(self.dna))
self.assertEquals(feature_list[0].subject_start, 1)
self.assertEquals(feature_list[0].subject_end, len(self.dna))
feature_list = blast(query, self.feature_db, identity_threshold=0.99)
self.assertEquals(len(feature_list), 0)
def test_blast_feature_threshold(self):
self.feature_db.build()
p = 0.8
n = int(len(self.dna)*p)
query = 'G'*100+self.dna[0:n]+'A'*40
feature_list = blast(query, self.feature_db, feature_threshold=None)
self.assertEquals(len(feature_list), 1)
self.assertEquals(feature_list[0].query_start, 101)
self.assertEquals(feature_list[0].query_end, 100+n)
self.assertEquals(feature_list[0].subject_start, 1)
self.assertEquals(feature_list[0].subject_end, n)
feature_list = blast(query, self.feature_db, feature_threshold=p)
self.assertEquals(len(feature_list), 0)
def test_get_feature_from_blast_result(self):
self.feature_db.build()
query = 'G'*100+self.dna+'A'*40
feature_list = blast(query, self.feature_db)
self.assertEquals(len(feature_list), 1)
self.assertEquals(feature_list[0].feature_id, self.feature1.id)
def test_returns_one_result_from_across_circular_boundary(self):
self.feature_db.build()
q = 'G'*100+self.dna+'A'*40
query = q[110:]+q[0:110]
feature_list = blast(query, self.feature_db)
# if we don't remove truncated features across circular boundary, we'd see
# 2 results, one for truncated feature, one for full feature
self.assertEquals(len(feature_list), 1)
def test_returns_correct_coordinates_across_circular_boundary(self):
self.feature_db.build()
q = 'G'*100+self.dna+'A'*40
query = q[110:]+q[0:110]
feature_list = blast(query, self.feature_db)
self.assertEquals(feature_list[0].query_start, len(q)-10+1)
self.assertEquals(feature_list[0].query_end, len(self.dna)-10)
self.assertEquals(feature_list[0].subject_start, 1)
self.assertEquals(feature_list[0].subject_end, len(self.dna))
class IntegrationTest(TestCase):
def setUp(self):
ft_gene = Feature_Type(type='Gene')
ft_gene.save()
self.dna = 'ATTGCGGATCGCGAATGCGATCG'
self.pro = 'MKKKAPSI'
self.pro_dna = 'ATGAAGAAGAAAGCACCAAGCATA'
self.feature1 = Feature(type=ft_gene, name='G1', sequence=self.dna)
self.feature1.as_dna()
self.feature1.save()
self.feature2 = Feature(type=ft_gene, name='G2', sequence=self.pro)
self.feature2.as_protein()
self.feature2.save()
self.feature_db = Feature_Database(name='test')
self.feature_db.save()
self.feature_db.features.add(self.feature1, self.feature2)
def test_build_db(self):
self.feature_db.build()
def test_finds_orf_in_both_directions_and_across_boundary(self):
self.feature_db.build()
query = 'G'*100+'ATG'+'C'*3000+'TAG'+'CTA'+'G'*1800+'CAT'
data = {'db': self.feature_db.name, 'sequence': query}
url = reverse('giraffe-analyze')
resp = self.client.post(url, data)
self.assertEquals(resp.status_code, 200)
res = json.loads(resp.content)
self.assertEquals(res[0], len(query))
self.assertItemsEqual(res[1],
[
{'orf_frame': 1,
'layer': 'ORFs',
'type_id': 10,
'subject_start': 1,
'subject_end': 3+3000+3,
'query_start': 100+1,
'query_end': 100+3+3000+3,
'label': 'ORF frame 2',
'name': 'ORF frame 2'},
{'orf_frame': 0,
'layer': 'ORFs',
'type_id': 10,
'subject_start': 3+1800+3,
'subject_end': 1,
'query_start': 100+3+3000+3+1,
'query_end': 100+3+3000+3+3+1800+3,
'label': 'ORF frame 1',
'name': 'ORF frame 1'},
# across circular boundary, there is AT, then G, it ends with the first
# stop codon after C*3000
{'orf_frame': 2,
'layer': 'ORFs',
'type_id': 10,
'subject_start': 1,
'subject_end': 3+99+3+3000+3,
'query_start': len(query)-2+1,
'query_end': 100+3+3000+3,
'label': 'ORF frame 3',
'name': 'ORF frame 3'}
]
)
def test_does_not_find_orf_across_boundary_if_not_in_circular_mode(self):
self.feature_db.build()
query = 'G'*100+'ATG'+'C'*3000+'TAG'+'CTA'+'G'*1800+'CAT'
data = {'db': self.feature_db.name, 'sequence': query, 'circular': 0}
url = reverse('giraffe-analyze')
resp = self.client.post(url, data)
self.assertEquals(resp.status_code, 200)
res = json.loads(resp.content)
self.assertEquals(res[0], len(query))
self.assertItemsEqual(res[1],
[
{'orf_frame': 1,
'layer': 'ORFs',
'type_id': 10,
'subject_start': 1,
'subject_end': 3+3000+3,
'query_start': 100+1,
'query_end': 100+3+3000+3,
'label': 'ORF frame 2',
'name': 'ORF frame 2'},
{'orf_frame': 0,
'layer': 'ORFs',
'type_id': 10,
'subject_start': 3+1800+3,
'subject_end': 1,
'query_start': 100+3+3000+3+1,
'query_end': 100+3+3000+3+3+1800+3,
'label': 'ORF frame 1',
'name': 'ORF frame 1'}
]
)
def test_find_tags_within_orf(self):
self.feature_db.build()
query = 'G'*100+'ATG'+'C'*3000+'CAT'*6+'TAG'
data = {'db': self.feature_db.name, 'sequence': query, 'circular': 0}
url = reverse('giraffe-analyze')
resp = self.client.post(url, data)
self.assertEquals(resp.status_code, 200)
res = json.loads(resp.content)
self.assertEquals(res[0], len(query))
self.assertItemsEqual(res[1],
[
{'orf_frame': 1,
'layer': 'ORFs',
'type_id': 10,
'subject_start': 1,
'subject_end': 3+3000+3*6+3,
'query_start': 100+1,
'query_end': 100+3+3000+3*6+3,
'label': 'ORF frame 2',
'name': 'ORF frame 2'},
{'layer': 'Detected Features',
'type_id': 1,
'subject_start': 1,
'subject_end': 3*6,
'query_start': 100+3+3000+1,
'query_end': 100+3+3000+3*6,
'label': '6xHIS',
'name': '6xHIS'}
]
)
def test_blastn(self):
self.feature_db.build()
data = {'db': self.feature_db.name, 'sequence': self.dna}
url = reverse('giraffe-analyze')
resp = self.client.post(url, data)
self.assertEquals(resp.status_code, 200)
res = json.loads(resp.content)
self.assertEquals(res[0], len(self.dna))
self.assertItemsEqual(res[1],
[{"layer": "Detected Features",
"name": "G1",
"type_id": 5,
"label": "G1",
"query_start": 1,
"query_end": 23,
"subject_start": 1,
"subject_end": 23,
"evalue": 6.01355e-11,
"identities": 23,
"alignment": { "query": "ATTGCGGATCGCGAATGCGATCG",
"match": "|||||||||||||||||||||||",
"subject": "ATTGCGGATCGCGAATGCGATCG"}},
{"layer": "Restriction Enzymes",
"name": "NruI", "type_id": 4, "elucidate": "TCG^_CGA", "label": "NruI",
"query_start": 9, "cut": 11, "query_end": 14, "subject_start": 1, "subject_end": 6},
{"layer": "Restriction Enzymes",
"name": "ClaI", "type_id": 4, "elucidate": "AT^CG_AT", "label": "ClaI",
"query_start": 20, "cut": 21, "query_end": 2, "subject_start": 1, "subject_end": 6},
]
)
def test_blastx(self):
self.feature_db.build()
q = 'A'+self.pro_dna+'T'
data = {'db': self.feature_db.name, 'sequence': q}
url = reverse('giraffe-analyze')
resp = self.client.post(url, data)
self.assertEquals(resp.status_code, 200)
res = json.loads(resp.content)
self.assertEquals(res[0], len(q))
self.assertItemsEqual(res[1], [
{"layer": "Detected Features",
"query_start": 2,
"query_end": 25,
"subject_start" : 1,
"subject_end" : 8,
"name": "G2",
"type_id": 5,
"label": "G2",
"alignment": {"query": "MKKKAPSI", "match": "MKKKAPSI", "subject": "MKKKAPSI"},
"evalue": 5.42133e-5,
"identities": 8
}
])
def test_blast2(self):
data = {'subject': self.dna, 'query': self.dna[0:22]+'T'}
url = reverse('blast2')
resp = self.client.post(url, data)
self.assertEquals(resp.status_code, 200)
res = json.loads(resp.content)
self.assertItemsEqual(res, [
{ "subject_start": 1, "subject_end": 22,
"query_start": 1, "query_end": 22,
"evalue": 5.81644e-07,
"query": "ATTGCGGATCGCGAATGCGATC",
"match": "||||||||||||||||||||||",
"subject": "ATTGCGGATCGCGAATGCGATC" },
])
| mit | -2,156,599,013,070,376,000 | 30.29912 | 92 | 0.598239 | false |
cowhite/pinterest_clone_django | pinterest_clone/urls.py | 1 | 1044 | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from dajaxice.core import dajaxice_autodiscover, dajaxice_config
dajaxice_autodiscover()
from core.views import ProfileView, HomeView
urlpatterns = patterns("",
#url(r"^$", TemplateView.as_view(template_name="homepage.html"), name="home"),
url(r"^$", HomeView.as_view(), name="home"),
url(r"^admin/", include(admin.site.urls)),
url(dajaxice_config.dajaxice_url, include('dajaxice.urls')),
url(r"^account/", include("account.urls")),
url(r"^(?P<username>[a-zA-Z0-9_]+)/$", ProfileView.as_view(),
name="user-profile"),
url(r'^avatar/', include('avatar.urls')),
url(r"^", include("core.urls")),
)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
| mit | -2,700,705,664,800,419,000 | 32.677419 | 82 | 0.712644 | false |
entpy/email2SMS_idle | email2SMS_idle/idle/gmail/gmail.py | 1 | 5981 | import re, imaplib2, logging
from mailbox import Mailbox
from utf import encode as encode_utf7, decode as decode_utf7
from exceptions import *
# Get an instance of a logger
logger = logging.getLogger(__name__)
class Gmail():
# GMail IMAP defaults
GMAIL_IMAP_HOST = 'imap.gmail.com'
GMAIL_IMAP_PORT = 993
# GMail SMTP defaults
# TODO: implement SMTP functions
GMAIL_SMTP_HOST = "smtp.gmail.com"
GMAIL_SMTP_PORT = 587
def __init__(self):
self.username = None
self.password = None
self.access_token = None
self.imap = None
self.smtp = None
self.logged_in = False
self.mailboxes = {}
self.current_mailbox = None
# self.connect()
def connect(self, raise_errors=True):
# try:
# self.imap = imaplib2.IMAP4_SSL(self.GMAIL_IMAP_HOST, self.GMAIL_IMAP_PORT)
# except socket.error:
# if raise_errors:
# raise Exception('Connection failure.')
# self.imap = None
# imposto un timeout per nuove connessioni
# socket.setdefaulttimeout(1)
self.imap = imaplib2.IMAP4_SSL(self.GMAIL_IMAP_HOST, self.GMAIL_IMAP_PORT)
# self.smtp = smtplib.SMTP(self.server,self.port)
# self.smtp.set_debuglevel(self.debug)
# self.smtp.ehlo()
# self.smtp.starttls()
# self.smtp.ehlo()
return self.imap
def fetch_mailboxes(self):
response, mailbox_list = self.imap.list()
if response == 'OK':
for mailbox in mailbox_list:
mailbox_name = mailbox.split('"/"')[-1].replace('"', '').strip()
mailbox = Mailbox(self)
mailbox.external_name = mailbox_name
self.mailboxes[mailbox_name] = mailbox
def use_mailbox(self, mailbox):
if mailbox:
self.imap.select(mailbox)
self.current_mailbox = mailbox
def mailbox(self, mailbox_name):
if mailbox_name not in self.mailboxes:
mailbox_name = encode_utf7(mailbox_name)
mailbox = self.mailboxes.get(mailbox_name)
if mailbox and not self.current_mailbox == mailbox_name:
self.use_mailbox(mailbox_name)
return mailbox
def create_mailbox(self, mailbox_name):
mailbox = self.mailboxes.get(mailbox_name)
if not mailbox:
self.imap.create(mailbox_name)
mailbox = Mailbox(self, mailbox_name)
self.mailboxes[mailbox_name] = mailbox
return mailbox
def delete_mailbox(self, mailbox_name):
mailbox = self.mailboxes.get(mailbox_name)
if mailbox:
self.imap.delete(mailbox_name)
del self.mailboxes[mailbox_name]
def login(self, username, password):
self.username = username
self.password = password
if not self.imap:
logger.debug("connessione via imap")
self.connect()
try:
imap_login = self.imap.login(self.username, self.password)
self.logged_in = (imap_login and imap_login[0] == 'OK')
logger.debug("logged_in status: " + str(self.logged_in))
if self.logged_in:
logger.debug("elenco di tutte le mailbox")
self.fetch_mailboxes()
else:
logger.debug("non sono connesso: " + str(imap_login[0]))
except imaplib2.IMAP4.error:
logger.error("errore di autenticazione: " + str(AuthenticationError))
raise AuthenticationError
# smtp_login(username, password)
return self.logged_in
def authenticate(self, username, access_token):
self.username = username
self.access_token = access_token
if not self.imap:
self.connect()
try:
auth_string = 'user=%s\1auth=Bearer %s\1\1' % (username, access_token)
imap_auth = self.imap.authenticate('XOAUTH2', lambda x: auth_string)
self.logged_in = (imap_auth and imap_auth[0] == 'OK')
if self.logged_in:
self.fetch_mailboxes()
except imaplib2.IMAP4.error:
raise AuthenticationError
return self.logged_in
def logout(self):
self.imap.close()
self.imap.logout()
self.logged_in = False
logger.info("Logout da IMAP effettuato con successo")
def label(self, label_name):
return self.mailbox(label_name)
def find(self, mailbox_name="[Gmail]/All Mail", **kwargs):
box = self.mailbox(mailbox_name)
return box.mail(**kwargs)
def copy(self, uid, to_mailbox, from_mailbox=None):
if from_mailbox:
self.use_mailbox(from_mailbox)
self.imap.uid('COPY', uid, to_mailbox)
def fetch_multiple_messages(self, messages):
fetch_str = ','.join(messages.keys())
response, results = self.imap.uid('FETCH', fetch_str, '(BODY.PEEK[] FLAGS X-GM-THRID X-GM-MSGID X-GM-LABELS)')
for index in xrange(len(results) - 1):
raw_message = results[index]
if re.search(r'UID (\d+)', raw_message[0]):
uid = re.search(r'UID (\d+)', raw_message[0]).groups(1)[0]
messages[uid].parse(raw_message)
return messages
def labels(self, require_unicode=False):
keys = self.mailboxes.keys()
if require_unicode:
keys = [decode_utf7(key) for key in keys]
return keys
def inbox(self):
return self.mailbox("INBOX")
def spam(self):
return self.mailbox("[Gmail]/Spam")
def starred(self):
return self.mailbox("[Gmail]/Starred")
def all_mail(self):
return self.mailbox("[Gmail]/All Mail")
def sent_mail(self):
return self.mailbox("[Gmail]/Sent Mail")
def important(self):
return self.mailbox("[Gmail]/Important")
def mail_domain(self):
return self.username.split('@')[-1]
| gpl-3.0 | -4,133,375,767,475,737,600 | 29.515306 | 118 | 0.587527 | false |