input
stringlengths
0
2.17k
instruction
stringlengths
18
2.94k
output
stringlengths
47
3.36k
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import sockjsroom # Setup library setup( # Pypi name name = "sockjsroom", # Release version version = sockjsroom.__version__, # Associated package packages = find_packages(), # Author author = "Deisss", author_email = "[email protected]", # Package description description = "Sockjs-tornado multi room system", long_description = open('README.md').read(), # Require sockjs-tornado install_requires = ["tornado", "sockjs-tornado"], # Add MANIFEST.in include_package_data = True, # Github url url = "https://github.com/Deisss/python-sockjsroom", # Metadata classifiers=[ "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: MIT Licence", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Topic :: Communications", ], )
Switch to OSI license for Pypi
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import sockjsroom # Setup library setup( # Pypi name name = "sockjsroom", # Release version version = sockjsroom.__version__, # Associated package packages = find_packages(), # Author author = "Deisss", author_email = "[email protected]", # Package description description = "Sockjs-tornado multi room system", long_description = open('README.md').read(), # Require sockjs-tornado install_requires = ["tornado", "sockjs-tornado"], # Add MANIFEST.in include_package_data = True, # Github url url = "https://github.com/Deisss/python-sockjsroom", # Metadata classifiers=[ "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Topic :: Communications", ], )
# -*- coding: utf-8 -*- from __future__ import unicode_literals import codecs from setuptools import setup, find_packages setup( name='Venom', version='1.0.0a1', packages=find_packages(exclude=['*tests*']), url='https://github.com/biosustain/venom', license='MIT', author='Lars Schöning', author_email='[email protected]', description='Venom is an upcoming RPC framework for Python', long_description=codecs.open('README.rst', encoding='utf-8').read(), test_suite='nose.collector', tests_require=[ 'aiohttp', 'nose' ], install_requires=[ ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], zip_safe=False, extras_require={ 'docs': ['sphinx'], 'aiohttp': ['aiohttp'], 'grpc': ['grpcio'], 'zmq': ['pyzmq'], } )
Add 'ujson' requirement for tests & aiohttp
# -*- coding: utf-8 -*- from __future__ import unicode_literals import codecs from setuptools import setup, find_packages setup( name='venom', version='1.0.0a1', packages=find_packages(exclude=['*tests*']), url='https://github.com/biosustain/venom', license='MIT', author='Lars Schöning', author_email='[email protected]', description='Venom is an upcoming RPC framework for Python', long_description=codecs.open('README.rst', encoding='utf-8').read(), test_suite='nose.collector', tests_require=[ 'aiohttp', 'ujson', 'nose' ], install_requires=[], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], zip_safe=False, extras_require={ 'docs': ['sphinx'], 'aiohttp': ['aiohttp', 'ujson'], 'grpc': ['grpcio'], 'zmq': ['pyzmq'], } )
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() def package_data(package): package_data = [] for dirpath, dirnames, filenames in os.walk( os.path.join(os.path.dirname(__file__), package)): for i, dirname in enumerate(dirnames): if dirname.startswith('.'): del dirnames[i] if '__init__.py' in filenames: continue elif filenames: for f in filenames: package_data.append( os.path.join(dirpath[len(package)+len(os.sep):], f)) return {package: package_data} setup( name='django-linkcheck', version='0.1.0', description="A Django app that will analyze and report on links in any " "model that you register with it.", long_description=read('README.rst'), author='Andy Baker', author_email='[email protected]', license='BSD', url='http://github.com/andybak/django-linkcheck/', packages=[ 'linkcheck', 'linkcheck.management', 'linkcheck.management.commands', ], package_data=package_data('linkcheck'), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], )
Use static definition for package data.
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='django-linkcheck', version='0.1.0', description="A Django app that will analyze and report on links in any " "model that you register with it.", long_description=read('README.rst'), author='Andy Baker', author_email='[email protected]', license='BSD', url='http://github.com/andybak/django-linkcheck/', packages=[ 'linkcheck', 'linkcheck.management', 'linkcheck.management.commands', ], package_data={ 'linkcheck': [ 'templates/linkcheck/*.html', 'templates/linkcheck/*.xhtml', ] }, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], )
from distutils.core import setup from setuptools import find_packages VERSION = '0.1.4' setup( name='spockbot', description='High level Python framework for building Minecraft ' 'clients and bots', license='MIT', long_description=open('README.rst').read(), version=VERSION, url='https://github.com/SpockBotMC/SpockBot', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ 'cryptography >= 0.9', 'minecraft_data == 0.4.0', 'six', ], keywords=['minecraft'], classifiers=[ 'License :: OSI Approved :: MIT License', ] )
Increase version number so we can start adding 1.9 support
from distutils.core import setup from setuptools import find_packages VERSION = '0.2.0' setup( name='spockbot', description='High level Python framework for building Minecraft ' 'clients and bots', license='MIT', long_description=open('README.rst').read(), version=VERSION, url='https://github.com/SpockBotMC/SpockBot', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ 'cryptography >= 0.9', 'minecraft_data == 0.4.0', 'six', ], keywords=['minecraft'], classifiers=[ 'License :: OSI Approved :: MIT License', ] )
import os try: from setuptools import setup except ImportError: from distutils.core import setup from pypvwatts.__version__ import VERSION setup( name='pypvwatts', version=VERSION, author='Miguel Paolino', author_email='[email protected]', url='https://github.com/mpaolino/pypvwatts', download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip', description='Python wrapper for NREL PVWatts\'s API.', long_description=open('README.md').read(), packages=['pypvwatts'], provides=['pypvwatts'], requires=['requests'], install_requires=['requests >= 2.1.0'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'License :: OSI Approved :: MIT License', 'Topic :: Internet', 'Topic :: Internet :: WWW/HTTP', ], keywords='nrel pvwatts pypvwatts', license='MIT', )
Make sure we require at least python 2.7
import os try: from setuptools import setup except ImportError: from distutils.core import setup from pypvwatts.__version__ import VERSION setup( name='pypvwatts', version=VERSION, author='Miguel Paolino', author_email='[email protected]', url='https://github.com/mpaolino/pypvwatts', download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip', description='Python wrapper for NREL PVWatts\'s API.', long_description=open('README.md').read(), packages=['pypvwatts'], provides=['pypvwatts'], requires=['requests'], install_requires=['requests >= 2.1.0'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'License :: OSI Approved :: MIT License', 'Topic :: Internet', 'Topic :: Internet :: WWW/HTTP', ], keywords='nrel pvwatts pypvwatts', license='MIT', python_requires=">=2.7", )
from setuptools import find_packages import os.path as op try: from setuptools import setup except ImportError: from distutils.core import setup here = op.abspath(op.dirname(__file__)) # Get metadata from the AFQ/version.py file: ver_file = op.join(here, 'AFQ', 'version.py') with open(ver_file) as f: exec(f.read()) REQUIRES = [] with open(op.join(here, 'requirements.txt')) as f: l = f.readline()[:-1] while l: REQUIRES.append(l) l = f.readline()[:-1] with open(op.join(here, 'README.md'), encoding='utf-8') as f: LONG_DESCRIPTION = f.read() opts = dict(name=NAME, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, description=DESCRIPTION, long_description=LONG_DESCRIPTION, url=URL, download_url=DOWNLOAD_URL, license=LICENSE, classifiers=CLASSIFIERS, author=AUTHOR, author_email=AUTHOR_EMAIL, platforms=PLATFORMS, packages=find_packages(), install_requires=REQUIRES, scripts=SCRIPTS, version=VERSION, python_requires=PYTHON_REQUIRES) if __name__ == '__main__': setup(**opts)
Make this one-char variable name a two-char.
from setuptools import find_packages import os.path as op try: from setuptools import setup except ImportError: from distutils.core import setup here = op.abspath(op.dirname(__file__)) # Get metadata from the AFQ/version.py file: ver_file = op.join(here, 'AFQ', 'version.py') with open(ver_file) as f: exec(f.read()) REQUIRES = [] with open(op.join(here, 'requirements.txt')) as f: ll = f.readline()[:-1] while ll: REQUIRES.append(l) ll = f.readline()[:-1] with open(op.join(here, 'README.md'), encoding='utf-8') as f: LONG_DESCRIPTION = f.read() opts = dict(name=NAME, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, description=DESCRIPTION, long_description=LONG_DESCRIPTION, url=URL, download_url=DOWNLOAD_URL, license=LICENSE, classifiers=CLASSIFIERS, author=AUTHOR, author_email=AUTHOR_EMAIL, platforms=PLATFORMS, packages=find_packages(), install_requires=REQUIRES, scripts=SCRIPTS, version=VERSION, python_requires=PYTHON_REQUIRES) if __name__ == '__main__': setup(**opts)
from setuptools import setup BLOCKS = [ 'mentoring = mentoring:MentoringBlock', 'answer = mentoring:AnswerBlock', 'quizz = mentoring:QuizzBlock', 'tip = mentoring:QuizzTipBlock', ] setup( name='xblock-mentoring', version='0.1', description='XBlock - Mentoring', packages=['mentoring'], entry_points={ 'xblock.v1': BLOCKS, 'xmodule.v1': BLOCKS, } )
Remove hack that registered the XBlock as a XModule for the LMS
from setuptools import setup BLOCKS = [ 'mentoring = mentoring:MentoringBlock', 'answer = mentoring:AnswerBlock', 'quizz = mentoring:QuizzBlock', 'tip = mentoring:QuizzTipBlock', ] setup( name='xblock-mentoring', version='0.1', description='XBlock - Mentoring', packages=['mentoring'], entry_points={ 'xblock.v1': BLOCKS, } )
from setuptools import setup, find_packages setup( name='autobuilder', version='1.0.2', packages=find_packages(), license='MIT', author='Matt Madison', author_email='[email protected]', entry_points={ 'console_scripts': [ 'update-sstate-mirror = autobuilder.scripts.update_sstate_mirror:main', 'update-downloads = autobuilder.scripts.update_downloads:main', 'install-sdk = autobuilder.scripts.install_sdk:main', 'autorev-report = autobuilder.scripts.autorev_report:main' ] }, include_package_data=True, package_data={ 'autobuilder': ['templates/*.txt'] }, install_requires=['buildbot[tls]>=1.4.0', 'buildbot-worker>=1.4.0', 'buildbot-www>=1.4.0', 'buildbot-console-view>=1.4.0', 'buildbot-grid-view>=1.4.0', 'buildbot-waterfall-view>=1.4.0' 'buildbot-badges>=1.4.0', 'boto3', 'botocore', 'twisted'] )
Add 'treq' as a requirement for GitHubStatusPush.
from setuptools import setup, find_packages setup( name='autobuilder', version='1.0.3', packages=find_packages(), license='MIT', author='Matt Madison', author_email='[email protected]', entry_points={ 'console_scripts': [ 'update-sstate-mirror = autobuilder.scripts.update_sstate_mirror:main', 'update-downloads = autobuilder.scripts.update_downloads:main', 'install-sdk = autobuilder.scripts.install_sdk:main', 'autorev-report = autobuilder.scripts.autorev_report:main' ] }, include_package_data=True, package_data={ 'autobuilder': ['templates/*.txt'] }, install_requires=['buildbot[tls]>=1.4.0', 'buildbot-worker>=1.4.0', 'buildbot-www>=1.4.0', 'buildbot-console-view>=1.4.0', 'buildbot-grid-view>=1.4.0', 'buildbot-waterfall-view>=1.4.0' 'buildbot-badges>=1.4.0', 'boto3', 'botocore', 'treq', 'twisted'] )
#!/usr/bin/env python TRAJECTORY = 'linear' CONTROLLER = 'pid' if TRAJECTORY == 'linear': SIMULATION_TIME_IN_SECONDS = 80.0 elif TRAJECTORY == 'circular': SIMULATION_TIME_IN_SECONDS = 120.0 elif TRAJECTORY == 'squared': SIMULATION_TIME_IN_SECONDS = 160.0 DELTA_T = 0.1 # this is the sampling time STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T) # control constants K_X = 0.90 K_Y = 0.90 K_THETA = 0.90 # PID control constants K_P_V = 0.2 K_I_V = 1.905 K_D_V = 0.00 K_P_W = 0.45 K_I_W = 1.25 K_D_W = 0.000
Change simulation time of linear trajectory to 60 seconds
#!/usr/bin/env python TRAJECTORY = 'linear' CONTROLLER = 'pid' if TRAJECTORY == 'linear': SIMULATION_TIME_IN_SECONDS = 60.0 elif TRAJECTORY == 'circular': SIMULATION_TIME_IN_SECONDS = 120.0 elif TRAJECTORY == 'squared': SIMULATION_TIME_IN_SECONDS = 160.0 DELTA_T = 0.1 # this is the sampling time STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T) # control constants K_X = 0.90 K_Y = 0.90 K_THETA = 0.90 # PID control constants K_P_V = 0.2 K_I_V = 1.905 K_D_V = 0.00 K_P_W = 0.45 K_I_W = 1.25 K_D_W = 0.000
from st2actions.runners.pythonrunner import Action import requests class TravisCI(Action): def __init__(self, config): super(TravisCI, self).__init__(config) def _init_header(self): travis_header = { 'User_Agent': self.config['User-Agent'], 'Accept': self.config['Accept'], 'Host': self.config['Host'], } return travis_header def _auth_header(self): _HEADERS = self._init_header() _HEADERS['Authorization'] = self.config["Authorization"] _HEADERS['Content-Type'] = self.config["Content-Type"] return _HEADERS def _perform_request(self, uri, method, data=None, requires_auth=False): if method == "GET": if requires_auth: _HEADERS = self._auth_header() else: _HEADERS = self._init_header() response = requests.get(uri, headers=_HEADERS) elif method == "POST": _HEADERS = self._auth_header response = requests.post(uri, headers=_HEADERS) elif method == "PUT": _HEADERS = self._auth_header() _HEADERS['Authorization'] = self.config["Authorization"] _HEADERS['Content-Type'] = self.config["Content-Type"] response = requests.put(uri, data=data, headers=_HEADERS) return response
Remove unnecessary values from the config - those should just be constants.
import requests from st2actions.runners.pythonrunner import Action API_URL = 'https://api.travis-ci.org' HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json' HEADERS_HOST = '' class TravisCI(Action): def __init__(self, config): super(TravisCI, self).__init__(config) def _get_auth_headers(self): headers = {} headers['Authorization'] = self.config["Authorization"] headers['Content-Type'] = self.config["Content-Type"] return headers def _perform_request(self, uri, method, data=None, requires_auth=False): if method == "GET": if requires_auth: headers = self._get_auth_headers() else: headers = {} response = requests.get(uri, headers=headers) elif method == 'POST': headers = self._get_auth_headers() response = requests.post(uri, headers=headers) elif method == 'PUT': headers = self._get_auth_headers() response = requests.put(uri, data=data, headers=headers) return response
from django.conf.urls.defaults import patterns, include, url from django.contrib import admin from okupy.login.views import * admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'okupy.views.home', name='home'), # url(r'^okupy/', include('okupy.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^$', mylogin) )
Remove comments, properly enable admin panel
from django.conf.urls.defaults import patterns, include, url from django.contrib import admin from okupy.login.views import * admin.autodiscover() urlpatterns = patterns('', url(r'^$', mylogin) url(r'^admin/', include(admin.site.urls)), )
from devbot import chat def call(message: str, name, protocol, cfg, commands): if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
Fix missing command crash with gadd
from devbot import chat def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
# -*- coding: utf-8 -*- import os import sys from robot.variables import GLOBAL_VARIABLES from robot.api import logger from keywordgroup import KeywordGroup class _LoggingKeywords(KeywordGroup): # Private def _debug(self, message): logger.debug(message) def _get_log_dir(self): logfile = GLOBAL_VARIABLES['${LOG FILE}'] if logfile != 'NONE': return os.path.dirname(logfile) return GLOBAL_VARIABLES['${OUTPUTDIR}'] def _html(self, message): logger.info(message, True, False) def _info(self, message): logger.info(message) def _log(self, message, level='INFO'): level = level.upper() if (level == 'INFO'): self._info(message) elif (level == 'DEBUG'): self._debug(message) elif (level == 'WARN'): self._warn(message) elif (level == 'HTML'): self._html(message) def _log_list(self, items, what='item'): msg = ['Altogether %d %s%s.' % (len(items), what, ['s',''][len(items)==1])] for index, item in enumerate(items): msg.append('%d: %s' % (index+1, item)) self._info('\n'.join(msg)) return items def _warn(self, message): logger.warn(message)
Update BuiltIn library reference for RF 2.9 compatibility
# -*- coding: utf-8 -*- import os import sys from robot.libraries.BuiltIn import BuiltIn from robot.api import logger from keywordgroup import KeywordGroup class _LoggingKeywords(KeywordGroup): # Private def _debug(self, message): logger.debug(message) def _get_log_dir(self): variables = BuiltIn().get_variables() logfile = variables['${LOG FILE}'] if logfile != 'NONE': return os.path.dirname(logfile) return variables['${OUTPUTDIR}'] def _html(self, message): logger.info(message, True, False) def _info(self, message): logger.info(message) def _log(self, message, level='INFO'): level = level.upper() if (level == 'INFO'): self._info(message) elif (level == 'DEBUG'): self._debug(message) elif (level == 'WARN'): self._warn(message) elif (level == 'HTML'): self._html(message) def _log_list(self, items, what='item'): msg = ['Altogether %d %s%s.' % (len(items), what, ['s',''][len(items)==1])] for index, item in enumerate(items): msg.append('%d: %s' % (index+1, item)) self._info('\n'.join(msg)) return items def _warn(self, message): logger.warn(message)
"""Views for Zinnia entries""" from django.views.generic.dates import BaseDateDetailView from zinnia.models.entry import Entry from zinnia.views.mixins.archives import ArchiveMixin from zinnia.views.mixins.entry_protection import EntryProtectionMixin from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin class EntryDateDetail(ArchiveMixin, EntryArchiveTemplateResponseMixin, CallableQuerysetMixin, BaseDateDetailView): """ Mixin combinating: - ArchiveMixin configuration centralizing conf for archive views - EntryArchiveTemplateResponseMixin to provide a custom templates depending on the date - BaseDateDetailView to retrieve the entry with date and slug - CallableQueryMixin to defer the execution of the *queryset* property when imported """ queryset = Entry.published.on_site class EntryDetail(EntryProtectionMixin, EntryDateDetail): """ Detailled view archive view for an Entry with password and login protections. """
Implement the EntryPreviewMixin in the EntryDetail view
"""Views for Zinnia entries""" from django.views.generic.dates import BaseDateDetailView from zinnia.models.entry import Entry from zinnia.views.mixins.archives import ArchiveMixin from zinnia.views.mixins.entry_preview import EntryPreviewMixin from zinnia.views.mixins.entry_protection import EntryProtectionMixin from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin class EntryDateDetail(ArchiveMixin, EntryArchiveTemplateResponseMixin, CallableQuerysetMixin, BaseDateDetailView): """ Mixin combinating: - ArchiveMixin configuration centralizing conf for archive views - EntryArchiveTemplateResponseMixin to provide a custom templates depending on the date - BaseDateDetailView to retrieve the entry with date and slug - CallableQueryMixin to defer the execution of the *queryset* property when imported """ queryset = Entry.published.on_site class EntryDetail(EntryPreviewMixin, EntryProtectionMixin, EntryDateDetail): """ Detailled archive view for an Entry with password and login protections and restricted preview. """
from . import settings, utils from django.conf import settings as django_settings class Config(object): """ The global config wrapper that handles the backend. """ def __init__(self): super(Config, self).__setattr__('_backend', utils.import_module_attr(settings.BACKEND)()) def __getattr__(self, key): try: default = settings.CONFIG[key][0] except KeyError: raise AttributeError(key) result = self._backend.get(key) # use Django settings as primary source of default # for example DEBUG if is in django settings will be set as default if hasattr(django_settings, key): return getattr(django_settings, key, result) return result or default def __setattr__(self, key, value): if key not in settings.CONFIG: raise AttributeError(key) self._backend.set(key, value) def __dir__(self): return settings.CONFIG.keys()
Use leonardo settings instead of django settings.
from . import settings, utils from leonardo import settings as django_settings class Config(object): """ The global config wrapper that handles the backend. """ def __init__(self): super(Config, self).__setattr__('_backend', utils.import_module_attr(settings.BACKEND)()) def get_default(self, key): try: default = settings.CONFIG[key][0] except KeyError: raise AttributeError(key) return default def __getattr__(self, key): result = self._backend.get(key) # use Django settings as primary source of default # for example DEBUG if is in django settings will be set as default return getattr(django_settings, key, result or self.get_default(key)) def __setattr__(self, key, value): if key not in settings.CONFIG: raise AttributeError(key) self._backend.set(key, value) def __dir__(self): return settings.CONFIG.keys()
class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classifier, key, default) for key in keys] def set_key_list(self, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classifier, key, value) for key, value in key_value_pairs] def set_key(self, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
Add classifier type to the base class
class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classification_type, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classification_type, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classification_type, classifier, key, default) for classifier, key in izip(repeat(classifier), keys)] def set_key_list(self, classification_type, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classification_type, classifier, key, value) for classifier, key, value in izip(repeat(classifier), key_value_pairs)] def set_key(self, classification_type, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(self.members) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
Fix problem rendering oneof() predicate when the members aren't strings
class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(map(repr, self.members)) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
import sys import ctypes as _ct if sys.version_info < (3,): integer_types = (int, long,) range = xrange else: integer_types = (int,) long = int # xrange = range C_WCHAR2CHAR = lambda x: bytes(str(x).encode('ascii')) C_WCHAR2CHAR.__doc__ = 'Convert str to bytes' C_CHAR2WCHAR = lambda x: str(x.decode('ascii')) C_CHAR2WCHAR.__doc__ = 'Convert bytes to str' C_MKSTR = lambda x: _ct.create_string_buffer(C_WCHAR2CHAR(x)) C_MKSTR.__doc__ = 'alias to ctypes.create_string_buffer, make sure bytes are provided'
Add missing rpnpy.range reference for Python 3. Signed-off-by: Stephane_Chamberland <[email protected]> (cherry picked from commit 23860277c006d9635dedcaaa5e065c7aad199d8c)
import sys import ctypes as _ct if sys.version_info < (3,): integer_types = (int, long,) range = xrange else: integer_types = (int,) long = int range = range C_WCHAR2CHAR = lambda x: bytes(str(x).encode('ascii')) C_WCHAR2CHAR.__doc__ = 'Convert str to bytes' C_CHAR2WCHAR = lambda x: str(x.decode('ascii')) C_CHAR2WCHAR.__doc__ = 'Convert bytes to str' C_MKSTR = lambda x: _ct.create_string_buffer(C_WCHAR2CHAR(x)) C_MKSTR.__doc__ = 'alias to ctypes.create_string_buffer, make sure bytes are provided'
Add eul2quat function. Currently only accept scalar. Need to do input checking.
import numpy as np def eul2quat(rotAngle1,rotAngle2,rotAngle3, input_unit='rad',rotation_sequence='ZYX'): """ """ """ # INPUT CHECK rotAngle1 = np.array(rotAngle1) rotAngle2 = np.array(rotAngle2) rotAngle3 = np.array(rotAngle3) if(len(rotAngle1.shape)==0): rotAngle1.reshape(1,) if(len(rotAngle2.shape)==0): rotAngle2.reshape(1,) if(len(rotAngle3.shape)==0): rotAngle3.reshape(1,) if(len(rotAngle1.shape)==2) rotAngle1.shape[0] """ if(input_unit=='deg'): rotAngle1 = np.deg2rad(rotAngle1) rotAngle2 = np.deg2rad(rotAngle2) rotAngle3 = np.deg2rad(rotAngle3) rotAngle1 /= 2.0 rotAngle2 /= 2.0 rotAngle3 /= 2.0 if(rotation_sequence=='ZYX'): q0 = np.cos(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) + \ np.sin(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3) qvec = np.zeros(3) qvec[0] = np.cos(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3) - \ np.sin(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3) qvec[1] = np.cos(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3) + \ np.sin(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3) qvec[2] = np.sin(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) - \ np.cos(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3) return q0, qvec
import os class Searcher(object): """Searches files in dirs for specified string.""" def __init__(self, currentdir, string_to_search_for): self.currentdir = currentdir self.string_to_search_for = string_to_search_for self.get_files_in_currentdir() def search_files_in_dir_for_string(self, file_list): for f in file_list: self.search_file_for_string(f) def get_files_in_currentdir(self): # TODO implement iterator file; iterate lines file_list = [] for f in os.listdir(self.currentdir): if not os.path.isdir(f): file_list.append(f) if self.string_to_search_for: self.search_files_in_dir_for_string(file_list) else: for f in file_list: print f def search_file_for_string(self, f): with open(f) as f: for line in f: if self.string_to_search_for in line: print 'now' def search_subdir(self): pass
Add basic output of found lines
import os from clint.textui import colored class Searcher(object): """Searches files in dirs for specified string.""" def __init__(self, currentdir, string_to_search_for): self.currentdir = currentdir self.string_to_search_for = string_to_search_for self.get_files_in_currentdir() def get_files_in_currentdir(self): # TODO implement iterator file; iterate lines file_list = [] for f in os.listdir(self.currentdir): if not os.path.isdir(f): file_list.append(f) if self.string_to_search_for: matched_file_dict = self.search_files_in_dir_for_string(file_list) if matched_file_dict: self.print_nicely(matched_file_dict) else: for f in file_list: print f def search_files_in_dir_for_string(self, file_list): matched_file_dict = {} for f in file_list: matched_line_dict = self.search_file_for_string(f) if matched_line_dict: matched_file_dict[f] = matched_line_dict return matched_file_dict def search_file_for_string(self, f): matched_line_dict = {} with open(f) as f: for index, line in enumerate(f): if self.string_to_search_for in line: matched_line_dict[index] = line return matched_line_dict def print_nicely(self, matched_file_dict): for key, value in matched_file_dict.iteritems(): for k, v in value.iteritems(): print (colored.magenta('./' + key + ':', True, False) + str(k) + ':' + v),
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('1') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
Use 'index' as the default page alias for lookups
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('index') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
from distutils.core import setup setup( name = 'jsuite', packages = ['jsuite'], # this must be the same as the name above version = '0.3.0', scripts=['bin/jsuite'], install_requires = [ 'lxml==3.6.4', 'clint==0.5.1' ], description = 'Parsing and manipulation tools for JATS XML files.', author = 'Dipanjan Mukherjee', author_email = '[email protected]', url = 'https://github.com/schatten/jsuite', # use the URL to the github repo download_url = 'https://github.com/schatten/jsuite/tarball/0.1', # I'll explain this in a second keywords = ['xml', 'parsing', 'JATS', 'tools'], # arbitrary keywords classifiers = [], )
v0.4.0: Add count function across entire document
from distutils.core import setup setup( name = 'jsuite', packages = ['jsuite'], # this must be the same as the name above version = '0.4.0', scripts=['bin/jsuite'], install_requires = [ 'lxml==3.6.4', 'clint==0.5.1' ], description = 'Parsing and manipulation tools for JATS XML files.', author = 'Dipanjan Mukherjee', author_email = '[email protected]', url = 'https://github.com/schatten/jsuite', # use the URL to the github repo download_url = 'https://github.com/schatten/jsuite/tarball/0.1', # I'll explain this in a second keywords = ['xml', 'parsing', 'JATS', 'tools'], # arbitrary keywords classifiers = [], )
from setuptools import setup setup(name='mordecai', version='2.0.0a1', description='Full text geoparsing and event geocoding', url='http://github.com/openeventdata/mordecai/', author='Andy Halterman', author_email='[email protected]', license='MIT', packages=['mordecai'], keywords = ['geoparsing', 'nlp', 'geocoding', 'toponym resolution'], include_package_data=True, package_data = {'data': ['admin1CodesASCII.json', 'countries.json', 'nat_df.csv', 'stopword_country_names.json'], 'models' : ['country_model.h5', 'rank_model.h5']} )
Update README and fix typos
from setuptools import setup setup(name='mordecai', version='2.0.0a2', description='Full text geoparsing and event geocoding', url='http://github.com/openeventdata/mordecai/', author='Andy Halterman', author_email='[email protected]', license='MIT', packages=['mordecai'], keywords = ['geoparsing', 'nlp', 'geocoding', 'toponym resolution'], include_package_data=True, package_data = {'data': ['admin1CodesASCII.json', 'countries.json', 'nat_df.csv', 'stopword_country_names.json'], 'models' : ['country_model.h5', 'rank_model.h5']} )
import time def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name): """Show and then tail the event log""" def tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: tail_print(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: tail_print(e) seen.add(e.event_id) time.sleep(5)
Support a custom logging function and sleep time within tail
import time def _tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name, log_func=_tail_print, sleep_time=5): """Show and then tail the event log""" # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: log_func(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: log_func(e) seen.add(e.event_id) time.sleep(sleep_time)
from django.conf.urls import url from ..models import Tag from ..utils import DetailView from ..views import ( TagCreate, TagDelete, TagList, TagPageList, TagUpdate) urlpatterns = [ url(r'^$', TagList.as_view(), name='organizer_tag_list'), url(r'^create/$', TagCreate.as_view(), name='organizer_tag_create'), url(r'^(?P<page_number>\d+)/$', TagPageList.as_view(), name='organizer_tag_page'), url(r'^(?P<slug>[\w\-]+)/$', DetailView.as_view( context_object_name='tag', model=Tag, template_name=( 'organizer/tag_detail.html')), name='organizer_tag_detail'), url(r'^(?P<slug>[\w-]+)/delete/$', TagDelete.as_view(), name='organizer_tag_delete'), url(r'^(?P<slug>[\w\-]+)/update/$', TagUpdate.as_view(), name='organizer_tag_update'), ]
Ch17: Revert to Tag Detail URL pattern.
from django.conf.urls import url from ..views import ( TagCreate, TagDelete, TagDetail, TagList, TagPageList, TagUpdate) urlpatterns = [ url(r'^$', TagList.as_view(), name='organizer_tag_list'), url(r'^create/$', TagCreate.as_view(), name='organizer_tag_create'), url(r'^(?P<page_number>\d+)/$', TagPageList.as_view(), name='organizer_tag_page'), url(r'^(?P<slug>[\w\-]+)/$', TagDetail.as_view(), name='organizer_tag_detail'), url(r'^(?P<slug>[\w-]+)/delete/$', TagDelete.as_view(), name='organizer_tag_delete'), url(r'^(?P<slug>[\w\-]+)/update/$', TagUpdate.as_view(), name='organizer_tag_update'), ]
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend
Make the default backend be a multi-backend
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
from django.contrib import admin from registration.models import RegistrationProfile class RegistrationAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'activation_key_expired') search_fields = ('user__username', 'user__first_name') admin.site.register(RegistrationProfile, RegistrationAdmin)
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
from django.contrib import admin from registration.models import RegistrationProfile class RegistrationAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'activation_key_expired') raw_id_fields = ['user'] search_fields = ('user__username', 'user__first_name') admin.site.register(RegistrationProfile, RegistrationAdmin)
from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.views.generic.base import TemplateView urlpatterns = [ url(r'^admin_tools/', include('admin_tools.urls')), url(r'^admin/', include(admin.site.urls)), # Simply show the master template. url(r'^$', TemplateView.as_view(template_name='demo.html')), ] # NOTE: The staticfiles_urlpatterns also discovers static files (ie. no need to run collectstatic). Both the static # folder and the media folder are only served via Django if DEBUG = True. urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Add missing configuration for DjDT
from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.views.generic.base import TemplateView urlpatterns = [ url(r'^admin_tools/', include('admin_tools.urls')), url(r'^admin/', include(admin.site.urls)), # Simply show the master template. url(r'^$', TemplateView.as_view(template_name='demo.html')), ] # NOTE: The staticfiles_urlpatterns also discovers static files (ie. no need to run collectstatic). Both the static # folder and the media folder are only served via Django if DEBUG = True. urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
from behave import * @given(u'"{text}" backend added') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
Rename Behave steps for api tests
from behave import * @given(u'"{text}" backend added through api') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
"""Suggest creating symbolic link if hard link is not allowed. Example: > ln barDir barLink ln: ‘barDir’: hard link not allowed for directory --> ln -s barDir barLink """ import re from thefuck.specific.sudo import sudo_support @sudo_support def match(command): return (command.stderr.endswith("hard link not allowed for directory") and command.script.startswith("ln ")) @sudo_support def get_new_command(command): return re.sub(r'^ln ', 'ln -s ', command.script)
Fix encoding error in source file example
# -*- coding: utf-8 -*- """Suggest creating symbolic link if hard link is not allowed. Example: > ln barDir barLink ln: ‘barDir’: hard link not allowed for directory --> ln -s barDir barLink """ import re from thefuck.specific.sudo import sudo_support @sudo_support def match(command): return (command.stderr.endswith("hard link not allowed for directory") and command.script.startswith("ln ")) @sudo_support def get_new_command(command): return re.sub(r'^ln ', 'ln -s ', command.script)
from string import ascii_lowercase import django import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target, Victim endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s' prefix = 'imper' alphabet = ascii_lowercase secretlength = 9 target_1 = Target( endpoint=endpoint, prefix=prefix, alphabet=alphabet, secretlength=secretlength ) target_1.save() print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength) snifferendpoint = 'http://127.0.0.1:9000' sourceip = '192.168.1.70' victim_1 = Victim( target=target_1, snifferendpoint=snifferendpoint, sourceip=sourceip, # method='serial' ) victim_1.save() print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
Update dimkarakostas population with alignmentalphabet
from string import ascii_lowercase import django import os import string os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target, Victim endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s' prefix = 'imper' alphabet = ascii_lowercase secretlength = 9 target_1 = Target( endpoint=endpoint, prefix=prefix, alphabet=alphabet, secretlength=secretlength, alignmentalphabet=string.ascii_uppercase ) target_1.save() print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength) snifferendpoint = 'http://127.0.0.1:9000' sourceip = '192.168.1.70' victim_1 = Victim( target=target_1, snifferendpoint=snifferendpoint, sourceip=sourceip, # method='serial' ) victim_1.save() print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
""" NOTE: this API is WIP and has not yet been approved. Do not use this API without talking to Christina or Andy. For more information, see: https://openedx.atlassian.net/wiki/display/TNL/User+API """ from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from rest_framework import permissions from django.db import transaction from django.utils.translation import ugettext as _ from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.parsers import MergePatchParser from openedx.core.lib.api.permissions import IsUserInUrlOrStaff from ..errors import UserNotFound, UserNotAuthorized class PermissionsView(APIView): authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser) parser_classes = (MergePatchParser,) def get(self, request): """ GET /api/user/v1/ """ try: is_staff = request.user.is_staff except UserNotAuthorized: return Response(status=status.HTTP_403_FORBIDDEN) except UserNotFound: return Response(status=status.HTTP_404_NOT_FOUND) return Response(is_staff)
Remove unused import and redundant comment
from rest_framework.views import APIView from rest_framework.response import Response from rest_framework import status from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.parsers import MergePatchParser from ..errors import UserNotFound, UserNotAuthorized class PermissionsView(APIView): authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser) parser_classes = (MergePatchParser,) def get(self, request): """ GET /api/user/v1/ """ try: is_staff = request.user.is_staff except UserNotAuthorized: return Response(status=status.HTTP_403_FORBIDDEN) except UserNotFound: return Response(status=status.HTTP_404_NOT_FOUND) return Response(is_staff)
Set up basic structure of code
def heap_sort(arr): """ Heapsort Complexity: O(n log(n)) """ pass def heapify(arr): pass array = [1,5,65,23,57,1232,-1,-5,-2,242,100,4,423,2,564,9,0,10,43,64] print(array) heap_sort(array) print(array)
import vx import math import os import sys _tick_functions = [] def _register_tick_function(f, front=False): if front: _tick_functions.insert(0, f) else: _tick_functions.append(f) def _tick(): for f in _tick_functions: f() vx.my_vx = _tick vx.register_tick_function = _register_tick_function vx.files = sys.argv[1:] import utils import scheduler import keybindings import windows import prompt def _default_start(): if len(vx.files) == 0: win = vx.window(vx.rows, vx.cols, 0, 0) win.blank() win.focus() else: d = math.floor(vx.rows / (len(vx.files))) y = 0 for f in vx.files: win = vx.window(d, vx.cols, y, 0) win.attach_file(f) y += d win.focus() vx.default_start = _default_start sys.path.append(os.path.expanduser('~/.python')) import rc
Fix a crash if there is no ~/.python/rc.py
import vx import math import os import sys _tick_functions = [] def _register_tick_function(f, front=False): if front: _tick_functions.insert(0, f) else: _tick_functions.append(f) def _tick(): for f in _tick_functions: f() vx.my_vx = _tick vx.register_tick_function = _register_tick_function vx.files = sys.argv[1:] import utils import scheduler import keybindings import windows import prompt def _default_start(): if len(vx.files) == 0: win = vx.window(vx.rows, vx.cols, 0, 0) win.blank() win.focus() else: d = math.floor(vx.rows / (len(vx.files))) y = 0 for f in vx.files: win = vx.window(d, vx.cols, y, 0) win.attach_file(f) y += d win.focus() vx.default_start = _default_start sys.path.append(os.path.expanduser('~/.python')) try: import rc except ImportError: pass # just means there was no ~/.python/rc module
""" gargoyle ~~~~~~~~ :copyright: (c) 2010 DISQUS. :license: Apache License 2.0, see LICENSE for more details. """ __all__ = ('gargoyle', 'ConditionSet', 'autodiscover', 'VERSION') try: VERSION = __import__('pkg_resources') \ .get_distribution('gargoyle').version except Exception, e: VERSION = 'unknown' from gargoyle.manager import gargoyle def autodiscover(): """ Auto-discover INSTALLED_APPS admin.py modules and fail silently when not present. This forces an import on them to register any admin bits they may want. """ import copy from django.conf import settings from django.utils.importlib import import_module for app in settings.INSTALLED_APPS: # Attempt to import the app's gargoyle module. before_import_registry = copy.copy(gargoyle._registry) try: import_module('%s.gargoyle' % app) except: # Reset the model registry to the state before the last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions gargoyle._registry = before_import_registry # load builtins __import__('gargoyle.builtins')
Use python import lib (django import lib will be removed in 1.9).
""" gargoyle ~~~~~~~~ :copyright: (c) 2010 DISQUS. :license: Apache License 2.0, see LICENSE for more details. """ __all__ = ('gargoyle', 'ConditionSet', 'autodiscover', 'VERSION') try: VERSION = __import__('pkg_resources') \ .get_distribution('gargoyle').version except Exception, e: VERSION = 'unknown' from gargoyle.manager import gargoyle def autodiscover(): """ Auto-discover INSTALLED_APPS admin.py modules and fail silently when not present. This forces an import on them to register any admin bits they may want. """ import copy from django.conf import settings from importlib import import_module for app in settings.INSTALLED_APPS: # Attempt to import the app's gargoyle module. before_import_registry = copy.copy(gargoyle._registry) try: import_module('%s.gargoyle' % app) except: # Reset the model registry to the state before the last import as # this import will have to reoccur on the next request and this # could raise NotRegistered and AlreadyRegistered exceptions gargoyle._registry = before_import_registry # load builtins __import__('gargoyle.builtins')
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField from wtforms.validators import Required, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[Required(), Email(), Length(1, 32)]) username = StringField("Username", validators=[Required(), Length(1, 32)]) password = PasswordField("Password", validators=[Required(), Length(1, 32)])
Use DataRequired to validate form
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Username", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me")
import sys import os def run(): base = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ## FIXME: this is kind of crude; if we could create a fake pip ## module, then exec into it and update pip.__path__ properly, we ## wouldn't have to update sys.path: sys.path.insert(0, base) import pip return pip.main() if __name__ == '__main__': run()
Make sure exit code is used in -E situation
import sys import os def run(): base = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ## FIXME: this is kind of crude; if we could create a fake pip ## module, then exec into it and update pip.__path__ properly, we ## wouldn't have to update sys.path: sys.path.insert(0, base) import pip return pip.main() if __name__ == '__main__': exit = run() if exit: sys.exit(exit)
from zope.interface import implementer from .interfaces ( IAuthSourceService, ) @implementer(IAuthSourceService) class SessionAuthSource(object): """ An authentication source that uses the current session """ vary = () value_key = 'sanity.value' def __init__(self, context, request): self.request = request self.session = request.session return self def get_value(self): return self.session.get(value_key, [None, None]) def headers_remember(self, value): self.session[value_key] = value return [] def headers_forget(self): if value_key in self.session: del self.session[value_key] return []
Add a cookie based authentication source
from webob.cookies ( SignedCookieProfile, SignedSerializer, ) from zope.interface import implementer from .interfaces ( IAuthSourceService, ) @implementer(IAuthSourceService) class SessionAuthSource(object): """ An authentication source that uses the current session """ vary = () value_key = 'sanity.value' def __init__(self, context, request): self.request = request self.session = request.session return self def get_value(self): return self.session.get(value_key, [None, None]) def headers_remember(self, value): self.session[value_key] = value return [] def headers_forget(self): if value_key in self.session: del self.session[value_key] return [] def CookieAuthSourceFactory( secret, cookie_name='auth', secure=False, max_age=None, httponly=False, path="/", domains=None, timeout=None, reissue_time=None, debug=False, hashalg='sha512', ): """ An authentication source that uses a unique cookie """ @implementer(IAuthSourceService) class CookieAuthSource(object): def __init__(self, context, request): self.domains = domains if self.domains is None: self.domains = [] self.domains.append(request.domain) self.cookie = SignedCookieProfile( secret, 'authsanity', cookie_name, secure=secure, max_age=max_age, httponly=httponly, path=path, domains=domains, hashalg=hashalg, ) # Bind the cookie to the current request self.cookie = self.cookie.bind(request) return self def get_value(self): return self.cookie.get_value() def headers_remember(self, value): return self.cookie.get_headers(value, domains=self.domains) def headers_forget(self): return self.cookie.get_headers('', max_age=0) return CookieAuthSource
# coding=utf-8 """ DCRM - Darwin Cydia Repository Manager Copyright (C) 2017 WU Zheng <[email protected]> & 0xJacky <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Notice: You have used class-based views, that's awesome. If not necessary, you can try function-based views. You may add lines above as license. """ from django.views.generic import ListView from WEIPDCRM.models.package import Package class ChartView(ListView): model = Package context_object_name = 'package_list' ordering = '-download_times' template_name = 'frontend/chart.html' def get_queryset(self): """ Get 24 packages ordering by download times. :return: QuerySet """ queryset = super(ChartView, self).get_queryset().all()[:24] return queryset
Fix privileges of package frontend.
# coding=utf-8 """ DCRM - Darwin Cydia Repository Manager Copyright (C) 2017 WU Zheng <[email protected]> & 0xJacky <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Notice: You have used class-based views, that's awesome. If not necessary, you can try function-based views. You may add lines above as license. """ from django.views.generic import ListView from WEIPDCRM.models.package import Package class ChartView(ListView): model = Package context_object_name = 'package_list' ordering = '-download_count' template_name = 'frontend/chart.html' def get_queryset(self): """ Get 24 packages ordering by download times. :return: QuerySet """ queryset = super(ChartView, self).get_queryset().all()[:24] return queryset
from datetime import datetime import feedparser import time from newswall.providers.base import ProviderBase class Provider(ProviderBase): def update(self): feed = feedparser.parse(self.config['source']) for entry in feed['entries']: self.create_story(entry.link, title=entry.title, body=entry.description, timestamp=datetime.fromtimestamp(time.mktime(entry.date_parsed)), )
Add RSS Feed Provider docs
""" RSS Feed Provider ================= Required configuration keys:: { "provider": "newswall.providers.feed", "source": "http://twitter.com/statuses/user_timeline/feinheit.rss" } """ from datetime import datetime import feedparser import time from newswall.providers.base import ProviderBase class Provider(ProviderBase): def update(self): feed = feedparser.parse(self.config['source']) for entry in feed['entries']: self.create_story(entry.link, title=entry.title, body=entry.description, timestamp=datetime.fromtimestamp(time.mktime(entry.date_parsed)), )
from __future__ import absolute_import, division, print_function from .common import Benchmark import numpy as np class Random(Benchmark): params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5', 'poisson 10'] def setup(self, name): items = name.split() name = items.pop(0) params = [float(x) for x in items] self.func = getattr(np.random, name) self.params = tuple(params) + ((100, 100),) def time_rng(self, name): self.func(*self.params) class Shuffle(Benchmark): def setup(self): self.a = np.arange(100000) def time_100000(self): np.random.shuffle(self.a)
ENH: Add benchmark tests for numpy.random.randint. This add benchmarks randint. There is one set of benchmarks for the default dtype, 'l', that can be tracked back, and another set for the new dtypes 'bool', 'uint8', 'uint16', 'uint32', and 'uint64'.
from __future__ import absolute_import, division, print_function from .common import Benchmark import numpy as np from numpy.lib import NumpyVersion class Random(Benchmark): params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5', 'poisson 10'] def setup(self, name): items = name.split() name = items.pop(0) params = [float(x) for x in items] self.func = getattr(np.random, name) self.params = tuple(params) + ((100, 100),) def time_rng(self, name): self.func(*self.params) class Shuffle(Benchmark): def setup(self): self.a = np.arange(100000) def time_100000(self): np.random.shuffle(self.a) class Randint(Benchmark): def time_randint_fast(self): """Compare to uint32 below""" np.random.randint(0, 2**30, size=10**5) def time_randint_slow(self): """Compare to uint32 below""" np.random.randint(0, 2**30 + 1, size=10**5) class Randint_dtype(Benchmark): high = { 'bool': 1, 'uint8': 2**7, 'uint16': 2**15, 'uint32': 2**31, 'uint64': 2**63 } param_names = ['dtype'] params = ['bool', 'uint8', 'uint16', 'uint32', 'uint64'] def setup(self, name): if NumpyVersion(np.__version__) < '1.11.0.dev0': raise NotImplementedError def time_randint_fast(self, name): high = self.high[name] np.random.randint(0, high, size=10**5, dtype=name) def time_randint_slow(self, name): high = self.high[name] np.random.randint(0, high + 1, size=10**5, dtype=name)
# PMUtil.py # Phenotype microarray utility functions # # Author: Daniel A Cuevas # Created on 27 Jan. 2015 # Updated on 27 Jan. 2015 from __future__ import absolute_import, division, print_function import sys import time import datetime def timeStamp(): '''Return time stamp''' t = time.time() fmt = '[%Y-%m-%d %H:%M:%S]' return datetime.datetime.fromtimestamp(t).strftime(fmt) def printStatus(msg): '''Print status message''' print('{} {}'.format(timeStamp(), msg), file=sys.stderr) sys.stderr.flush()
Exit method. - (New) Added exit method.
# PMUtil.py # Phenotype microarray utility functions # # Author: Daniel A Cuevas # Created on 27 Jan 2015 # Updated on 20 Aug 2015 from __future__ import absolute_import, division, print_function import sys import time import datetime def timeStamp(): '''Return time stamp''' t = time.time() fmt = '[%Y-%m-%d %H:%M:%S]' return datetime.datetime.fromtimestamp(t).strftime(fmt) def printStatus(msg): '''Print status message''' print('{} {}'.format(timeStamp(), msg), file=sys.stderr) sys.stderr.flush() def exitScript(num=1): '''Exit script''' sys.exit(num)
#!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default='~/.cookiejar', help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
InstallCommand: Use get_minion_path to get default dir.
#!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder import get_minion_path from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default=get_minion_path('weaver'), help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
from conans import ConanFile, CMake class GrpccbConan(ConanFile): name = "grpc_cb_core" version = "0.2" license = "Apache-2.0" url = "https://github.com/jinq0123/grpc_cb_core" description = "C++ gRPC core library with callback interface." settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" requires = "grpc/1.17.2@inexorgame/stable", generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing build_requires = "PremakeGen/0.1@memsharded/testing" exports_sources = "src*", "include*", "CMakeLists.txt" def build(self): cmake = CMake(self) self.run('cmake %s %s' % (self.source_folder, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def package(self): self.copy("include/*") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False) def package_info(self): self.cpp_info.libs = ["grpc_cb_core"]
Fix update remote to ConanCenter and grpc to highest buildable/supported version
from conans import ConanFile, CMake class GrpccbConan(ConanFile): name = "grpc_cb_core" version = "0.2" license = "Apache-2.0" url = "https://github.com/jinq0123/grpc_cb_core" description = "C++ gRPC core library with callback interface." settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" requires = "grpc/1.44.0@", generators = "cmake", "premake" # The builtin premake generator exports_sources = "src*", "include*", "CMakeLists.txt" def build(self): cmake = CMake(self) self.run('cmake %s %s' % (self.source_folder, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def package(self): self.copy("include/*") self.copy("*.lib", dst="lib", keep_path=False) self.copy("*.dll", dst="bin", keep_path=False) self.copy("*.dylib*", dst="lib", keep_path=False) self.copy("*.so", dst="lib", keep_path=False) self.copy("*.a", dst="lib", keep_path=False) def package_info(self): self.cpp_info.libs = ["grpc_cb_core"]
from conans import ConanFile from conans.tools import download, unzip import os VERSION = "0.0.7" class CMakeModuleCommonConan(ConanFile): name = "cmake-module-common" version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION) generators = "cmake" url = "http://github.com/polysquare/cmake-module-common" license = "MIT" def source(self): zip_name = "cmake-module-common.zip" download("https://github.com/polysquare/" "cmake-module-common/archive/{version}.zip" "".format(version="v" + VERSION), zip_name) unzip(zip_name) os.unlink(zip_name) def package(self): self.copy(pattern="Find*.cmake", dst="", src="cmake-module-common-" + VERSION, keep_path=True) self.copy(pattern="*.cmake", dst="cmake/cmake-module-common", src="cmake-module-common-" + VERSION, keep_path=True)
conan: Make cmake-unit, cmake-linter-cmake and style-linter-cmake normal deps
from conans import ConanFile from conans.tools import download, unzip import os VERSION = "0.0.7" class CMakeModuleCommonConan(ConanFile): name = "cmake-module-common" version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION) generators = "cmake" url = "http://github.com/polysquare/cmake-module-common" license = "MIT" requires = ("cmake-unit/master@smspillaz/cmake-unit", "cmake-linter-cmake/master@smspillaz/cmake-linter-cmake", "style-linter-cmake/master@smspillaz/style-linter-cmake") def source(self): zip_name = "cmake-module-common.zip" download("https://github.com/polysquare/" "cmake-module-common/archive/{version}.zip" "".format(version="v" + VERSION), zip_name) unzip(zip_name) os.unlink(zip_name) def package(self): self.copy(pattern="Find*.cmake", dst="", src="cmake-module-common-" + VERSION, keep_path=True) self.copy(pattern="*.cmake", dst="cmake/cmake-module-common", src="cmake-module-common-" + VERSION, keep_path=True)
# Copyright 2015 MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time from mockupdb import MockupDB, wait_until from pymongo import MongoClient from tests import unittest class TestInitialIsMaster(unittest.TestCase): def test_initial_ismaster(self): server = MockupDB() server.run() self.addCleanup(server.stop) start = time.time() client = MongoClient(server.uri) self.addCleanup(client.close) # A single ismaster is enough for the client to be connected. self.assertIsNone(client.address) server.receives('ismaster').ok() wait_until(lambda: client.address is not None, 'update address', timeout=1) # At least 10 seconds before next heartbeat. server.receives('ismaster').ok() self.assertGreaterEqual(time.time() - start, 10) if __name__ == '__main__': unittest.main()
Update for PYTHON 985: MongoClient properties now block until connected.
# Copyright 2015 MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time from mockupdb import MockupDB, wait_until from pymongo import MongoClient from tests import unittest class TestInitialIsMaster(unittest.TestCase): def test_initial_ismaster(self): server = MockupDB() server.run() self.addCleanup(server.stop) start = time.time() client = MongoClient(server.uri) self.addCleanup(client.close) # A single ismaster is enough for the client to be connected. self.assertFalse(client.nodes) server.receives('ismaster').ok(ismaster=True) wait_until(lambda: client.nodes, 'update nodes', timeout=1) # At least 10 seconds before next heartbeat. server.receives('ismaster').ok(ismaster=True) self.assertGreaterEqual(time.time() - start, 10) if __name__ == '__main__': unittest.main()
from uuid import UUID from werkzeug.routing import BaseConverter class UUIDConverter(BaseConverter): to_python = UUID to_url = str
Drop dashes from download urls.
from uuid import UUID from werkzeug.routing import BaseConverter class UUIDConverter(BaseConverter): to_python = UUID def to_url(self, obj): return str(obj).replace('-', '')
"""curses The main package for curses support for Python. Normally used by importing the package, and perhaps a particular module inside it. import curses from curses import textpad curses.initwin() ... """ __revision__ = "$Id$" from _curses import * from curses.wrapper import wrapper
Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings
"""curses The main package for curses support for Python. Normally used by importing the package, and perhaps a particular module inside it. import curses from curses import textpad curses.initwin() ... """ __revision__ = "$Id$" from _curses import * from curses.wrapper import wrapper # Some constants, most notably the ACS_* ones, are only added to the C # _curses module's dictionary after initscr() is called. (Some # versions of SGI's curses don't define values for those constants # until initscr() has been called.) This wrapper function calls the # underlying C initscr(), and then copies the constants from the # _curses module to the curses package's dictionary. Don't do 'from # curses import *' if you'll be needing the ACS_* constants. def initscr(): import _curses, curses stdscr = _curses.initscr() for key, value in _curses.__dict__.items(): if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'): setattr(curses, key, value) return stdscr
from flask.json import JSONEncoder from bson import json_util from mongoengine.base import BaseDocument from mongoengine import QuerySet def _make_encoder(superclass): class MongoEngineJSONEncoder(superclass): ''' A JSONEncoder which provides serialization of MongoEngine documents and querysets. ''' def default(self, obj): if isinstance(obj, BaseDocument): return json_util._json_convert(obj.to_mongo()) elif isinstance(obj, QuerySet): return json_util._json_convert(obj.as_pymongo()) return superclass.default(self, obj) return MongoEngineJSONEncoder MongoEngineJSONEncoder = _make_encoder(JSONEncoder) def overide_json_encoder(app): ''' A function to dynamically create a new MongoEngineJSONEncoder class based upon a custom base class. This function allows us to combine MongoEngine serialization with any changes to Flask's JSONEncoder which a user may have made prior to calling init_app. NOTE: This does not cover situations where users override an instance's json_encoder after calling init_app. ''' app.json_encoder = _make_encoder(app.json_encoder)
Support older versions of MongoEngine
from flask.json import JSONEncoder from bson import json_util from mongoengine.base import BaseDocument try: from mongoengine.base import BaseQuerySet except ImportError as ie: # support mongoengine < 0.7 from mongoengine.queryset import QuerySet as BaseQuerySet def _make_encoder(superclass): class MongoEngineJSONEncoder(superclass): ''' A JSONEncoder which provides serialization of MongoEngine documents and queryset objects. ''' def default(self, obj): if isinstance(obj, BaseDocument): return json_util._json_convert(obj.to_mongo()) elif isinstance(obj, BaseQuerySet): return json_util._json_convert(obj.as_pymongo()) return superclass.default(self, obj) return MongoEngineJSONEncoder MongoEngineJSONEncoder = _make_encoder(JSONEncoder) def overide_json_encoder(app): ''' A function to dynamically create a new MongoEngineJSONEncoder class based upon a custom base class. This function allows us to combine MongoEngine serialization with any changes to Flask's JSONEncoder which a user may have made prior to calling init_app. NOTE: This does not cover situations where users override an instance's json_encoder after calling init_app. ''' app.json_encoder = _make_encoder(app.json_encoder)
import sys import hashlib def e(s): if type(s) == str: return str return s.encode('utf-8') def d(s): if type(s) == unicode: return s return unicode(s, 'utf-8') def mkid(s): return hashlib.sha1(e(s)).hexdigest()[:2*4] class Logger(object): def __init__(self): self._mode = 'INFO' def progress(self, message): if not sys.stderr.isatty(): return if self._mode == 'PROGRESS': print >>sys.stderr, '\r', print >>sys.stderr, message, self._mode = 'PROGRESS' def info(self, message): if self._mode == 'PROGRESS': print >>sys.stderr print >>sys.stderr, message self._mode = 'INFO'
Handle logging unicode messages in python2. Former-commit-id: 257d94eb71d5597ff52a18ec1530d73496901ef4
import sys import hashlib def e(s): if type(s) == str: return str return s.encode('utf-8') def d(s): if type(s) == unicode: return s return unicode(s, 'utf-8') def mkid(s): return hashlib.sha1(e(s)).hexdigest()[:2*4] class Logger(object): def __init__(self): self._mode = 'INFO' def progress(self, message): message = e(message) if not sys.stderr.isatty(): return if self._mode == 'PROGRESS': print >>sys.stderr, '\r', print >>sys.stderr, message, self._mode = 'PROGRESS' def info(self, message): message = e(message) if self._mode == 'PROGRESS': print >>sys.stderr print >>sys.stderr, message self._mode = 'INFO'
import argparse import scanner import numpy as np import cv2 from decode import db @db.loader('frame') def load_frames(buf, metadata): return np.frombuffer(buf, dtype=np.uint8) \ .reshape((metadata.height,metadata.width,3)) def extract_frames(args): job = load_frames(args['dataset'], 'edr') video_paths = job._dataset.video_data.original_video_paths for (vid, frames) in job.as_frame_list(): video_path = video_paths[int(vid)] inp = cv2.VideoCapture(video_path) assert(inp.isOpened()) video_frame_num = -1 for (frame_num, buf) in frames: while video_frame_num != frame_num: _, video_frame = inp.read() video_frame_num += 1 scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR) frame_diff = (scanner_frame - video_frame).sum() if frame_diff != 0: print('Frame {} does not match!'.format(frame_num)) if __name__ == "__main__": p = argparse.ArgumentParser(description='Extract JPEG frames from videos') p.add_argument('dataset', type=str) extract_frames(p.parse_args().__dict__)
Write out concatenated frame on decode test failure
import argparse import scanner import numpy as np import cv2 from decode import db @db.loader('frame') def load_frames(buf, metadata): return np.frombuffer(buf, dtype=np.uint8) \ .reshape((metadata.height,metadata.width,3)) def extract_frames(args): job = load_frames(args['dataset'], 'edr') video_paths = job._dataset.video_data.original_video_paths for (vid, frames) in job.as_frame_list(): video_path = video_paths[int(vid)] inp = cv2.VideoCapture(video_path) assert(inp.isOpened()) video_frame_num = -1 for (frame_num, buf) in frames: while video_frame_num != frame_num: _, video_frame = inp.read() video_frame_num += 1 scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR) frame_diff = np.abs(scanner_frame - video_frame) if frame_diff.sum() != 0: print('Frame {} does not match!'.format(frame_num)) cv2.imwrite('decode_frames_' + str(frame_num) + '.jpg', np.concatenate( (scanner_frame, video_frame, frame_diff), 1)) if __name__ == "__main__": p = argparse.ArgumentParser(description='Extract JPEG frames from videos') p.add_argument('dataset', type=str) extract_frames(p.parse_args().__dict__)
# -*- coding: utf-8 -*- """ Simple urls for use in testing the gcframe app. """ from __future__ import unicode_literals # The defaults module is deprecated in Django 1.5, but necessary to # support Django 1.3. drop ``.defaults`` when dropping 1.3 support. from django.conf.urls.defaults import patterns, url from .views import normal, framed, exempt urlpatterns = patterns('', url(r'normal/$', normal, name='gcframe-test-normal'), url(r'framed/$', framed, name='gcframe-test-framed'), url(r'exempt/$', exempt, name='gcframe-test-exempt'), )
Handle a Django deprecation properly. Should have done this in commit cb4eae7b7.
# -*- coding: utf-8 -*- """ Simple urls for use in testing the gcframe app. """ from __future__ import unicode_literals try: from django.conf.urls import patterns, url except ImportError: # Django 1.3 from django.conf.urls.defaults import patterns, url from .views import normal, framed, exempt urlpatterns = patterns('', url(r'normal/$', normal, name='gcframe-test-normal'), url(r'framed/$', framed, name='gcframe-test-framed'), url(r'exempt/$', exempt, name='gcframe-test-exempt'), )
import inspect from .random import random from .circular import circular from .force_directed import fruchterman_reingold _layout_map = { 'random': random, 'circular': circular, 'force_directed': fruchterman_reingold, 'spring_layout': fruchterman_reingold } AVAILABLE_LAYOUTS = _layout_map.keys() def get_layout(name, *args, **kwargs): """ Retrieve a graph layout Some graph layouts accept extra options. Please refer to their documentation for more information. Parameters ---------- name : string The name of the layout. The variable `AVAILABLE_LAYOUTS` contains all available layouts. *args Positional arguments which are passed to the layout. **kwargs Keyword arguments which are passed to the layout. Returns ------- layout : callable The callable generator which will calculate the graph layout """ if name not in _layout_map: raise KeyError( "Graph layout '{}' not found. Should be one of {}".format( name, ", ".join(AVAILABLE_LAYOUTS) ) ) layout = _layout_map[name] if inspect.isclass(layout): layout = layout(*args, **kwargs) return layout
Make sure AVAILABLE_LAYOUTS is a tuple The .keys() function is a generator in Python 3.
import inspect from .random import random from .circular import circular from .force_directed import fruchterman_reingold _layout_map = { 'random': random, 'circular': circular, 'force_directed': fruchterman_reingold, 'spring_layout': fruchterman_reingold } AVAILABLE_LAYOUTS = tuple(_layout_map.keys()) def get_layout(name, *args, **kwargs): """ Retrieve a graph layout Some graph layouts accept extra options. Please refer to their documentation for more information. Parameters ---------- name : string The name of the layout. The variable `AVAILABLE_LAYOUTS` contains all available layouts. *args Positional arguments which are passed to the layout. **kwargs Keyword arguments which are passed to the layout. Returns ------- layout : callable The callable generator which will calculate the graph layout """ if name not in _layout_map: raise KeyError( "Graph layout '{}' not found. Should be one of {}".format( name, ", ".join(AVAILABLE_LAYOUTS) ) ) layout = _layout_map[name] if inspect.isclass(layout): layout = layout(*args, **kwargs) return layout
from django.conf.urls import include, url from django.core import urlresolvers from django.utils.translation import ugettext_lazy as _ from wagtail.wagtailcore import hooks from wagtail.wagtailadmin.menu import MenuItem from wagtail.wagtailsites import urls def register_admin_urls(): return [ url(r'^sites/', include(urls)), ] hooks.register('register_admin_urls', register_admin_urls) def construct_main_menu(request, menu_items): if request.user.is_superuser: menu_items.append( MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602) ) hooks.register('construct_main_menu', construct_main_menu)
Move Sites to the settings menu (and use decorator syntax for hooks)
from django.conf.urls import include, url from django.core import urlresolvers from django.utils.translation import ugettext_lazy as _ from wagtail.wagtailcore import hooks from wagtail.wagtailadmin.menu import MenuItem from wagtail.wagtailsites import urls @hooks.register('register_admin_urls') def register_admin_urls(): return [ url(r'^sites/', include(urls)), ] class SitesMenuItem(MenuItem): def is_shown(self, request): return request.user.is_superuser @hooks.register('register_settings_menu_item') def register_sites_menu_item(): return MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602)
import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) return Artist.objects.filter(similarity__other_artist=artist, similarity__weight__gt=0)
Order similar artist results properly
from django.db.models import Q import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) similar = Q(similarity__other_artist=artist, similarity__weight__gt=0) return Artist.objects.filter(similar).order_by('-similarity__weight')
from django import template class InvalidParamsError(template.TemplateSyntaxError): ''' Custom exception class to distinguish usual TemplateSyntaxErrors and validation errors for templatetags introduced by ``validate_params`` function''' pass def validate_params(bits, arguments_count, keyword_positions): ''' Raises exception if passed params (`bits`) do not match signature. Signature is defined by `arguments_count` (acceptible number of params) and keyword_positions (dictionary with positions in keys and keywords in values, for ex. {2:'by', 4:'of', 5:'type', 7:'as'}). ''' if len(bits) != arguments_count+1: raise InvalidTagParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,)) for pos in keyword_positions: value = keyword_positions[pos] if bits[pos] != value: raise InvalidTagParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
Fix typo/bug in validate_params function
from django import template class InvalidParamsError(template.TemplateSyntaxError): ''' Custom exception class to distinguish usual TemplateSyntaxErrors and validation errors for templatetags introduced by ``validate_params`` function''' pass def validate_params(bits, arguments_count, keyword_positions): ''' Raises exception if passed params (`bits`) do not match signature. Signature is defined by `arguments_count` (acceptible number of params) and keyword_positions (dictionary with positions in keys and keywords in values, for ex. {2:'by', 4:'of', 5:'type', 7:'as'}). ''' if len(bits) != arguments_count+1: raise InvalidParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,)) for pos in keyword_positions: value = keyword_positions[pos] if bits[pos] != value: raise InvalidParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
# -*- coding: utf-8 -*- from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell return {'shell': os.environ.get('SHELL', '/bin/sh')} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): with salt.utils.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {}
Allow proxy minions to load static grains Add the `__proxyenabled__` global var so the extra grains are loaded. Inside the `config` function of the extra grains check if the minion is a proxy, then try loading from <conf_file>/proxy.d/<proxy id>/grains.
# -*- coding: utf-8 -*- from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils __proxyenabled__ = ['*'] log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell return {'shell': os.environ.get('SHELL', '/bin/sh')} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): if salt.utils.is_proxy(): gfn = os.path.join( __opts__['conf_file'], 'proxy.d', __opts__['id'], 'grains' ) else: gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: if salt.utils.is_proxy(): gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'proxy.d', __opts__['id'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): log.debug('Loading static grains from %s', gfn) with salt.utils.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {}
from lib import BaseTest class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib"
Remove updated at while comparing.
from lib import BaseTest import re class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s)
#!/usr/bin/env python def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('signal', parent_package, top_path) config.add_data_dir('tests') config.add_extension('sigtools', sources=['sigtoolsmodule.c', 'firfilter.c','medianfilter.c'], depends = ['sigtools.h'] ) config.add_extension('spline', sources = ['splinemodule.c','S_bspline_util.c','D_bspline_util.c', 'C_bspline_util.c','Z_bspline_util.c','bspline_util.c'], ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Add newsig.c as a dependency to sigtools module.
#!/usr/bin/env python def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('signal', parent_package, top_path) config.add_data_dir('tests') config.add_extension('sigtools', sources=['sigtoolsmodule.c', 'firfilter.c','medianfilter.c'], depends = ['sigtools.h', 'newsig.c'] ) config.add_extension('spline', sources = ['splinemodule.c','S_bspline_util.c','D_bspline_util.c', 'C_bspline_util.c','Z_bspline_util.c','bspline_util.c'], ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
import argparse import yaml def parse_arguments_based_on_yaml(yaml_file): with open(yaml_file) as f: yaml_data = yaml.load(f) # to start with, support only a single parameter key = list(yaml_data.keys())[0] value = yaml_data[key] parser = argparse.ArgumentParser() parser.add_argument("-{}".format(key), default=value) args = parser.parse_args() return args
Implement creating arguments for multiple strings
import argparse import yaml def parse_arguments_based_on_yaml(yaml_file): with open(yaml_file) as f: yaml_data = yaml.load(f) parser = argparse.ArgumentParser() for key, value in yaml_data.items(): parser.add_argument("-{}".format(key), default=value) args = parser.parse_args() return args
#!/usr/bin/env python from fabric.api import env, run, sudo, task from fabric.context_managers import cd, prefix env.use_ssh_config = True home = '~/jarvis2' @task def pull_code(): with cd(home): run('git pull --rebase') @task def update_dependencies(): with prefix('workon jarvis2'): run('pip install --use-mirrors -r %s/requirements.txt' % (home,)) @task def restart_server(): sudo('/etc/init.d/uwsgi restart', pty=False) @task def restart_client(): run('pkill -x midori') @task(default=True) def deploy(update_deps=False): pull_code() if update_deps: update_dependencies() restart_server() restart_client() @task def full_deploy(): deploy(True)
Add task for pushing code with rsync
#!/usr/bin/env python from fabric.api import env, run, sudo, task from fabric.context_managers import cd, prefix from fabric.contrib.project import rsync_project env.use_ssh_config = True home = '~/jarvis2' @task def pull_code(): with cd(home): run('git pull --rebase') @task def push_code(): rsync_project(local_dir='.', remote_dir=home, exclude=('.git', '.vagrant'), extra_opts='--filter=":- .gitignore"') @task def update_dependencies(): with prefix('workon jarvis2'): run(('pip install --quiet --use-mirrors --upgrade' ' -r {home}/requirements.txt').format(home=home)) @task def restart_server(): sudo('/etc/init.d/uwsgi restart', pty=False) @task def restart_client(): run('pkill -x midori') @task(default=True) def deploy(update_deps=False): push_code() if update_deps: update_dependencies() restart_server() restart_client() @task def full_deploy(): deploy(True)
# -*- coding: utf-8 -*- from south.db import db from south.v2 import SchemaMigration class Migration(SchemaMigration): def forwards(self, orm): db.rename_table('system_key_value', 'system_kv') def backwards(self, orm): db.rename_table('system_kv', 'system_key_value')
Add ORM freeze thing to SystemKeyValue migration
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): db.rename_table('system_key_value', 'system_kv') def backwards(self, orm): db.rename_table('system_kv', 'system_key_value') models = { 'system.system': { 'Meta': {'object_name': 'System', 'db_table': "'system'"}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'system.systemkeyvalue': { 'Meta': {'unique_together': "(('key', 'value', 'system'),)", 'object_name': 'SystemKeyValue', 'db_table': "'system_kv'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_quoted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['system.System']"}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'}) } } complete_apps = ['system']
"""Markdown Exporter class""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.config import Config from .templateexporter import TemplateExporter #----------------------------------------------------------------------------- # Classes #----------------------------------------------------------------------------- class MarkdownExporter(TemplateExporter): """ Exports to a markdown document (.md) """ def _file_extension_default(self): return 'md' def _template_file_default(self): return 'markdown' output_mimetype = 'text/markdown' def _raw_mimetypes_default(self): return ['text/markdown', 'text/html', ''] @property def default_config(self): c = Config({ 'NbConvertBase': { 'display_data_priority': ['html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'] }, 'ExtractOutputPreprocessor': { 'enabled':True} }) c.merge(super(MarkdownExporter,self).default_config) return c
Revert "Removed Javascript from Markdown by adding display priority to def config." This reverts commit 58e05f9625c60f8deba9ddf1c74dba73e8ea7dd1.
"""Markdown Exporter class""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.config import Config from .templateexporter import TemplateExporter #----------------------------------------------------------------------------- # Classes #----------------------------------------------------------------------------- class MarkdownExporter(TemplateExporter): """ Exports to a markdown document (.md) """ def _file_extension_default(self): return 'md' def _template_file_default(self): return 'markdown' output_mimetype = 'text/markdown' def _raw_mimetypes_default(self): return ['text/markdown', 'text/html', ''] @property def default_config(self): c = Config({'ExtractOutputPreprocessor':{'enabled':True}}) c.merge(super(MarkdownExporter,self).default_config) return c
#!/usr/bin/python3 from bs4 import BeautifulSoup import requests r = requests.get("https://projects.archlinux.org/") soup = BeautifulSoup(r.text) repos = soup.select(".sublevel-repo a") repo_names = [] for repo in repos: repo_name = repo.string if repo_name[-4:] == ".git": repo_name = repo_name[:-4] repo_names.append(repo_name) with open("projects.txt", mode = "w", encoding = "utf-8") as projects_file: for repo_name in repo_names: projects_file.write(repo_name + "\n")
Update project downloader to do diffs before overwriting
#!/usr/bin/python3 from bs4 import BeautifulSoup import requests import simplediff from pprint import pprint r = requests.get("https://projects.archlinux.org/") soup = BeautifulSoup(r.text) repos = soup.select(".sublevel-repo a") with open("projects.txt", mode = "r", encoding = "utf-8") as projects_file: cur_repos = projects_file.readlines() new_repos = [] for repo in repos: repo_name = repo.string if repo_name[-4:] == ".git": repo_name = repo_name[:-4] new_repos.append(repo_name + "\n") repo_diff = simplediff.string_diff(''.join(cur_repos), ''.join(new_repos)) added = [] removed = [] for (diff_type, values) in repo_diff: if diff_type == "+": added.extend(values) elif diff_type == "-": removed.extend(values) if added: print("Added:") pprint(added) if removed: print("Removed:") pprint(removed) if added or removed: with open("projects.txt", mode = "w", encoding = "utf-8") as projects_file: for repo_name in new_repos: projects_file.write(repo_name) else: print("No projects were added or removed.")
# flake8: noqa __all__ = [ 'ExtensionManager', 'EnabledExtensionManager', 'NamedExtensionManager', 'HookManager', 'DriverManager', ] from .extension import ExtensionManager from .enabled import EnabledExtensionManager from .named import NamedExtensionManager from .hook import HookManager from .driver import DriverManager import logging # Configure a NullHandler for our log messages in case # the app we're used from does not set up logging. LOG = logging.getLogger('stevedore') if hasattr(logging, 'NullHandler'): LOG.addHandler(logging.NullHandler()) else: class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None LOG.addHandler(NullHandler())
Remove work around for NullHandler logging module added NullHandler in Python 2.7, we have dropped Python 2.6 support now, so don't need the work around any more. Change-Id: Ib6fdbc2f92cd66f4846243221e696f1b1fa712df
# flake8: noqa __all__ = [ 'ExtensionManager', 'EnabledExtensionManager', 'NamedExtensionManager', 'HookManager', 'DriverManager', ] from .extension import ExtensionManager from .enabled import EnabledExtensionManager from .named import NamedExtensionManager from .hook import HookManager from .driver import DriverManager import logging # Configure a NullHandler for our log messages in case # the app we're used from does not set up logging. LOG = logging.getLogger('stevedore') LOG.addHandler(logging.NullHandler())
import random def eightball(): return random.choice(("It is certain", "It is decidedly so", "Without a doubt", "Yes, definitely", "You may rely on it", "As I see it, yes", "Most likely", "Outlook good", "Yes", "Signs point to yes", "Reply hazy try again", "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"))
Fix eightball method response grammar
import random def eightball(): return random.choice(("It is certain", "It is decidedly so", "Without a doubt", "Yes, definitely", "You may rely on it", "As I see it, yes", "Most likely", "Outlook good", "Yes", "Signs point to yes", "Reply hazy; try again", "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"))
import os if os.getenv('OPENSHIFT_REPO_DIR'): from .staging import * elif os.getenv('TRAVIS_CI'): from .testing import * else: from .development import *
[fix] Use production settings in Heroku
import os if os.getenv('OPENSHIFT_REPO_DIR'): from .staging import * elif os.getenv('TRAVIS_CI'): from .testing import * elif os.getenv('HEROKU'): from .production import * else: from .development import *
from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from django_auto_filter.views_django_auto_filter_new import DjangoAutoFilterNew from djangoautoconf.model_utils.model_attr_utils import model_enumerator from ufs_tools.string_tools import class_name_to_low_case def add_filter_to_url_for(urlpatterns, models): for model in model_enumerator(models): urlpatterns += patterns('', url(r'^models/%s/' % class_name_to_low_case(model.__name__), DjangoAutoFilterNew.as_view(model_class=model))) def get_filter_urls(models, template_name=None): url_list = [] for model in model_enumerator(models): param_dict = {"model": model} if template_name is not None: param_dict["template_name"] = template_name url_list.append(url(r'^model/%s/' % class_name_to_low_case(model.__name__), login_required(DjangoAutoFilterNew.as_view(**param_dict)))) p = patterns('', *url_list) return p
Fix attribute from model_class to model issue.
from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from django_auto_filter.views_django_auto_filter_new import DjangoAutoFilterNew from djangoautoconf.model_utils.model_attr_utils import model_enumerator from ufs_tools.string_tools import class_name_to_low_case def add_filter_to_url_for(urlpatterns, models): for model in model_enumerator(models): urlpatterns += patterns('', url(r'^models/%s/' % class_name_to_low_case(model.__name__), DjangoAutoFilterNew.as_view(model=model))) def get_filter_urls(models, template_name=None): url_list = [] for model in model_enumerator(models): param_dict = {"model": model} if template_name is not None: param_dict["template_name"] = template_name url_list.append(url(r'^model/%s/' % class_name_to_low_case(model.__name__), login_required(DjangoAutoFilterNew.as_view(**param_dict)))) p = patterns('', *url_list) return p
from motobot import hook from time import sleep @hook('PING') def handle_ping(bot, message): """ Handle the server's pings. """ bot.send('PONG :' + message.params[-1]) @hook('NOTICE') def handle_notice(bot, message): """ Use the notice message to identify and register to the server. """ if not bot.identified: bot.send('USER MotoBot localhost localhost MotoBot') bot.send('NICK ' + bot.nick) sleep(2) if bot.nickserv_password is not None: bot.send('PRIVMSG nickserv :identify ' + bot.nickserv_password) sleep(2) for channel in bot.channels: bot.send('JOIN ' + channel) bot.identified = True @hook('INVITE') def handle_invite(bot, message): """ Join a channel when invited. """ bot.join(message.params[-1]) @hook('ERROR') def handle_error(bot, message): """ Handle an error message from the server. """ bot.connected = bot.identified = False
Change connect command to 439
from motobot import hook from time import sleep @hook('PING') def handle_ping(bot, message): """ Handle the server's pings. """ bot.send('PONG :' + message.params[-1]) @hook('439') def handle_notice(bot, message): """ Use the notice message to identify and register to the server. """ if not bot.identified: bot.send('USER MotoBot localhost localhost MotoBot') bot.send('NICK ' + bot.nick) sleep(2) if bot.nickserv_password is not None: bot.send('PRIVMSG nickserv :identify ' + bot.nickserv_password) sleep(2) for channel in bot.channels: bot.send('JOIN ' + channel) bot.identified = True @hook('INVITE') def handle_invite(bot, message): """ Join a channel when invited. """ bot.join(message.params[-1]) @hook('ERROR') def handle_error(bot, message): """ Handle an error message from the server. """ bot.connected = bot.identified = False
# Create your views here. from django.http import HttpResponse from lingcod.raster_stats.models import zonal_stats, RasterDataset, ZonalStatsCache from django.core import serializers from django.contrib.gis.geos import fromstr def stats_for_geom(request, raster_name): # Confirm that we have a valid polygon geometry if 'geom_txt' in request.REQUEST: geom_txt = str(request.REQUEST['geom_txt']) else: return HttpResponse("Must supply a geom_txt parameter", status=404) try: geom = fromstr(geom_txt) except: return HttpResponse("Must supply a parsable geom_txt parameter (wkt or json)", status=404) # Confirm raster with pk exists try: raster = RasterDataset.objects.get(name=raster_name) except: return HttpResponse("No raster with pk of %s" % pk, status=404) #TODO check if continuous zonal = zonal_stats(geom, raster) zonal.save() zqs = ZonalStatsCache.objects.filter(pk=zonal.pk) data = serializers.serialize("json", zqs) return HttpResponse(data, mimetype='application/javascript') def raster_list(request): rasts = RasterDataset.objects.all() data = serializers.serialize("json", rasts) return HttpResponse(data, mimetype='application/javascript')
Exclude certain fields from json serialization in raster_stats web service
# Create your views here. from django.http import HttpResponse from lingcod.raster_stats.models import zonal_stats, RasterDataset, ZonalStatsCache from django.core import serializers from django.contrib.gis.geos import fromstr def stats_for_geom(request, raster_name): # Confirm that we have a valid polygon geometry if 'geom_txt' in request.REQUEST: geom_txt = str(request.REQUEST['geom_txt']) else: return HttpResponse("Must supply a geom_txt parameter", status=404) try: geom = fromstr(geom_txt) except: return HttpResponse("Must supply a parsable geom_txt parameter (wkt or json)", status=404) # Confirm raster with pk exists try: raster = RasterDataset.objects.get(name=raster_name) except: return HttpResponse("No raster with pk of %s" % pk, status=404) #TODO check if continuous zonal = zonal_stats(geom, raster) zonal.save() zqs = ZonalStatsCache.objects.filter(pk=zonal.pk) data = serializers.serialize("json", zqs, fields=('avg','min','max','median','mode','stdev','nulls','pixels','date_modified','raster')) return HttpResponse(data, mimetype='application/json') def raster_list(request): rasts = RasterDataset.objects.all() data = serializers.serialize("json", rasts, fields=('name','type')) return HttpResponse(data, mimetype='application/json')
import json from datawire.views.util import JSONEncoder class Store(object): def __init__(self, url): self.url = url def store(self, frame): urn = frame.get('urn') data = json.dumps(frame, cls=JSONEncoder) return self._store(urn, data) def load(self, urn): data = self._load(urn) if data is not None: data = json.loads(data) return data
Fix encoding of store serialisation.
import json from datawire.views.util import JSONEncoder class Store(object): def __init__(self, url): self.url = url def store(self, frame): urn = frame.get('urn') data = JSONEncoder().encode(frame) return self._store(urn, data) def load(self, urn): data = self._load(urn) if data is not None: data = json.loads(data) return data
""" Definition of the plugin. """ from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool from . import models @plugin_pool.register class ImagePlugin(ContentPlugin): model = models.ImageItem category = _('Image') render_template = 'icekit/plugins/image/default.html' raw_id_fields = ['image', ]
Implement per-app/model template overrides for ImagePlugin.
""" Definition of the plugin. """ from django.utils.translation import ugettext_lazy as _ from django.template import loader from fluent_contents.extensions import ContentPlugin, plugin_pool from . import models @plugin_pool.register class ImagePlugin(ContentPlugin): model = models.ImageItem category = _('Image') raw_id_fields = ['image', ] def get_render_template(self, request, instance, **kwargs): template = loader.select_template([ 'icekit/plugins/image/%s_%s.html' % ( type(instance.parent)._meta.app_label, type(instance.parent)._meta.model_name ), 'icekit/plugins/image/%s.html' % type( instance.parent)._meta.app_label, 'icekit/plugins/image/default.html']) return template.name
from setuptools import setup, find_packages setup( name='zeit.push', version='1.21.0.dev0', author='gocept, Zeit Online', author_email='[email protected]', url='http://www.zeit.de/', description="Sending push notifications through various providers", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'fb', 'gocept.testing', 'grokcore.component', 'mock', 'pytz', 'requests', 'setuptools', 'tweepy', 'urbanairship >= 1.0', 'zc.sourcefactory', 'zeit.cms >= 2.102.0.dev0', 'zeit.content.article', 'zeit.content.image', 'zeit.objectlog', 'zope.app.appsetup', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.schema', ], entry_points={ 'console_scripts': [ 'facebook-access-token = zeit.push.facebook:create_access_token', 'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation', ], 'fanstatic.libraries': [ 'zeit_push=zeit.push.browser.resources:lib', ], }, )
ZON-4007: Declare dependency (belongs to commit:6791185)
from setuptools import setup, find_packages setup( name='zeit.push', version='1.21.0.dev0', author='gocept, Zeit Online', author_email='[email protected]', url='http://www.zeit.de/', description="Sending push notifications through various providers", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'fb', 'gocept.testing', 'grokcore.component', 'mock', 'pytz', 'requests', 'setuptools', 'tweepy', 'urbanairship >= 1.0', 'zc.sourcefactory', 'zeit.cms >= 2.102.0.dev0', 'zeit.content.article', 'zeit.content.image', 'zeit.content.text', 'zeit.objectlog', 'zope.app.appsetup', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.schema', ], entry_points={ 'console_scripts': [ 'facebook-access-token = zeit.push.facebook:create_access_token', 'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation', ], 'fanstatic.libraries': [ 'zeit_push=zeit.push.browser.resources:lib', ], }, )
#!/usr/bin/env python #encoding:utf-8 #author:dbr/Ben #project:tvnamer #repository:http://github.com/dbr/tvnamer #license:Creative Commons GNU GPL v2 # http://creativecommons.org/licenses/GPL/2.0/ """Test tvnamer's filename parser """ import os import sys from copy import copy import unittest sys.path.append(os.path.join(os.path.abspath(sys.path[0]), "..")) from utils import FileParser from test_files import files def check_test(curtest): """Runs test case, used by test_generator """ parser = FileParser(curtest['input']) theep = parser.parse() assert theep.seriesname.lower() == curtest['seriesname'].lower() assert theep.seasonnumber == curtest['seasonnumber'] assert theep.episodenumber == curtest['episodenumber'] def test_generator(): """Generates test for each test case in test_files.py """ for category, testcases in files.items(): for testindex, curtest in enumerate(testcases): cur_tester = lambda x: check_test(x) cur_tester.description = '%s_%d' % (category, testindex) yield (cur_tester, curtest) if __name__ == '__main__': import nose nose.main()
Fix utility being picked up as test, display expected-and-got values in assertion error
#!/usr/bin/env python #encoding:utf-8 #author:dbr/Ben #project:tvnamer #repository:http://github.com/dbr/tvnamer #license:Creative Commons GNU GPL v2 # http://creativecommons.org/licenses/GPL/2.0/ """Test tvnamer's filename parser """ import os import sys from copy import copy import unittest sys.path.append(os.path.join(os.path.abspath(sys.path[0]), "..")) from utils import FileParser from test_files import files def check_case(curtest): """Runs test case, used by test_generator """ parser = FileParser(curtest['input']) theep = parser.parse() assert(theep.seriesname.lower() == curtest['seriesname'].lower(), "%s == %s" % (theep.seriesname.lower(), curtest['seriesname'].lower())) assert (theep.seasonnumber == curtest['seasonnumber'], "%s == %s" % (theep.seasonnumber, curtest['seasonnumber'])) assert (theep.episodenumber == curtest['episodenumber'], "%s == %s" % (theep.episodenumber, curtest['episodenumber'])) def test_generator(): """Generates test for each test case in test_files.py """ for category, testcases in files.items(): for testindex, curtest in enumerate(testcases): cur_tester = lambda x: check_case(x) cur_tester.description = '%s_%d' % (category, testindex) yield (cur_tester, curtest) if __name__ == '__main__': import nose nose.main()
# -*- coding: utf-8 -*- # # Debian Changes Bot # Copyright (C) 2008 Chris Lamb <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from DebianDevelChangesBot import Message class Popcon(Message): FIELDS = ('package', 'inst', 'vote', 'old', 'recent', 'nofiles') def format(self): msg = "Popcon for [package]%d[reset] - " % self.package for field in ('inst', 'vote', 'old', 'recent', 'nofiles'): msg += "[category]%s[/category]: %s " % (field, getattr(self, field)) msg += u"- [url]http://qa.debian.org/developer.php?popcon=%s[/url]" % self.package return msg
Correct typing issues in string interp. Signed-off-by: Chris Lamb <[email protected]>
# -*- coding: utf-8 -*- # # Debian Changes Bot # Copyright (C) 2008 Chris Lamb <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from DebianDevelChangesBot import Message class Popcon(Message): FIELDS = ('package', 'inst', 'vote', 'old', 'recent', 'nofiles') def format(self): msg = "Popcon for [package]%s[reset] - " % self.package for field in ('inst', 'vote', 'old', 'recent', 'nofiles'): msg += "[category]%s[/category]: %d " % (field, getattr(self, field)) msg += u"- [url]http://qa.debian.org/developer.php?popcon=%s[/url]" % self.package return msg
from app.na_celery.email_tasks import send_emails class WhenProcessingSendEmailsTask: def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email): mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email') send_emails(sample_email.id) assert mock_send_email.call_args[0][0] == '[email protected]' assert mock_send_email.call_args[0][1] == 'workshop: test title' def it_sends_an_email_to_members_up_to_email_limit(self): pass def it_does_not_send_an_email_if_not_between_start_and_expiry(self): pass def it_sends_email_with_correct_template(self): pass
Update email task test for members
from app.na_celery.email_tasks import send_emails class WhenProcessingSendEmailsTask: def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member): mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200) send_emails(sample_email.id) assert mock_send_email.call_args[0][0] == sample_member.email assert mock_send_email.call_args[0][1] == 'workshop: test title' def it_sends_an_email_to_members_up_to_email_limit(self): pass def it_does_not_send_an_email_if_not_between_start_and_expiry(self): pass def it_sends_email_with_correct_template(self): pass
""" Shortcuts for common development check tasks """ from __future__ import unicode_literals from invoke import task @task(name='blacken', iterable=['folder']) def blacken(c, line_length=79, folder=None): """Run black on the current source""" folders = ['.'] if not folder else folder black_command_line = "black -l {0}".format(line_length) cmd = "find {0} -name '*.py' | xargs {1}".format( " ".join(folders), black_command_line ) c.run(cmd, pty=True)
Add a 'blacken/folders' configuration flag for blacken
""" Shortcuts for common development check tasks """ from __future__ import unicode_literals from invoke import task @task(name='blacken', iterable=['folder']) def blacken(c, line_length=79, folder=None): """Run black on the current source""" default_folders = ["."] configured_folders = c.config.get("blacken", {}).get("folders", default_folders) folders = configured_folders if not folder else folder black_command_line = "black -l {0}".format(line_length) cmd = "find {0} -name '*.py' | xargs {1}".format( " ".join(folders), black_command_line ) c.run(cmd, pty=True)
# encoding: utf-8 """ Utility functions to help testing. """ from unittest.mock import Mock class dummy(object): def __init__(self): self.components = {} def get(self, name, default): if name not in self.components: self.components[name] = Mock() return self.components[name] class dummy_master(object): def __init__(self): setattr(self, "__components", dummy())
Add facade to mocked components
# encoding: utf-8 """ Utility functions to help testing. """ from unittest.mock import Mock class dummy(object): def __init__(self): self.components = {} def get(self, name, default=None): if name not in self.components: self.components[name] = Mock() return self.components[name] class dummy_master(object): def __init__(self): setattr(self, "__components", dummy()) def get(self, name): return self.__components.components.get(name)
from mongoengine import Document, DateTimeField, EmailField, IntField, \ ReferenceField, StringField import datetime, enum class Priority(enum.IntEnum): LOW = 0, MIDDLE = 1, HIGH = 2 """ This defines the basic model for a Task as we want it to be stored in the MongoDB. """ class Task(Document): title = StringField(max_length=150, required=True) description = StringField(max_length=800, required=True) creator = EmailField(max_length=120, required=True) assigne = EmailField(max_length=120, required=True) created_at = DateTimeField(default=datetime.datetime.now, required=True) closed_at = DateTimeField(required=False) status = IntField(default=0, required=True) priority = IntField(default=Priority.LOW, required=True)
Remove closed_at field from Task model
from mongoengine import Document, DateTimeField, EmailField, IntField, \ ReferenceField, StringField import datetime, enum class Priority(enum.IntEnum): LOW = 0, MIDDLE = 1, HIGH = 2 """ This defines the basic model for a Task as we want it to be stored in the MongoDB. """ class Task(Document): title = StringField(max_length=150, required=True) description = StringField(max_length=800, required=True) creator = EmailField(max_length=120, required=True) assigne = EmailField(max_length=120, required=True) created_at = DateTimeField(default=datetime.datetime.now, required=True) status = IntField(default=0, required=True) priority = IntField(default=Priority.LOW, required=True)
#!/usr/bin/env python # A hacky script to do dynamic snippets. import sys import os import datetime snippet_map = { 'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '), 'time' : datetime.datetime.now().strftime('%I:%M%p '), } keys = '\n'.join(snippet_map.keys()) result = os.popen('printf "%s" | rofi -dmenu ' % keys) selected_key = result.read().strip() os.system('xdotool type --clearmodifiers -- "%s"' % str(snippet_map[selected_key]))
Update snippet script to work with newlines.
#!/usr/bin/env python # A hacky script to do dynamic snippets. import sys import os import datetime snippet_map = { 'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '), 'time' : datetime.datetime.now().strftime('%I:%M%p '), 'sign' : 'Best,\nSameer', } keys = '\n'.join(snippet_map.keys()) result = os.popen('printf "%s" | rofi -dmenu ' % keys) selected_key = result.read().strip() os.system('sleep 0.1; xdotool type --clearmodifiers "$(printf "%s")"' % str(snippet_map[selected_key]))
# Get a reference to the Application class. from web.core import Application # This is our WSGI application instance. app = Application("Hi.") # If we're run as the "main script", serve our application over HTTP. if __name__ == "__main__": app.serve('wsgiref')
Make use of root object and WebCore extensions. The Wiki root object is now the root of the sample application and the following extensions have been configured: - [`AnnotationExtension`](https://github.com/marrow/WebCore/blob/develop/web/ext/annotation.py) - [`DebugExtension`](https://github.com/marrow/WebCore/blob/develop/web/ext/debug.py) - [`SerializationExtension`](https://github.com/marrow/WebCore/blob/develop/web/ext/serialize.py) - [`DBExtension`](https://github.com/marrow/WebCore/blob/develop/web/ext/db.py) The default databse has been configured to use the [`marrow.mongo`](https://github.com/marrow/mongo) WebCore adapter, connecting to a MongoDB database named `test` on the local machine.
# Get a reference to the Application class. from web.core import Application # Get references to web framework extensions. from web.ext.annotation import AnnotationExtension from web.ext.debug import DebugExtension from web.ext.serialize import SerializationExtension from web.ext.db import DBExtension # Get a reference to our database connection adapter. from web.db.mongo import MongoDBConnection # Get a reference to our Wiki root object. from web.app.wiki.root import Wiki # This is our WSGI application instance. app = Application(Wiki, extensions=[ AnnotationExtension(), DebugExtension(), SerializationExtension(), DBExtension(MongoDBConnection("mongodb://localhost/test")), ]) # If we're run as the "main script", serve our application over HTTP. if __name__ == "__main__": app.serve('wsgiref')
from django.core.management import call_command from django.core.management.commands.syncdb import Command as SyncDBCommand class Command(SyncDBCommand): def handle_noargs(self, **options): # Run migrations first if options.get('database'): databases = [options.get('database')] else: databases = None call_command("upgradedb", do_execute=True, databases=databases, interactive=options.get('interactive'), verbosity=options.get('verbosity'), ) # Follow up with a syncdb on anything that wasnt included in migrations # (this catches things like test-only models) super(Command, self).handle_noargs(**options)
Update style to be consistent with project
from django.core.management import call_command from django.core.management.commands.syncdb import Command as SyncDBCommand class Command(SyncDBCommand): def handle_noargs(self, **options): # Run migrations first if options.get("database"): databases = [options.get("database")] else: databases = None call_command("upgradedb", do_execute=True, databases=databases, interactive=options.get("interactive"), verbosity=options.get("verbosity"), ) # Follow up with a syncdb on anything that wasnt included in migrations # (this catches things like test-only models) super(Command, self).handle_noargs(**options)
import logging from followthemoney import model from ingestors.ingestor import Ingestor from ingestors.support.temp import TempFileSupport from ingestors.support.shell import ShellSupport from ingestors.support.ole import OLESupport from ingestors.directory import DirectoryIngestor log = logging.getLogger(__name__) class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport): MIME_TYPES = ['application/vnd.ms-outlook'] EXTENSIONS = ['pst', 'ost', 'pab'] BASE_SCORE = 5 COMMAND_TIMEOUT = 12 * 60 * 60 def ingest(self, file_path, entity): entity.schema = model.get('Package') self.extract_ole_metadata(file_path, entity) temp_dir = self.make_empty_directory() try: self.exec_command('readpst', '-e', # make subfolders, files per message '-D', # include deleted '-r', # recursive structure '-8', # utf-8 where possible '-b', '-q', # quiet '-o', temp_dir, file_path) self.manager.delegate(DirectoryIngestor, temp_dir, entity) except Exception: log.exception("Failed to unpack PST.") # Handle partially extracted archives. self.manager.delegate(DirectoryIngestor, temp_dir, entity) raise
Make outlook emit single files
import logging from followthemoney import model from ingestors.ingestor import Ingestor from ingestors.support.temp import TempFileSupport from ingestors.support.shell import ShellSupport from ingestors.support.ole import OLESupport from ingestors.directory import DirectoryIngestor log = logging.getLogger(__name__) class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport): MIME_TYPES = ['application/vnd.ms-outlook'] EXTENSIONS = ['pst', 'ost', 'pab'] BASE_SCORE = 5 COMMAND_TIMEOUT = 12 * 60 * 60 def ingest(self, file_path, entity): entity.schema = model.get('Package') self.extract_ole_metadata(file_path, entity) temp_dir = self.make_empty_directory() try: self.exec_command('readpst', '-e', # make subfolders, files per message '-S', # single files '-D', # include deleted # '-r', # recursive structure '-8', # utf-8 where possible '-cv', # export vcards # '-q', # quiet '-o', temp_dir, file_path) self.manager.delegate(DirectoryIngestor, temp_dir, entity) except Exception: log.exception("Failed to unpack PST.") # Handle partially extracted archives. self.manager.delegate(DirectoryIngestor, temp_dir, entity) raise
from tenyksservice import TenyksService, run_service class AFK(TenyksService): direct_only = False irc_message_filters = { 'depart': [r'^(?i)(xopa|away|afk|brb)'], 'return': [r'^(?i)(xoka|back)'] } def handle_depart(self, data, match): self.logger.debug('{nick} went AFK.'.format(nick=data['nick'])) self.send('{nick} is now AFK.'.format(nick=data['nick']), data) def handle_return(self, data, match): self.logger.debug('{nick} is no longer AFK.'.format(nick=data['nick'])) self.send('{nick} is no longer AFK.'.format(nick=data['nick']), data) def main(): run_service(AFK) if __name__ == '__main__': main()
Add ability to query if a user is away
from tenyksservice import TenyksService, run_service away = {} class AFK(TenyksService): direct_only = False irc_message_filters = { 'depart': [r'^(?i)(xopa|away|afk|brb)'], 'return': [r'^(?i)(xoka|back)'], 'query': [r'(?P<nick>(.*))\?$'] } def handle_depart(self, data, match): self.logger.debug('{nick} went AFK.'.format(nick=data['nick'])) self.send('{nick} is now AFK.'.format(nick=data['nick']), data) away[data['nick']] = True def handle_return(self, data, match): self.logger.debug('{nick} is no longer AFK.'.format(nick=data['nick'])) self.send('{nick} is no longer AFK.'.format(nick=data['nick']), data) away[data['nick']] = False def handle_query(self, data, match): nick = match.groupdict()['nick'] if nick in away: status = 'AFK' if away[nick] else 'present' self.logger.debug('{nick} is currently {status}'.format(nick=nick, status=status)) self.send('{nick} is currently {status}.'.format(nick=nick, status=status), data) else: self.logger.debug('{nick}\' status is unknown.'.format(nick=nick)) self.send('{nick}\'s status is unknown.'.format(nick=nick), data) def main(): run_service(AFK) if __name__ == '__main__': main()
#import pygame.midi.Output from pygame.midi import Output class Output(Output):#pygame.midi.Output): def set_pan(self, pan, channel): assert (0 <= channel <= 15) assert pan <= 127 self.write_short(0xB0 + channel, 0x0A, pan)
Add Volume and Pitch methods
#import pygame.midi.Output from pygame.midi import Output class Output(Output):#pygame.midi.Output): def set_pan(self, pan, channel): assert (0 <= channel <= 15) assert pan <= 127 self.write_short(0xB0 + channel, 0x0A, pan) def set_volume(self, volume, channel): assert (0 <= channel <= 15) assert volume <= 127 self.write_short(0xB0 + channel, 0x07, volume) def set_pitch(self, pitch, channel): assert (0 <= channel <= 15) assert pitch <= (2**14-1) # the 7 least significant bits come into the first data byte, # the 7 most significant bits come into the second data byte pitch_lsb = (pitch >> 7) & 127 pitch_msb = pitch & 127 self.write_short(0xE0 + channel, pitch_lsb, pitch_msb)
from cement.ext.ext_argparse import expose from clowder.cli.abstract_base_controller import AbstractBaseController class DiffController(AbstractBaseController): class Meta: label = 'diff' stacked_on = 'base' stacked_type = 'nested' description = 'Show git diff for projects' @expose(help="second-controller default command", hide=True) def default(self): print("Inside SecondController.default()")
Add `clowder diff` logic to Cement controller
from cement.ext.ext_argparse import expose from clowder.cli.abstract_base_controller import AbstractBaseController from clowder.commands.util import ( filter_groups, filter_projects_on_project_names, run_group_command, run_project_command ) from clowder.util.decorators import ( print_clowder_repo_status, valid_clowder_yaml_required ) class DiffController(AbstractBaseController): class Meta: label = 'diff' stacked_on = 'base' stacked_type = 'nested' description = 'Show git diff for projects' @expose(help="second-controller default command", hide=True) @valid_clowder_yaml_required @print_clowder_repo_status def default(self): if self.app.pargs.projects is None: groups = filter_groups(self.clowder.groups, self.app.pargs.groups) for group in groups: run_group_command(group, [], 'diff') return projects = filter_projects_on_project_names(self.clowder.groups, self.app.pargs.projects) for project in projects: run_project_command(project, [], 'diff')
import sge from . import config from . import game from . import player from . import rooms def initialize(config): """Load assets and initialize the game objects""" sge.game = game.Game( width=config.GAME_WINDOW_WIDTH, height=config.GAME_WINDOW_HEIGHT, fps=config.GAME_FPS, window_text=config.GAME_WINDOW_TITLE) player_obj = player.Player( config.PLAYER_SPRITES, sge.game.width / 2, sge.game.height / 2) sge.game.start_room = rooms.ScrollableLevel( player=player_obj, width=10000, ruler=True) sge.game.mouse_visible = False def run(): """Start the game running""" sge.game.start() if __name__ == '__main__': initialize(config) run()
Startup: Make test room shorter for ease of testing
import sge from . import config from . import game from . import player from . import rooms def initialize(config): """Load assets and initialize the game objects""" sge.game = game.Game( width=config.GAME_WINDOW_WIDTH, height=config.GAME_WINDOW_HEIGHT, fps=config.GAME_FPS, window_text=config.GAME_WINDOW_TITLE) player_obj = player.Player( config.PLAYER_SPRITES, sge.game.width / 2, sge.game.height / 2) sge.game.start_room = rooms.ScrollableLevel( player=player_obj, width=2000, ruler=True) sge.game.mouse_visible = False def run(): """Start the game running""" sge.game.start() if __name__ == '__main__': initialize(config) run()
#! /usr/bin/env python from __future__ import print_function import os import sys import json def pretty_print(equation): print(equation["description"]["terse"]) eqn_dict = equation["unicode-pretty-print"] equation_text = eqn_dict["multiline"] for line in equation_text: print(line) if "parameters" in eqn_dict: print("where:") for param, param_dict in eqn_dict["parameters"].iteritems(): label = param_dict["label"] print(param,'=',label) def main(query): here = sys.path[0] json_dir = os.path.join(here, 'equajson') for filename in os.listdir(json_dir): if not filename.endswith('.json'): continue filepath = os.path.join(json_dir, filename) with open(filepath) as json_file: try: equation = json.load(json_file) except ValueError: sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name)) continue # try the next file description = equation["description"]["verbose"] if query.lower() in description.lower(): pretty_print(equation) if __name__ == '__main__': num_args = len(sys.argv) - 1 if num_args != 1: sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n') sys.exit(1) main(sys.argv[1])
Add a line between outputs.
#! /usr/bin/env python from __future__ import print_function import os import sys import json def pretty_print(equation): print(equation["description"]["terse"]) eqn_dict = equation["unicode-pretty-print"] equation_text = eqn_dict["multiline"] for line in equation_text: print(line) if "parameters" in eqn_dict: print("where:") for param, param_dict in eqn_dict["parameters"].iteritems(): label = param_dict["label"] print(param,'=',label) def main(query): here = sys.path[0] json_dir = os.path.join(here, 'equajson') for filename in os.listdir(json_dir): if not filename.endswith('.json'): continue filepath = os.path.join(json_dir, filename) with open(filepath) as json_file: try: equation = json.load(json_file) except ValueError: sys.stderr.write("Invalid JSON for file: `{}'\n".format(json_file.name)) continue # try the next file description = equation["description"]["verbose"] if query.lower() in description.lower(): pretty_print(equation) print() if __name__ == '__main__': num_args = len(sys.argv) - 1 if num_args != 1: sys.stderr.write("Usage: python "+sys.argv[0]+" query"+'\n') sys.exit(1) main(sys.argv[1])
# -*- coding: utf-8 -*- from openerp import fields, models class Session(models.Model): _name = 'openacademy.session' name = fields.Char(required=True) start_date = fields.Date() duration = fields.Float(digits=(6, 2), help="Duration in days") seats = fields.Integer(string="Number of seats") instructor_id = fields.Many2one('res.partner', string="Instructor") course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True) attendee_ids = fields.Many2many('res.partner', string="Attendees")
[REF] openacademy: Add domain or and ilike
# -*- coding: utf-8 -*- from openerp import fields, models class Session(models.Model): _name = 'openacademy.session' name = fields.Char(required=True) start_date = fields.Date() duration = fields.Float(digits=(6, 2), help="Duration in days") seats = fields.Integer(string="Number of seats") instructor_id = fields.Many2one('res.partner', string="Instructor", domain=['|', ('instructor', '=', True), ('category_id.name', 'ilike', "Teacher")]) course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True) attendee_ids = fields.Many2many('res.partner', string="Attendees")
# dmntest.py from .core import Daemon class TestDmn(Daemon): pass def main(): td = TestDmn('testd') td.start()
Work on daemons and TCP servers.
# dmntest.py #from .core import Daemon #class TestDmn(Daemon): # pass #def main(): # td = TestDmn('testd') # td.start()
from plugin import Plugin from utils import create_logger from config import OwnerID class Jacob_Noah(Plugin): is_global = True log = create_logger('noah_stuff') async def on_message(self, message, pfx): if message.content.startswith('<@' + self.client.id + '> kish meh'): await self.client.send_typing(message.channel) cmd_name = 'Jacob Noah Meme' self.log.info('User %s [%s] on server %s [%s], used the ' + cmd_name + ' command on #%s channel', message.author, message.author.id, message.server.name, message.server.id, message.channel) if message.author.id == OwnerID: await self.client.send_message(message.channel, 'Of course <@' + OwnerID + '>, my love, anything for you! Chu~') else: await self.client.send_message(message.channel, 'Ew <@' + OwnerID + '>... Would kindly piss off...')
Fix for the noah thing
from plugin import Plugin from utils import create_logger from config import OwnerID class Jacob_Noah(Plugin): is_global = True log = create_logger('noah_stuff') async def on_message(self, message, pfx): if message.content.startswith('<@' + self.client.user.id + '> kish meh'): await self.client.send_typing(message.channel) cmd_name = 'Jacob Noah Meme' self.log.info('User %s [%s] on server %s [%s], used the ' + cmd_name + ' command on #%s channel', message.author, message.author.id, message.server.name, message.server.id, message.channel) if message.author.id == OwnerID: await self.client.send_message(message.channel, 'Of course <@' + OwnerID + '>, my love, anything for you! Chu~') else: await self.client.send_message(message.channel, 'Ew <@' + OwnerID + '>... Would kindly piss off...')
from rest_framework.pagination import PageNumberPagination from rest_framework.response import Response class QuotesResultsSetPagination(PageNumberPagination): page_size = 10 page_size_query_param = 'page_size' max_page_size = 10000 def get_paginated_response(self, data): return Response({ 'pages': { 'next': self.page.next_page_number() if self.page.has_next() else None, 'previous': self.page.previous_page_number() if self.page.has_previous() else None }, 'count': self.page.paginator.count, 'results': data })
Change page size to 25
from rest_framework.pagination import PageNumberPagination from rest_framework.response import Response class QuotesResultsSetPagination(PageNumberPagination): page_size = 25 page_size_query_param = 'page_size' max_page_size = 10000 def get_paginated_response(self, data): return Response({ 'pages': { 'next': self.page.next_page_number() if self.page.has_next() else None, 'previous': self.page.previous_page_number() if self.page.has_previous() else None }, 'count': self.page.paginator.count, 'results': data })
# Generated by Django 2.2.28 on 2022-04-20 13:05 from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', field=models.ImageField(null=True, upload_to=''), ), ]
[AC-9452] Fix image field import and migration
# Generated by Django 2.2.28 on 2022-04-20 13:05 import sorl.thumbnail.fields from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', field=sorl.thumbnail.fields.ImageField( null=True, upload_to='program_images'), ), ]
__author__ = 'mark'
Work on SQL example set
# -*- coding: utf-8 -*- """ This example shows some examples of how to access data in SQL databases using blaze. It walks through how blaze syntax corresponds to SQL queries. Select Queries -------------- """ from __future__ import absolute_import, division, print_function import sqlite3 as db from blaze.io.sql import connect, from_table from blaze import dshape def create_sqlite_table(): data = [ (4, "Gilbrecht", 17), (8, "Bertrand", 48), (16, "Janssen", 32), ] conn = db.connect(":memory:") c = conn.cursor() c.execute('''create table MyTable (id INTEGER, name TEXT, age INTEGER)''') c.executemany("""insert into testtable values (?, ?, ?)""", data) conn.commit() c.close() return conn conn = create_sqlite_table() # Describe the columns. Note: typically you would describe column # with variables for the column size, e.g. dshape('a, int32') id = from_table('MyTable', 'id', dshape('3, int32'), conn) name_col = from_table('MyTable', 'name', dshape('3, int32'), conn) age_col = from_table('MyTable', 'age', dshape('3, int32'), conn) table = Table([id, name_col, age_col]) # TODO: Better interface def select(): """ SELECT * FROM MyTable WHERE MyTable.id > 5 """ print(table[table.id > 5]) def select_ordered(): """ SELECT * FROM MyTable WHERE MyTable.id > 5 ORDER BY MyTable.age """ print(index(table, table.id > 5, order=table.age)) def groupby(): """ SELECT * FROM MyTable WHERE MyTable.age > 10 AND MyTable.age < 20 GROUP BY MyTable.age ORDER BY COUNT(*) """ teenagers = index(table, table.age > 10 & table.age < 20) print(groupby(teenagers, table.age, order=count(table))) def aggregate(): """ SELECT AVG(age) FROM MyTable WHERE MyTable.id > 5 """ print(avg(age_col[id > 5]))
# -*- coding: utf-8 -*- import string from django.db import models class Blacklist(models.Model): domain = models.CharField(max_length=255, unique=True, null=True) def __unicode__(self): return self.domain class URL(models.Model): hashcode = models.CharField(max_length=10, unique=True, db_index=True, null=True) longurl = models.CharField(max_length=1024, unique=True, db_index=True, null=True) views = models.IntegerField(default=0) ip = models.GenericIPAddressField(null=True) data = models.DateTimeField(auto_now_add=True, null=True) def save(self, *args, **kwargs): if URL.objects.count(): last = URL.objects.latest('id').pk + 1 alphabet = string.digits + string.ascii_lowercase base36 = '' while last != 0: last, i = divmod(last, len(alphabet)) base36 = alphabet[i] + base36 self.hashcode = base36 else: self.hashcode = '1' return super(URL, self).save(*args, **kwargs) def short_url(self, request): return ''.join([ request.scheme, '://', request.get_host(), '/', self.hashcode, ]) def __unicode__(self): return ' - '.join([self.hashcode, self.longurl])
Fix hashcode generation for existing URLs
# -*- coding: utf-8 -*- import string from django.db import models class Blacklist(models.Model): domain = models.CharField(max_length=255, unique=True, null=True) def __unicode__(self): return self.domain class URL(models.Model): hashcode = models.CharField(max_length=10, unique=True, db_index=True, null=True) longurl = models.CharField(max_length=1024, unique=True, db_index=True, null=True) views = models.IntegerField(default=0) ip = models.GenericIPAddressField(null=True) data = models.DateTimeField(auto_now_add=True, null=True) def save(self, *args, **kwargs): if not self.pk: if URL.objects.count(): last = URL.objects.latest('id').pk + 1 alphabet = string.digits + string.ascii_lowercase base36 = '' while last != 0: last, i = divmod(last, len(alphabet)) base36 = alphabet[i] + base36 self.hashcode = base36 else: self.hashcode = '1' return super(URL, self).save(*args, **kwargs) def short_url(self, request): return ''.join([ request.scheme, '://', request.get_host(), '/', self.hashcode, ]) def __unicode__(self): return ' - '.join([self.hashcode, self.longurl])
#!/usr/bin/env python # # Delete all secure policies. # import os import sys import json sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..')) from sdcclient import SdSecureClient def usage(): print('usage: %s <sysdig-token>' % sys.argv[0]) print('You can find your token at https://secure.sysdig.com/#/settings/user') sys.exit(1) # # Parse arguments # if len(sys.argv) != 2: usage() sdc_token = sys.argv[1] # # Instantiate the SDC client # sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') # Get a list of policyIds ok, res = sdclient.list_policies() policies = [] if not ok: print(res) sys.exit(1) else: policies = res[1]['policies'] for policy in policies: print("deleting policy: " + str(policy['id'])) ok, res = sdclient.delete_policy_id(policy['id']) if not ok: print(res) sys.exit(1)
Fix legacy use of action result
#!/usr/bin/env python # # Delete all secure policies. # import os import sys import json sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..')) from sdcclient import SdSecureClient def usage(): print('usage: %s <sysdig-token>' % sys.argv[0]) print('You can find your token at https://secure.sysdig.com/#/settings/user') sys.exit(1) # # Parse arguments # if len(sys.argv) != 2: usage() sdc_token = sys.argv[1] # # Instantiate the SDC client # sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com') # Get a list of policyIds ok, res = sdclient.list_policies() policies = [] if not ok: print(res) sys.exit(1) else: policies = res['policies'] for policy in policies: print("deleting policy: " + str(policy['id'])) ok, res = sdclient.delete_policy_id(policy['id']) if not ok: print(res) sys.exit(1)
from unittest import TestCase from prudent.sequence import Sequence class SequenceTest(TestCase): def setUp(self): self.seq = Sequence([1, 2, 3]) def test_getitem(self): assert self.seq[0] == 1 self.seq[2] assert self.seq[2] == 3 def test_len(self): assert len(self.seq) == 0 self.seq[2] assert len(self.seq) == 3 def test_iter(self): for _ in range(2): assert list(self.seq) == [1, 2, 3]
Test that IndexError is raised when appropriate
from unittest import TestCase from prudent.sequence import Sequence class SequenceTest(TestCase): def setUp(self): self.seq = Sequence([1, 2, 3]) def test_getitem(self): assert self.seq[0] == 1 assert self.seq[2] == 3 def test_getitem_raises_indexerror(self): self.assertRaises(IndexError, lambda: self.seq[3]) def test_len_returns_current_size(self): assert len(self.seq) == 0 self.seq[2] assert len(self.seq) == 3 def test_iter_preserves_elems(self): for _ in range(2): assert list(self.seq) == [1, 2, 3]
#!/usr/bin/python """destroyer-runner.py - Run the main application""" import sys import subprocess if __name__ == '__main__': subprocess.call(['python', './destroyer/destroyer.py'] + [str(arg) for arg in sys.argv[1:]])
Update with working code to run destroyer
#!/usr/bin/python """destroyer-runner.py - Run the main application""" from destroyer.destroyer import main if __name__ == '__main__': main()
import logging import pydicom logger = logging.getLogger(__name__) class UID(pydicom.uid.UID): """Unique DICOM identifier with a highdicom-specific UID prefix.""" def __new__(cls: type) -> str: prefix = '1.2.826.0.1.3680043.10.511.3.' identifier = pydicom.uid.generate_uid(prefix=prefix) return super().__new__(cls, identifier)
Fix typing for UID class
import logging from typing import Type, TypeVar import pydicom logger = logging.getLogger(__name__) T = TypeVar('T', bound='UID') class UID(pydicom.uid.UID): """Unique DICOM identifier with a highdicom-specific UID prefix.""" def __new__(cls: Type[T]) -> T: prefix = '1.2.826.0.1.3680043.10.511.3.' identifier = pydicom.uid.generate_uid(prefix=prefix) return super().__new__(cls, identifier)
""" Definition of the plugin. """ from django.apps import apps from django.conf import settings from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
Update Blog model and content item matching
""" Definition of the plugin. """ from django.apps import apps from django.conf import settings from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]