""" Paypal IPN: three steps validation to ensure data correctness - step 1: return an empty HTTP 200 response -> will be done at the end by returning '' - step 2: POST the complete, unaltered message back to Paypal (preceded by cmd=_notify-validate), with same encoding - step 3: paypal send either VERIFIED or INVALID (single word) Once data is validated, process it. """ res = False new_post = dict(post, cmd='_notify-validate') urequest = urllib2.Request("https://www.sandbox.paypal.com/cgi-bin/webscr", werkzeug.url_encode(new_post)) uopen = urllib2.urlopen(urequest) resp = uopen.read() if resp == 'VERIFIED': _logger.info('Paypal: validated data') cr, uid, context = request.cr, SUPERUSER_ID, request.context res = request.registry['payment.transaction'].form_feedback(cr, uid, post, 'paypal', context=context) elif resp == 'INVALID': _logger.warning('Paypal: answered INVALID on data verification') else: _logger.warning('Paypal: unrecognized paypal answer, received %s instead of VERIFIED or INVALID' % resp.text) return res @http.route('/payment/paypal/ipn/', type='http', auth='none', methods=['POST']) def paypal_ipn(self, **post): """ Paypal IPN. """ _logger.info('Beginning Paypal IPN form_feedback with post data %s', pprint.pformat(post)) # debug self.paypal_validate_data(**post) return '' @http.route('/payment/paypal/dpn', type='http', auth="none", methods=['POST']) def paypal_dpn(self, **post): """ Paypal DPN """ _logger.info('Beginning Paypal DPN form_feedback with post data %s', pprint.pformat(post)) # debug return_url = self._get_return_url(**post) self.paypal_validate_data(**post) return werkzeug.utils.redirect(return_url) @http.route('/payment/paypal/cancel', type='http', auth="none") def paypal_cancel(self, **post): """ When the user cancels its Paypal payment: GET on this route """ cr, uid, context = request.cr, SUPERUSER_ID, request.context _logger.info('Beginning Paypal cancel with post data %s', pprint.pformat(post)) # debug return_url = self._get_return_url(**post) return werkzeug.utils.redirect(return_url) #!/usr/bin/env python # # VMEncryption extension # # Copyright 2015 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import subprocess import os import os.path import shlex import sys from subprocess import * class CommandExecuter(object): """description of class""" def __init__(self, logger): self.logger = logger def Execute(self, command_to_execute): self.logger.log("Executing:" + command_to_execute) args = shlex.split(command_to_execute) proc = Popen(args) returnCode = proc.wait() return returnCode def RunGetOutput(self, command_to_execute): try: output=subprocess.check_output(command_to_execute,stderr=subprocess.STDOUT,shell=True) return 0,output.decode('latin-1') except subprocess.CalledProcessError as e : self.logger.log('CalledProcessError. Error Code is ' + str(e.returncode) ) self.logger.log('CalledProcessError. Command string was ' + e.cmd ) self.logger.log('CalledProcessError. Command result was ' + (e.output[:-1]).decode('latin-1')) return e.returncode,e.output.decode('latin-1') from django.contrib.gis.gdal.error import OGRException #### OGRGeomType #### class OGRGeomType(object): "Encapulates OGR Geometry Types." wkb25bit = -2147483648 # Dictionary of acceptable OGRwkbGeometryType s and their string names. _types = {0 : 'Unknown', 1 : 'Point', 2 : 'LineString', 3 : 'Polygon', 4 : 'MultiPoint', 5 : 'MultiLineString', 6 : 'MultiPolygon', 7 : 'GeometryCollection', 100 : 'None', 101 : 'LinearRing', 1 + wkb25bit: 'Point25D', 2 + wkb25bit: 'LineString25D', 3 + wkb25bit: 'Polygon25D', 4 + wkb25bit: 'MultiPoint25D', 5 + wkb25bit : 'MultiLineString25D', 6 + wkb25bit : 'MultiPolygon25D', 7 + wkb25bit : 'GeometryCollection25D', } # Reverse type dictionary, keyed by lower-case of the name. _str_types = dict([(v.lower(), k) for k, v in _types.items()]) def __init__(self, type_input): "Figures out the correct OGR Type based upon the input." if isinstance(type_input, OGRGeomType): num = type_input.num elif isinstance(type_input, basestring): type_input = type_input.lower() if type_input == 'geometry': type_input='unknown' num = self._str_types.get(type_input, None) if num is None: raise OGRException('Invalid OGR String Type "%s"' % type_input) elif isinstance(type_input, int): if not type_input in self._types: raise OGRException('Invalid OGR Integer Type: %d' % type_input) num = type_input else: raise TypeError('Invalid OGR input type given.') # Setting the OGR geometry type number. self.num = num def __str__(self): "Returns the value of the name property." return self.name def __eq__(self, other): """ Does an equivalence test on the OGR type with the given other OGRGeomType, the short-hand string, or the integer. """ if isinstance(other, OGRGeomType): return self.num == other.num elif isinstance(other, basestring): return self.name.lower() == other.lower() elif isinstance(other, int): return self.num == other else: return False def __ne__(self, other): return not (self == other) @property def name(self): "Returns a short-hand string form of the OGR Geometry type." return self._types[self.num] @property def django(self): "Returns the Django GeometryField for this OGR Type." s = self.name.replace('25D', '') if s in ('LinearRing', 'None'): return None elif s == 'Unknown': s = 'Geometry' return s + 'Field' """praw constants.""" import sys __version__ = '5.2.1.dev0' API_PATH = { 'about_edited': 'r/{subreddit}/about/edited/', 'about_log': 'r/{subreddit}/about/log/', 'about_modqueue': 'r/{subreddit}/about/modqueue/', 'about_reports': 'r/{subreddit}/about/reports/', 'about_spam': 'r/{subreddit}/about/spam/', 'about_sticky': 'r/{subreddit}/about/sticky/', 'about_stylesheet': 'r/{subreddit}/about/stylesheet/', 'about_traffic': 'r/{subreddit}/about/traffic/', 'about_unmoderated': 'r/{subreddit}/about/unmoderated/', 'accept_mod_invite': 'r/{subreddit}/api/accept_moderator_invite', 'approve': 'api/approve/', 'block': 'api/block', 'block_user': '/api/block_user/', 'blocked': 'prefs/blocked/', 'collapse': 'api/collapse_message/', 'comment': 'api/comment/', 'comment_replies': 'message/comments/', 'compose': 'api/compose/', 'contest_mode': 'api/set_contest_mode/', 'del': 'api/del/', 'deleteflair': 'r/{subreddit}/api/deleteflair', 'delete_sr_banner': 'r/{subreddit}/api/delete_sr_banner', 'delete_sr_header': 'r/{subreddit}/api/delete_sr_header', 'delete_sr_icon': 'r/{subreddit}/api/delete_sr_icon', 'delete_sr_image': 'r/{subreddit}/api/delete_sr_img', 'distinguish': 'api/distinguish/', 'domain': 'domain/{domain}/', 'duplicates': 'duplicates/{submission_id}/', 'edit': 'api/editusertext/', 'flair': 'r/{subreddit}/api/flair/', 'flairconfig': 'r/{subreddit}/api/flairconfig/', 'flaircsv': 'r/{subreddit}/api/flaircsv/', 'flairlist': 'r/{subreddit}/api/flairlist/', 'flairselector': 'r/{subreddit}/api/flairselector/', 'flairtemplate': 'r/{subreddit}/api/flairtemplate/', 'flairtemplateclear': 'r/{subreddit}/api/clearflairtemplates/', 'flairtemplatedelete': 'r/{subreddit}/api/deleteflairtemplate/', 'friend': 'r/{subreddit}/api/friend/', 'friend_v1': 'api/v1/me/friends/{user}', 'friends': 'api/v1/me/friends/', 'gild_thing': 'api/v1/gold/gild/{fullname}/', 'gild_user': 'api/v1/gold/give/{username}/', 'hide': 'api/hide/', 'ignore_reports': 'api/ignore_reports/', 'inbox': 'message/inbox/', 'info': 'api/info/', 'karma': 'api/v1/me/karma', 'leavecontributor': 'api/leavecontributor', 'leavemoderator': 'api/leavemoderator', 'link_flair': 'r/{subreddit}/api/link_flair', 'list_banned': 'r/{subreddit}/about/banned/', 'list_contributor': 'r/{subreddit}/about/contributors/', 'list_moderator': 'r/{subreddit}/about/moderators/', 'list_muted': 'r/{subreddit}/about/muted/', 'list_wikibanned': 'r/{subreddit}/about/wikibanned/', 'list_wikicontributor': 'r/{subreddit}/about/wikicontributors/', 'live_accept_invite': 'api/live/{id}/accept_contributor_invite', 'live_add_update': 'api/live/{id}/update', 'live_close': 'api/live/{id}/close_thread', 'live_contributors': 'live/{id}/contributors', 'live_discussions': 'live/{id}/discussions', 'live_focus': 'live/{thread_id}/updates/{update_id}', 'live_info': 'api/live/by_id/{ids}', 'live_invite': 'api/live/{id}/invite_contributor', 'live_leave': 'api/live/{id}/leave_contributor', 'live_now': 'api/live/happening_now', 'live_remove_update': 'api/live/{id}/delete_update', 'live_remove_contrib': 'api/live/{id}/rm_contributor', 'live_remove_invite': 'api/live/{id}/rm_contributor_invite', 'live_report': 'api/live/{id}/report', 'live_strike': 'api/live/{id}/strike_update', 'live_update_perms': 'api/live/{id}/set_contributor_permissions', 'live_update_thread': 'api/live/{id}/edit', 'live_updates': 'live/{id}', 'liveabout': 'api/live/{id}/about/', 'livecreate': 'api/live/create', 'lock': 'api/lock/', 'me': 'api/v1/me', 'mentions': 'message/mentions', 'message': 'message/messages/{id}/', 'messages': 'message/messages/', 'moderator_messages': 'r/{subreddit}/message/moderator/', 'moderator_unread': 'r/{subreddit}/message/moderator/unread/', 'morechildren': 'api/morechildren/', 'my_contributor': 'subreddits/mine/contributor/', 'my_moderator': 'subreddits/mine/moderator/', 'my_multireddits': 'api/multi/mine/', 'my_subreddits': 'subreddits/mine/subscriber/', 'marknsfw': 'api/marknsfw/', 'modmail_archive': 'api/mod/conversations/{id}/archive', 'modmail_bulk_read': 'api/mod/conversations/bulk/read', 'modmail_conversation': 'api/mod/conversations/{id}', 'modmail_conversations': 'api/mod/conversations/', 'modmail_highlight': 'api/mod/conversations/{id}/highlight', 'modmail_mute': 'api/mod/conversations/{id}/mute', 'modmail_read': 'api/mod/conversations/read', 'modmail_subreddits': 'api/mod/conversations/subreddits', 'modmail_unarchive': 'api/mod/conversations/{id}/unarchive', 'modmail_unmute': 'api/mod/conversations/{id}/unmute', 'modmail_unread': 'api/mod/conversations/unread', 'modmail_unread_count': 'api/mod/conversations/unread/count', 'multireddit': 'user/{user}/m/{multi}/', 'multireddit_api': 'api/multi/user/{user}/m/{multi}/', 'multireddit_base': 'api/multi/', 'multireddit_copy': 'api/multi/copy/', 'multireddit_rename': 'api/multi/rename/', 'multireddit_update': 'api/multi/user/{user}/m/{multi}/r/{subreddit}', 'multireddit_user': 'api/multi/user/{user}/', 'mute_sender': 'api/mute_message_author/', 'quarantine_opt_in': 'api/quarantine_optin', 'quarantine_opt_out': 'api/quarantine_optout', 'read_message': 'api/read_message/', 'remove': 'api/remove/', 'report': 'api/report/', 'rules': 'r/{subreddit}/about/rules', 'save': 'api/save/', 'search': 'r/{subreddit}/search/', 'select_flair': 'r/{subreddit}/api/selectflair/', 'sendreplies': 'api/sendreplies', 'sent': 'message/sent/', 'setpermissions': 'r/{subreddit}/api/setpermissions/', 'spoiler': 'api/spoiler/', 'site_admin': 'api/site_admin/', 'sticky_submission': 'api/set_subreddit_sticky/', 'sub_recommended': 'api/recommend/sr/{subreddits}', 'submission': 'comments/{id}/', 'submission_replies': 'message/selfreply/', 'submit': 'api/submit/', 'subreddit': 'r/{subreddit}/', 'subreddit_about': 'r/{subreddit}/about/', 'subreddit_filter': ('api/filter/user/{user}/f/{special}/' 'r/{subreddit}'), 'subreddit_filter_list': 'api/filter/user/{user}/f/{special}', 'subreddit_random': 'r/{subreddit}/random/', 'subreddit_settings': 'r/{subreddit}/about/edit/', 'subreddit_stylesheet': 'r/{subreddit}/api/subreddit_stylesheet/', 'subreddits_by_topic': 'api/subreddits_by_topic', 'subreddits_default': 'subreddits/default/', 'subreddits_gold': 'subreddits/gold/', 'subreddits_new': 'subreddits/new/', 'subreddits_popular': 'subreddits/popular/', 'subreddits_name_search': 'api/search_reddit_names/', 'subreddits_search': 'subreddits/search/', 'subscribe': 'api/subscribe/', 'suggested_sort': 'api/set_suggested_sort/', 'uncollapse': 'api/uncollapse_message/', 'unfriend': 'r/{subreddit}/api/unfriend/', 'unhide': 'api/unhide/', 'unignore_reports': 'api/unignore_reports/', 'unlock': 'api/unlock/', 'unmarknsfw': 'api/unmarknsfw/', 'unmute_sender': 'api/unmute_message_author/', 'unread': 'message/unread/', 'unread_message': 'api/unread_message/', 'unsave': 'api/unsave/', 'unspoiler': 'api/unspoiler/', 'upload_image': 'r/{subreddit}/api/upload_sr_img', 'user': 'user/{user}/', 'user_about': 'user/{user}/about/', 'vote': 'api/vote/', 'wiki_edit': 'r/{subreddit}/api/wiki/edit/', 'wiki_page': 'r/{subreddit}/wiki/{page}', 'wiki_page_editor': 'r/{subreddit}/api/wiki/alloweditor/{method}', 'wiki_page_revisions': 'r/{subreddit}/wiki/revisions/{page}', 'wiki_page_settings': 'r/{subreddit}/wiki/settings/{page}', 'wiki_pages': 'r/{subreddit}/wiki/pages/', 'wiki_revisions': 'r/{subreddit}/wiki/revisions/'} JPEG_HEADER = b'\xff\xd8\xff' MAX_IMAGE_SIZE = 512000 MIN_PNG_SIZE = 67 MIN_JPEG_SIZE = 128 PNG_HEADER = b'\x89\x50\x4e\x47\x0d\x0a\x1a\x0a' USER_AGENT_FORMAT = '{{}} PRAW/{}'.format(__version__) # pylint: disable=import-error,no-name-in-module,unused-import if sys.version_info.major == 2: import ConfigParser as configparser # NOQA from urlparse import urljoin, urlparse # NOQA else: import configparser # NOQA from urllib.parse import urljoin, urlparse # NOQA #!/usr/bin/env python # -*- coding: utf-8 -*- import datetime import logging import pprint import time from django.conf import settings from django.forms.models import fields_for_model from django.http import QueryDict from django.utils.functional import cached_property from django.utils.http import urlencode from six.moves.urllib.request import urlopen from paypal.pro.signals import payment_was_successful, recurring_cancel, recurring_suspend, recurring_reactivate, payment_profile_created from paypal.pro.models import PayPalNVP from paypal.pro.exceptions import PayPalFailure USER = settings.PAYPAL_WPP_USER PASSWORD = settings.PAYPAL_WPP_PASSWORD SIGNATURE = settings.PAYPAL_WPP_SIGNATURE VERSION = 116.0 BASE_PARAMS = dict(USER=USER, PWD=PASSWORD, SIGNATURE=SIGNATURE, VERSION=VERSION) ENDPOINT = "https://api-3t.paypal.com/nvp" SANDBOX_ENDPOINT = "https://api-3t.sandbox.paypal.com/nvp" log = logging.getLogger(__file__) def paypal_time(time_obj=None): """Returns a time suitable for PayPal time fields.""" if time_obj is None: time_obj = time.gmtime() return time.strftime(PayPalNVP.TIMESTAMP_FORMAT, time_obj) def paypaltime2datetime(s): """Convert a PayPal time string to a DateTime.""" return datetime.datetime(*(time.strptime(s, PayPalNVP.TIMESTAMP_FORMAT)[:6])) class PayPalError(TypeError): """Error thrown when something be wrong.""" class PayPalWPP(object): """ Wrapper class for the PayPal Website Payments Pro. Website Payments Pro Integration Guide: https://cms.paypal.com/cms_content/US/en_US/files/developer/PP_WPP_IntegrationGuide.pdf Name-Value Pair API Developer Guide and Reference: https://cms.paypal.com/cms_content/US/en_US/files/developer/PP_NVPAPI_DeveloperGuide.pdf """ def __init__(self, request, params=BASE_PARAMS): """Required - USER / PWD / SIGNATURE / VERSION""" self.request = request if getattr(settings, 'PAYPAL_TEST', True): self.endpoint = SANDBOX_ENDPOINT else: self.endpoint = ENDPOINT self.signature_values = params self.signature = urlencode(self.signature_values) + "&" @cached_property def NVP_FIELDS(self): # Put this onto class and load lazily, because in some cases there is an # import order problem if we put it at module level. return list(fields_for_model(PayPalNVP).keys()) def doDirectPayment(self, params): """Call PayPal DoDirectPayment method.""" defaults = {"method": "DoDirectPayment", "paymentaction": "Sale"} required = ["creditcardtype", "acct", "expdate", "cvv2", "ipaddress", "firstname", "lastname", "street", "city", "state", "countrycode", "zip", "amt", ] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) payment_was_successful.send(sender=nvp_obj, **params) # @@@ Could check cvv2match / avscode are both 'X' or '0' # qd = django.http.QueryDict(nvp_obj.response) # if qd.get('cvv2match') not in ['X', '0']: # nvp_obj.set_flag("Invalid cvv2match: %s" % qd.get('cvv2match') # if qd.get('avscode') not in ['X', '0']: # nvp_obj.set_flag("Invalid avscode: %s" % qd.get('avscode') return nvp_obj def setExpressCheckout(self, params): """ Initiates an Express Checkout transaction. Optionally, the SetExpressCheckout API operation can set up billing agreements for reference transactions and recurring payments. Returns a NVP instance - check for token and payerid to continue! """ if "amt" in params: import warnings warnings.warn("'amt' has been deprecated. 'paymentrequest_0_amt' " "should be used instead.", DeprecationWarning) # Make a copy so we don't change things unexpectedly params = params.copy() params.update({'paymentrequest_0_amt': params['amt']}) del params['amt'] if self._is_recurring(params): params = self._recurring_setExpressCheckout_adapter(params) defaults = {"method": "SetExpressCheckout", "noshipping": 1} required = ["returnurl", "cancelurl", "paymentrequest_0_amt"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) return nvp_obj def doExpressCheckoutPayment(self, params): """ Check the dude out: """ if "amt" in params: import warnings warnings.warn("'amt' has been deprecated. 'paymentrequest_0_amt' " "should be used instead.", DeprecationWarning) # Make a copy so we don't change things unexpectedly params = params.copy() params.update({'paymentrequest_0_amt': params['amt']}) del params['amt'] defaults = {"method": "DoExpressCheckoutPayment", "paymentaction": "Sale"} required = ["returnurl", "cancelurl", "paymentrequest_0_amt", "token", "payerid"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) payment_was_successful.send(sender=nvp_obj, **params) return nvp_obj def createRecurringPaymentsProfile(self, params, direct=False): """ Set direct to True to indicate that this is being called as a directPayment. Returns True PayPal successfully creates the profile otherwise False. """ defaults = {"method": "CreateRecurringPaymentsProfile"} required = ["profilestartdate", "billingperiod", "billingfrequency", "amt"] # Direct payments require CC data if direct: required + ["creditcardtype", "acct", "expdate", "firstname", "lastname"] else: required + ["token", "payerid"] nvp_obj = self._fetch(params, required, defaults) # Flag if profile_type != ActiveProfile if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) payment_profile_created.send(sender=nvp_obj, **params) return nvp_obj def getExpressCheckoutDetails(self, params): defaults = {"method": "GetExpressCheckoutDetails"} required = ["token"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) return nvp_obj def setCustomerBillingAgreement(self, params): raise DeprecationWarning def createBillingAgreement(self, params): """ Create a billing agreement for future use, without any initial payment """ defaults = {"method": "CreateBillingAgreement"} required = ["token"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) return nvp_obj def getTransactionDetails(self, params): defaults = {"method": "GetTransactionDetails"} required = ["transactionid"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) return nvp_obj def massPay(self, params): raise NotImplementedError def getRecurringPaymentsProfileDetails(self, params): raise NotImplementedError def updateRecurringPaymentsProfile(self, params): defaults = {"method": "UpdateRecurringPaymentsProfile"} required = ["profileid"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) return nvp_obj def billOutstandingAmount(self, params): raise NotImplementedError def manangeRecurringPaymentsProfileStatus(self, params, fail_silently=False): """ Requires `profileid` and `action` params. Action must be either "Cancel", "Suspend", or "Reactivate". """ defaults = {"method": "ManageRecurringPaymentsProfileStatus"} required = ["profileid", "action"] nvp_obj = self._fetch(params, required, defaults) # TODO: This fail silently check should be using the error code, but its not easy to access if not nvp_obj.flag or ( fail_silently and nvp_obj.flag_info == 'Invalid profile status for cancel action; profile should be active or suspended'): if params['action'] == 'Cancel': recurring_cancel.send(sender=nvp_obj) elif params['action'] == 'Suspend': recurring_suspend.send(sender=nvp_obj) elif params['action'] == 'Reactivate': recurring_reactivate.send(sender=nvp_obj) else: raise PayPalFailure(nvp_obj.flag_info) return nvp_obj def refundTransaction(self, params): raise NotImplementedError def doReferenceTransaction(self, params): """ Process a payment from a buyer's account, identified by a previous transaction. The `paymentaction` param defaults to "Sale", but may also contain the values "Authorization" or "Order". """ defaults = {"method": "DoReferenceTransaction", "paymentaction": "Sale"} required = ["referenceid", "amt"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info) return nvp_obj def _is_recurring(self, params): """Returns True if the item passed is a recurring transaction.""" return 'billingfrequency' in params def _recurring_setExpressCheckout_adapter(self, params): """ The recurring payment interface to SEC is different than the recurring payment interface to ECP. This adapts a normal call to look like a SEC call. """ params['l_billingtype0'] = "RecurringPayments" params['l_billingagreementdescription0'] = params['desc'] REMOVE = ["billingfrequency", "billingperiod", "profilestartdate", "desc"] for k in params.keys(): if k in REMOVE: del params[k] return params def _fetch(self, params, required, defaults): """Make the NVP request and store the response.""" defaults.update(params) pp_params = self._check_and_update_params(required, defaults) pp_string = self.signature + urlencode(pp_params) response = self._request(pp_string) response_params = self._parse_response(response) if getattr(settings, 'PAYPAL_DEBUG', settings.DEBUG): log.debug('PayPal Request:\n%s\n', pprint.pformat(defaults)) log.debug('PayPal Response:\n%s\n', pprint.pformat(response_params)) # Gather all NVP parameters to pass to a new instance. nvp_params = {} tmpd = defaults.copy() tmpd.update(response_params) for k, v in tmpd.items(): if k in self.NVP_FIELDS: nvp_params[str(k)] = v # PayPal timestamp has to be formatted. if 'timestamp' in nvp_params: nvp_params['timestamp'] = paypaltime2datetime(nvp_params['timestamp']) nvp_obj = PayPalNVP(**nvp_params) nvp_obj.init(self.request, params, response_params) nvp_obj.save() return nvp_obj def _request(self, data): """Moved out to make testing easier.""" return urlopen(self.endpoint, data.encode("ascii")).read() def _check_and_update_params(self, required, params): """ Ensure all required parameters were passed to the API call and format them correctly. """ for r in required: if r not in params: raise PayPalError("Missing required param: %s" % r) # Upper case all the parameters for PayPal. return (dict((k.upper(), v) for k, v in params.items())) def _parse_response(self, response): """Turn the PayPal response into a dict""" q = QueryDict(response, encoding='UTF-8').dict() return {k.lower(): v for k,v in q.items()} # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Test cases for operators with no arguments.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.compiler.tests.xla_test import XLATestCase from tensorflow.python.framework import constant_op from tensorflow.python.ops import control_flow_ops from tensorflow.python.platform import googletest class NullaryOpsTest(XLATestCase): def _testNullary(self, op, expected): with self.test_session() as session: with self.test_scope(): output = op() result = session.run(output) self.assertAllClose(result, expected, rtol=1e-3) def testNoOp(self): with self.test_session(): with self.test_scope(): output = control_flow_ops.no_op() # This should not crash. output.run() def testConstants(self): constants = [ np.float32(42), np.array([], dtype=np.float32), np.array([1, 2], dtype=np.float32), np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32), np.array([[[1, 2], [3, 4], [5, 6]], [[10, 20], [30, 40], [50, 60]]], dtype=np.float32), np.array([[[]], [[]]], dtype=np.float32), np.array([[[[1]]]], dtype=np.float32), ] for c in constants: self._testNullary(lambda c=c: constant_op.constant(c), expected=c) if __name__ == "__main__": googletest.main() # coding=utf-8 import os import unittest import mock from conans.client.tools import env class ToolsEnvTest(unittest.TestCase): def test_environment_append_variables(self): with mock.patch.dict('os.environ', {}),\ env.environment_append({'env_var1': 'value', 'env_var2': 'value2'}): self.assertEqual(os.environ['env_var1'], 'value') self.assertEqual(os.environ['env_var2'], 'value2') def test_environment_append_variables_without_values(self): with mock.patch.dict('os.environ', {'env_var1': 'value', 'env_var2': 'value2'}),\ env.environment_append({}): self.assertEqual(os.environ['env_var1'], 'value') self.assertEqual(os.environ['env_var2'], 'value2') def test_environment_append_overwriting(self): with mock.patch.dict('os.environ', {'env_var1': 'value'}),\ env.environment_append({'env_var1': 'new_value'}): self.assertEqual(os.environ['env_var1'], 'new_value') def test_environment_append_list(self): with mock.patch.dict('os.environ', {}),\ env.environment_append({'env_var1': ['value1', 'value2']}): self.assertEqual(os.environ['env_var1'], 'value1' + os.pathsep + 'value2') def test_environment_append_unsetting_some_variables(self): with mock.patch.dict('os.environ', {'env_var1': 'value'}),\ env.environment_append({'env_var1': None, 'env_var2': 'value2'}): self.assertNotIn('env_var1', os.environ) self.assertEqual(os.environ['env_var2'], 'value2') def test_environment_append_unsetting_all_variables(self): with mock.patch.dict('os.environ', {'env_var1': 'value', 'env_var2': 'value2'}),\ env.environment_append({'env_var1': None}): self.assertNotIn('env_var1', os.environ) def test_environment_append_unsetting_non_existing_variables(self): with mock.patch.dict('os.environ', {'env_var2': 'value2'}),\ env.environment_append({'env_var1': None}): self.assertNotIn('env_var1', os.environ) # Kills a process by process name # # Uses the Performance Data Helper to locate the PID, then kills it. # Will only kill the process if there is only one process of that name # (eg, attempting to kill "Python.exe" will only work if there is only # one Python.exe running. (Note that the current process does not # count - ie, if Python.exe is hosting this script, you can still kill # another Python.exe (as long as there is only one other Python.exe) # Really just a demo for the win32pdh(util) module, which allows you # to get all sorts of information about a running process and many # other aspects of your system. import win32api, win32pdhutil, win32con, sys def killProcName(procname): # Change suggested by Dan Knierim, who found that this performed a # "refresh", allowing us to kill processes created since this was run # for the first time. try: win32pdhutil.GetPerformanceAttributes('Process','ID Process',procname) except: pass pids = win32pdhutil.FindPerformanceAttributesByName(procname) # If _my_ pid in there, remove it! try: pids.remove(win32api.GetCurrentProcessId()) except ValueError: pass if len(pids)==0: result = "Can't find %s" % procname elif len(pids)>1: result = "Found too many %s's - pids=`%s`" % (procname,pids) else: handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0,pids[0]) win32api.TerminateProcess(handle,0) win32api.CloseHandle(handle) result = "" return result if __name__ == '__main__': if len(sys.argv)>1: for procname in sys.argv[1:]: result = killProcName(procname) if result: print result print "Dumping all processes..." win32pdhutil.ShowAllProcesses() else: print "Killed %s" % procname else: print "Usage: killProcName.py procname ..." ''' Project Euler Problem 9 A Pythagorean triplet is a set of three natural numbers, a < b < c, for which, a**2 + b**2 = c**2 For example, 3**2 + 4**2 = 9 + 16 = 25 = 5**2. There exists exactly one Pythagorean triplet for which a + b + c = 1000. Find the product abc. Link : https://projecteuler.net/problem=9 ''' from operator import mul class Pythagoras(object): def __init__(self, sides=[]): self.sides = sides def get_sides_from_sum(self, psum=0): if psum <= 0 : print "Error: Pythagorean sum cannot be less than 0" return None for b in range(int(psum/5), int(psum/2)): a = ((((psum**2)/2) - (psum * b)))/(psum - b) c = psum - a - b if c < 0 : continue print a, b, c if ((a**2) + (b**2)) == (c**2): self.sides = [a, b, c] print self.sides return self.sides return None def get_product(self, sides=[]): if self.sides == [] and sides: self.sides = sides product = reduce(mul, self.sides) return product def main(): pythagoras = Pythagoras() print "Special Pythagorean Triplets" if pythagoras.get_sides_from_sum(1000): print "Product is ", pythagoras.get_product() if __name__ == "__main__": main() # Copyright (c) 2001-2010 Twisted Matrix Laboratories. # See LICENSE for details. """ Tests for ssh/transport.py and the classes therein. """ try: import pyasn1 except ImportError: pyasn1 = None try: import Crypto.Cipher.DES3 except ImportError: Crypto = None if pyasn1 is not None and Crypto is not None: dependencySkip = None from twisted.conch.ssh import transport, common, keys, factory from twisted.conch.test import keydata else: if pyasn1 is None: dependencySkip = "can't run w/o PyASN1" elif Crypto is None: dependencySkip = "can't run w/o PyCrypto" class transport: # fictional modules to make classes work class SSHTransportBase: pass class SSHServerTransport: pass class SSHClientTransport: pass class factory: class SSHFactory: pass from twisted.trial import unittest from twisted.internet import defer from twisted.protocols import loopback from twisted.python import randbytes from twisted.python.reflect import qual from twisted.python.hashlib import md5, sha1 from twisted.conch.ssh import service from twisted.test import proto_helpers from twisted.conch.error import ConchError class MockTransportBase(transport.SSHTransportBase): """ A base class for the client and server protocols. Stores the messages it receieves instead of ignoring them. @ivar errors: a list of tuples: (reasonCode, description) @ivar unimplementeds: a list of integers: sequence number @ivar debugs: a list of tuples: (alwaysDisplay, message, lang) @ivar ignoreds: a list of strings: ignored data """ def connectionMade(self): """ Set up instance variables. """ transport.SSHTransportBase.connectionMade(self) self.errors = [] self.unimplementeds = [] self.debugs = [] self.ignoreds = [] self.gotUnsupportedVersion = None def _unsupportedVersionReceived(self, remoteVersion): """ Intercept unsupported version call. @type remoteVersion: C{str} """ self.gotUnsupportedVersion = remoteVersion return transport.SSHTransportBase._unsupportedVersionReceived(self, remoteVersion) def receiveError(self, reasonCode, description): """ Store any errors received. @type reasonCode: C{int} @type description: C{str} """ self.errors.append((reasonCode, description)) def receiveUnimplemented(self, seqnum): """ Store any unimplemented packet messages. @type seqnum: C{int} """ self.unimplementeds.append(seqnum) def receiveDebug(self, alwaysDisplay, message, lang): """ Store any debug messages. @type alwaysDisplay: C{bool} @type message: C{str} @type lang: C{str} """ self.debugs.append((alwaysDisplay, message, lang)) def ssh_IGNORE(self, packet): """ Store any ignored data. @type packet: C{str} """ self.ignoreds.append(packet) class MockCipher(object): """ A mocked-up version of twisted.conch.ssh.transport.SSHCiphers. """ outCipType = 'test' encBlockSize = 6 inCipType = 'test' decBlockSize = 6 inMACType = 'test' outMACType = 'test' verifyDigestSize = 1 usedEncrypt = False usedDecrypt = False outMAC = (None, '', '', 1) inMAC = (None, '', '', 1) keys = () def encrypt(self, x): """ Called to encrypt the packet. Simply record that encryption was used and return the data unchanged. """ self.usedEncrypt = True if (len(x) % self.encBlockSize) != 0: raise RuntimeError("length %i modulo blocksize %i is not 0: %i" % (len(x), self.encBlockSize, len(x) % self.encBlockSize)) return x def decrypt(self, x): """ Called to decrypt the packet. Simply record that decryption was used and return the data unchanged. """ self.usedDecrypt = True if (len(x) % self.encBlockSize) != 0: raise RuntimeError("length %i modulo blocksize %i is not 0: %i" % (len(x), self.decBlockSize, len(x) % self.decBlockSize)) return x def makeMAC(self, outgoingPacketSequence, payload): """ Make a Message Authentication Code by sending the character value of the outgoing packet. """ return chr(outgoingPacketSequence) def verify(self, incomingPacketSequence, packet, macData): """ Verify the Message Authentication Code by checking that the packet sequence number is the same. """ return chr(incomingPacketSequence) == macData def setKeys(self, ivOut, keyOut, ivIn, keyIn, macIn, macOut): """ Record the keys. """ self.keys = (ivOut, keyOut, ivIn, keyIn, macIn, macOut) class MockCompression: """ A mocked-up compression, based on the zlib interface. Instead of compressing, it reverses the data and adds a 0x66 byte to the end. """ def compress(self, payload): return payload[::-1] # reversed def decompress(self, payload): return payload[:-1][::-1] def flush(self, kind): return '\x66' class MockService(service.SSHService): """ A mocked-up service, based on twisted.conch.ssh.service.SSHService. @ivar started: True if this service has been started. @ivar stopped: True if this service has been stopped. """ name = "MockService" started = False stopped = False protocolMessages = {0xff: "MSG_TEST", 71: "MSG_fiction"} def logPrefix(self): return "MockService" def serviceStarted(self): """ Record that the service was started. """ self.started = True def serviceStopped(self): """ Record that the service was stopped. """ self.stopped = True def ssh_TEST(self, packet): """ A message that this service responds to. """ self.transport.sendPacket(0xff, packet) class MockFactory(factory.SSHFactory): """ A mocked-up factory based on twisted.conch.ssh.factory.SSHFactory. """ services = { 'ssh-userauth': MockService} def getPublicKeys(self): """ Return the public keys that authenticate this server. """ return { 'ssh-rsa': keys.Key.fromString(keydata.publicRSA_openssh), 'ssh-dsa': keys.Key.fromString(keydata.publicDSA_openssh)} def getPrivateKeys(self): """ Return the private keys that authenticate this server. """ return { 'ssh-rsa': keys.Key.fromString(keydata.privateRSA_openssh), 'ssh-dsa': keys.Key.fromString(keydata.privateDSA_openssh)} def getPrimes(self): """ Return the Diffie-Hellman primes that can be used for the diffie-hellman-group-exchange-sha1 key exchange. """ return { 1024: ((2, transport.DH_PRIME),), 2048: ((3, transport.DH_PRIME),), 4096: ((5, 7),)} class MockOldFactoryPublicKeys(MockFactory): """ The old SSHFactory returned mappings from key names to strings from getPublicKeys(). We return those here for testing. """ def getPublicKeys(self): """ We used to map key types to public key blobs as strings. """ keys = MockFactory.getPublicKeys(self) for name, key in keys.items()[:]: keys[name] = key.blob() return keys class MockOldFactoryPrivateKeys(MockFactory): """ The old SSHFactory returned mappings from key names to PyCrypto key objects from getPrivateKeys(). We return those here for testing. """ def getPrivateKeys(self): """ We used to map key types to PyCrypto key objects. """ keys = MockFactory.getPrivateKeys(self) for name, key in keys.items()[:]: keys[name] = key.keyObject return keys class TransportTestCase(unittest.TestCase): """ Base class for transport test cases. """ klass = None if Crypto is None: skip = "cannot run w/o PyCrypto" if pyasn1 is None: skip = "cannot run w/o PyASN1" def setUp(self): self.transport = proto_helpers.StringTransport() self.proto = self.klass() self.packets = [] def secureRandom(len): """ Return a consistent entropy value """ return '\x99' * len self.oldSecureRandom = randbytes.secureRandom randbytes.secureRandom = secureRandom def stubSendPacket(messageType, payload): self.packets.append((messageType, payload)) self.proto.makeConnection(self.transport) # we just let the kex packet go into the transport self.proto.sendPacket = stubSendPacket def tearDown(self): randbytes.secureRandom = self.oldSecureRandom self.oldSecureRandom = None class BaseSSHTransportTestCase(TransportTestCase): """ Test TransportBase. It implements the non-server/client specific parts of the SSH transport protocol. """ klass = MockTransportBase def test_sendVersion(self): """ Test that the first thing sent over the connection is the version string. """ # the other setup was done in the setup method self.assertEquals(self.transport.value().split('\r\n', 1)[0], "SSH-2.0-Twisted") def test_sendPacketPlain(self): """ Test that plain (unencrypted, uncompressed) packets are sent correctly. The format is:: uint32 length (including type and padding length) byte padding length byte type bytes[length-padding length-2] data bytes[padding length] padding """ proto = MockTransportBase() proto.makeConnection(self.transport) self.transport.clear() message = ord('A') payload = 'BCDEFG' proto.sendPacket(message, payload) value = self.transport.value() self.assertEquals(value, '\x00\x00\x00\x0c\x04ABCDEFG\x99\x99\x99\x99') def test_sendPacketEncrypted(self): """ Test that packets sent while encryption is enabled are sent correctly. The whole packet should be encrypted. """ proto = MockTransportBase() proto.makeConnection(self.transport) proto.currentEncryptions = testCipher = MockCipher() message = ord('A') payload = 'BC' self.transport.clear() proto.sendPacket(message, payload) self.assertTrue(testCipher.usedEncrypt) value = self.transport.value() self.assertEquals(value, '\x00\x00\x00\x08\x04ABC\x99\x99\x99\x99\x01') def test_sendPacketCompressed(self): """ Test that packets sent while compression is enabled are sent correctly. The packet type and data should be encrypted. """ proto = MockTransportBase() proto.makeConnection(self.transport) proto.outgoingCompression = MockCompression() self.transport.clear() proto.sendPacket(ord('A'), 'B') value = self.transport.value() self.assertEquals( value, '\x00\x00\x00\x0c\x08BA\x66\x99\x99\x99\x99\x99\x99\x99\x99') def test_sendPacketBoth(self): """ Test that packets sent while compression and encryption are enabled are sent correctly. The packet type and data should be compressed and then the whole packet should be encrypted. """ proto = MockTransportBase() proto.makeConnection(self.transport) proto.currentEncryptions = testCipher = MockCipher() proto.outgoingCompression = MockCompression() message = ord('A') payload = 'BC' self.transport.clear() proto.sendPacket(message, payload) value = self.transport.value() self.assertEquals( value, '\x00\x00\x00\x0e\x09CBA\x66\x99\x99\x99\x99\x99\x99\x99\x99\x99' '\x01') def test_getPacketPlain(self): """ Test that packets are retrieved correctly out of the buffer when no encryption is enabled. """ proto = MockTransportBase() proto.makeConnection(self.transport) self.transport.clear() proto.sendPacket(ord('A'), 'BC') proto.buf = self.transport.value() + 'extra' self.assertEquals(proto.getPacket(), 'ABC') self.assertEquals(proto.buf, 'extra') def test_getPacketEncrypted(self): """ Test that encrypted packets are retrieved correctly. See test_sendPacketEncrypted. """ proto = MockTransportBase() proto.sendKexInit = lambda: None # don't send packets proto.makeConnection(self.transport) self.transport.clear() proto.currentEncryptions = testCipher = MockCipher() proto.sendPacket(ord('A'), 'BCD') value = self.transport.value() proto.buf = value[:MockCipher.decBlockSize] self.assertEquals(proto.getPacket(), None) self.assertTrue(testCipher.usedDecrypt) self.assertEquals(proto.first, '\x00\x00\x00\x0e\x09A') proto.buf += value[MockCipher.decBlockSize:] self.assertEquals(proto.getPacket(), 'ABCD') self.assertEquals(proto.buf, '') def test_getPacketCompressed(self): """ Test that compressed packets are retrieved correctly. See test_sendPacketCompressed. """ proto = MockTransportBase() proto.makeConnection(self.transport) self.transport.clear() proto.outgoingCompression = MockCompression() proto.incomingCompression = proto.outgoingCompression proto.sendPacket(ord('A'), 'BCD') proto.buf = self.transport.value() self.assertEquals(proto.getPacket(), 'ABCD') def test_getPacketBoth(self): """ Test that compressed and encrypted packets are retrieved correctly. See test_sendPacketBoth. """ proto = MockTransportBase() proto.sendKexInit = lambda: None proto.makeConnection(self.transport) self.transport.clear() proto.currentEncryptions = testCipher = MockCipher() proto.outgoingCompression = MockCompression() proto.incomingCompression = proto.outgoingCompression proto.sendPacket(ord('A'), 'BCDEFG') proto.buf = self.transport.value() self.assertEquals(proto.getPacket(), 'ABCDEFG') def test_ciphersAreValid(self): """ Test that all the supportedCiphers are valid. """ ciphers = transport.SSHCiphers('A', 'B', 'C', 'D') iv = key = '\x00' * 16 for cipName in self.proto.supportedCiphers: self.assertTrue(ciphers._getCipher(cipName, iv, key)) def test_sendKexInit(self): """ Test that the KEXINIT (key exchange initiation) message is sent correctly. Payload:: bytes[16] cookie string key exchange algorithms string public key algorithms string outgoing ciphers string incoming ciphers string outgoing MACs string incoming MACs string outgoing compressions string incoming compressions bool first packet follows uint32 0 """ value = self.transport.value().split('\r\n', 1)[1] self.proto.buf = value packet = self.proto.getPacket() self.assertEquals(packet[0], chr(transport.MSG_KEXINIT)) self.assertEquals(packet[1:17], '\x99' * 16) (kex, pubkeys, ciphers1, ciphers2, macs1, macs2, compressions1, compressions2, languages1, languages2, buf) = common.getNS(packet[17:], 10) self.assertEquals(kex, ','.join(self.proto.supportedKeyExchanges)) self.assertEquals(pubkeys, ','.join(self.proto.supportedPublicKeys)) self.assertEquals(ciphers1, ','.join(self.proto.supportedCiphers)) self.assertEquals(ciphers2, ','.join(self.proto.supportedCiphers)) self.assertEquals(macs1, ','.join(self.proto.supportedMACs)) self.assertEquals(macs2, ','.join(self.proto.supportedMACs)) self.assertEquals(compressions1, ','.join(self.proto.supportedCompressions)) self.assertEquals(compressions2, ','.join(self.proto.supportedCompressions)) self.assertEquals(languages1, ','.join(self.proto.supportedLanguages)) self.assertEquals(languages2, ','.join(self.proto.supportedLanguages)) self.assertEquals(buf, '\x00' * 5) def test_sendDebug(self): """ Test that debug messages are sent correctly. Payload:: bool always display string debug message string language """ self.proto.sendDebug("test", True, 'en') self.assertEquals( self.packets, [(transport.MSG_DEBUG, "\x01\x00\x00\x00\x04test\x00\x00\x00\x02en")]) def test_receiveDebug(self): """ Test that debug messages are received correctly. See test_sendDebug. """ self.proto.dispatchMessage( transport.MSG_DEBUG, '\x01\x00\x00\x00\x04test\x00\x00\x00\x02en') self.assertEquals(self.proto.debugs, [(True, 'test', 'en')]) def test_sendIgnore(self): """ Test that ignored messages are sent correctly. Payload:: string ignored data """ self.proto.sendIgnore("test") self.assertEquals( self.packets, [(transport.MSG_IGNORE, '\x00\x00\x00\x04test')]) def test_receiveIgnore(self): """ Test that ignored messages are received correctly. See test_sendIgnore. """ self.proto.dispatchMessage(transport.MSG_IGNORE, 'test') self.assertEquals(self.proto.ignoreds, ['test']) def test_sendUnimplemented(self): """ Test that unimplemented messages are sent correctly. Payload:: uint32 sequence number """ self.proto.sendUnimplemented() self.assertEquals( self.packets, [(transport.MSG_UNIMPLEMENTED, '\x00\x00\x00\x00')]) def test_receiveUnimplemented(self): """ Test that unimplemented messages are received correctly. See test_sendUnimplemented. """ self.proto.dispatchMessage(transport.MSG_UNIMPLEMENTED, '\x00\x00\x00\xff') self.assertEquals(self.proto.unimplementeds, [255]) def test_sendDisconnect(self): """ Test that disconnection messages are sent correctly. Payload:: uint32 reason code string reason description string language """ disconnected = [False] def stubLoseConnection(): disconnected[0] = True self.transport.loseConnection = stubLoseConnection self.proto.sendDisconnect(0xff, "test") self.assertEquals( self.packets, [(transport.MSG_DISCONNECT, "\x00\x00\x00\xff\x00\x00\x00\x04test\x00\x00\x00\x00")]) self.assertTrue(disconnected[0]) def test_receiveDisconnect(self): """ Test that disconnection messages are received correctly. See test_sendDisconnect. """ disconnected = [False] def stubLoseConnection(): disconnected[0] = True self.transport.loseConnection = stubLoseConnection self.proto.dispatchMessage(transport.MSG_DISCONNECT, '\x00\x00\x00\xff\x00\x00\x00\x04test') self.assertEquals(self.proto.errors, [(255, 'test')]) self.assertTrue(disconnected[0]) def test_dataReceived(self): """ Test that dataReceived parses packets and dispatches them to ssh_* methods. """ kexInit = [False] def stubKEXINIT(packet): kexInit[0] = True self.proto.ssh_KEXINIT = stubKEXINIT self.proto.dataReceived(self.transport.value()) self.assertTrue(self.proto.gotVersion) self.assertEquals(self.proto.ourVersionString, self.proto.otherVersionString) self.assertTrue(kexInit[0]) def test_service(self): """ Test that the transport can set the running service and dispatches packets to the service's packetReceived method. """ service = MockService() self.proto.setService(service) self.assertEquals(self.proto.service, service) self.assertTrue(service.started) self.proto.dispatchMessage(0xff, "test") self.assertEquals(self.packets, [(0xff, "test")]) service2 = MockService() self.proto.setService(service2) self.assertTrue(service2.started) self.assertTrue(service.stopped) self.proto.connectionLost(None) self.assertTrue(service2.stopped) def test_avatar(self): """ Test that the transport notifies the avatar of disconnections. """ disconnected = [False] def logout(): disconnected[0] = True self.proto.logoutFunction = logout self.proto.avatar = True self.proto.connectionLost(None) self.assertTrue(disconnected[0]) def test_isEncrypted(self): """ Test that the transport accurately reflects its encrypted status. """ self.assertFalse(self.proto.isEncrypted('in')) self.assertFalse(self.proto.isEncrypted('out')) self.assertFalse(self.proto.isEncrypted('both')) self.proto.currentEncryptions = MockCipher() self.assertTrue(self.proto.isEncrypted('in')) self.assertTrue(self.proto.isEncrypted('out')) self.assertTrue(self.proto.isEncrypted('both')) self.proto.currentEncryptions = transport.SSHCiphers('none', 'none', 'none', 'none') self.assertFalse(self.proto.isEncrypted('in')) self.assertFalse(self.proto.isEncrypted('out')) self.assertFalse(self.proto.isEncrypted('both')) self.assertRaises(TypeError, self.proto.isEncrypted, 'bad') def test_isVerified(self): """ Test that the transport accurately reflects its verified status. """ self.assertFalse(self.proto.isVerified('in')) self.assertFalse(self.proto.isVerified('out')) self.assertFalse(self.proto.isVerified('both')) self.proto.currentEncryptions = MockCipher() self.assertTrue(self.proto.isVerified('in')) self.assertTrue(self.proto.isVerified('out')) self.assertTrue(self.proto.isVerified('both')) self.proto.currentEncryptions = transport.SSHCiphers('none', 'none', 'none', 'none') self.assertFalse(self.proto.isVerified('in')) self.assertFalse(self.proto.isVerified('out')) self.assertFalse(self.proto.isVerified('both')) self.assertRaises(TypeError, self.proto.isVerified, 'bad') def test_loseConnection(self): """ Test that loseConnection sends a disconnect message and closes the connection. """ disconnected = [False] def stubLoseConnection(): disconnected[0] = True self.transport.loseConnection = stubLoseConnection self.proto.loseConnection() self.assertEquals(self.packets[0][0], transport.MSG_DISCONNECT) self.assertEquals(self.packets[0][1][3], chr(transport.DISCONNECT_CONNECTION_LOST)) def test_badVersion(self): """ Test that the transport disconnects when it receives a bad version. """ def testBad(version): self.packets = [] self.proto.gotVersion = False disconnected = [False] def stubLoseConnection(): disconnected[0] = True self.transport.loseConnection = stubLoseConnection for c in version + '\r\n': self.proto.dataReceived(c) self.assertTrue(disconnected[0]) self.assertEquals(self.packets[0][0], transport.MSG_DISCONNECT) self.assertEquals( self.packets[0][1][3], chr(transport.DISCONNECT_PROTOCOL_VERSION_NOT_SUPPORTED)) testBad('SSH-1.5-OpenSSH') testBad('SSH-3.0-Twisted') testBad('GET / HTTP/1.1') def test_dataBeforeVersion(self): """ Test that the transport ignores data sent before the version string. """ proto = MockTransportBase() proto.makeConnection(proto_helpers.StringTransport()) data = ("""here's some stuff beforehand here's some other stuff """ + proto.ourVersionString + "\r\n") [proto.dataReceived(c) for c in data] self.assertTrue(proto.gotVersion) self.assertEquals(proto.otherVersionString, proto.ourVersionString) def test_compatabilityVersion(self): """ Test that the transport treats the compatbility version (1.99) as equivalent to version 2.0. """ proto = MockTransportBase() proto.makeConnection(proto_helpers.StringTransport()) proto.dataReceived("SSH-1.99-OpenSSH\n") self.assertTrue(proto.gotVersion) self.assertEquals(proto.otherVersionString, "SSH-1.99-OpenSSH") def test_supportedVersionsAreAllowed(self): """ If an unusual SSH version is received and is included in C{supportedVersions}, an unsupported version error is not emitted. """ proto = MockTransportBase() proto.supportedVersions = ("9.99", ) proto.makeConnection(proto_helpers.StringTransport()) proto.dataReceived("SSH-9.99-OpenSSH\n") self.assertFalse(proto.gotUnsupportedVersion) def test_unsupportedVersionsCallUnsupportedVersionReceived(self): """ If an unusual SSH version is received and is not included in C{supportedVersions}, an unsupported version error is emitted. """ proto = MockTransportBase() proto.supportedVersions = ("2.0", ) proto.makeConnection(proto_helpers.StringTransport()) proto.dataReceived("SSH-9.99-OpenSSH\n") self.assertEquals("9.99", proto.gotUnsupportedVersion) def test_badPackets(self): """ Test that the transport disconnects with an error when it receives bad packets. """ def testBad(packet, error=transport.DISCONNECT_PROTOCOL_ERROR): self.packets = [] self.proto.buf = packet self.assertEquals(self.proto.getPacket(), None) self.assertEquals(len(self.packets), 1) self.assertEquals(self.packets[0][0], transport.MSG_DISCONNECT) self.assertEquals(self.packets[0][1][3], chr(error)) testBad('\xff' * 8) # big packet testBad('\x00\x00\x00\x05\x00BCDE') # length not modulo blocksize oldEncryptions = self.proto.currentEncryptions self.proto.currentEncryptions = MockCipher() testBad('\x00\x00\x00\x08\x06AB123456', # bad MAC transport.DISCONNECT_MAC_ERROR) self.proto.currentEncryptions.decrypt = lambda x: x[:-1] testBad('\x00\x00\x00\x08\x06BCDEFGHIJK') # bad decryption self.proto.currentEncryptions = oldEncryptions self.proto.incomingCompression = MockCompression() def stubDecompress(payload): raise Exception('bad compression') self.proto.incomingCompression.decompress = stubDecompress testBad('\x00\x00\x00\x04\x00BCDE', # bad decompression transport.DISCONNECT_COMPRESSION_ERROR) self.flushLoggedErrors() def test_unimplementedPackets(self): """ Test that unimplemented packet types cause MSG_UNIMPLEMENTED packets to be sent. """ seqnum = self.proto.incomingPacketSequence def checkUnimplemented(seqnum=seqnum): self.assertEquals(self.packets[0][0], transport.MSG_UNIMPLEMENTED) self.assertEquals(self.packets[0][1][3], chr(seqnum)) self.proto.packets = [] seqnum += 1 self.proto.dispatchMessage(40, '') checkUnimplemented() transport.messages[41] = 'MSG_fiction' self.proto.dispatchMessage(41, '') checkUnimplemented() self.proto.dispatchMessage(60, '') checkUnimplemented() self.proto.setService(MockService()) self.proto.dispatchMessage(70, '') checkUnimplemented() self.proto.dispatchMessage(71, '') checkUnimplemented() def test_getKey(self): """ Test that _getKey generates the correct keys. """ self.proto.sessionID = 'EF' k1 = sha1('AB' + 'CD' + 'K' + self.proto.sessionID).digest() k2 = sha1('ABCD' + k1).digest() self.assertEquals(self.proto._getKey('K', 'AB', 'CD'), k1 + k2) def test_multipleClasses(self): """ Test that multiple instances have distinct states. """ proto = self.proto proto.dataReceived(self.transport.value()) proto.currentEncryptions = MockCipher() proto.outgoingCompression = MockCompression() proto.incomingCompression = MockCompression() proto.setService(MockService()) proto2 = MockTransportBase() proto2.makeConnection(proto_helpers.StringTransport()) proto2.sendIgnore('') self.failIfEquals(proto.gotVersion, proto2.gotVersion) self.failIfEquals(proto.transport, proto2.transport) self.failIfEquals(proto.outgoingPacketSequence, proto2.outgoingPacketSequence) self.failIfEquals(proto.incomingPacketSequence, proto2.incomingPacketSequence) self.failIfEquals(proto.currentEncryptions, proto2.currentEncryptions) self.failIfEquals(proto.service, proto2.service) class ServerAndClientSSHTransportBaseCase: """ Tests that need to be run on both the server and the client. """ def checkDisconnected(self, kind=None): """ Helper function to check if the transport disconnected. """ if kind is None: kind = transport.DISCONNECT_PROTOCOL_ERROR self.assertEquals(self.packets[-1][0], transport.MSG_DISCONNECT) self.assertEquals(self.packets[-1][1][3], chr(kind)) def connectModifiedProtocol(self, protoModification, kind=None): """ Helper function to connect a modified protocol to the test protocol and test for disconnection. """ if kind is None: kind = transport.DISCONNECT_KEY_EXCHANGE_FAILED proto2 = self.klass() protoModification(proto2) proto2.makeConnection(proto_helpers.StringTransport()) self.proto.dataReceived(proto2.transport.value()) if kind: self.checkDisconnected(kind) return proto2 def test_disconnectIfCantMatchKex(self): """ Test that the transport disconnects if it can't match the key exchange """ def blankKeyExchanges(proto2): proto2.supportedKeyExchanges = [] self.connectModifiedProtocol(blankKeyExchanges) def test_disconnectIfCantMatchKeyAlg(self): """ Like test_disconnectIfCantMatchKex, but for the key algorithm. """ def blankPublicKeys(proto2): proto2.supportedPublicKeys = [] self.connectModifiedProtocol(blankPublicKeys) def test_disconnectIfCantMatchCompression(self): """ Like test_disconnectIfCantMatchKex, but for the compression. """ def blankCompressions(proto2): proto2.supportedCompressions = [] self.connectModifiedProtocol(blankCompressions) def test_disconnectIfCantMatchCipher(self): """ Like test_disconnectIfCantMatchKex, but for the encryption. """ def blankCiphers(proto2): proto2.supportedCiphers = [] self.connectModifiedProtocol(blankCiphers) def test_disconnectIfCantMatchMAC(self): """ Like test_disconnectIfCantMatchKex, but for the MAC. """ def blankMACs(proto2): proto2.supportedMACs = [] self.connectModifiedProtocol(blankMACs) class ServerSSHTransportTestCase(ServerAndClientSSHTransportBaseCase, TransportTestCase): """ Tests for the SSHServerTransport. """ klass = transport.SSHServerTransport def setUp(self): TransportTestCase.setUp(self) self.proto.factory = MockFactory() self.proto.factory.startFactory() def tearDown(self): TransportTestCase.tearDown(self) self.proto.factory.stopFactory() del self.proto.factory def test_KEXINIT(self): """ Test that receiving a KEXINIT packet sets up the correct values on the server. """ self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14' '\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99' '\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g' 'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00' '\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae' 's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d' 'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c' 'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b' 'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma' 'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00' '\tnone,zlib\x00\x00\x00\tnone,zlib\x00\x00\x00\x00\x00\x00' '\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99' '\x99') self.assertEquals(self.proto.kexAlg, 'diffie-hellman-group1-sha1') self.assertEquals(self.proto.keyAlg, 'ssh-dss') self.assertEquals(self.proto.outgoingCompressionType, 'none') self.assertEquals(self.proto.incomingCompressionType, 'none') ne = self.proto.nextEncryptions self.assertEquals(ne.outCipType, 'aes128-ctr') self.assertEquals(ne.inCipType, 'aes128-ctr') self.assertEquals(ne.outMACType, 'hmac-md5') self.assertEquals(ne.inMACType, 'hmac-md5') def test_ignoreGuessPacketKex(self): """ The client is allowed to send a guessed key exchange packet after it sends the KEXINIT packet. However, if the key exchanges do not match, that guess packet must be ignored. This tests that the packet is ignored in the case of the key exchange method not matching. """ kexInitPacket = '\x00' * 16 + ( ''.join([common.NS(x) for x in [','.join(y) for y in [self.proto.supportedKeyExchanges[::-1], self.proto.supportedPublicKeys, self.proto.supportedCiphers, self.proto.supportedCiphers, self.proto.supportedMACs, self.proto.supportedMACs, self.proto.supportedCompressions, self.proto.supportedCompressions, self.proto.supportedLanguages, self.proto.supportedLanguages]]])) + ( '\xff\x00\x00\x00\x00') self.proto.ssh_KEXINIT(kexInitPacket) self.assertTrue(self.proto.ignoreNextPacket) self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00") self.assertTrue(self.proto.ignoreNextPacket) self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00') self.assertFalse(self.proto.ignoreNextPacket) self.assertEquals(self.packets, []) self.proto.ignoreNextPacket = True self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3) self.assertFalse(self.proto.ignoreNextPacket) self.assertEquals(self.packets, []) def test_ignoreGuessPacketKey(self): """ Like test_ignoreGuessPacketKex, but for an incorrectly guessed public key format. """ kexInitPacket = '\x00' * 16 + ( ''.join([common.NS(x) for x in [','.join(y) for y in [self.proto.supportedKeyExchanges, self.proto.supportedPublicKeys[::-1], self.proto.supportedCiphers, self.proto.supportedCiphers, self.proto.supportedMACs, self.proto.supportedMACs, self.proto.supportedCompressions, self.proto.supportedCompressions, self.proto.supportedLanguages, self.proto.supportedLanguages]]])) + ( '\xff\x00\x00\x00\x00') self.proto.ssh_KEXINIT(kexInitPacket) self.assertTrue(self.proto.ignoreNextPacket) self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00") self.assertTrue(self.proto.ignoreNextPacket) self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00') self.assertFalse(self.proto.ignoreNextPacket) self.assertEquals(self.packets, []) self.proto.ignoreNextPacket = True self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3) self.assertFalse(self.proto.ignoreNextPacket) self.assertEquals(self.packets, []) def test_KEXDH_INIT(self): """ Test that the KEXDH_INIT packet causes the server to send a KEXDH_REPLY with the server's public key and a signature. """ self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1'] self.proto.supportedPublicKeys = ['ssh-rsa'] self.proto.dataReceived(self.transport.value()) e = pow(transport.DH_GENERATOR, 5000, transport.DH_PRIME) self.proto.ssh_KEX_DH_GEX_REQUEST_OLD(common.MP(e)) y = common.getMP('\x00\x00\x00\x40' + '\x99' * 64)[0] f = common._MPpow(transport.DH_GENERATOR, y, transport.DH_PRIME) sharedSecret = common._MPpow(e, y, transport.DH_PRIME) h = sha1() h.update(common.NS(self.proto.ourVersionString) * 2) h.update(common.NS(self.proto.ourKexInitPayload) * 2) h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob())) h.update(common.MP(e)) h.update(f) h.update(sharedSecret) exchangeHash = h.digest() signature = self.proto.factory.privateKeys['ssh-rsa'].sign( exchangeHash) self.assertEquals( self.packets, [(transport.MSG_KEXDH_REPLY, common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) + f + common.NS(signature)), (transport.MSG_NEWKEYS, '')]) def test_KEX_DH_GEX_REQUEST_OLD(self): """ Test that the KEX_DH_GEX_REQUEST_OLD message causes the server to reply with a KEX_DH_GEX_GROUP message with the correct Diffie-Hellman group. """ self.proto.supportedKeyExchanges = [ 'diffie-hellman-group-exchange-sha1'] self.proto.supportedPublicKeys = ['ssh-rsa'] self.proto.dataReceived(self.transport.value()) self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x04\x00') self.assertEquals( self.packets, [(transport.MSG_KEX_DH_GEX_GROUP, common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x02')]) self.assertEquals(self.proto.g, 2) self.assertEquals(self.proto.p, transport.DH_PRIME) def test_KEX_DH_GEX_REQUEST_OLD_badKexAlg(self): """ Test that if the server recieves a KEX_DH_GEX_REQUEST_OLD message and the key exchange algorithm is not 'diffie-hellman-group1-sha1' or 'diffie-hellman-group-exchange-sha1', we raise a ConchError. """ self.proto.kexAlg = None self.assertRaises(ConchError, self.proto.ssh_KEX_DH_GEX_REQUEST_OLD, None) def test_KEX_DH_GEX_REQUEST(self): """ Test that the KEX_DH_GEX_REQUEST message causes the server to reply with a KEX_DH_GEX_GROUP message with the correct Diffie-Hellman group. """ self.proto.supportedKeyExchanges = [ 'diffie-hellman-group-exchange-sha1'] self.proto.supportedPublicKeys = ['ssh-rsa'] self.proto.dataReceived(self.transport.value()) self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x04\x00\x00\x00\x08\x00' + '\x00\x00\x0c\x00') self.assertEquals( self.packets, [(transport.MSG_KEX_DH_GEX_GROUP, common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x03')]) self.assertEquals(self.proto.g, 3) self.assertEquals(self.proto.p, transport.DH_PRIME) def test_KEX_DH_GEX_INIT_after_REQUEST(self): """ Test that the KEX_DH_GEX_INIT message after the client sends KEX_DH_GEX_REQUEST causes the server to send a KEX_DH_GEX_INIT message with a public key and signature. """ self.test_KEX_DH_GEX_REQUEST() e = pow(self.proto.g, 3, self.proto.p) y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0] f = common._MPpow(self.proto.g, y, self.proto.p) sharedSecret = common._MPpow(e, y, self.proto.p) h = sha1() h.update(common.NS(self.proto.ourVersionString) * 2) h.update(common.NS(self.proto.ourKexInitPayload) * 2) h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob())) h.update('\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00\x0c\x00') h.update(common.MP(self.proto.p)) h.update(common.MP(self.proto.g)) h.update(common.MP(e)) h.update(f) h.update(sharedSecret) exchangeHash = h.digest() self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e)) self.assertEquals( self.packets[1], (transport.MSG_KEX_DH_GEX_REPLY, common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) + f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign( exchangeHash)))) def test_KEX_DH_GEX_INIT_after_REQUEST_OLD(self): """ Test that the KEX_DH_GEX_INIT message after the client sends KEX_DH_GEX_REQUEST_OLD causes the server to sent a KEX_DH_GEX_INIT message with a public key and signature. """ self.test_KEX_DH_GEX_REQUEST_OLD() e = pow(self.proto.g, 3, self.proto.p) y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0] f = common._MPpow(self.proto.g, y, self.proto.p) sharedSecret = common._MPpow(e, y, self.proto.p) h = sha1() h.update(common.NS(self.proto.ourVersionString) * 2) h.update(common.NS(self.proto.ourKexInitPayload) * 2) h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob())) h.update('\x00\x00\x04\x00') h.update(common.MP(self.proto.p)) h.update(common.MP(self.proto.g)) h.update(common.MP(e)) h.update(f) h.update(sharedSecret) exchangeHash = h.digest() self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e)) self.assertEquals( self.packets[1:], [(transport.MSG_KEX_DH_GEX_REPLY, common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) + f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign( exchangeHash))), (transport.MSG_NEWKEYS, '')]) def test_keySetup(self): """ Test that _keySetup sets up the next encryption keys. """ self.proto.nextEncryptions = MockCipher() self.proto._keySetup('AB', 'CD') self.assertEquals(self.proto.sessionID, 'CD') self.proto._keySetup('AB', 'EF') self.assertEquals(self.proto.sessionID, 'CD') self.assertEquals(self.packets[-1], (transport.MSG_NEWKEYS, '')) newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF'] self.assertEquals( self.proto.nextEncryptions.keys, (newKeys[1], newKeys[3], newKeys[0], newKeys[2], newKeys[5], newKeys[4])) def test_NEWKEYS(self): """ Test that NEWKEYS transitions the keys in nextEncryptions to currentEncryptions. """ self.test_KEXINIT() self.proto.nextEncryptions = transport.SSHCiphers('none', 'none', 'none', 'none') self.proto.ssh_NEWKEYS('') self.assertIdentical(self.proto.currentEncryptions, self.proto.nextEncryptions) self.assertIdentical(self.proto.outgoingCompression, None) self.assertIdentical(self.proto.incomingCompression, None) self.proto.outgoingCompressionType = 'zlib' self.proto.ssh_NEWKEYS('') self.failIfIdentical(self.proto.outgoingCompression, None) self.proto.incomingCompressionType = 'zlib' self.proto.ssh_NEWKEYS('') self.failIfIdentical(self.proto.incomingCompression, None) def test_SERVICE_REQUEST(self): """ Test that the SERVICE_REQUEST message requests and starts a service. """ self.proto.ssh_SERVICE_REQUEST(common.NS('ssh-userauth')) self.assertEquals(self.packets, [(transport.MSG_SERVICE_ACCEPT, common.NS('ssh-userauth'))]) self.assertEquals(self.proto.service.name, 'MockService') def test_disconnectNEWKEYSData(self): """ Test that NEWKEYS disconnects if it receives data. """ self.proto.ssh_NEWKEYS("bad packet") self.checkDisconnected() def test_disconnectSERVICE_REQUESTBadService(self): """ Test that SERVICE_REQUESTS disconnects if an unknown service is requested. """ self.proto.ssh_SERVICE_REQUEST(common.NS('no service')) self.checkDisconnected(transport.DISCONNECT_SERVICE_NOT_AVAILABLE) class ClientSSHTransportTestCase(ServerAndClientSSHTransportBaseCase, TransportTestCase): """ Tests for SSHClientTransport. """ klass = transport.SSHClientTransport def test_KEXINIT(self): """ Test that receiving a KEXINIT packet sets up the correct values on the client. The way algorithms are picks is that the first item in the client's list that is also in the server's list is chosen. """ self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14' '\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99' '\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g' 'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00' '\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae' 's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d' 'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c' 'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b' 'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma' 'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00' '\tzlib,none\x00\x00\x00\tzlib,none\x00\x00\x00\x00\x00\x00' '\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99' '\x99') self.assertEquals(self.proto.kexAlg, 'diffie-hellman-group-exchange-sha1') self.assertEquals(self.proto.keyAlg, 'ssh-rsa') self.assertEquals(self.proto.outgoingCompressionType, 'none') self.assertEquals(self.proto.incomingCompressionType, 'none') ne = self.proto.nextEncryptions self.assertEquals(ne.outCipType, 'aes256-ctr') self.assertEquals(ne.inCipType, 'aes256-ctr') self.assertEquals(ne.outMACType, 'hmac-sha1') self.assertEquals(ne.inMACType, 'hmac-sha1') def verifyHostKey(self, pubKey, fingerprint): """ Mock version of SSHClientTransport.verifyHostKey. """ self.calledVerifyHostKey = True self.assertEquals(pubKey, self.blob) self.assertEquals(fingerprint.replace(':', ''), md5(pubKey).hexdigest()) return defer.succeed(True) def setUp(self): TransportTestCase.setUp(self) self.blob = keys.Key.fromString(keydata.publicRSA_openssh).blob() self.privObj = keys.Key.fromString(keydata.privateRSA_openssh) self.calledVerifyHostKey = False self.proto.verifyHostKey = self.verifyHostKey def test_notImplementedClientMethods(self): """ verifyHostKey() should return a Deferred which fails with a NotImplementedError exception. connectionSecure() should raise NotImplementedError(). """ self.assertRaises(NotImplementedError, self.klass().connectionSecure) def _checkRaises(f): f.trap(NotImplementedError) d = self.klass().verifyHostKey(None, None) return d.addCallback(self.fail).addErrback(_checkRaises) def test_KEXINIT_groupexchange(self): """ Test that a KEXINIT packet with a group-exchange key exchange results in a KEX_DH_GEX_REQUEST_OLD message.. """ self.proto.supportedKeyExchanges = [ 'diffie-hellman-group-exchange-sha1'] self.proto.dataReceived(self.transport.value()) self.assertEquals(self.packets, [(transport.MSG_KEX_DH_GEX_REQUEST_OLD, '\x00\x00\x08\x00')]) def test_KEXINIT_group1(self): """ Like test_KEXINIT_groupexchange, but for the group-1 key exchange. """ self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1'] self.proto.dataReceived(self.transport.value()) self.assertEquals(common.MP(self.proto.x)[5:], '\x99' * 64) self.assertEquals(self.packets, [(transport.MSG_KEXDH_INIT, self.proto.e)]) def test_KEXINIT_badKexAlg(self): """ Test that the client raises a ConchError if it receives a KEXINIT message bug doesn't have a key exchange algorithm that we understand. """ self.proto.supportedKeyExchanges = ['diffie-hellman-group2-sha1'] data = self.transport.value().replace('group1', 'group2') self.assertRaises(ConchError, self.proto.dataReceived, data) def test_KEXDH_REPLY(self): """ Test that the KEXDH_REPLY message verifies the server. """ self.test_KEXINIT_group1() sharedSecret = common._MPpow(transport.DH_GENERATOR, self.proto.x, transport.DH_PRIME) h = sha1() h.update(common.NS(self.proto.ourVersionString) * 2) h.update(common.NS(self.proto.ourKexInitPayload) * 2) h.update(common.NS(self.blob)) h.update(self.proto.e) h.update('\x00\x00\x00\x01\x02') # f h.update(sharedSecret) exchangeHash = h.digest() def _cbTestKEXDH_REPLY(value): self.assertIdentical(value, None) self.assertEquals(self.calledVerifyHostKey, True) self.assertEquals(self.proto.sessionID, exchangeHash) signature = self.privObj.sign(exchangeHash) d = self.proto.ssh_KEX_DH_GEX_GROUP( (common.NS(self.blob) + '\x00\x00\x00\x01\x02' + common.NS(signature))) d.addCallback(_cbTestKEXDH_REPLY) return d def test_KEX_DH_GEX_GROUP(self): """ Test that the KEX_DH_GEX_GROUP message results in a KEX_DH_GEX_INIT message with the client's Diffie-Hellman public key. """ self.test_KEXINIT_groupexchange() self.proto.ssh_KEX_DH_GEX_GROUP( '\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02') self.assertEquals(self.proto.p, 15) self.assertEquals(self.proto.g, 2) self.assertEquals(common.MP(self.proto.x)[5:], '\x99' * 40) self.assertEquals(self.proto.e, common.MP(pow(2, self.proto.x, 15))) self.assertEquals(self.packets[1:], [(transport.MSG_KEX_DH_GEX_INIT, self.proto.e)]) def test_KEX_DH_GEX_REPLY(self): """ Test that the KEX_DH_GEX_REPLY message results in a verified server. """ self.test_KEX_DH_GEX_GROUP() sharedSecret = common._MPpow(3, self.proto.x, self.proto.p) h = sha1() h.update(common.NS(self.proto.ourVersionString) * 2) h.update(common.NS(self.proto.ourKexInitPayload) * 2) h.update(common.NS(self.blob)) h.update('\x00\x00\x08\x00\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02') h.update(self.proto.e) h.update('\x00\x00\x00\x01\x03') # f h.update(sharedSecret) exchangeHash = h.digest() def _cbTestKEX_DH_GEX_REPLY(value): self.assertIdentical(value, None) self.assertEquals(self.calledVerifyHostKey, True) self.assertEquals(self.proto.sessionID, exchangeHash) signature = self.privObj.sign(exchangeHash) d = self.proto.ssh_KEX_DH_GEX_REPLY( common.NS(self.blob) + '\x00\x00\x00\x01\x03' + common.NS(signature)) d.addCallback(_cbTestKEX_DH_GEX_REPLY) return d def test_keySetup(self): """ Test that _keySetup sets up the next encryption keys. """ self.proto.nextEncryptions = MockCipher() self.proto._keySetup('AB', 'CD') self.assertEquals(self.proto.sessionID, 'CD') self.proto._keySetup('AB', 'EF') self.assertEquals(self.proto.sessionID, 'CD') self.assertEquals(self.packets[-1], (transport.MSG_NEWKEYS, '')) newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF'] self.assertEquals(self.proto.nextEncryptions.keys, (newKeys[0], newKeys[2], newKeys[1], newKeys[3], newKeys[4], newKeys[5])) def test_NEWKEYS(self): """ Test that NEWKEYS transitions the keys from nextEncryptions to currentEncryptions. """ self.test_KEXINIT() secure = [False] def stubConnectionSecure(): secure[0] = True self.proto.connectionSecure = stubConnectionSecure self.proto.nextEncryptions = transport.SSHCiphers('none', 'none', 'none', 'none') self.proto.ssh_NEWKEYS('') self.failIfIdentical(self.proto.currentEncryptions, self.proto.nextEncryptions) self.proto.nextEncryptions = MockCipher() self.proto._keySetup('AB', 'EF') self.assertIdentical(self.proto.outgoingCompression, None) self.assertIdentical(self.proto.incomingCompression, None) self.assertIdentical(self.proto.currentEncryptions, self.proto.nextEncryptions) self.assertTrue(secure[0]) self.proto.outgoingCompressionType = 'zlib' self.proto.ssh_NEWKEYS('') self.failIfIdentical(self.proto.outgoingCompression, None) self.proto.incomingCompressionType = 'zlib' self.proto.ssh_NEWKEYS('') self.failIfIdentical(self.proto.incomingCompression, None) def test_SERVICE_ACCEPT(self): """ Test that the SERVICE_ACCEPT packet starts the requested service. """ self.proto.instance = MockService() self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x0bMockService') self.assertTrue(self.proto.instance.started) def test_requestService(self): """ Test that requesting a service sends a SERVICE_REQUEST packet. """ self.proto.requestService(MockService()) self.assertEquals(self.packets, [(transport.MSG_SERVICE_REQUEST, '\x00\x00\x00\x0bMockService')]) def test_disconnectKEXDH_REPLYBadSignature(self): """ Test that KEXDH_REPLY disconnects if the signature is bad. """ self.test_KEXDH_REPLY() self.proto._continueKEXDH_REPLY(None, self.blob, 3, "bad signature") self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED) def test_disconnectGEX_REPLYBadSignature(self): """ Like test_disconnectKEXDH_REPLYBadSignature, but for DH_GEX_REPLY. """ self.test_KEX_DH_GEX_REPLY() self.proto._continueGEX_REPLY(None, self.blob, 3, "bad signature") self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED) def test_disconnectNEWKEYSData(self): """ Test that NEWKEYS disconnects if it receives data. """ self.proto.ssh_NEWKEYS("bad packet") self.checkDisconnected() def test_disconnectSERVICE_ACCEPT(self): """ Test that SERVICE_ACCEPT disconnects if the accepted protocol is differet from the asked-for protocol. """ self.proto.instance = MockService() self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x03bad') self.checkDisconnected() class SSHCiphersTestCase(unittest.TestCase): """ Tests for the SSHCiphers helper class. """ if Crypto is None: skip = "cannot run w/o PyCrypto" if pyasn1 is None: skip = "cannot run w/o PyASN1" def test_init(self): """ Test that the initializer sets up the SSHCiphers object. """ ciphers = transport.SSHCiphers('A', 'B', 'C', 'D') self.assertEquals(ciphers.outCipType, 'A') self.assertEquals(ciphers.inCipType, 'B') self.assertEquals(ciphers.outMACType, 'C') self.assertEquals(ciphers.inMACType, 'D') def test_getCipher(self): """ Test that the _getCipher method returns the correct cipher. """ ciphers = transport.SSHCiphers('A', 'B', 'C', 'D') iv = key = '\x00' * 16 for cipName, (modName, keySize, counter) in ciphers.cipherMap.items(): cip = ciphers._getCipher(cipName, iv, key) if cipName == 'none': self.assertIsInstance(cip, transport._DummyCipher) else: self.assertTrue(str(cip).startswith('<' + modName)) def test_getMAC(self): """ Test that the _getMAC method returns the correct MAC. """ ciphers = transport.SSHCiphers('A', 'B', 'C', 'D') key = '\x00' * 64 for macName, mac in ciphers.macMap.items(): mod = ciphers._getMAC(macName, key) if macName == 'none': self.assertIdentical(mac, None) else: self.assertEquals(mod[0], mac) self.assertEquals(mod[1], Crypto.Cipher.XOR.new('\x36').encrypt(key)) self.assertEquals(mod[2], Crypto.Cipher.XOR.new('\x5c').encrypt(key)) self.assertEquals(mod[3], len(mod[0]().digest())) def test_setKeysCiphers(self): """ Test that setKeys sets up the ciphers. """ key = '\x00' * 64 cipherItems = transport.SSHCiphers.cipherMap.items() for cipName, (modName, keySize, counter) in cipherItems: encCipher = transport.SSHCiphers(cipName, 'none', 'none', 'none') decCipher = transport.SSHCiphers('none', cipName, 'none', 'none') cip = encCipher._getCipher(cipName, key, key) bs = cip.block_size encCipher.setKeys(key, key, '', '', '', '') decCipher.setKeys('', '', key, key, '', '') self.assertEquals(encCipher.encBlockSize, bs) self.assertEquals(decCipher.decBlockSize, bs) enc = cip.encrypt(key[:bs]) enc2 = cip.encrypt(key[:bs]) if counter: self.failIfEquals(enc, enc2) self.assertEquals(encCipher.encrypt(key[:bs]), enc) self.assertEquals(encCipher.encrypt(key[:bs]), enc2) self.assertEquals(decCipher.decrypt(enc), key[:bs]) self.assertEquals(decCipher.decrypt(enc2), key[:bs]) def test_setKeysMACs(self): """ Test that setKeys sets up the MACs. """ key = '\x00' * 64 for macName, mod in transport.SSHCiphers.macMap.items(): outMac = transport.SSHCiphers('none', 'none', macName, 'none') inMac = transport.SSHCiphers('none', 'none', 'none', macName) outMac.setKeys('', '', '', '', key, '') inMac.setKeys('', '', '', '', '', key) if mod: ds = mod().digest_size else: ds = 0 self.assertEquals(inMac.verifyDigestSize, ds) if mod: mod, i, o, ds = outMac._getMAC(macName, key) seqid = 0 data = key packet = '\x00' * 4 + key if mod: mac = mod(o + mod(i + packet).digest()).digest() else: mac = '' self.assertEquals(outMac.makeMAC(seqid, data), mac) self.assertTrue(inMac.verify(seqid, data, mac)) class CounterTestCase(unittest.TestCase): """ Tests for the _Counter helper class. """ if Crypto is None: skip = "cannot run w/o PyCrypto" if pyasn1 is None: skip = "cannot run w/o PyASN1" def test_init(self): """ Test that the counter is initialized correctly. """ counter = transport._Counter('\x00' * 8 + '\xff' * 8, 8) self.assertEquals(counter.blockSize, 8) self.assertEquals(counter.count.tostring(), '\x00' * 8) def test_count(self): """ Test that the counter counts incrementally and wraps at the top. """ counter = transport._Counter('\x00', 1) self.assertEquals(counter(), '\x01') self.assertEquals(counter(), '\x02') [counter() for i in range(252)] self.assertEquals(counter(), '\xff') self.assertEquals(counter(), '\x00') class TransportLoopbackTestCase(unittest.TestCase): """ Test the server transport and client transport against each other, """ if Crypto is None: skip = "cannot run w/o PyCrypto" if pyasn1 is None: skip = "cannot run w/o PyASN1" def _runClientServer(self, mod): """ Run an async client and server, modifying each using the mod function provided. Returns a Deferred called back when both Protocols have disconnected. @type mod: C{func} @rtype: C{defer.Deferred} """ factory = MockFactory() server = transport.SSHServerTransport() server.factory = factory factory.startFactory() server.errors = [] server.receiveError = lambda code, desc: server.errors.append(( code, desc)) client = transport.SSHClientTransport() client.verifyHostKey = lambda x, y: defer.succeed(None) client.errors = [] client.receiveError = lambda code, desc: client.errors.append(( code, desc)) client.connectionSecure = lambda: client.loseConnection() server = mod(server) client = mod(client) def check(ignored, server, client): name = repr([server.supportedCiphers[0], server.supportedMACs[0], server.supportedKeyExchanges[0], server.supportedCompressions[0]]) self.assertEquals(client.errors, []) self.assertEquals(server.errors, [( transport.DISCONNECT_CONNECTION_LOST, "user closed connection")]) if server.supportedCiphers[0] == 'none': self.assertFalse(server.isEncrypted(), name) self.assertFalse(client.isEncrypted(), name) else: self.assertTrue(server.isEncrypted(), name) self.assertTrue(client.isEncrypted(), name) if server.supportedMACs[0] == 'none': self.assertFalse(server.isVerified(), name) self.assertFalse(client.isVerified(), name) else: self.assertTrue(server.isVerified(), name) self.assertTrue(client.isVerified(), name) d = loopback.loopbackAsync(server, client) d.addCallback(check, server, client) return d def test_ciphers(self): """ Test that the client and server play nicely together, in all the various combinations of ciphers. """ deferreds = [] for cipher in transport.SSHTransportBase.supportedCiphers + ['none']: def setCipher(proto): proto.supportedCiphers = [cipher] return proto deferreds.append(self._runClientServer(setCipher)) return defer.DeferredList(deferreds, fireOnOneErrback=True) def test_macs(self): """ Like test_ciphers, but for the various MACs. """ deferreds = [] for mac in transport.SSHTransportBase.supportedMACs + ['none']: def setMAC(proto): proto.supportedMACs = [mac] return proto deferreds.append(self._runClientServer(setMAC)) return defer.DeferredList(deferreds, fireOnOneErrback=True) def test_keyexchanges(self): """ Like test_ciphers, but for the various key exchanges. """ deferreds = [] for kex in transport.SSHTransportBase.supportedKeyExchanges: def setKeyExchange(proto): proto.supportedKeyExchanges = [kex] return proto deferreds.append(self._runClientServer(setKeyExchange)) return defer.DeferredList(deferreds, fireOnOneErrback=True) def test_compressions(self): """ Like test_ciphers, but for the various compressions. """ deferreds = [] for compression in transport.SSHTransportBase.supportedCompressions: def setCompression(proto): proto.supportedCompressions = [compression] return proto deferreds.append(self._runClientServer(setCompression)) return defer.DeferredList(deferreds, fireOnOneErrback=True) class RandomNumberTestCase(unittest.TestCase): """ Tests for the random number generator L{_getRandomNumber} and private key generator L{_generateX}. """ skip = dependencySkip def test_usesSuppliedRandomFunction(self): """ L{_getRandomNumber} returns an integer constructed directly from the bytes returned by the random byte generator passed to it. """ def random(bytes): # The number of bytes requested will be the value of each byte # we return. return chr(bytes) * bytes self.assertEquals( transport._getRandomNumber(random, 32), 4 << 24 | 4 << 16 | 4 << 8 | 4) def test_rejectsNonByteMultiples(self): """ L{_getRandomNumber} raises L{ValueError} if the number of bits passed to L{_getRandomNumber} is not a multiple of 8. """ self.assertRaises( ValueError, transport._getRandomNumber, None, 9) def test_excludesSmall(self): """ If the random byte generator passed to L{_generateX} produces bytes which would result in 0 or 1 being returned, these bytes are discarded and another attempt is made to produce a larger value. """ results = [chr(0), chr(1), chr(127)] def random(bytes): return results.pop(0) * bytes self.assertEquals( transport._generateX(random, 8), 127) def test_excludesLarge(self): """ If the random byte generator passed to L{_generateX} produces bytes which would result in C{(2 ** bits) - 1} being returned, these bytes are discarded and another attempt is made to produce a smaller value. """ results = [chr(255), chr(64)] def random(bytes): return results.pop(0) * bytes self.assertEquals( transport._generateX(random, 8), 64) class OldFactoryTestCase(unittest.TestCase): """ The old C{SSHFactory.getPublicKeys}() returned mappings of key names to strings of key blobs and mappings of key names to PyCrypto key objects from C{SSHFactory.getPrivateKeys}() (they could also be specified with the C{publicKeys} and C{privateKeys} attributes). This is no longer supported by the C{SSHServerTransport}, so we warn the user if they create an old factory. """ if Crypto is None: skip = "cannot run w/o PyCrypto" if pyasn1 is None: skip = "cannot run w/o PyASN1" def test_getPublicKeysWarning(self): """ If the return value of C{getPublicKeys}() isn't a mapping from key names to C{Key} objects, then warn the user and convert the mapping. """ sshFactory = MockOldFactoryPublicKeys() self.assertWarns(DeprecationWarning, "Returning a mapping from strings to strings from" " getPublicKeys()/publicKeys (in %s) is deprecated. Return " "a mapping from strings to Key objects instead." % (qual(MockOldFactoryPublicKeys),), factory.__file__, sshFactory.startFactory) self.assertEquals(sshFactory.publicKeys, MockFactory().getPublicKeys()) def test_getPrivateKeysWarning(self): """ If the return value of C{getPrivateKeys}() isn't a mapping from key names to C{Key} objects, then warn the user and convert the mapping. """ sshFactory = MockOldFactoryPrivateKeys() self.assertWarns(DeprecationWarning, "Returning a mapping from strings to PyCrypto key objects from" " getPrivateKeys()/privateKeys (in %s) is deprecated. Return" " a mapping from strings to Key objects instead." % (qual(MockOldFactoryPrivateKeys),), factory.__file__, sshFactory.startFactory) self.assertEquals(sshFactory.privateKeys, MockFactory().getPrivateKeys()) def test_publicKeysWarning(self): """ If the value of the C{publicKeys} attribute isn't a mapping from key names to C{Key} objects, then warn the user and convert the mapping. """ sshFactory = MockOldFactoryPublicKeys() sshFactory.publicKeys = sshFactory.getPublicKeys() self.assertWarns(DeprecationWarning, "Returning a mapping from strings to strings from" " getPublicKeys()/publicKeys (in %s) is deprecated. Return " "a mapping from strings to Key objects instead." % (qual(MockOldFactoryPublicKeys),), factory.__file__, sshFactory.startFactory) self.assertEquals(sshFactory.publicKeys, MockFactory().getPublicKeys()) def test_privateKeysWarning(self): """ If the return value of C{privateKeys} attribute isn't a mapping from key names to C{Key} objects, then warn the user and convert the mapping. """ sshFactory = MockOldFactoryPrivateKeys() sshFactory.privateKeys = sshFactory.getPrivateKeys() self.assertWarns(DeprecationWarning, "Returning a mapping from strings to PyCrypto key objects from" " getPrivateKeys()/privateKeys (in %s) is deprecated. Return" " a mapping from strings to Key objects instead." % (qual(MockOldFactoryPrivateKeys),), factory.__file__, sshFactory.startFactory) self.assertEquals(sshFactory.privateKeys, MockFactory().getPrivateKeys()) """ Views related to course tabs """ from student.auth import has_course_author_access from util.json_request import expect_json, JsonResponse from django.http import HttpResponseNotFound from django.contrib.auth.decorators import login_required from django.core.exceptions import PermissionDenied from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.http import require_http_methods from edxmako.shortcuts import render_to_response from xmodule.modulestore.django import modulestore from xmodule.modulestore import ModuleStoreEnum from xmodule.tabs import CourseTabList, CourseTab, InvalidTabsException, StaticTab from opaque_keys.edx.keys import CourseKey, UsageKey from ..utils import get_lms_link_for_item __all__ = ['tabs_handler'] @expect_json @login_required @ensure_csrf_cookie @require_http_methods(("GET", "POST", "PUT")) def tabs_handler(request, course_key_string): """ The restful handler for static tabs. GET html: return page for editing static tabs json: not supported PUT or POST json: update the tab order. It is expected that the request body contains a JSON-encoded dict with entry "tabs". The value for "tabs" is an array of tab locators, indicating the desired order of the tabs. Creating a tab, deleting a tab, or changing its contents is not supported through this method. Instead use the general xblock URL (see item.xblock_handler). """ course_key = CourseKey.from_string(course_key_string) if not has_course_author_access(request.user, course_key): raise PermissionDenied() course_item = modulestore().get_course(course_key) if 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'): if request.method == 'GET': raise NotImplementedError('coming soon') else: if 'tabs' in request.json: return reorder_tabs_handler(course_item, request) elif 'tab_id_locator' in request.json: return edit_tab_handler(course_item, request) else: raise NotImplementedError('Creating or changing tab content is not supported.') elif request.method == 'GET': # assume html # get all tabs from the tabs list: static tabs (a.k.a. user-created tabs) and built-in tabs # present in the same order they are displayed in LMS tabs_to_render = [] for tab in CourseTabList.iterate_displayable(course_item, inline_collections=False): if isinstance(tab, StaticTab): # static tab needs its locator information to render itself as an xmodule static_tab_loc = course_key.make_usage_key('static_tab', tab.url_slug) tab.locator = static_tab_loc tabs_to_render.append(tab) return render_to_response('edit-tabs.html', { 'context_course': course_item, 'tabs_to_render': tabs_to_render, 'lms_link': get_lms_link_for_item(course_item.location), }) else: return HttpResponseNotFound() def reorder_tabs_handler(course_item, request): """ Helper function for handling reorder of tabs request """ # Tabs are identified by tab_id or locators. # The locators are used to identify static tabs since they are xmodules. # Although all tabs have tab_ids, newly created static tabs do not know # their tab_ids since the xmodule editor uses only locators to identify new objects. requested_tab_id_locators = request.json['tabs'] # original tab list in original order old_tab_list = course_item.tabs # create a new list in the new order new_tab_list = [] for tab_id_locator in requested_tab_id_locators: tab = get_tab_by_tab_id_locator(old_tab_list, tab_id_locator) if tab is None: return JsonResponse( {"error": "Tab with id_locator '{0}' does not exist.".format(tab_id_locator)}, status=400 ) new_tab_list.append(tab) # the old_tab_list may contain additional tabs that were not rendered in the UI because of # global or course settings. so add those to the end of the list. non_displayed_tabs = set(old_tab_list) - set(new_tab_list) new_tab_list.extend(non_displayed_tabs) # validate the tabs to make sure everything is Ok (e.g., did the client try to reorder unmovable tabs?) try: CourseTabList.validate_tabs(new_tab_list) except InvalidTabsException, exception: return JsonResponse( {"error": "New list of tabs is not valid: {0}.".format(str(exception))}, status=400 ) # persist the new order of the tabs course_item.tabs = new_tab_list modulestore().update_item(course_item, request.user.id) return JsonResponse() def edit_tab_handler(course_item, request): """ Helper function for handling requests to edit settings of a single tab """ # Tabs are identified by tab_id or locator tab_id_locator = request.json['tab_id_locator'] # Find the given tab in the course tab = get_tab_by_tab_id_locator(course_item.tabs, tab_id_locator) if tab is None: return JsonResponse( {"error": "Tab with id_locator '{0}' does not exist.".format(tab_id_locator)}, status=400 ) if 'is_hidden' in request.json: # set the is_hidden attribute on the requested tab tab.is_hidden = request.json['is_hidden'] modulestore().update_item(course_item, request.user.id) else: raise NotImplementedError('Unsupported request to edit tab: {0}'.format(request.json)) return JsonResponse() def get_tab_by_tab_id_locator(tab_list, tab_id_locator): """ Look for a tab with the specified tab_id or locator. Returns the first matching tab. """ if 'tab_id' in tab_id_locator: tab = CourseTabList.get_tab_by_id(tab_list, tab_id_locator['tab_id']) elif 'tab_locator' in tab_id_locator: tab = get_tab_by_locator(tab_list, tab_id_locator['tab_locator']) return tab def get_tab_by_locator(tab_list, usage_key_string): """ Look for a tab with the specified locator. Returns the first matching tab. """ tab_location = UsageKey.from_string(usage_key_string) item = modulestore().get_item(tab_location) static_tab = StaticTab( name=item.display_name, url_slug=item.location.name, ) return CourseTabList.get_tab_by_id(tab_list, static_tab.tab_id) # "primitive" tab edit functions driven by the command line. # These should be replaced/deleted by a more capable GUI someday. # Note that the command line UI identifies the tabs with 1-based # indexing, but this implementation code is standard 0-based. def validate_args(num, tab_type): "Throws for the disallowed cases." if num <= 1: raise ValueError('Tabs 1 and 2 cannot be edited') if tab_type == 'static_tab': raise ValueError('Tabs of type static_tab cannot be edited here (use Studio)') def primitive_delete(course, num): "Deletes the given tab number (0 based)." tabs = course.tabs validate_args(num, tabs[num].get('type', '')) del tabs[num] # Note for future implementations: if you delete a static_tab, then Chris Dodge # points out that there's other stuff to delete beyond this element. # This code happens to not delete static_tab so it doesn't come up. modulestore().update_item(course, ModuleStoreEnum.UserID.primitive_command) def primitive_insert(course, num, tab_type, name): "Inserts a new tab at the given number (0 based)." validate_args(num, tab_type) new_tab = CourseTab.from_json({u'type': unicode(tab_type), u'name': unicode(name)}) tabs = course.tabs tabs.insert(num, new_tab) modulestore().update_item(course, ModuleStoreEnum.UserID.primitive_command) # -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # ############################################################################## #---------------------------------------------------------- # Init Sales #---------------------------------------------------------- import sale_analytic_plans # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: from .base import * from .constructors.jsmath import Math from .constructors.jsdate import Date from .constructors.jsobject import Object from .constructors.jsfunction import Function from .constructors.jsstring import String from .constructors.jsnumber import Number from .constructors.jsboolean import Boolean from .constructors.jsregexp import RegExp from .constructors.jsarray import Array from .constructors.jsarraybuffer import ArrayBuffer from .constructors.jsint8array import Int8Array from .constructors.jsuint8array import Uint8Array from .constructors.jsuint8clampedarray import Uint8ClampedArray from .constructors.jsint16array import Int16Array from .constructors.jsuint16array import Uint16Array from .constructors.jsint32array import Int32Array from .constructors.jsuint32array import Uint32Array from .constructors.jsfloat32array import Float32Array from .constructors.jsfloat64array import Float64Array from .prototypes.jsjson import JSON from .host.console import console from .host.jseval import Eval from .host.jsfunctions import parseFloat, parseInt, isFinite, \ isNaN, escape, unescape, encodeURI, decodeURI, encodeURIComponent, decodeURIComponent # Now we have all the necessary items to create global environment for script __all__ = [ 'Js', 'PyJsComma', 'PyJsStrictEq', 'PyJsStrictNeq', 'PyJsException', 'PyJsBshift', 'Scope', 'PyExceptionToJs', 'JsToPyException', 'JS_BUILTINS', 'appengine', 'set_global_object', 'JsRegExp', 'PyJsException', 'PyExceptionToJs', 'JsToPyException', 'PyJsSwitchException' ] # these were defined in base.py builtins = ( 'true', 'false', 'null', 'undefined', 'Infinity', 'NaN', 'console', 'String', 'Number', 'Boolean', 'RegExp', 'Math', 'Date', 'Object', 'Function', 'Array', 'Int8Array', 'Uint8Array', 'Uint8ClampedArray', 'Int16Array', 'Uint16Array', 'Int32Array', 'Uint32Array', 'Float32Array', 'Float64Array', 'ArrayBuffer', 'parseFloat', 'parseInt', 'isFinite', 'isNaN', 'escape', 'unescape', 'encodeURI', 'decodeURI', 'encodeURIComponent', 'decodeURIComponent', ) #Array, Function, JSON, Error is done later :) # also some built in functions like eval... def set_global_object(obj): obj.IS_CHILD_SCOPE = False this = This({}) this.own = obj.own this.prototype = obj.prototype PyJs.GlobalObject = this # make this available obj.register('this') obj.put('this', this) # also add window and set it to be a global object for compatibility obj.register('window') obj.put('window', this) scope = dict(zip(builtins, [globals()[e] for e in builtins])) # Now add errors: for name, error in ERRORS.items(): scope[name] = error #add eval scope['eval'] = Eval scope['JSON'] = JSON JS_BUILTINS = dict((k, v) for k, v in scope.items()) # Copyright 2013 Kylin, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from horizon import workflows from openstack_dashboard.api import base from openstack_dashboard.api import cinder from openstack_dashboard.api import nova from openstack_dashboard.usage import quotas ALL_NOVA_QUOTA_FIELDS = quotas.NOVA_QUOTA_FIELDS + quotas.MISSING_QUOTA_FIELDS class UpdateDefaultQuotasAction(workflows.Action): ifcb_label = _("Injected File Content Bytes") ifpb_label = _("Length of Injected File Path") injected_file_content_bytes = forms.IntegerField(min_value=-1, label=ifcb_label) metadata_items = forms.IntegerField(min_value=-1, label=_("Metadata Items")) ram = forms.IntegerField(min_value=-1, label=_("RAM (MB)")) floating_ips = forms.IntegerField(min_value=-1, label=_("Floating IPs")) key_pairs = forms.IntegerField(min_value=-1, label=_("Key Pairs")) injected_file_path_bytes = forms.IntegerField(min_value=-1, label=ifpb_label) instances = forms.IntegerField(min_value=-1, label=_("Instances")) security_group_rules = forms.IntegerField(min_value=-1, label=_("Security Group Rules")) injected_files = forms.IntegerField(min_value=-1, label=_("Injected Files")) cores = forms.IntegerField(min_value=-1, label=_("VCPUs")) security_groups = forms.IntegerField(min_value=-1, label=_("Security Groups")) gigabytes = forms.IntegerField( min_value=-1, label=_("Total Size of Volumes and Snapshots (GB)")) snapshots = forms.IntegerField(min_value=-1, label=_("Volume Snapshots")) volumes = forms.IntegerField(min_value=-1, label=_("Volumes")) def __init__(self, request, *args, **kwargs): super(UpdateDefaultQuotasAction, self).__init__(request, *args, **kwargs) disabled_quotas = quotas.get_disabled_quotas(request) for field in disabled_quotas: if field in self.fields: self.fields[field].required = False self.fields[field].widget = forms.HiddenInput() class Meta(object): name = _("Default Quotas") slug = 'update_default_quotas' help_text = _("From here you can update the default quotas " "(max limits).") class UpdateDefaultQuotasStep(workflows.Step): action_class = UpdateDefaultQuotasAction contributes = (quotas.QUOTA_FIELDS + quotas.MISSING_QUOTA_FIELDS) class UpdateDefaultQuotas(workflows.Workflow): slug = "update_default_quotas" name = _("Update Default Quotas") finalize_button_name = _("Update Defaults") success_message = _('Default quotas updated.') failure_message = _('Unable to update default quotas.') success_url = "horizon:admin:defaults:index" default_steps = (UpdateDefaultQuotasStep,) def handle(self, request, data): # Update the default quotas. # `fixed_ips` update for quota class is not supported by novaclient nova_data = dict([(key, data[key]) for key in ALL_NOVA_QUOTA_FIELDS if key != 'fixed_ips']) try: nova.default_quota_update(request, **nova_data) if base.is_service_enabled(request, 'volume'): cinder_data = dict([(key, data[key]) for key in quotas.CINDER_QUOTA_FIELDS]) cinder.default_quota_update(request, **cinder_data) except Exception: exceptions.handle(request, _('Unable to update default quotas.')) return True """ The lookup API This demonstrates features of the database API. """ from __future__ import unicode_literals from django.db import models from django.utils import six from django.utils.encoding import python_2_unicode_compatible class Alarm(models.Model): desc = models.CharField(max_length=100) time = models.TimeField() def __str__(self): return '%s (%s)' % (self.time, self.desc) class Author(models.Model): name = models.CharField(max_length=100) class Meta: ordering = ('name', ) @python_2_unicode_compatible class Article(models.Model): headline = models.CharField(max_length=100) pub_date = models.DateTimeField() author = models.ForeignKey(Author, models.SET_NULL, blank=True, null=True) class Meta: ordering = ('-pub_date', 'headline') def __str__(self): return self.headline class Tag(models.Model): articles = models.ManyToManyField(Article) name = models.CharField(max_length=100) class Meta: ordering = ('name', ) @python_2_unicode_compatible class Season(models.Model): year = models.PositiveSmallIntegerField() gt = models.IntegerField(null=True, blank=True) def __str__(self): return six.text_type(self.year) @python_2_unicode_compatible class Game(models.Model): season = models.ForeignKey(Season, models.CASCADE, related_name='games') home = models.CharField(max_length=100) away = models.CharField(max_length=100) def __str__(self): return "%s at %s" % (self.away, self.home) @python_2_unicode_compatible class Player(models.Model): name = models.CharField(max_length=100) games = models.ManyToManyField(Game, related_name='players') def __str__(self): return self.name # To test __search lookup a fulltext index is needed. This # is only available when using MySQL 5.6, or when using MyISAM # tables. As 5.6 isn't common yet, lets use MyISAM table for # testing. The table is manually created by the test method. class MyISAMArticle(models.Model): headline = models.CharField(max_length=100) class Meta: db_table = 'myisam_article' managed = False #!/usr/bin/python import sqlite3 class Database: def __init__(self): self.connect() def connect(self): self.conn = sqlite3.connect('lifebot.db') def get_cursor(self): # A bit weird for now but trying to figure out SQLite try: return self.conn.cursor() except Exception, e: self.connect() return self.conn.cursor() def create_table(self, query): c = self.get_cursor() c.execute(query) self.conn.commit() self.conn.close() def get(self, query, args = None): if args is None: args = tuple() c = self.get_cursor() c.execute(query, args) return c.fetchone() def get_all(self, query, args = None): if args is None: args = tuple() c = self.get_cursor() c.execute(query, args) return c.fetchall() def insert(self, query, args = None): if args is None: args = tuple() c = self.get_cursor() c.execute(query, args) self.conn.commit() return c.lastrowid """ Django settings for cattrack project. Generated by 'django-admin startproject' using Django 1.9.5. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import datetime import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '76kw_-twd)u$+_ltr&r_%z&cw^7yepzg@$rm&d%lvox7lb7ra&' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'django_filters', 'corsheaders', 'ctrack', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'corsheaders.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'cattrack.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'cattrack.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR, "client/dist/prod"), os.path.join(BASE_DIR, "static"), ] CTRACK_CATEGORISER = 'SklearnCategoriser' CTRACK_CATEGORISER_FILE = 'categoriser.pkl' REST_FRAMEWORK = { 'DEFAULT_AUTHENTICATION_CLASSES': [ 'rest_framework.authentication.SessionAuthentication', 'rest_framework.authentication.BasicAuthentication', 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', ], 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.IsAuthenticated' ], 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination', 'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',), } JWT_AUTH = { 'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=600), 'JWT_ALLOW_REFRESH': True, } CORS_ORIGIN_ALLOW_ALL = True # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # import time from tests.unit import unittest from boto.dynamodb.layer2 import Layer2 from boto.dynamodb.table import Table from boto.dynamodb.schema import Schema class TestDynamoDBTable(unittest.TestCase): dynamodb = True def setUp(self): self.dynamodb = Layer2() self.schema = Schema.create(('foo', 'N'), ('bar', 'S')) self.table_name = 'testtable%s' % int(time.time()) def create_table(self, table_name, schema, read_units, write_units): result = self.dynamodb.create_table(table_name, schema, read_units, write_units) self.addCleanup(self.dynamodb.delete_table, result) return result def assertAllEqual(self, *items): first = items[0] for item in items[1:]: self.assertEqual(first, item) def test_table_retrieval_parity(self): created_table = self.dynamodb.create_table( self.table_name, self.schema, 1, 1) created_table.refresh(wait_for_active=True) retrieved_table = self.dynamodb.get_table(self.table_name) constructed_table = self.dynamodb.table_from_schema(self.table_name, self.schema) # All three tables should have the same name # and schema attributes. self.assertAllEqual(created_table.name, retrieved_table.name, constructed_table.name) self.assertAllEqual(created_table.schema, retrieved_table.schema, constructed_table.schema) # However for create_time, status, read/write units, # only the created/retrieved table will have equal # values. self.assertEqual(created_table.create_time, retrieved_table.create_time) self.assertEqual(created_table.status, retrieved_table.status) self.assertEqual(created_table.read_units, retrieved_table.read_units) self.assertEqual(created_table.write_units, retrieved_table.write_units) # The constructed table will have values of None. self.assertIsNone(constructed_table.create_time) self.assertIsNone(constructed_table.status) self.assertIsNone(constructed_table.read_units) self.assertIsNone(constructed_table.write_units) # -*- coding: utf-8 -*- ## ## $Rev: 137 $ ## $Release: 0.6.2 $ ## copyright(c) 2007-2008 kuwata-lab.com all rights reserved. ## ## Permission is hereby granted, free of charge, to any person obtaining ## a copy of this software and associated documentation files (the ## "Software"), to deal in the Software without restriction, including ## without limitation the rights to use, copy, modify, merge, publish, ## distribute, sublicense, and/or sell copies of the Software, and to ## permit persons to whom the Software is furnished to do so, subject to ## the following conditions: ## ## The above copyright notice and this permission notice shall be ## included in all copies or substantial portions of the Software. ## ## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, ## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF ## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND ## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE ## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION ## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION ## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ## """Very fast and light-weight template engine based embedded Python. pyTenjin is similar to PHP or eRuby (embedded Ruby). * '' represents python statement. * '#{...}' represents python expression. * '${...}' represents python expression with escaping. And it provides the following features. * Layout template and nested template * Including other template files * Template caching * Capturing See help of tenjin.Template and tenjin.Engine for details. """ __revision__ = "$Rev: 137 $"[6:-2] __release__ = "0.6.2" __license__ = "MIT License" __all__ = ['Template', 'Engine', 'helpers', 'html', ] import re, sys, os, time, marshal ## ## utilities ## try: import fcntl def _lock_file(file, content): fcntl.flock(file.fileno(), fcntl.LOCK_EX) except ImportError, ex: try: import msvcrt def _lock_file(file, content): msvcrt.locking(file.fileno(), msvcrt.LK_LOCK, len(content)) except ImportError, ex: def _lock_file(file, content): pass def _write_file_with_lock(filename, content): f = None try: f = open(filename, 'wb') _lock_file(f, content) f.write(content) finally: if f: f.close() def _create_module(module_name): """ex. mod = _create_module('tenjin.util')""" import new mod = new.module(module_name.split('.')[-1]) sys.modules[module_name] = mod return mod ## ## helper method's module ## def _create_helpers_module(): def to_str(val): """Convert value into string. Return '' if val is None. ex. >>> to_str(None) '' >>> to_str("foo") 'foo' >>> to_str(u"\u65e5\u672c\u8a9e") u'\u65e5\u672c\u8a9e' >>> to_str(123) '123' """ if val is None: return '' if isinstance(val, str): return val if isinstance(val, unicode): return val return str(val) def generate_tostrfunc(encoding): """Generate 'to_str' function which encodes unicode to str. ex. import tenjin from tenjin.helpers import escape to_str = tenjin.generate_tostrfunc('utf-8') engine = tenjin.Engine() context = { 'items': [u'AAA', u'BBB', u'CCC'] } output = engine.render('example.pyhtml') print output """ def to_str(val): if val is None: return '' if isinstance(val, str): return val if isinstance(val, unicode): return val.encode(encoding) return str(val) return to_str def echo(string): """add string value into _buf. this is equivarent to '#{string}'.""" frame = sys._getframe(1) context = frame.f_locals context['_buf'].append(string) def start_capture(varname=None): """ start capturing with name. ex. list.rbhtml ex. layout.rbhtml Capture Example #{itemlist} """ frame = sys._getframe(1) context = frame.f_locals context['_buf_tmp'] = context['_buf'] context['_capture_varname'] = varname context['_buf'] = [] def stop_capture(store_to_context=True): """ stop capturing and return the result of capturing. if store_to_context is True then the result is stored into _context[varname]. """ frame = sys._getframe(1) context = frame.f_locals result = ''.join(context['_buf']) context['_buf'] = context.pop('_buf_tmp') varname = context.pop('_capture_varname') if varname: context[varname] = result if store_to_context: context['_context'][varname] = result return result def captured_as(name): """ helper method for layout template. if captured string is found then append it to _buf and return True, else return False. """ frame = sys._getframe(1) context = frame.f_locals if context.has_key(name): _buf = context['_buf'] _buf.append(context[name]) return True return False def _p(arg): """ex. '/show/'+_p("item['id']") => "/show/#{item['id']}" """ return '<`#%s#`>' % arg # decoded into #{...} by preprocessor def _P(arg): """ex. '%s' % _P("item['id']") => "${item['id']}" """ return '<`$%s$`>' % arg # decoded into ${...} by preprocessor def _decode_params(s): """decode <`#...#`> and <`$...$`> into #{...} and ${...}""" from urllib import unquote dct = { 'lt':'<', 'gt':'>', 'amp':'&', 'quot':'"', '#039':"'", } def unescape(s): #return s.replace('<', '<').replace('>', '>').replace('"', '"').replace(''', "'").replace('&', '&') return re.sub(r'&(lt|gt|quot|amp|#039);', lambda m: dct[m.group(1)], s) s = re.sub(r'%3C%60%23(.*?)%23%60%3E', lambda m: '#{%s}' % unquote(m.group(1)), s) s = re.sub(r'%3C%60%24(.*?)%24%60%3E', lambda m: '${%s}' % unquote(m.group(1)), s) s = re.sub(r'<`#(.*?)#`>', lambda m: '#{%s}' % unescape(m.group(1)), s) s = re.sub(r'<`\$(.*?)\$`>', lambda m: '${%s}' % unescape(m.group(1)), s) s = re.sub(r'<`#(.*?)#`>', r'#{\1}', s) s = re.sub(r'<`\$(.*?)\$`>', r'${\1}', s) return s mod = _create_module('tenjin.helpers') mod.to_str = to_str mod.generate_tostrfunc = generate_tostrfunc mod.echo = echo mod.start_capture = start_capture mod.stop_capture = stop_capture mod.captured_as = captured_as mod._p = _p mod._P = _P mod._decode_params = _decode_params mod.__all__ = ['escape', 'to_str', 'echo', 'generate_tostrfunc', 'start_capture', 'stop_capture', 'captured_as', '_p', '_P', '_decode_params', ] return mod helpers = _create_helpers_module() del _create_helpers_module generate_tostrfunc = helpers.generate_tostrfunc ## ## module for html ## def _create_html_module(): to_str = helpers.to_str _escape_table = { '&': '&', '<': '<', '>': '>', '"': '"' } _escape_pattern = re.compile(r'[&<>"]') _escape_callable = lambda m: _escape_table[m.group(0)] def escape_xml(s): """Escape '&', '<', '>', '"' into '&', '<', '>', '"'. """ return _escape_pattern.sub(_escape_callable, s) #return s.replace('&','&').replace('<','<').replace('>','>').replace('"','"') def tagattr(name, expr, value=None, escape=True): """return empty string when expr is false value, ' name="value"' when value is specified, or ' name="expr"' when value is None. ex. >>> tagattr('size', 20) ' size="20"' >>> tagattr('size', 0) '' >>> tagattr('checked', True, 'checked') ' checked="checked"' >>> tagattr('checked', False, 'checked') '' """ if not expr: return '' if value is None: value = to_str(expr) else: value = to_str(value) if escape: value = escape_xml(value) return ' %s="%s"' % (name, value) def checked(expr): """return ' checked="checked"' if expr is true.""" return expr and ' checked="checked"' or '' #return attr('checked', expr, 'checked') def selected(expr): """return ' selected="selected"' if expr is true.""" return expr and ' selected="selected"' or '' #return attr('selected', expr, 'selected') def disabled(expr): """return ' disabled="disabled"' if expr is true.""" return expr and ' disabled="disabled"' or '' #return attr('disabled, expr, 'disabled') def nl2br(text): """replace "\n" to "
\n" and return it.""" if not text: return '' return text.replace('\n', '
\n') def text2html(text): """(experimental) escape xml characters, replace "\n" to "
\n", and return it.""" if not text: return '' return nl2br(escape_xml(text).replace(' ', '  ')) mod = _create_module('tenjin.helpers.html') mod._escape_table = _escape_table mod.escape_xml = escape_xml mod.escape = escape_xml mod.tagattr = tagattr mod.checked = checked mod.selected = selected mod.disabled = disabled mod.nl2br = nl2br mod.text2html = text2html return mod helpers.html = _create_html_module() del _create_html_module helpers.escape = helpers.html.escape_xml ## ## Template class ## class Template(object): """Convert and evaluate embedded python string. Notation: * '' means python statement code. * '#{...}' means python expression code. * '${...}' means python escaped expression code. ex. example.pyhtml
${item}
ex. >>> filename = 'example.pyhtml' >>> import tenjin >>> from tenjin.helpers import escape, to_str >>> template = tenjin.Template(filename) >>> script = template.script >>> ## or >>> #template = tenjin.Template() >>> #script = template.convert_file(filename) >>> ## or >>> #template = tenjin.Template() >>> #input = open(filename).read() >>> #script = template.convert(input, filename) # filename is optional >>> print script >>> context = {'items': ['','bar&bar','"baz"']} >>> output = template.render(context) >>> print output
<foo>
bar&bar
"baz"
""" ## default value of attributes filename = None encoding = None escapefunc = 'escape' tostrfunc = 'to_str' indent = 4 preamble = None postamble = None # "_buf = []" smarttrim = None # "print ''.join(_buf)" args = None def __init__(self, filename=None, encoding=None, escapefunc=None, tostrfunc=None, indent=None, preamble=None, postamble=None, smarttrim=None): """Initailizer of Template class. filename:str (=None) Filename to convert (optional). If None, no convert. encoding:str (=None) Encoding name. If specified, template string is converted into unicode object internally. Template.render() returns str object if encoding is None, else returns unicode object if encoding name is specified. escapefunc:str (='escape') Escape function name. tostrfunc:str (='to_str') 'to_str' function name. indent:int (=4) Indent width. preamble:str or bool (=None) Preamble string which is inserted into python code. If true, '_buf = []' is used insated. postamble:str or bool (=None) Postamble string which is appended to python code. If true, 'print "".join(_buf)' is used instead. smarttrim:bool (=None) If True then "
\\n#{_context}\\n
" is parsed as "
\\n#{_context}
". """ if encoding is not None: self.encoding = encoding if escapefunc is not None: self.escapefunc = escapefunc if tostrfunc is not None: self.tostrfunc = tostrfunc if indent is not None: self.indent = indent if preamble is not None: self.preamble = preamble if postamble is not None: self.postamble = postamble if smarttrim is not None: self.smarttrim = smarttrim # if preamble is True: self.preamble = "_buf = []" if postamble is True: self.postamble = "print ''.join(_buf)" if filename: self.convert_file(filename) else: self._reset() def _reset(self, input=None, filename=None): self._spaces = '' self.script = None self.bytecode = None self.input = input self.filename = filename if input != None: i = input.find("\n") if i < 0: self.newline = "\n" # or None elif len(input) >= 2 and input[i-1] == "\r": self.newline = "\r\n" else: self.newline = "\n" def before_convert(self, buf): #buf.append('_buf = []; ') if self.preamble: buf.append(self.preamble) buf.append(self.input.startswith('>> import tenjin >>> from tenjin.helpers import escape, to_str >>> template = tenjin.Template() >>> filename = 'example.html' >>> input = open(filename).read() >>> script = template.convert(input, filename) # filename is optional >>> print script """ if self.encoding and isinstance(input, str): input = input.decode(self.encoding) self._reset(input, filename) buf = [] self.before_convert(buf) self.parse_stmts(buf, input) self.after_convert(buf) script = ''.join(buf) self.script = script return script def compile_stmt_pattern(pi): return re.compile(r'<\?%s( |\t|\r?\n)(.*?) ?\?>([ \t]*\r?\n)?' % pi, re.S) STMT_PATTERN = compile_stmt_pattern('py') compile_stmt_pattern = staticmethod(compile_stmt_pattern) def stmt_pattern(self): return Template.STMT_PATTERN def parse_stmts(self, buf, input): if not input: return rexp = self.stmt_pattern() is_bol = True index = 0 for m in rexp.finditer(input): mspace, code, rspace = m.groups() #mspace, close, rspace = m.groups() #code = input[m.start()+4+len(mspace):m.end()-len(close)-(rspace and len(rspace) or 0)] text = input[index:m.start()] index = m.end() ## detect spaces at beginning of line lspace = None if text == '': if is_bol: lspace = '' elif text[-1] == '\n': lspace = '' else: rindex = text.rfind('\n') if rindex < 0: if is_bol and text.isspace(): lspace = text text = '' else: s = text[rindex+1:] if s.isspace(): lspace = s text = text[:rindex+1] #is_bol = rspace is not None ## add text, spaces, and statement self.parse_exprs(buf, text, is_bol) is_bol = rspace is not None if lspace: buf.append(lspace) if mspace != " ": #buf.append(mspace) buf.append(mspace == "\t" and "\t" or "\n") # don't append "\r\n"! if code: code = self.statement_hook(code) self.add_stmt(buf, code) self._set_spaces(code, lspace, mspace) if rspace: #buf.append(rspace) buf.append("\n") # don't append "\r\n"! rest = input[index:] if rest: self.parse_exprs(buf, rest) def statement_hook(self, stmt): """expand macros and parse '#@ARGS' in a statement.""" ## macro expantion #macro_pattern = r'^(\s*)(\w+)\((.*?)\);?\s*$'; #m = re.match(macro_pattern, stmt) #if m: # lspace, name, arg = m.group(1), m.group(2), m.group(3) # handler = self.get_macro_handler(name) # return handler is None and stmt or lspace + handler(arg) ## arguments declaration if self.args is None: args_pattern = r'^ *#@ARGS(?:[ \t]+(.*?))?$' m = re.match(args_pattern, stmt) if m: arr = (m.group(1) or '').split(',') args = []; declares = [] for s in arr: arg = s.strip() if not s: continue if not re.match('^[a-zA-Z_]\w*$', arg): raise ValueError("%s: invalid template argument." % arg) args.append(arg) declares.append("%s = _context.get('%s'); " % (arg, arg)) self.args = args return ''.join(declares) ## return stmt #MACRO_HANDLER_TABLE = { # "echo": # lambda arg: "_buf.append(%s); " % arg, # "include": # lambda arg: "_buf.append(_context['_engine'].render(%s, _context, layout=False)); " % arg, # "start_capture": # lambda arg: "_buf_bkup = _buf; _buf = []; _capture_varname = %s; " % arg, # "stop_capture": # lambda arg: "_context[_capture_varname] = ''.join(_buf); _buf = _buf_bkup; ", # "start_placeholder": # lambda arg: "if (_context[%s]) _buf.push(_context[%s]); else:" % (arg, arg), # "stop_placeholder": # lambda arg: "#endif", #} # #def get_macro_handler(name): # return MACRO_HANDLER_TABLE.get(name) EXPR_PATTERN = re.compile(r'([#$])\{(.*?)\}', re.S) def expr_pattern(self): return Template.EXPR_PATTERN def get_expr_and_escapeflag(self, match): return match.group(2), match.group(1) == '$' def parse_exprs(self, buf, input, is_bol=False): if not input: return if self._spaces: buf.append(self._spaces) self.start_text_part(buf) rexp = self.expr_pattern() smarttrim = self.smarttrim nl = self.newline nl_len = len(nl) pos = 0 for m in rexp.finditer(input): start = m.start() text = input[pos:start] pos = m.end() expr, flag_escape = self.get_expr_and_escapeflag(m) # if text: self.add_text(buf, text) #if text[-1] == "\n": # buf.append("\n") # if self._spaces: # buf.append(self._spaces) self.add_expr(buf, expr, flag_escape) # if smarttrim: flag_bol = text.endswith(nl) or not text and (start > 0 or is_bol) if flag_bol and not flag_escape and input[pos:pos+nl_len] == nl: pos += nl_len buf.append("\n") if smarttrim: if buf and buf[-1] == "\n": buf.pop() rest = input[pos:] if rest: self.add_text(buf, rest, True) self.stop_text_part(buf) if input[-1] == '\n': buf.append("\n") def start_text_part(self, buf): buf.append("_buf.extend((") def stop_text_part(self, buf): buf.append("));") _quote_rexp = re.compile(r"(['\\\\])") def add_text(self, buf, text, encode_newline=False): if not text: return; if self.encoding: buf.append("u'''") else: buf.append("'''") #text = re.sub(r"(['\\\\])", r"\\\1", text) text = Template._quote_rexp.sub(r"\\\1", text) if not encode_newline or text[-1] != "\n": buf.append(text) buf.append("''', ") elif len(text) >= 2 and text[-2] == "\r": buf.append(text[0:-2]) buf.append("\\r\\n''', ") else: buf.append(text[0:-1]) buf.append("\\n''', ") _add_text = add_text def add_expr(self, buf, code, flag_escape=None): if not code or code.isspace(): return if flag_escape is None: buf.append(code); buf.append(", "); elif flag_escape is False: buf.extend((self.tostrfunc, "(", code, "), ")) else: buf.extend((self.escapefunc, "(", self.tostrfunc, "(", code, ")), ")) def add_stmt(self, buf, code): if self.newline == "\r\n": code = code.replace("\r\n", "\n") buf.append(code) #if code[-1] != '\n': # buf.append(self.newline) def _set_spaces(self, code, lspace, mspace): if lspace: if mspace == " ": code = lspace + code elif mspace == "\t": code = lspace + "\t" + code #i = code.rstrip().rfind("\n") #if i < 0: # i == -1 # i = 0 #else: # i += 1 i = code.rstrip().rfind("\n") + 1 indent = 0 n = len(code) ch = None while i < n: ch = code[i] if ch == " ": indent += 1 elif ch == "\t": indent += 8 else: break i += 1 if ch: if code.rstrip()[-1] == ':': indent += self.indent self._spaces = ' ' * indent def render(self, context=None, globals=None, _buf=None): """Evaluate python code with context dictionary. If _buf is None then return the result of evaluation as str, else return None. context:dict (=None) Context object to evaluate. If None then new dict is created. globals:dict (=None) Global object. If None then globals() is used. _buf:list (=None) If None then new list is created. ex. >>> import tenjin >>> from tenjin.helpers import escape, to_str >>> template = tenjin.Template('example.pyhtml') >>> context = {'items': ['foo','bar','baz'], 'title': 'example'} >>> output = template.evaluate(context) >>> print output, """ if context is None: locals = context = {} elif self.args is None: locals = context.copy() else: locals = {} if context.has_key('_engine'): context.get('_engine').hook_context(locals) locals['_context'] = context if globals is None: globals = sys._getframe(1).f_globals bufarg = _buf if _buf is None: _buf = [] locals['_buf'] = _buf if not self.bytecode: self.compile() exec self.bytecode in globals, locals if bufarg is None: s = ''.join(_buf) #if self.encoding: # s = s.encode(self.encoding) return s else: return None def compile(self): """compile self.script into self.bytecode""" self.bytecode = compile(self.script, self.filename or '(tenjin)', 'exec') ## ## preprocessor class ## class Preprocessor(Template): STMT_PATTERN = Template.compile_stmt_pattern('PY') def stmt_pattern(self): return Preprocessor.STMT_PATTERN EXPR_PATTERN = re.compile(r'([#$])\{\{(.*?)\}\}', re.S) def expr_pattern(self): return Preprocessor.EXPR_PATTERN #def get_expr_and_escapeflag(self, match): # return match.group(2), match.group(1) == '$' def add_expr(self, buf, code, flag_escape=None): if not code or code.isspace(): return code = "_decode_params(%s)" % code Template.add_expr(self, buf, code, flag_escape) ## ## template engine class ## class Engine(object): """Engine class of templates. ex. >>> ## create engine >>> import tenjin >>> from tenjin.helpers import * >>> prefix = 'user_' >>> postfix = '.pyhtml' >>> layout = 'layout.pyhtml' >>> path = ['views'] >>> engine = tenjin.Engine(prefix=prefix, postfix=postfix, ... layout=layout, path=path, encoding='utf-8') >>> ## evaluate template(='views/user_create.pyhtml') with context object. >>> ## (layout template (='views/layout.pyhtml') are used.) >>> context = {'title': 'Create User', 'user': user} >>> print engine.render(':create', context) >>> ## evaluate template without layout template. >>> print engine.render(':create', context, layout=False) In template file, the followings are available. * include(template_name, append_to_buf=True) : Include other template * _content : Result of evaluating template (available only in layout file). ex. file 'layout.pyhtml':
#{_content}
""" ## default value of attributes prefix = '' postfix = '' layout = None templateclass = Template path = None cache = False preprocess = False def __init__(self, prefix=None, postfix=None, layout=None, path=None, cache=None, preprocess=None, templateclass=None, **kwargs): """Initializer of Engine class. prefix:str (='') Prefix string used to convert template short name to template filename. postfix:str (='') Postfix string used to convert template short name to template filename. layout:str (=None) Default layout template name. path:list of str(=None) List of directory names which contain template files. cache:bool (=True) Cache converted python code into file. preprocess:bool(=False) Activate preprocessing or not. templateclass:class (=Template) Template class which engine creates automatically. kwargs:dict Options for Template class constructor. See document of Template.__init__() for details. """ if prefix: self.prefix = prefix if postfix: self.postfix = postfix if layout: self.layout = layout if templateclass: self.templateclass = templateclass if path is not None: self.path = path if cache is not None: self.cache = cache if preprocess is not None: self.preprocess = preprocess self.kwargs = kwargs self.templates = {} # template_name => Template object def to_filename(self, template_name): """Convert template short name to filename. ex. >>> engine = tenjin.Engine(prefix='user_', postfix='.pyhtml') >>> engine.to_filename('list') 'list' >>> engine.to_filename(':list') 'user_list.pyhtml' """ if template_name[0] == ':' : return self.prefix + template_name[1:] + self.postfix return template_name def find_template_file(self, template_name): """Find template file and return it's filename. When template file is not found, IOError is raised. """ filename = self.to_filename(template_name) if self.path: for dirname in self.path: filepath = dirname + os.path.sep + filename if os.path.isfile(filepath): return filepath else: if os.path.isfile(filename): return filename raise IOError('%s: filename not found (path=%s).' % (filename, repr(self.path))) def register_template(self, template_name, template): """Register an template object.""" if not hasattr(template, 'timestamp'): template.timestamp = None # or time.time() self.templates[template_name] = template def load_cachefile(self, cache_filename, template): """load marshaled cache file""" #template.bytecode = marshal.load(open(cache_filename, 'rb')) dct = marshal.load(open(cache_filename, 'rb')) template.args = dct['args'] template.script = dct['script'] template.bytecode = dct['bytecode'] def _load_cachefile_for_script(self, cache_filename, template): s = open(cache_filename).read() if s.startswith('#@ARGS '): pos = s.find("\n") args_str = s[len('#@ARGS '):pos] template.args = args_str and args_str.split(', ') or [] s = s[pos+1:] else: template.args = None if template.encoding: #s = s.decode(template.encoding) s = s.decode('utf-8') template.script = s template.compile() def store_cachefile(self, cache_filename, template): """store template into marshal file""" dct = { 'args': template.args, 'script': template.script, 'bytecode': template.bytecode } _write_file_with_lock(cache_filename, marshal.dumps(dct)) def _store_cachefile_for_script(self, cache_filename, template): s = template.script if template.encoding and isinstance(s, unicode): s = s.encode(template.encoding) #s = s.encode('utf-8') if template.args is not None: s = "#@ARGS %s\n%s" % (', '.join(template.args), s) _write_file_with_lock(cache_filename, s) def cachename(self, filename): return os.path.join(os.path.expanduser('~'),'.rst2pdf', os.path.basename(filename) + '.cache') def create_template(self, filename, _context, _globals): """Read template file and create template object.""" template = self.templateclass(None, **self.kwargs) template.timestamp = time.time() cache_filename = self.cachename(filename) getmtime = os.path.getmtime if not self.cache: input = self.read_template_file(filename, _context, _globals) template.convert(input, filename) #template.compile() elif os.path.exists(cache_filename) and getmtime(cache_filename) >= getmtime(filename): #Tenjin.logger.info("** debug: %s: cache found." % filename) template.filename = filename self.load_cachefile(cache_filename, template) if template.bytecode is None: template.compile() else: #Tenjin.logger.info("** debug: %s: cache not found." % filename) input = self.read_template_file(filename, _context, _globals) template.convert(input, filename) template.compile() self.store_cachefile(cache_filename, template) return template def read_template_file(self, filename, _context, _globals): if not self.preprocess: return open(filename).read() if _context is None: _context = {} if not _context.has_key('_engine'): self.hook_context(_context) if _globals is None: _globals = sys._getframe(2).f_globals preprocessor = Preprocessor(filename) return preprocessor.render(_context, globals=_globals) def get_template(self, template_name, _context=None, _globals=None): """Return template object. If template object has not registered, template engine creates and registers template object automatically. """ template = self.templates.get(template_name) t = template if t is None or t.timestamp and t.filename and t.timestamp < os.path.getmtime(t.filename): filename = self.find_template_file(template_name) # context and globals are passed only for preprocessing if _globals is None: _globals = sys._getframe(1).f_globals template = self.create_template(filename, _context, _globals) self.register_template(template_name, template) return template def include(self, template_name, append_to_buf=True): """Evaluate template using current local variables as context. template_name:str Filename (ex. 'user_list.pyhtml') or short name (ex. ':list') of template. append_to_buf:boolean (=True) If True then append output into _buf and return None, else return stirng output. ex. #{include('file.pyhtml', False)} """ frame = sys._getframe(1) locals = frame.f_locals globals = frame.f_globals assert locals.has_key('_context') context = locals['_context'] # context and globals are passed to get_template() only for preprocessing. template = self.get_template(template_name, context, globals) if append_to_buf: _buf = locals['_buf'] else: _buf = None return template.render(context, globals, _buf=_buf) def render(self, template_name, context=None, globals=None, layout=True): """Evaluate template with layout file and return result of evaluation. template_name:str Filename (ex. 'user_list.pyhtml') or short name (ex. ':list') of template. context:dict (=None) Context object to evaluate. If None then new dict is used. globals:dict (=None) Global context to evaluate. If None then globals() is used. layout:str or Bool(=True) If True, the default layout name specified in constructor is used. If False, no layout template is used. If str, it is regarded as layout template name. If temlate object related with the 'template_name' argument is not exist, engine generates a template object and register it automatically. """ if context is None: context = {} if globals is None: globals = sys._getframe(1).f_globals self.hook_context(context) while True: # context and globals are passed to get_template() only for preprocessing template = self.get_template(template_name, context, globals) content = template.render(context, globals) layout = context.pop('_layout', layout) if layout is True or layout is None: layout = self.layout if not layout: break template_name = layout layout = False context['_content'] = content context.pop('_content', None) return content def hook_context(self, context): context['_engine'] = self #context['render'] = self.render context['include'] = self.include """Cache object The Cache object is used to manage a set of cache files and their associated backend. The backends can be rotated on the fly by specifying an alternate type when used. Advanced users can add new backends in beaker.backends """ import warnings import beaker.container as container import beaker.util as util from beaker.exceptions import BeakerException, InvalidCacheBackendError import beaker.ext.memcached as memcached import beaker.ext.database as database import beaker.ext.sqla as sqla import beaker.ext.google as google # Initialize the basic available backends clsmap = { 'memory':container.MemoryNamespaceManager, 'dbm':container.DBMNamespaceManager, 'file':container.FileNamespaceManager, 'ext:memcached':memcached.MemcachedNamespaceManager, 'ext:database':database.DatabaseNamespaceManager, 'ext:sqla': sqla.SqlaNamespaceManager, 'ext:google': google.GoogleNamespaceManager, } # Initialize the cache region dict cache_regions = {} cache_managers = {} try: import pkg_resources # Load up the additional entry point defined backends for entry_point in pkg_resources.iter_entry_points('beaker.backends'): try: NamespaceManager = entry_point.load() name = entry_point.name if name in clsmap: raise BeakerException("NamespaceManager name conflict,'%s' " "already loaded" % name) clsmap[name] = NamespaceManager except (InvalidCacheBackendError, SyntaxError): # Ignore invalid backends pass except: import sys from pkg_resources import DistributionNotFound # Warn when there's a problem loading a NamespaceManager if not isinstance(sys.exc_info()[1], DistributionNotFound): import traceback from StringIO import StringIO tb = StringIO() traceback.print_exc(file=tb) warnings.warn("Unable to load NamespaceManager entry point: '%s': " "%s" % (entry_point, tb.getvalue()), RuntimeWarning, 2) except ImportError: pass def cache_region(region, *deco_args): """Decorate a function to cache itself using a cache region The region decorator requires arguments if there are more than 2 of the same named function, in the same module. This is because the namespace used for the functions cache is based on the functions name and the module. Example:: # Add cache region settings to beaker: beaker.cache.cache_regions.update(dict_of_config_region_options)) @cache_region('short_term', 'some_data') def populate_things(search_term, limit, offset): return load_the_data(search_term, limit, offset) return load('rabbits', 20, 0) .. note:: The function being decorated must only be called with positional arguments. """ cache = [None] def decorate(func): namespace = util.func_namespace(func) def cached(*args): reg = cache_regions[region] if not reg.get('enabled', True): return func(*args) if not cache[0]: if region not in cache_regions: raise BeakerException('Cache region not configured: %s' % region) cache[0] = Cache._get_cache(namespace, reg) cache_key = " ".join(map(str, deco_args + args)) def go(): return func(*args) return cache[0].get_value(cache_key, createfunc=go) cached._arg_namespace = namespace cached._arg_region = region return cached return decorate def region_invalidate(namespace, region, *args): """Invalidate a cache region namespace or decorated function This function only invalidates cache spaces created with the cache_region decorator. :param namespace: Either the namespace of the result to invalidate, or the cached function reference :param region: The region the function was cached to. If the function was cached to a single region then this argument can be None :param args: Arguments that were used to differentiate the cached function as well as the arguments passed to the decorated function Example:: # Add cache region settings to beaker: beaker.cache.cache_regions.update(dict_of_config_region_options)) def populate_things(invalidate=False): @cache_region('short_term', 'some_data') def load(search_term, limit, offset): return load_the_data(search_term, limit, offset) # If the results should be invalidated first if invalidate: region_invalidate(load, None, 'some_data', 'rabbits', 20, 0) return load('rabbits', 20, 0) """ if callable(namespace): if not region: region = namespace._arg_region namespace = namespace._arg_namespace if not region: raise BeakerException("Region or callable function " "namespace is required") else: region = cache_regions[region] cache = Cache._get_cache(namespace, region) cache_key = " ".join(str(x) for x in args) cache.remove_value(cache_key) class Cache(object): """Front-end to the containment API implementing a data cache. :param namespace: the namespace of this Cache :param type: type of cache to use :param expire: seconds to keep cached data :param expiretime: seconds to keep cached data (legacy support) :param starttime: time when cache was cache was """ def __init__(self, namespace, type='memory', expiretime=None, starttime=None, expire=None, **nsargs): try: cls = clsmap[type] if isinstance(cls, InvalidCacheBackendError): raise cls except KeyError: raise TypeError("Unknown cache implementation %r" % type) self.namespace = cls(namespace, **nsargs) self.expiretime = expiretime or expire self.starttime = starttime self.nsargs = nsargs @classmethod def _get_cache(cls, namespace, kw): key = namespace + str(kw) try: return cache_managers[key] except KeyError: cache_managers[key] = cache = cls(namespace, **kw) return cache def put(self, key, value, **kw): self._get_value(key, **kw).set_value(value) set_value = put def get(self, key, **kw): """Retrieve a cached value from the container""" return self._get_value(key, **kw).get_value() get_value = get def remove_value(self, key, **kw): mycontainer = self._get_value(key, **kw) if mycontainer.has_current_value(): mycontainer.clear_value() remove = remove_value def _get_value(self, key, **kw): if isinstance(key, unicode): key = key.encode('ascii', 'backslashreplace') if 'type' in kw: return self._legacy_get_value(key, **kw) kw.setdefault('expiretime', self.expiretime) kw.setdefault('starttime', self.starttime) return container.Value(key, self.namespace, **kw) @util.deprecated("Specifying a " "'type' and other namespace configuration with cache.get()/put()/etc. " "is deprecated. Specify 'type' and other namespace configuration to " "cache_manager.get_cache() and/or the Cache constructor instead.") def _legacy_get_value(self, key, type, **kw): expiretime = kw.pop('expiretime', self.expiretime) starttime = kw.pop('starttime', None) createfunc = kw.pop('createfunc', None) kwargs = self.nsargs.copy() kwargs.update(kw) c = Cache(self.namespace.namespace, type=type, **kwargs) return c._get_value(key, expiretime=expiretime, createfunc=createfunc, starttime=starttime) def clear(self): """Clear all the values from the namespace""" self.namespace.remove() # dict interface def __getitem__(self, key): return self.get(key) def __contains__(self, key): return self._get_value(key).has_current_value() def has_key(self, key): return key in self def __delitem__(self, key): self.remove_value(key) def __setitem__(self, key, value): self.put(key, value) class CacheManager(object): def __init__(self, **kwargs): """Initialize a CacheManager object with a set of options Options should be parsed with the :func:`~beaker.util.parse_cache_config_options` function to ensure only valid options are used. """ self.kwargs = kwargs self.regions = kwargs.pop('cache_regions', {}) # Add these regions to the module global cache_regions.update(self.regions) def get_cache(self, name, **kwargs): kw = self.kwargs.copy() kw.update(kwargs) return Cache._get_cache(name, kw) def get_cache_region(self, name, region): if region not in self.regions: raise BeakerException('Cache region not configured: %s' % region) kw = self.regions[region] return Cache._get_cache(name, kw) def region(self, region, *args): """Decorate a function to cache itself using a cache region The region decorator requires arguments if there are more than 2 of the same named function, in the same module. This is because the namespace used for the functions cache is based on the functions name and the module. Example:: # Assuming a cache object is available like: cache = CacheManager(dict_of_config_options) def populate_things(): @cache.region('short_term', 'some_data') def load(search_term, limit, offset): return load_the_data(search_term, limit, offset) return load('rabbits', 20, 0) .. note:: The function being decorated must only be called with positional arguments. """ return cache_region(region, *args) def region_invalidate(self, namespace, region, *args): """Invalidate a cache region namespace or decorated function This function only invalidates cache spaces created with the cache_region decorator. :param namespace: Either the namespace of the result to invalidate, or the name of the cached function :param region: The region the function was cached to. If the function was cached to a single region then this argument can be None :param args: Arguments that were used to differentiate the cached function as well as the arguments passed to the decorated function Example:: # Assuming a cache object is available like: cache = CacheManager(dict_of_config_options) def populate_things(invalidate=False): @cache.region('short_term', 'some_data') def load(search_term, limit, offset): return load_the_data(search_term, limit, offset) # If the results should be invalidated first if invalidate: cache.region_invalidate(load, None, 'some_data', 'rabbits', 20, 0) return load('rabbits', 20, 0) """ return region_invalidate(namespace, region, *args) if callable(namespace): if not region: region = namespace._arg_region namespace = namespace._arg_namespace if not region: raise BeakerException("Region or callable function " "namespace is required") else: region = self.regions[region] cache = self.get_cache(namespace, **region) cache_key = " ".join(str(x) for x in args) cache.remove_value(cache_key) def cache(self, *args, **kwargs): """Decorate a function to cache itself with supplied parameters :param args: Used to make the key unique for this function, as in region() above. :param kwargs: Parameters to be passed to get_cache(), will override defaults Example:: # Assuming a cache object is available like: cache = CacheManager(dict_of_config_options) def populate_things(): @cache.cache('mycache', expire=15) def load(search_term, limit, offset): return load_the_data(search_term, limit, offset) return load('rabbits', 20, 0) .. note:: The function being decorated must only be called with positional arguments. """ cache = [None] key = " ".join(str(x) for x in args) def decorate(func): namespace = util.func_namespace(func) def cached(*args): if not cache[0]: cache[0] = self.get_cache(namespace, **kwargs) cache_key = key + " " + " ".join(str(x) for x in args) def go(): return func(*args) return cache[0].get_value(cache_key, createfunc=go) cached._arg_namespace = namespace return cached return decorate def invalidate(self, func, *args, **kwargs): """Invalidate a cache decorated function This function only invalidates cache spaces created with the cache decorator. :param func: Decorated function to invalidate :param args: Used to make the key unique for this function, as in region() above. :param kwargs: Parameters that were passed for use by get_cache(), note that this is only required if a ``type`` was specified for the function Example:: # Assuming a cache object is available like: cache = CacheManager(dict_of_config_options) def populate_things(invalidate=False): @cache.cache('mycache', type="file", expire=15) def load(search_term, limit, offset): return load_the_data(search_term, limit, offset) # If the results should be invalidated first if invalidate: cache.invalidate(load, 'mycache', 'rabbits', 20, 0, type="file") return load('rabbits', 20, 0) """ namespace = func._arg_namespace cache = self.get_cache(namespace, **kwargs) cache_key = " ".join(str(x) for x in args) cache.remove_value(cache_key) # coding=utf-8 # Copyright 2018 Sascha Schirra # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" A ND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.. from ropper.gadget import Category from ropper.common.error import * from ropper.common.utils import * from ropper.rop import Ropper from ropper.arch import x86 from ropper.ropchain.ropchain import * from ropper.loaders.loader import Type from ropper.loaders.pe import PE from ropper.loaders.elf import ELF from ropper.loaders.mach_o import MachO from ropper.loaders.raw import Raw from ropper.gadget import Gadget from re import match from filebytes.pe import ImageDirectoryEntry import itertools import math import sys if sys.version_info.major == 2: range = xrange class RopChainX86(RopChain): MAX_QUALI = 7 def _printHeader(self): toReturn = '' toReturn += ('#!/usr/bin/env python\n') toReturn += ('# Generated by ropper ropchain generator #\n') toReturn += ('from struct import pack\n') toReturn += ('\n') toReturn += ('p = lambda x : pack(\'I\', x)\n') toReturn += ('\n') return toReturn def _printRebase(self): toReturn = '' for binary,section in self._usedBinaries: imageBase = Gadget.IMAGE_BASES[binary] toReturn += ('IMAGE_BASE_%d = %s # %s\n' % (self._usedBinaries.index((binary, section)),toHex(imageBase , 4), binary)) toReturn += ('rebase_%d = lambda x : p(x + IMAGE_BASE_%d)\n\n'% (self._usedBinaries.index((binary, section)),self._usedBinaries.index((binary, section)))) return toReturn @classmethod def name(cls): return '' @classmethod def availableGenerators(cls): return [RopChainX86System, RopChainX86Mprotect, RopChainX86VirtualProtect] @classmethod def archs(self): return [x86] def _createDependenceChain(self, gadgets): """ gadgets - list with tuples tuple contains: - method to create chaingadget - list with arguments - dict with named arguments - list with registers which are not allowed to override in the gadget """ failed = [] cur_len = 0 cur_chain = '' counter = 0 failed_chains={} max_perm = math.factorial(len(gadgets)) for x in itertools.permutations(gadgets): counter += 1 self._printMessage('[*] Try permuation %d / %d' % (counter, max_perm)) found = False for y in failed: if x[:len(y)] == y: found = True break if found: continue try: fail = [] chain2 = '' dontModify = [] badRegs = [] c = 0 for idx in range(len(x)): g = x[idx] if idx != 0: badRegs.extend(x[idx-1][3]) dontModify.extend(g[3]) fail.append(g) chain2 += g[0](*g[1], badRegs=badRegs, dontModify=dontModify,**g[2])[0] cur_chain += chain2 break except RopChainError as e: failed_chains[chain2] = fail failed.append(tuple(fail)) else: self._printMessage('Cannot create chain which fills all registers') fail_tmp = None fail_max = [] chain_tmp = None for chain,fail in failed_chains.items(): if len(fail) > len(fail_max): fail_max = fail chain_tmp = chain cur_chain = '# Filled registers: ' for fa in fail_max[:-1]: cur_chain += (fa[2]['reg']) + ', ' cur_chain += '\n' cur_chain += chain_tmp # print('Impossible to create complete chain') return cur_chain def _isModifiedOrDereferencedAccess(self, gadget, dontModify): regs = [] for line in gadget.lines[1:]: line = line[1] if '[' in line: return True if dontModify: m = match('[a-z]+ (e?[abcds][ixlh]),?.*', line) if m and m.group(1) in dontModify: return True return False def _paddingNeededFor(self, gadget): regs = [] for idx in range(1,len(gadget.lines)): line = gadget.lines[idx][1] matched = match('^pop (...)$', line) if matched: regs.append(matched.group(1)) return regs def _printRopInstruction(self, gadget, padding=True, number=None, value=None): value_first = False toReturn = ('rop += rebase_%d(%s) # %s\n' % (self._usedBinaries.index((gadget.fileName, gadget.section)),toHex(gadget.lines[0][0],4), gadget.simpleString())) if number is not None: toReturn +=self._printPaddingInstruction(number) if padding: regs = self._paddingNeededFor(gadget) if len(regs) > 0: dst = gadget.category[2]['dst'] search = '^pop (%s)$' % dst first_line = gadget.lines[0][1] if match(search, first_line): value_first = True padding_str = '' for i in range(len(regs)): padding_str +=self._printPaddingInstruction() if value_first: toReturn += value toReturn += padding_str else: toReturn += padding_str if value: toReturn += value return toReturn def _printAddString(self, string): return ('rop += \'%s\'\n' % string) def _printRebasedAddress(self, addr, comment='', idx=0): return ('rop += rebase_%d(%s)\n' % (idx,addr)) def _printPaddingInstruction(self, addr='0xdeadbeef'): return ('rop += p(%s)\n' % addr) def _containsZeroByte(self, addr): return self.containsBadbytes(addr) def _createZeroByteFillerForSub(self, number): start = 0x01010101 for i in range(start, 0x0f0f0f0f): if not self._containsZeroByte(i) and not self._containsZeroByte(i+number): return i raise RopChainError("Could not create Number for substract gadget") def _createZeroByteFillerForAdd(self, number): start = 0x01010101 for i in range(start, 0x0f0f0f0f): if not self._containsZeroByte(i) and not self._containsZeroByte(number-i): return i raise RopChainError("Could not create Number for addition gadget") def _find(self, category, reg=None, srcdst='dst', badDst=[], badSrc=None, dontModify=None, srcEqDst=False, switchRegs=False ): quali = 1 while quali < RopChainX86System.MAX_QUALI: for binary in self._binaries: for gadget in self._gadgets[binary]: if gadget.category[0] == category and gadget.category[1] == quali: if badSrc and (gadget.category[2]['src'] in badSrc \ or gadget.affected_regs.intersection(badSrc)): continue if badDst and (gadget.category[2]['dst'] in badDst \ or gadget.affected_regs.intersection(badDst)): continue if not gadget.lines[len(gadget.lines)-1][1].strip().endswith('ret') or 'esp' in gadget.simpleString(): continue if srcEqDst and (not (gadget.category[2]['dst'] == gadget.category[2]['src'])): continue elif not srcEqDst and 'src' in gadget.category[2] and (gadget.category[2]['dst'] == gadget.category[2]['src']): continue if self._isModifiedOrDereferencedAccess(gadget, dontModify): continue if reg: if gadget.category[2][srcdst] == reg: self._updateUsedBinaries(gadget) return gadget elif switchRegs: other = 'src' if srcdst == 'dst' else 'dst' if gadget.category[2][other] == reg: self._updateUsedBinaries(gadget) return gadget else: self._updateUsedBinaries(gadget) return gadget quali += 1 def _createWriteStringWhere(self, what, where, reg=None, dontModify=[], idx=0): badRegs = [] badDst = [] while True: popReg = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs, dontModify=dontModify) if not popReg: raise RopChainError('Cannot build writewhatwhere gadget!') write4 = self._find(Category.WRITE_MEM, reg=popReg.category[2]['dst'], badDst= badDst, srcdst='src') if not write4: badRegs.append(popReg.category[2]['dst']) continue else: popReg2 = self._find(Category.LOAD_REG, reg=write4.category[2]['dst'], dontModify=[popReg.category[2]['dst']]+dontModify) if not popReg2: badDst.append(write4.category[2]['dst']) continue else: break; if len(what) % 4 > 0: what += ' ' * (4 - len(what) % 4) toReturn = '' for index in range(0,len(what),4): part = what[index:index+4] toReturn += self._printRopInstruction(popReg,False) toReturn += self._printAddString(part) regs = self._paddingNeededFor(popReg) for i in range(len(regs)): toReturn +=self._printPaddingInstruction() toReturn += self._printRopInstruction(popReg2, False) toReturn += self._printRebasedAddress(toHex(where+index,4), idx=idx) regs = self._paddingNeededFor(popReg2) for i in range(len(regs)): toReturn +=self._printPaddingInstruction() toReturn += self._printRopInstruction(write4) return (toReturn,popReg.category[2]['dst'], popReg2.category[2]['dst']) def _createWriteRegValueWhere(self, what, where, dontModify=[], idx=0): badRegs = [] badDst = [] while True: write4 = self._find(Category.WRITE_MEM, reg=what, badDst=badDst, dontModify=dontModify, srcdst='src') if not write4: raise RopChainError('Cannot build writewhatwhere gadget!') else: popReg2 = self._find(Category.LOAD_REG, reg=write4.category[2]['dst'], dontModify=[what]+dontModify) if not popReg2: badDst.append(write4.category[2]['dst']) continue else: break; toReturn = self._printRopInstruction(popReg2, False) toReturn += self._printRebasedAddress(toHex(where,4), idx=idx) regs = self._paddingNeededFor(popReg2) for i in range(len(regs)): toReturn +=self._printPaddingInstruction() toReturn += self._printRopInstruction(write4) return (toReturn,what, popReg2.category[2]['dst']) def _createLoadRegValueFrom(self, what, from_reg, dontModify=[], idx=0): try: return self._createLoadRegValueFromMov(what, from_reg, dontModify, idx) except RopChainError: return self._createLoadRegValueFromXchg(what, from_reg, dontModify, idx) def _createLoadRegValueFromMov(self, what, from_reg, dontModify=[], idx=0): badRegs = [] badDst = [] while True: load4 = self._find(Category.LOAD_MEM, reg=what, badDst=badDst, dontModify=dontModify, srcdst='dst') if not load4: raise RopChainError('Cannot build loadwhere gadget!') else: popReg2 = self._find(Category.LOAD_REG, reg=load4.category[2]['src'], dontModify=[what,load4.category[2]['src']]+dontModify) if not popReg2: badDst.append(load4.category[2]['src']) continue else: break; value = self._printPaddingInstruction(toHex(from_reg,4)) toReturn = self._printRopInstruction(popReg2, False, value=value) regs = self._paddingNeededFor(popReg2) for i in range(len(regs)): toReturn +=self._printPaddingInstruction() toReturn += self._printRopInstruction(load4) return (toReturn,what, popReg2.category[2]['dst']) def _createLoadRegValueFromXchg(self, what, from_reg, dontModify=[], idx=0): badRegs = [] badDst = [] while True: load4 = self._find(Category.XCHG_REG, reg=what, badDst=badDst, dontModify=dontModify, srcdst='src') if not load4: raise RopChainError('Cannot build loadwhere gadget!') else: mov = self._find(Category.LOAD_MEM, reg=load4.category[2]['dst'], badDst=badDst, dontModify=[load4.category[2]['dst']]+dontModify, srcdst='dst') if not mov: badDst.append(load4.category[2]['dst']) continue popReg2 = self._find(Category.LOAD_REG, reg=mov.category[2]['src'], dontModify=[what,load4.category[2]['src']]+dontModify) if not popReg2: badDst.append(load4.category[2]['src']) continue else: break; toReturn = self._printRopInstruction(popReg2, False) toReturn += self._printPaddingInstruction(toHex(from_reg,4)) regs = self._paddingNeededFor(popReg2) for i in range(len(regs)): toReturn +=self._printPaddingInstruction() toReturn += self._printRopInstruction(mov) toReturn += self._printRopInstruction(load4) return (toReturn,what, popReg2.category[2]['dst']) def _createNumberSubtract(self, number, reg=None, badRegs=None, dontModify=None): if not badRegs: badRegs=[] while True: sub = self._find(Category.SUB_REG, reg=reg, badDst=badRegs, badSrc=badRegs, dontModify=dontModify) if not sub: raise RopChainError('Cannot build number with subtract gadget for reg %s!' % reg) popSrc = self._find(Category.LOAD_REG, reg=sub.category[2]['src'], dontModify=dontModify) if not popSrc: badRegs.append=[sub.category[2]['src']] continue popDst = self._find(Category.LOAD_REG, reg=sub.category[2]['dst'], dontModify=[sub.category[2]['src']]+dontModify) if not popDst: badRegs.append=[sub.category[2]['dst']] continue else: break; filler = self._createZeroByteFillerForSub(number) toReturn = self._printRopInstruction(popSrc, False) toReturn += self._printPaddingInstruction(toHex(filler,4)) regs = self._paddingNeededFor(popSrc) for i in range(len(regs)): toReturn += self._printPaddingInstruction() toReturn += self._printRopInstruction(popDst, False) toReturn += self._printPaddingInstruction(toHex(filler+number,4)) regs = self._paddingNeededFor(popDst) for i in range(len(regs)): toReturn += self._printPaddingInstruction() toReturn += self._printRopInstruction(sub) return (toReturn, popDst.category[2]['dst'],popSrc.category[2]['dst']) def _createNumberAddition(self, number, reg=None, badRegs=None, dontModify=None): if not badRegs: badRegs=[] while True: sub = self._find(Category.ADD_REG, reg=reg, badDst=badRegs, badSrc=badRegs, dontModify=dontModify) if not sub: raise RopChainError('Cannot build number with addition gadget for reg %s!' % reg) popSrc = self._find(Category.LOAD_REG, reg=sub.category[2]['src'], dontModify=dontModify) if not popSrc: badRegs.append=[sub.category[2]['src']] continue popDst = self._find(Category.LOAD_REG, reg=sub.category[2]['dst'], dontModify=[sub.category[2]['src']]+dontModify) if not popDst: badRegs.append(sub.category[2]['dst']) continue else: break; filler = self._createZeroByteFillerForAdd(number) toReturn = self._printRopInstruction(popSrc, False) toReturn += self._printPaddingInstruction(toHex(filler,4)) regs = self._paddingNeededFor(popSrc) for i in range(len(regs)): toReturn += self._printPaddingInstruction() toReturn += self._printRopInstruction(popDst, False) toReturn += self._printPaddingInstruction(toHex(number - filler,4)) regs = self._paddingNeededFor(popDst) for i in range(len(regs)): toReturn += self._printPaddingInstruction() toReturn += self._printRopInstruction(sub) return (toReturn, popDst.category[2]['dst'],popSrc.category[2]['dst']) def _createNumberPop(self, number, reg=None, badRegs=None, dontModify=None): if self._containsZeroByte(0xffffffff): raise RopChainError("Cannot write value with pop -1 and inc gadgets, because there are badbytes in the negated number") while True: popReg = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs,dontModify=dontModify) if not popReg: raise RopChainError('Cannot build number with xor gadget!') incReg = self._find(Category.INC_REG, reg=popReg.category[2]['dst'], dontModify=dontModify) if not incReg: if not badRegs: badRegs = [] badRegs.append(popReg.category[2]['dst']) else: break value = self._printPaddingInstruction(toHex(0xffffffff,4)) toReturn = self._printRopInstruction(popReg, value=value) for i in range(number+1): toReturn += self._printRopInstruction(incReg) return (toReturn ,popReg.category[2]['dst'],) def _createNumberXOR(self, number, reg=None, badRegs=None, dontModify=None): while True: clearReg = self._find(Category.CLEAR_REG, reg=reg, badDst=badRegs, badSrc=badRegs,dontModify=dontModify, srcEqDst=True) if not clearReg: raise RopChainError('Cannot build number with xor gadget!') if number > 0: incReg = self._find(Category.INC_REG, reg=clearReg.category[2]['src'], dontModify=dontModify) if not incReg: if not badRegs: badRegs = [] badRegs.append(clearReg.category[2]['src']) else: break else: break toReturn = self._printRopInstruction(clearReg) for i in range(number): toReturn += self._printRopInstruction(incReg) return (toReturn, clearReg.category[2]['dst'],) def _createNumberXchg(self, number, reg=None, badRegs=None, dontModify=None): xchg = self._find(Category.XCHG_REG, reg=reg, badDst=badRegs, dontModify=dontModify) if not xchg: raise RopChainError('Cannot build number gadget with xchg!') other = xchg.category[2]['src'] if xchg.category[2]['dst'] else xchg.category[2]['dst'] toReturn = self._createNumber(number, other, badRegs, dontModify)[0] toReturn += self._printRopInstruction(xchg) return (toReturn, reg, other) def _createNumberNeg(self, number, reg=None, badRegs=None, dontModify=None): if number == 0: raise RopChainError('Cannot build number gadget with neg if number is 0!') if self._containsZeroByte((~number)+1): raise RopChainError("Cannot use neg gadget, because there are badbytes in the negated number") neg = self._find(Category.NEG_REG, reg=reg, badDst=badRegs, dontModify=dontModify) if not neg: raise RopChainError('Cannot build number gadget with neg!') pop = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs, dontModify=dontModify) if not pop: raise RopChainError('Cannot build number gadget with neg!') value = self._printPaddingInstruction(toHex((~number)+1)) # two's complement toReturn = self._printRopInstruction(pop, value=value) toReturn += self._printRopInstruction(neg) return (toReturn, reg,) def _createNumber(self, number, reg=None, badRegs=None, dontModify=None, xchg=True): try: if self._containsZeroByte(number): try: return self._createNumberNeg(number, reg, badRegs,dontModify) except RopChainError as e: if number < 0x50: try: return self._createNumberXOR(number, reg, badRegs,dontModify) except RopChainError: try: return self._createNumberPop(number, reg, badRegs,dontModify) except RopChainError: try: return self._createNumberSubtract(number, reg, badRegs,dontModify) except RopChainError: return self._createNumberAddition(number, reg, badRegs,dontModify) else : try: return self._createNumberSubtract(number, reg, badRegs,dontModify) except RopChainError: return self._createNumberAddition(number, reg, badRegs,dontModify) else: popReg =self._find(Category.LOAD_REG, reg=reg, badDst=badRegs,dontModify=dontModify) if not popReg: raise RopChainError('Cannot build number gadget!') toReturn = self._printRopInstruction(popReg, padding=True, number=toHex(number,4)) return (toReturn , popReg.category[2]['dst']) except RopChainError as e: return self._createNumberXchg(number, reg, badRegs, dontModify) def _createAddress(self, address, reg=None, badRegs=None, dontModify=None): popReg = self._find(Category.LOAD_REG, reg=reg, badDst=badRegs,dontModify=dontModify) if not popReg: raise RopChainError('Cannot build address gadget!') toReturn = '' toReturn += self._printRopInstruction(popReg,False) toReturn += self._printRebasedAddress(toHex(address, 4), idx=self._usedBinaries.index((popReg.fileName, popReg.section))) regs = self._paddingNeededFor(popReg) for i in range(len(regs)): toReturn +=self._printPaddingInstruction() return (toReturn,popReg.category[2]['dst']) def _createSyscall(self, reg=None, badRegs=None, dontModify=None): syscall = self._find(Category.SYSCALL, reg=None, badDst=None, dontModify=dontModify) if not syscall: raise RopChainError('Cannot build syscall gadget!') toReturn = '' toReturn += self._printRopInstruction(syscall) return (toReturn,) def _createOpcode(self, opcode): return self._printRopInstruction(self._searchOpcode(opcode)) def _searchOpcode(self, opcode): r = Ropper() gadgets = [] for binary in self._binaries: for section in binary.executableSections: vaddr = section.virtualAddress gadgets.extend(r.searchOpcode(binary,opcode=opcode,disass=True)) if len(gadgets) > 0: for gadget in gadgets: if not gadget: continue if not self.containsBadbytes(gadget.IMAGE_BASES.get(gadget.fileName,0) + gadget.lines[0][0]): self._updateUsedBinaries(gadget) return gadget else: raise RopChainError('Cannot create gadget for opcode: %s' % opcode) def create(self): pass class RopChainX86System(RopChainX86): @classmethod def usableTypes(self): return (ELF, Raw) @classmethod def name(cls): return 'execve' def _createCommand(self, what, where, reg=None, dontModify=[], idx=0): if len(what) % 4 > 0: what = '/' * (4 - len(what) % 4) + what return self._createWriteStringWhere(what,where, idx=idx) def create(self, options={}): cmd = options.get('cmd') address = options.get('address') if not cmd: cmd = '/bin/sh' if len(cmd.split(' ')) > 1: raise RopChainError('No argument support for execve commands') self._printMessage('ROPchain Generator for syscall execve:\n') self._printMessage('\nwrite command into data section\neax 0xb\nebx address to cmd\necx address to null\nedx address to null\n') chain = self._printHeader() gadgets = [] can_create_command = False chain_tmp = '\n' if address is None: section = self._binaries[0].getSection('.data') length = math.ceil(float(len(cmd))/4) * 4 nulladdress = section.offset+length try: cmdaddress = section.offset chain_tmp += self._createCommand(cmd,cmdaddress)[0] can_create_command = True except RopChainError as e: self._printMessage('Cannot create gadget: writewhatwhere') self._printMessage('Use 0x41414141 as command address. Please replace that value.') cmdaddress = 0x41414141 if can_create_command: badregs = [] while True: c = '' ret = self._createNumber(0x0, badRegs=badregs) c += ret[0] try: c += self._createWriteRegValueWhere(ret[1], nulladdress)[0] chain_tmp += c break except BaseException as e: #raise e badregs.append(ret[1]) gadgets.append((self._createAddress, [cmdaddress],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh'])) gadgets.append((self._createAddress, [nulladdress],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch'])) gadgets.append((self._createAddress, [nulladdress],{'reg':'edx'},['edx', 'dx', 'dl', 'dh'])) gadgets.append((self._createNumber, [0xb],{'reg':'eax'},['eax', 'ax', 'al', 'ah'])) if address is not None and not can_create_command: if type(address) is str: cmdaddress = int(address, 16) nulladdress = options.get('nulladdress') if nulladdress is None: self._printMessage('No address to a null bytes was given, 0x42424242 is used instead.') self._printMessage('Please replace that value.') nulladdress = 0x42424242 elif type(nulladdress) is str: nulladdress = int(nulladdress,16) gadgets.append((self._createNumber, [cmdaddress],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh'])) gadgets.append((self._createNumber, [nulladdress],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch'])) gadgets.append((self._createNumber, [nulladdress],{'reg':'edx'},['edx', 'dx', 'dl', 'dh'])) gadgets.append((self._createNumber, [0xb],{'reg':'eax'},['eax', 'ax', 'al', 'ah'])) self._printMessage('Try to create chain which fills registers without delete content of previous filled registers') chain_tmp += self._createDependenceChain(gadgets) try: self._printMessage('Look for syscall gadget') chain_tmp += self._createSyscall()[0] self._printMessage('syscall gadget found') except RopChainError: try: self._printMessage('No syscall gadget found!') self._printMessage('Look for int 0x80 opcode') chain_tmp += self._createOpcode('cd80') self._printMessage('int 0x80 opcode found') except RopChainError: try: self._printMessage('No int 0x80 opcode found') self._printMessage('Look for call gs:[0x10] opcode') chain_tmp += self._createOpcode('65ff1510000000') self._printMessage('call gs:[0x10] found') except RopChainError: chain_tmp += '# INSERT SYSCALL GADGET HERE\n' self._printMessage('No call gs:[0x10] opcode found') chain += self._printRebase() chain += 'rop = \'\'\n' chain += chain_tmp chain += 'print rop' return chain class RopChainX86Mprotect(RopChainX86): """ Builds a ropchain for mprotect syscall eax 0x7b ebx address ecx size edx 0x7 -> RWE """ @classmethod def usableTypes(self): return (ELF, Raw) @classmethod def name(cls): return 'mprotect' def _createJmp(self, reg=['esp']): r = Ropper() gadgets = [] for binary in self._binaries: for section in binary.executableSections: vaddr = section.virtualAddress gadgets.extend( r.searchJmpReg(binary,reg)) if len(gadgets) > 0: self._updateUsedBinaries(gadgets[0]) return self._printRopInstruction(gadgets[0]) else: return None def create(self, options={}): address = options.get('address') size = options.get('size') if not address: raise RopChainError('Missing parameter: address') if not size: raise RopChainError('Missing parameter: size') if not match('0x[0-9a-fA-F]{1,8}', address): raise RopChainError('Parameter address have to have the following format: ') if not match('0x[0-9a-fA-F]+', size): raise RopChainError('Parameter size have to have the following format: ') address = int(address, 16) size = int(size, 16) self._printMessage('ROPchain Generator for syscall mprotect:\n') self._printMessage('eax 0x7b\nebx address\necx size\nedx 0x7 -> RWE\n') chain = self._printHeader() chain += 'shellcode = \'\\xcc\'*100\n\n' gadgets = [] gadgets.append((self._createNumber, [address],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh'])) gadgets.append((self._createNumber, [size],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch'])) gadgets.append((self._createNumber, [0x7],{'reg':'edx'},['edx', 'dx', 'dl', 'dh'])) gadgets.append((self._createNumber, [0x7d],{'reg':'eax'},['eax', 'ax', 'al', 'ah'])) self._printMessage('Try to create chain which fills registers without delete content of previous filled registers') chain_tmp = '' chain_tmp += self._createDependenceChain(gadgets) try: self._printMessage('Look for syscall gadget') chain_tmp += self._createSyscall()[0] self._printMessage('syscall gadget found') except RopChainError: chain_tmp += '\n# ADD HERE SYSCALL GADGET\n\n' self._printMessage('No syscall gadget found!') self._printMessage('Look for jmp esp') jmp_esp = self._createJmp() if jmp_esp: self._printMessage('jmp esp found') chain_tmp += jmp_esp else: self._printMessage('no jmp esp found') chain_tmp += '\n# ADD HERE JMP ESP\n\n' chain += self._printRebase() chain += '\nrop = \'\'\n' chain += chain_tmp chain += 'rop += shellcode\n\n' chain += 'print(rop)\n' return chain class RopChainX86VirtualProtect(RopChainX86): """ Builds a ropchain for a VirtualProtect call using pushad eax 0x90909090 ecx old protection (writable addr) edx 0x40 (RWE) ebx size esp address ebp return address (jmp esp) esi pointer to VirtualProtect edi ret (rop nop) """ @classmethod def usableTypes(self): return (PE, Raw) @classmethod def name(cls): return 'virtualprotect' def _createPushad(self): pushad = self._find(Category.PUSHAD) if pushad: return self._printRopInstruction(pushad) else: self._printMessage('No pushad found!') return '# Add here PUSHAD gadget!' def _createJmp(self, reg=['esp']): r = Ropper() gadgets = [] for binary in self._binaries: for section in binary.executableSections: vaddr = section.offset gadgets.extend( r.searchJmpReg(binary,reg)) if len(gadgets) > 0: self._updateUsedBinaries(gadgets[0]) return gadgets[0] else: return None def __extract(self, param): if (not match('0x[0-9a-fA-F]{1,8},0x[0-9a-fA-F]+', param)) and (not match('0x[0-9a-fA-F]+', param)): raise RopChainError('Parameter have to have the following format: ') return (None, int(param, 16)) def __getVirtualProtectEntry(self): for binary in self._binaries: if binary.type == Type.PE: imports = binary._binary.dataDirectory[ImageDirectoryEntry.IMPORT] if not imports: return None for descriptorData in imports: for thunk in descriptorData.importAddressTable: if thunk.importByName and thunk.importByName.name == 'VirtualProtect': return thunk.rva, binary.imageBase else: self._printMessage('File is not a PE file.') return None def create(self, options={}): self._printMessage('Ropchain Generator for VirtualProtect:\n') self._printMessage('eax 0x90909090\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (jmp esp)\nesi pointer to VirtualProtect\nedi ret (rop nop)\n') image_base = 0 address = options.get('address') given = False if not address: virtual_protect = self.__getVirtualProtectEntry() if virtual_protect: address, image_base = virtual_protect if not address: self._printMessage('No IAT-Entry for VirtualProtect found!') raise RopChainError('No IAT-Entry for VirtualProtect found and no address is given') else: if address: if not match('0x[0-9a-fA-F]{1,8}', address): raise RopChainError('Parameter address have to have the following format: ') address = int(address, 16) given = True writeable_ptr = self._binaries[0].getWriteableSection().offset for i in range(0,0x10000,4): if not self.containsBadbytes((writeable_ptr + i) & 0xffff,2): writeable_ptr += i break jmp_esp = self._createJmp() ret_addr = self._searchOpcode('c3') chain = self._printHeader() chain += '\n\nshellcode = \'\\xcc\'*100\n\n' gadgets = [] to_extend = [] chain_tmp = '' got_jmp_esp = False try: self._printMessage('Try to create gadget to fill esi with content of IAT address: 0x%x' % (address + image_base)) chain_tmp += self._createLoadRegValueFrom('esi', address+image_base)[0] gadgets.append((self._createNumber, [0x90909090],{'reg':'eax'},['eax', 'ax', 'ah', 'al','esi','si'])) to_extend = ['esi','si'] if jmp_esp: gadgets.append((self._createAddress, [jmp_esp.lines[0][0]],{'reg':'ebp'},['ebp', 'bp']+to_extend)) got_jmp_esp = True except RopChainError: self._printMessage('Cannot create fill esi gadget!') self._printMessage('Try to create this chain:\n') self._printMessage('eax Pointer to VirtualProtect\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (pop ebp;ret)\nesi pointer to jmp [eax]\nedi ret (rop nop)\n') jmp_eax = self._searchOpcode('ff20') # jmp [eax] gadgets.append((self._createAddress, [jmp_eax.lines[0][0]],{'reg':'esi'},['esi','si'])) gadgets.append((self._createNumber, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al'])) pop_ebp = self._searchOpcode('5dc3') if pop_ebp: gadgets.append((self._createAddress, [pop_ebp.lines[0][0]],{'reg':'ebp'},['ebp', 'bp']+to_extend)) gadgets.append((self._createNumber, [0x1],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh']+to_extend)) gadgets.append((self._createAddress, [writeable_ptr],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch']+to_extend)) gadgets.append((self._createNumber, [0x40],{'reg':'edx'},['edx', 'dx', 'dh', 'dl']+to_extend)) gadgets.append((self._createAddress, [ret_addr.lines[0][0]],{'reg':'edi'},['edi', 'di']+to_extend)) self._printMessage('Try to create chain which fills registers without delete content of previous filled registers') chain_tmp += self._createDependenceChain(gadgets) self._printMessage('Look for pushad gadget') chain_tmp += self._createPushad() if not got_jmp_esp and jmp_esp: chain_tmp += self._printRopInstruction(jmp_esp) chain += self._printRebase() chain += 'rop = \'\'\n' chain += chain_tmp chain += 'rop += shellcode\n\n' chain += 'print(rop)\n' return chain # class RopChainX86VirtualAlloc(RopChainX86): # """ # Builds a ropchain for a VirtualProtect call using pushad # eax 0x90909090 # ecx old protection (writable addr) # edx 0x40 (RWE) # ebx size # esp address # ebp return address (jmp esp) # esi pointer to VirtualProtect # edi ret (rop nop) # """ # @classmethod # def name(cls): # return 'virtualalloc' # def _createPushad(self): # pushad = self._find(Category.PUSHAD) # if pushad: # return self._printRopInstruction(pushad) # else: # self._printer.printInfo('No pushad found!') # return '# Add here PUSHAD gadget!' # def _createJmp(self, reg=['esp']): # r = Ropper() # gadgets = [] # for binary in self._binaries: # for section in binary.executableSections: # vaddr = section.offset # gadgets.extend( # r.searchJmpReg(self._binaries[0],reg)) # if len(gadgets) > 0: # if (gadgets[0]._binary, gadgets[0]._section) not in self._usedBinaries: # self._usedBinaries.append((gadgets[0]._binary, gadgets[0]._section)) # return gadgets[0] # else: # return None # def __extract(self, param): # if (not match('0x[0-9a-fA-F]{1,8},0x[0-9a-fA-F]+', param)) and (not match('0x[0-9a-fA-F]+', param)): # raise RopChainError('Parameter have to have the following format: , or ') # split = param.split(',') # if len(split) == 2: # if isHex(split[1]): # return (int(split[0], 16), int(split[1], 16)) # else: # return (None, int(split[0], 16)) # def __getVirtualProtectEntry(self): # for binary in self._binaries: # if binary.type == Type.PE: # s = binary._binary.dataDirectory[ImageDirectoryEntry.IMPORT] # for thunk in s.importNameTable: # if thunk.importByName.name == 'VirtualAlloc': # return thunk.rva + binary.imageBase # else: # self._printer.printError('File is not a PE file.') # return None # def create(self, param=None): # if not param: # raise RopChainError('Missing parameter: address,size or size') # self._printer.printInfo('Ropchain Generator for VirtualProtect:\n') # self._printer.println('eax 0x90909090\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (jmp esp)\nesi pointer to VirtualProtect\nedi ret (rop nop)\n') # address, size = self.__extract(param) # given = False # if not address: # address = self.__getVirtualProtectEntry() # if not address: # self._printer.printError('No IAT-Entry for VirtualProtect found!') # raise RopChainError('No IAT-Entry for VirtualProtect found and no address is given') # else: # given = True # jmp_esp = self._createJmp() # ret_addr = self._searchOpcode('c3') # chain = self._printHeader() # chain += '\n\nshellcode = \'\\xcc\'*100\n\n' # gadgets = [] # to_extend = [] # chain_tmp = '' # try: # self._printer.printInfo('Try to create gadget to fill esi with content of IAT address: %s' % address) # chain_tmp += self._createLoadRegValueFrom('esi', address)[0] # if given: # gadgets.append((self._createNumber, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al','esi','si'])) # else: # gadgets.append((self._createAddress, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al','esi','si'])) # to_extend = ['esi','si'] # except: # self._printer.printInfo('Cannot create fill esi gadget!') # self._printer.printInfo('Try to create this chain:\n') # self._printer.println('eax Pointer to VirtualProtect\necx old protection (writable addr)\nedx 0x40 (RWE)\nebx size\nesp address\nebp return address (jmp esp)\nesi pointer to jmp [eax]\nedi ret (rop nop)\n') # jmp_eax = self._searchOpcode('ff20') # jmp [eax] # gadgets.append((self._createAddress, [jmp_eax.lines[0][0]],{'reg':'esi'},['esi','si'])) # if given: # gadgets.append((self._createNumber, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al'])) # else: # gadgets.append((self._createAddress, [address],{'reg':'eax'},['eax', 'ax', 'ah', 'al'])) # gadgets.append((self._createNumber, [size],{'reg':'ebx'},['ebx', 'bx', 'bl', 'bh']+to_extend)) # gadgets.append((self._createNumber, [0x40],{'reg':'ecx'},['ecx', 'cx', 'cl', 'ch']+to_extend)) # if jmp_esp: # gadgets.append((self._createAddress, [jmp_esp.lines[0][0]],{'reg':'ebp'},['ebp', 'bp']+to_extend)) # gadgets.append((self._createNumber, [0x1000],{'reg':'edx'},['edx', 'dx', 'dh', 'dl']+to_extend)) # gadgets.append((self._createAddress, [ret_addr.lines[0][0]],{'reg':'edi'},['edi', 'di']+to_extend)) # self._printer.printInfo('Try to create chain which fills registers without delete content of previous filled registers') # chain_tmp += self._createDependenceChain(gadgets) # self._printer.printInfo('Look for pushad gadget') # chain_tmp += self._createPushad() # chain += self._printRebase() # chain += 'rop = \'\'\n' # chain += chain_tmp # chain += 'rop += shellcode\n\n' # chain += 'print(rop)\n' # return chain # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging from telemetry.page import shared_page_state class WebGLSupportedSharedState(shared_page_state.SharedPageState): def CanRunOnBrowser(self, browser_info, page): assert hasattr(page, 'skipped_gpus') if not browser_info.HasWebGLSupport(): logging.warning('Browser does not support webgl, skipping test') return False # Check the skipped GPUs list. # Requires the page provide a "skipped_gpus" property. browser = browser_info.browser system_info = browser.GetSystemInfo() if system_info: gpu_info = system_info.gpu gpu_vendor = self._GetGpuVendorString(gpu_info) if gpu_vendor in page.skipped_gpus: return False return True def _GetGpuVendorString(self, gpu_info): if gpu_info: primary_gpu = gpu_info.devices[0] if primary_gpu: vendor_string = primary_gpu.vendor_string.lower() vendor_id = primary_gpu.vendor_id if vendor_string: return vendor_string.split(' ')[0] elif vendor_id == 0x10DE: return 'nvidia' elif vendor_id == 0x1002: return 'amd' elif vendor_id == 0x8086: return 'intel' elif vendor_id == 0x15AD: return 'vmware' return 'unknown_gpu' from collections import deque import unittest from test import support, seq_tests import gc import weakref import copy import pickle from io import StringIO import random import struct BIG = 100000 def fail(): raise SyntaxError yield 1 class BadCmp: def __eq__(self, other): raise RuntimeError class MutateCmp: def __init__(self, deque, result): self.deque = deque self.result = result def __eq__(self, other): self.deque.clear() return self.result class TestBasic(unittest.TestCase): def test_basics(self): d = deque(range(-5125, -5000)) d.__init__(range(200)) for i in range(200, 400): d.append(i) for i in reversed(range(-200, 0)): d.appendleft(i) self.assertEqual(list(d), list(range(-200, 400))) self.assertEqual(len(d), 600) left = [d.popleft() for i in range(250)] self.assertEqual(left, list(range(-200, 50))) self.assertEqual(list(d), list(range(50, 400))) right = [d.pop() for i in range(250)] right.reverse() self.assertEqual(right, list(range(150, 400))) self.assertEqual(list(d), list(range(50, 150))) def test_maxlen(self): self.assertRaises(ValueError, deque, 'abc', -1) self.assertRaises(ValueError, deque, 'abc', -2) it = iter(range(10)) d = deque(it, maxlen=3) self.assertEqual(list(it), []) self.assertEqual(repr(d), 'deque([7, 8, 9], maxlen=3)') self.assertEqual(list(d), [7, 8, 9]) self.assertEqual(d, deque(range(10), 3)) d.append(10) self.assertEqual(list(d), [8, 9, 10]) d.appendleft(7) self.assertEqual(list(d), [7, 8, 9]) d.extend([10, 11]) self.assertEqual(list(d), [9, 10, 11]) d.extendleft([8, 7]) self.assertEqual(list(d), [7, 8, 9]) d = deque(range(200), maxlen=10) d.append(d) support.unlink(support.TESTFN) fo = open(support.TESTFN, "w") try: fo.write(str(d)) fo.close() fo = open(support.TESTFN, "r") self.assertEqual(fo.read(), repr(d)) finally: fo.close() support.unlink(support.TESTFN) d = deque(range(10), maxlen=None) self.assertEqual(repr(d), 'deque([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])') fo = open(support.TESTFN, "w") try: fo.write(str(d)) fo.close() fo = open(support.TESTFN, "r") self.assertEqual(fo.read(), repr(d)) finally: fo.close() support.unlink(support.TESTFN) def test_maxlen_zero(self): it = iter(range(100)) deque(it, maxlen=0) self.assertEqual(list(it), []) it = iter(range(100)) d = deque(maxlen=0) d.extend(it) self.assertEqual(list(it), []) it = iter(range(100)) d = deque(maxlen=0) d.extendleft(it) self.assertEqual(list(it), []) def test_maxlen_attribute(self): self.assertEqual(deque().maxlen, None) self.assertEqual(deque('abc').maxlen, None) self.assertEqual(deque('abc', maxlen=4).maxlen, 4) self.assertEqual(deque('abc', maxlen=2).maxlen, 2) self.assertEqual(deque('abc', maxlen=0).maxlen, 0) with self.assertRaises(AttributeError): d = deque('abc') d.maxlen = 10 def test_count(self): for s in ('', 'abracadabra', 'simsalabim'*500+'abc'): s = list(s) d = deque(s) for letter in 'abcdefghijklmnopqrstuvwxyz': self.assertEqual(s.count(letter), d.count(letter), (s, d, letter)) self.assertRaises(TypeError, d.count) # too few args self.assertRaises(TypeError, d.count, 1, 2) # too many args class BadCompare: def __eq__(self, other): raise ArithmeticError d = deque([1, 2, BadCompare(), 3]) self.assertRaises(ArithmeticError, d.count, 2) d = deque([1, 2, 3]) self.assertRaises(ArithmeticError, d.count, BadCompare()) class MutatingCompare: def __eq__(self, other): self.d.pop() return True m = MutatingCompare() d = deque([1, 2, 3, m, 4, 5]) m.d = d self.assertRaises(RuntimeError, d.count, 3) # test issue11004 # block advance failed after rotation aligned elements on right side of block d = deque([None]*16) for i in range(len(d)): d.rotate(-1) d.rotate(1) self.assertEqual(d.count(1), 0) self.assertEqual(d.count(None), 16) def test_comparisons(self): d = deque('xabc'); d.popleft() for e in [d, deque('abc'), deque('ab'), deque(), list(d)]: self.assertEqual(d==e, type(d)==type(e) and list(d)==list(e)) self.assertEqual(d!=e, not(type(d)==type(e) and list(d)==list(e))) args = map(deque, ('', 'a', 'b', 'ab', 'ba', 'abc', 'xba', 'xabc', 'cba')) for x in args: for y in args: self.assertEqual(x == y, list(x) == list(y), (x,y)) self.assertEqual(x != y, list(x) != list(y), (x,y)) self.assertEqual(x < y, list(x) < list(y), (x,y)) self.assertEqual(x <= y, list(x) <= list(y), (x,y)) self.assertEqual(x > y, list(x) > list(y), (x,y)) self.assertEqual(x >= y, list(x) >= list(y), (x,y)) def test_contains(self): n = 200 d = deque(range(n)) for i in range(n): self.assertTrue(i in d) self.assertTrue((n+1) not in d) # Test detection of mutation during iteration d = deque(range(n)) d[n//2] = MutateCmp(d, False) with self.assertRaises(RuntimeError): n in d # Test detection of comparison exceptions d = deque(range(n)) d[n//2] = BadCmp() with self.assertRaises(RuntimeError): n in d def test_extend(self): d = deque('a') self.assertRaises(TypeError, d.extend, 1) d.extend('bcd') self.assertEqual(list(d), list('abcd')) d.extend(d) self.assertEqual(list(d), list('abcdabcd')) def test_add(self): d = deque() e = deque('abc') f = deque('def') self.assertEqual(d + d, deque()) self.assertEqual(e + f, deque('abcdef')) self.assertEqual(e + e, deque('abcabc')) self.assertEqual(e + d, deque('abc')) self.assertEqual(d + e, deque('abc')) self.assertIsNot(d + d, deque()) self.assertIsNot(e + d, deque('abc')) self.assertIsNot(d + e, deque('abc')) g = deque('abcdef', maxlen=4) h = deque('gh') self.assertEqual(g + h, deque('efgh')) with self.assertRaises(TypeError): deque('abc') + 'def' def test_iadd(self): d = deque('a') d += 'bcd' self.assertEqual(list(d), list('abcd')) d += d self.assertEqual(list(d), list('abcdabcd')) def test_extendleft(self): d = deque('a') self.assertRaises(TypeError, d.extendleft, 1) d.extendleft('bcd') self.assertEqual(list(d), list(reversed('abcd'))) d.extendleft(d) self.assertEqual(list(d), list('abcddcba')) d = deque() d.extendleft(range(1000)) self.assertEqual(list(d), list(reversed(range(1000)))) self.assertRaises(SyntaxError, d.extendleft, fail()) def test_getitem(self): n = 200 d = deque(range(n)) l = list(range(n)) for i in range(n): d.popleft() l.pop(0) if random.random() < 0.5: d.append(i) l.append(i) for j in range(1-len(l), len(l)): assert d[j] == l[j] d = deque('superman') self.assertEqual(d[0], 's') self.assertEqual(d[-1], 'n') d = deque() self.assertRaises(IndexError, d.__getitem__, 0) self.assertRaises(IndexError, d.__getitem__, -1) def test_index(self): for n in 1, 2, 30, 40, 200: d = deque(range(n)) for i in range(n): self.assertEqual(d.index(i), i) with self.assertRaises(ValueError): d.index(n+1) # Test detection of mutation during iteration d = deque(range(n)) d[n//2] = MutateCmp(d, False) with self.assertRaises(RuntimeError): d.index(n) # Test detection of comparison exceptions d = deque(range(n)) d[n//2] = BadCmp() with self.assertRaises(RuntimeError): d.index(n) # Test start and stop arguments behavior matches list.index() elements = 'ABCDEFGHI' nonelement = 'Z' d = deque(elements * 2) s = list(elements * 2) for start in range(-5 - len(s)*2, 5 + len(s) * 2): for stop in range(-5 - len(s)*2, 5 + len(s) * 2): for element in elements + 'Z': try: target = s.index(element, start, stop) except ValueError: with self.assertRaises(ValueError): d.index(element, start, stop) else: self.assertEqual(d.index(element, start, stop), target) def test_index_bug_24913(self): d = deque('A' * 3) with self.assertRaises(ValueError): i = d.index("Hello world", 0, 4) def test_insert(self): # Test to make sure insert behaves like lists elements = 'ABCDEFGHI' for i in range(-5 - len(elements)*2, 5 + len(elements) * 2): d = deque('ABCDEFGHI') s = list('ABCDEFGHI') d.insert(i, 'Z') s.insert(i, 'Z') self.assertEqual(list(d), s) def test_insert_bug_26194(self): data = 'ABC' d = deque(data, maxlen=len(data)) with self.assertRaises(IndexError): d.insert(2, None) elements = 'ABCDEFGHI' for i in range(-len(elements), len(elements)): d = deque(elements, maxlen=len(elements)+1) d.insert(i, 'Z') if i >= 0: self.assertEqual(d[i], 'Z') else: self.assertEqual(d[i-1], 'Z') def test_imul(self): for n in (-10, -1, 0, 1, 2, 10, 1000): d = deque() d *= n self.assertEqual(d, deque()) self.assertIsNone(d.maxlen) for n in (-10, -1, 0, 1, 2, 10, 1000): d = deque('a') d *= n self.assertEqual(d, deque('a' * n)) self.assertIsNone(d.maxlen) for n in (-10, -1, 0, 1, 2, 10, 499, 500, 501, 1000): d = deque('a', 500) d *= n self.assertEqual(d, deque('a' * min(n, 500))) self.assertEqual(d.maxlen, 500) for n in (-10, -1, 0, 1, 2, 10, 1000): d = deque('abcdef') d *= n self.assertEqual(d, deque('abcdef' * n)) self.assertIsNone(d.maxlen) for n in (-10, -1, 0, 1, 2, 10, 499, 500, 501, 1000): d = deque('abcdef', 500) d *= n self.assertEqual(d, deque(('abcdef' * n)[-500:])) self.assertEqual(d.maxlen, 500) def test_mul(self): d = deque('abc') self.assertEqual(d * -5, deque()) self.assertEqual(d * 0, deque()) self.assertEqual(d * 1, deque('abc')) self.assertEqual(d * 2, deque('abcabc')) self.assertEqual(d * 3, deque('abcabcabc')) self.assertIsNot(d * 1, d) self.assertEqual(deque() * 0, deque()) self.assertEqual(deque() * 1, deque()) self.assertEqual(deque() * 5, deque()) self.assertEqual(-5 * d, deque()) self.assertEqual(0 * d, deque()) self.assertEqual(1 * d, deque('abc')) self.assertEqual(2 * d, deque('abcabc')) self.assertEqual(3 * d, deque('abcabcabc')) d = deque('abc', maxlen=5) self.assertEqual(d * -5, deque()) self.assertEqual(d * 0, deque()) self.assertEqual(d * 1, deque('abc')) self.assertEqual(d * 2, deque('bcabc')) self.assertEqual(d * 30, deque('bcabc')) def test_setitem(self): n = 200 d = deque(range(n)) for i in range(n): d[i] = 10 * i self.assertEqual(list(d), [10*i for i in range(n)]) l = list(d) for i in range(1-n, 0, -1): d[i] = 7*i l[i] = 7*i self.assertEqual(list(d), l) def test_delitem(self): n = 500 # O(n**2) test, don't make this too big d = deque(range(n)) self.assertRaises(IndexError, d.__delitem__, -n-1) self.assertRaises(IndexError, d.__delitem__, n) for i in range(n): self.assertEqual(len(d), n-i) j = random.randrange(-len(d), len(d)) val = d[j] self.assertIn(val, d) del d[j] self.assertNotIn(val, d) self.assertEqual(len(d), 0) def test_reverse(self): n = 500 # O(n**2) test, don't make this too big data = [random.random() for i in range(n)] for i in range(n): d = deque(data[:i]) r = d.reverse() self.assertEqual(list(d), list(reversed(data[:i]))) self.assertIs(r, None) d.reverse() self.assertEqual(list(d), data[:i]) self.assertRaises(TypeError, d.reverse, 1) # Arity is zero def test_rotate(self): s = tuple('abcde') n = len(s) d = deque(s) d.rotate(1) # verify rot(1) self.assertEqual(''.join(d), 'eabcd') d = deque(s) d.rotate(-1) # verify rot(-1) self.assertEqual(''.join(d), 'bcdea') d.rotate() # check default to 1 self.assertEqual(tuple(d), s) for i in range(n*3): d = deque(s) e = deque(d) d.rotate(i) # check vs. rot(1) n times for j in range(i): e.rotate(1) self.assertEqual(tuple(d), tuple(e)) d.rotate(-i) # check that it works in reverse self.assertEqual(tuple(d), s) e.rotate(n-i) # check that it wraps forward self.assertEqual(tuple(e), s) for i in range(n*3): d = deque(s) e = deque(d) d.rotate(-i) for j in range(i): e.rotate(-1) # check vs. rot(-1) n times self.assertEqual(tuple(d), tuple(e)) d.rotate(i) # check that it works in reverse self.assertEqual(tuple(d), s) e.rotate(i-n) # check that it wraps backaround self.assertEqual(tuple(e), s) d = deque(s) e = deque(s) e.rotate(BIG+17) # verify on long series of rotates dr = d.rotate for i in range(BIG+17): dr() self.assertEqual(tuple(d), tuple(e)) self.assertRaises(TypeError, d.rotate, 'x') # Wrong arg type self.assertRaises(TypeError, d.rotate, 1, 10) # Too many args d = deque() d.rotate() # rotate an empty deque self.assertEqual(d, deque()) def test_len(self): d = deque('ab') self.assertEqual(len(d), 2) d.popleft() self.assertEqual(len(d), 1) d.pop() self.assertEqual(len(d), 0) self.assertRaises(IndexError, d.pop) self.assertEqual(len(d), 0) d.append('c') self.assertEqual(len(d), 1) d.appendleft('d') self.assertEqual(len(d), 2) d.clear() self.assertEqual(len(d), 0) def test_underflow(self): d = deque() self.assertRaises(IndexError, d.pop) self.assertRaises(IndexError, d.popleft) def test_clear(self): d = deque(range(100)) self.assertEqual(len(d), 100) d.clear() self.assertEqual(len(d), 0) self.assertEqual(list(d), []) d.clear() # clear an empty deque self.assertEqual(list(d), []) def test_remove(self): d = deque('abcdefghcij') d.remove('c') self.assertEqual(d, deque('abdefghcij')) d.remove('c') self.assertEqual(d, deque('abdefghij')) self.assertRaises(ValueError, d.remove, 'c') self.assertEqual(d, deque('abdefghij')) # Handle comparison errors d = deque(['a', 'b', BadCmp(), 'c']) e = deque(d) self.assertRaises(RuntimeError, d.remove, 'c') for x, y in zip(d, e): # verify that original order and values are retained. self.assertTrue(x is y) # Handle evil mutator for match in (True, False): d = deque(['ab']) d.extend([MutateCmp(d, match), 'c']) self.assertRaises(IndexError, d.remove, 'c') self.assertEqual(d, deque()) def test_repr(self): d = deque(range(200)) e = eval(repr(d)) self.assertEqual(list(d), list(e)) d.append(d) self.assertIn('...', repr(d)) def test_print(self): d = deque(range(200)) d.append(d) try: support.unlink(support.TESTFN) fo = open(support.TESTFN, "w") print(d, file=fo, end='') fo.close() fo = open(support.TESTFN, "r") self.assertEqual(fo.read(), repr(d)) finally: fo.close() support.unlink(support.TESTFN) def test_init(self): self.assertRaises(TypeError, deque, 'abc', 2, 3); self.assertRaises(TypeError, deque, 1); def test_hash(self): self.assertRaises(TypeError, hash, deque('abc')) def test_long_steadystate_queue_popleft(self): for size in (0, 1, 2, 100, 1000): d = deque(range(size)) append, pop = d.append, d.popleft for i in range(size, BIG): append(i) x = pop() if x != i - size: self.assertEqual(x, i-size) self.assertEqual(list(d), list(range(BIG-size, BIG))) def test_long_steadystate_queue_popright(self): for size in (0, 1, 2, 100, 1000): d = deque(reversed(range(size))) append, pop = d.appendleft, d.pop for i in range(size, BIG): append(i) x = pop() if x != i - size: self.assertEqual(x, i-size) self.assertEqual(list(reversed(list(d))), list(range(BIG-size, BIG))) def test_big_queue_popleft(self): pass d = deque() append, pop = d.append, d.popleft for i in range(BIG): append(i) for i in range(BIG): x = pop() if x != i: self.assertEqual(x, i) def test_big_queue_popright(self): d = deque() append, pop = d.appendleft, d.pop for i in range(BIG): append(i) for i in range(BIG): x = pop() if x != i: self.assertEqual(x, i) def test_big_stack_right(self): d = deque() append, pop = d.append, d.pop for i in range(BIG): append(i) for i in reversed(range(BIG)): x = pop() if x != i: self.assertEqual(x, i) self.assertEqual(len(d), 0) def test_big_stack_left(self): d = deque() append, pop = d.appendleft, d.popleft for i in range(BIG): append(i) for i in reversed(range(BIG)): x = pop() if x != i: self.assertEqual(x, i) self.assertEqual(len(d), 0) def test_roundtrip_iter_init(self): d = deque(range(200)) e = deque(d) self.assertNotEqual(id(d), id(e)) self.assertEqual(list(d), list(e)) def test_pickle(self): for d in deque(range(200)), deque(range(200), 100): for i in range(pickle.HIGHEST_PROTOCOL + 1): s = pickle.dumps(d, i) e = pickle.loads(s) self.assertNotEqual(id(e), id(d)) self.assertEqual(list(e), list(d)) self.assertEqual(e.maxlen, d.maxlen) def test_pickle_recursive(self): for d in deque('abc'), deque('abc', 3): d.append(d) for i in range(pickle.HIGHEST_PROTOCOL + 1): e = pickle.loads(pickle.dumps(d, i)) self.assertNotEqual(id(e), id(d)) self.assertEqual(id(e[-1]), id(e)) self.assertEqual(e.maxlen, d.maxlen) def test_iterator_pickle(self): orig = deque(range(200)) data = [i*1.01 for i in orig] for proto in range(pickle.HIGHEST_PROTOCOL + 1): # initial iterator itorg = iter(orig) dump = pickle.dumps((itorg, orig), proto) it, d = pickle.loads(dump) for i, x in enumerate(data): d[i] = x self.assertEqual(type(it), type(itorg)) self.assertEqual(list(it), data) # running iterator next(itorg) dump = pickle.dumps((itorg, orig), proto) it, d = pickle.loads(dump) for i, x in enumerate(data): d[i] = x self.assertEqual(type(it), type(itorg)) self.assertEqual(list(it), data[1:]) # empty iterator for i in range(1, len(data)): next(itorg) dump = pickle.dumps((itorg, orig), proto) it, d = pickle.loads(dump) for i, x in enumerate(data): d[i] = x self.assertEqual(type(it), type(itorg)) self.assertEqual(list(it), []) # exhausted iterator self.assertRaises(StopIteration, next, itorg) dump = pickle.dumps((itorg, orig), proto) it, d = pickle.loads(dump) for i, x in enumerate(data): d[i] = x self.assertEqual(type(it), type(itorg)) self.assertEqual(list(it), []) def test_deepcopy(self): mut = [10] d = deque([mut]) e = copy.deepcopy(d) self.assertEqual(list(d), list(e)) mut[0] = 11 self.assertNotEqual(id(d), id(e)) self.assertNotEqual(list(d), list(e)) def test_copy(self): mut = [10] d = deque([mut]) e = copy.copy(d) self.assertEqual(list(d), list(e)) mut[0] = 11 self.assertNotEqual(id(d), id(e)) self.assertEqual(list(d), list(e)) for i in range(5): for maxlen in range(-1, 6): s = [random.random() for j in range(i)] d = deque(s) if maxlen == -1 else deque(s, maxlen) e = d.copy() self.assertEqual(d, e) self.assertEqual(d.maxlen, e.maxlen) self.assertTrue(all(x is y for x, y in zip(d, e))) def test_copy_method(self): mut = [10] d = deque([mut]) e = d.copy() self.assertEqual(list(d), list(e)) mut[0] = 11 self.assertNotEqual(id(d), id(e)) self.assertEqual(list(d), list(e)) def test_reversed(self): for s in ('abcd', range(2000)): self.assertEqual(list(reversed(deque(s))), list(reversed(s))) def test_reversed_new(self): klass = type(reversed(deque())) for s in ('abcd', range(2000)): self.assertEqual(list(klass(deque(s))), list(reversed(s))) def test_gc_doesnt_blowup(self): import gc # This used to assert-fail in deque_traverse() under a debug # build, or run wild with a NULL pointer in a release build. d = deque() for i in range(100): d.append(1) gc.collect() def test_container_iterator(self): # Bug #3680: tp_traverse was not implemented for deque iterator objects class C(object): pass for i in range(2): obj = C() ref = weakref.ref(obj) if i == 0: container = deque([obj, 1]) else: container = reversed(deque([obj, 1])) obj.x = iter(container) del obj, container gc.collect() self.assertTrue(ref() is None, "Cycle was not collected") check_sizeof = support.check_sizeof @support.cpython_only def test_sizeof(self): BLOCKLEN = 64 basesize = support.calcvobjsize('2P4nP') blocksize = struct.calcsize('P%dPP' % BLOCKLEN) self.assertEqual(object.__sizeof__(deque()), basesize) check = self.check_sizeof check(deque(), basesize + blocksize) check(deque('a'), basesize + blocksize) check(deque('a' * (BLOCKLEN - 1)), basesize + blocksize) check(deque('a' * BLOCKLEN), basesize + 2 * blocksize) check(deque('a' * (42 * BLOCKLEN)), basesize + 43 * blocksize) class TestVariousIteratorArgs(unittest.TestCase): def test_constructor(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (seq_tests.Sequence, seq_tests.IterFunc, seq_tests.IterGen, seq_tests.IterFuncStop, seq_tests.itermulti, seq_tests.iterfunc): self.assertEqual(list(deque(g(s))), list(g(s))) self.assertRaises(TypeError, deque, seq_tests.IterNextOnly(s)) self.assertRaises(TypeError, deque, seq_tests.IterNoNext(s)) self.assertRaises(ZeroDivisionError, deque, seq_tests.IterGenExc(s)) def test_iter_with_altered_data(self): d = deque('abcdefg') it = iter(d) d.pop() self.assertRaises(RuntimeError, next, it) def test_runtime_error_on_empty_deque(self): d = deque() it = iter(d) d.append(10) self.assertRaises(RuntimeError, next, it) class Deque(deque): pass class DequeWithBadIter(deque): def __iter__(self): raise TypeError class TestSubclass(unittest.TestCase): def test_basics(self): d = Deque(range(25)) d.__init__(range(200)) for i in range(200, 400): d.append(i) for i in reversed(range(-200, 0)): d.appendleft(i) self.assertEqual(list(d), list(range(-200, 400))) self.assertEqual(len(d), 600) left = [d.popleft() for i in range(250)] self.assertEqual(left, list(range(-200, 50))) self.assertEqual(list(d), list(range(50, 400))) right = [d.pop() for i in range(250)] right.reverse() self.assertEqual(right, list(range(150, 400))) self.assertEqual(list(d), list(range(50, 150))) d.clear() self.assertEqual(len(d), 0) def test_copy_pickle(self): d = Deque('abc') e = d.__copy__() self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) e = Deque(d) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) for proto in range(pickle.HIGHEST_PROTOCOL + 1): s = pickle.dumps(d, proto) e = pickle.loads(s) self.assertNotEqual(id(d), id(e)) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) d = Deque('abcde', maxlen=4) e = d.__copy__() self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) e = Deque(d) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) for proto in range(pickle.HIGHEST_PROTOCOL + 1): s = pickle.dumps(d, proto) e = pickle.loads(s) self.assertNotEqual(id(d), id(e)) self.assertEqual(type(d), type(e)) self.assertEqual(list(d), list(e)) def test_pickle_recursive(self): for proto in range(pickle.HIGHEST_PROTOCOL + 1): for d in Deque('abc'), Deque('abc', 3): d.append(d) e = pickle.loads(pickle.dumps(d, proto)) self.assertNotEqual(id(e), id(d)) self.assertEqual(type(e), type(d)) self.assertEqual(e.maxlen, d.maxlen) dd = d.pop() ee = e.pop() self.assertEqual(id(ee), id(e)) self.assertEqual(e, d) d.x = d e = pickle.loads(pickle.dumps(d, proto)) self.assertEqual(id(e.x), id(e)) for d in DequeWithBadIter('abc'), DequeWithBadIter('abc', 2): self.assertRaises(TypeError, pickle.dumps, d, proto) def test_weakref(self): d = deque('gallahad') p = weakref.proxy(d) self.assertEqual(str(p), str(d)) d = None self.assertRaises(ReferenceError, str, p) def test_strange_subclass(self): class X(deque): def __iter__(self): return iter([]) d1 = X([1,2,3]) d2 = X([4,5,6]) d1 == d2 # not clear if this is supposed to be True or False, # but it used to give a SystemError class SubclassWithKwargs(deque): def __init__(self, newarg=1): deque.__init__(self) class TestSubclassWithKwargs(unittest.TestCase): def test_subclass_with_kwargs(self): # SF bug #1486663 -- this used to erroneously raise a TypeError SubclassWithKwargs(newarg=1) class TestSequence(seq_tests.CommonTest): type2test = deque def test_getitem(self): # For now, bypass tests that require slicing pass def test_getslice(self): # For now, bypass tests that require slicing pass def test_subscript(self): # For now, bypass tests that require slicing pass def test_free_after_iterating(self): # For now, bypass tests that require slicing self.skipTest("Exhausted deque iterator doesn't free a deque") #============================================================================== libreftest = """ Example from the Library Reference: Doc/lib/libcollections.tex >>> from collections import deque >>> d = deque('ghi') # make a new deque with three items >>> for elem in d: # iterate over the deque's elements ... print(elem.upper()) G H I >>> d.append('j') # add a new entry to the right side >>> d.appendleft('f') # add a new entry to the left side >>> d # show the representation of the deque deque(['f', 'g', 'h', 'i', 'j']) >>> d.pop() # return and remove the rightmost item 'j' >>> d.popleft() # return and remove the leftmost item 'f' >>> list(d) # list the contents of the deque ['g', 'h', 'i'] >>> d[0] # peek at leftmost item 'g' >>> d[-1] # peek at rightmost item 'i' >>> list(reversed(d)) # list the contents of a deque in reverse ['i', 'h', 'g'] >>> 'h' in d # search the deque True >>> d.extend('jkl') # add multiple elements at once >>> d deque(['g', 'h', 'i', 'j', 'k', 'l']) >>> d.rotate(1) # right rotation >>> d deque(['l', 'g', 'h', 'i', 'j', 'k']) >>> d.rotate(-1) # left rotation >>> d deque(['g', 'h', 'i', 'j', 'k', 'l']) >>> deque(reversed(d)) # make a new deque in reverse order deque(['l', 'k', 'j', 'i', 'h', 'g']) >>> d.clear() # empty the deque >>> d.pop() # cannot pop from an empty deque Traceback (most recent call last): File "", line 1, in -toplevel- d.pop() IndexError: pop from an empty deque >>> d.extendleft('abc') # extendleft() reverses the input order >>> d deque(['c', 'b', 'a']) >>> def delete_nth(d, n): ... d.rotate(-n) ... d.popleft() ... d.rotate(n) ... >>> d = deque('abcdef') >>> delete_nth(d, 2) # remove the entry at d[2] >>> d deque(['a', 'b', 'd', 'e', 'f']) >>> def roundrobin(*iterables): ... pending = deque(iter(i) for i in iterables) ... while pending: ... task = pending.popleft() ... try: ... yield next(task) ... except StopIteration: ... continue ... pending.append(task) ... >>> for value in roundrobin('abc', 'd', 'efgh'): ... print(value) ... a d e b f c g h >>> def maketree(iterable): ... d = deque(iterable) ... while len(d) > 1: ... pair = [d.popleft(), d.popleft()] ... d.append(pair) ... return list(d) ... >>> print(maketree('abcdefgh')) [[[['a', 'b'], ['c', 'd']], [['e', 'f'], ['g', 'h']]]] """ #============================================================================== __test__ = {'libreftest' : libreftest} def test_main(verbose=None): import sys test_classes = ( TestBasic, TestVariousIteratorArgs, TestSubclass, TestSubclassWithKwargs, TestSequence, ) support.run_unittest(*test_classes) # verify reference counting if verbose and hasattr(sys, "gettotalrefcount"): import gc counts = [None] * 5 for i in range(len(counts)): support.run_unittest(*test_classes) gc.collect() counts[i] = sys.gettotalrefcount() print(counts) # doctests from test import test_deque support.run_doctest(test_deque, verbose) if __name__ == "__main__": test_main(verbose=True) from decimal import Decimal from django.template.defaultfilters import pluralize from django.test import SimpleTestCase class FunctionTests(SimpleTestCase): def test_integers(self): self.assertEqual(pluralize(1), '') self.assertEqual(pluralize(0), 's') self.assertEqual(pluralize(2), 's') def test_floats(self): self.assertEqual(pluralize(0.5), 's') self.assertEqual(pluralize(1.5), 's') def test_decimals(self): self.assertEqual(pluralize(Decimal(1)), '') self.assertEqual(pluralize(Decimal(0)), 's') self.assertEqual(pluralize(Decimal(2)), 's') def test_lists(self): self.assertEqual(pluralize([1]), '') self.assertEqual(pluralize([]), 's') self.assertEqual(pluralize([1, 2, 3]), 's') def test_suffixes(self): self.assertEqual(pluralize(1, 'es'), '') self.assertEqual(pluralize(0, 'es'), 'es') self.assertEqual(pluralize(2, 'es'), 'es') self.assertEqual(pluralize(1, 'y,ies'), 'y') self.assertEqual(pluralize(0, 'y,ies'), 'ies') self.assertEqual(pluralize(2, 'y,ies'), 'ies') self.assertEqual(pluralize(0, 'y,ies,error'), '') from __future__ import unicode_literals import inspect import functools from functools import partial as bind from . import sexp from .sexp import key, sym import collections # ############################# DATA STRUCTURES ############################## class ActiveRecord(object): @classmethod def parse_list(cls, raw): if not raw: return [] if type(raw[0]) == type(key(":key")): m = sexp.sexp_to_key_map(raw) field = ":" + cls.__name__.lower() + "s" return [cls.parse(raw) for raw in (m[field] if field in m else [])] else: return [cls.parse(raw) for raw in raw] @classmethod def parse(cls, raw): """Parse a data type from a raw data structure""" if not raw: return None value_map = sexp.sexp_to_key_map(raw) self = cls() populate = getattr(self, "populate") populate(value_map) return self def unparse(self): raise Exception("abstract method: ActiveRecord.unparse - on " + str(this)) def __str__(self): return str(self.__dict__) class Note(ActiveRecord): def populate(self, m): self.message = m[":msg"] self.file_name = m[":file"] self.severity = m[":severity"] self.start = m[":beg"] self.end = m[":end"] self.line = m[":line"] self.col = m[":col"] class CompletionInfoList(ActiveRecord): @classmethod def create(cls, prefix, completions): self = CompletionInfoList() self.prefix = prefix self.completions = completions return self def populate(self, m): self.prefix = m[":prefix"] self.completions = CompletionInfo.parse_list(m[":completions"]) class CompletionSignature(ActiveRecord): """A completion signature consists of the parameter 'sections' which is a list of name to type) and a 'result' type. n.b. these are user readable rather than programmtic for presentation to the user. # sections: List[List[(String, String)]], # result: String """ def __init__(self, sections, result): self.sections = sections self.result = result @classmethod def from_raw(cls, data): # this hacky is all because () in both false and and empty list # the parser cannot tell, so hack it until we move to jerk sections_raw = data[0] if(data[0] is not False) else [] sections = [] for s in sections_raw: if not s: sections.append([]) else: sections.append(s) result = data[1] return CompletionSignature(sections, result) def __repr__(self): return 'CompletionSignature("{str(self.sections)}", "{self.result}")'.format(self=self) class CompletionInfo(ActiveRecord): def populate(self, m): self.name = m[":name"] self.signature = CompletionSignature.from_raw(m[":type-sig"]) self.is_callable = bool(m[":is-callable"]) if ":is-callable" in m else False self.type_id = m[":type-id"] self.to_insert = m[":to-insert"] if ":to-insert" in m else None def __repr__(self): return 'CompletionInfo("{self.name}", "{self.signature}", {self.is_callable}, {self.type_id}, ...)'.format( self=self) class SourcePosition(ActiveRecord): def populate(self, m): # [:type, line, :file, # '/workspace/ensime-test-project/.ensime_cache/dep-src/source-jars/java/io/PrintStream.java', :line, 697] # [:type, offset, :file, '/workspace/ensime-test-project/src/main/scala/Foo.scala', :offset, 150] self.type_str = str(m[":type"]) self.file_name = m[":file"] if ":file" in m else None self.line = m[":line"] if ":line" in m else None self.offset = m[":offset"] if ":offset" in m else None self.is_line = self.type_str == "line" self.is_offset = self.type_str == "offset" self.is_empty = self.type_str == "empty" class SymbolInfo(ActiveRecord): def populate(self, m): self.name = m[":name"] self.type = TypeInfo.parse(m[":type"]) self.decl_pos = SourcePosition.parse(m[":decl-pos"]) if ":decl-pos" in m else None self.is_callable = bool(m[":is-callable"]) if ":is-callable" in m else False self.owner_type_id = m[":owner-type-id"] if ":owner-type-id" in m else None class TypeInfo(ActiveRecord): def populate(self, m): self.name = m[":name"] self.type_id = m[":type-id"] isArrowType = bool(m[":arrow-type"]) if ":arrow-type" in m else False if isArrowType: self.arrow_type = True self.result_type = TypeInfo.parse(m[":result-type"]) self.param_sections = ParamSectionInfo.parse_list(m[":param-sections"]) if ":param-sections" in m else [] else: # Basic type self.arrow_type = False self.full_name = m[":full-name"] if ":full-name" in m else None self.decl_as = m[":decl-as"] if ":decl-as" in m else None self.decl_pos = SourcePosition.parse(m[":pos"]) if ":pos" in m else None self.type_args = TypeInfo.parse_list(m[":type-args"]) if ":type-args" in m else [] self.outer_type_id = m[":outer-type-id"] if ":outer-type-id" in m else None self.members = Member.parse_list(m[":members"]) if ":members" in m else [] class SymbolSearchResults(ActiveRecord): # we override parse here because raw contains a List of SymbolSearchResult # typehe ActiveRecord parse method expects raw to contain an object at this point # and calls sexp_to_key_map @classmethod def parse(cls, raw): if not raw: return None self = cls() self.populate(raw) return self def populate(self, m): self.results = SymbolSearchResult.parse_list(m) class SymbolSearchResult(ActiveRecord): def populate(self, m): self.name = m[":name"] self.local_name = m[":local-name"] self.decl_as = m[":decl-as"] if ":decl-as" in m else None self.pos = SourcePosition.parse(m[":pos"]) if ":pos" in m else None class RefactorResult(ActiveRecord): def populate(self, m): self.status = str(m[":status"]) self.procedure_id = m[":procedure-id"] if self.status == "success": self.done = True pass elif self.status == "failure": self.done = False self.reason = m[":reason"] class Member(ActiveRecord): def populate(self, m): pass class ParamSectionInfo(ActiveRecord): def populate(self, m): self.is_implicit = bool(m[":is-implicit"]) if ":is-implicit" in m else False if ":params" in m and m[":params"]: keyed_params = [{':param-name': p[0], ':param-type': p[1]} for p in m[":params"]] self.params = [Param(kp) for kp in keyed_params] else: self.params = [] class Param: def __init__(self, m): self.param_name = m[":param-name"] self.param_type = TypeInfo.parse(m[":param-type"]) class DebugEvent(ActiveRecord): def populate(self, m): self.type = str(m[":type"]) if self.type == "output": self.body = m[":body"] elif self.type == "step": self.thread_id = m[":thread-id"] self.thread_name = m[":thread-name"] self.file_name = m[":file"] self.line = m[":line"] elif self.type == "breakpoint": self.thread_id = m[":thread-id"] self.thread_name = m[":thread-name"] self.file_name = m[":file"] self.line = m[":line"] elif self.type == "death": pass elif self.type == "start": pass elif self.type == "disconnect": pass elif self.type == "exception": self.exception_id = m[":exception"] self.thread_id = m[":thread-id"] self.thread_name = m[":thread-name"] self.file_name = m[":file"] self.line = m[":line"] elif self.type == "threadStart": self.thread_id = m[":thread-id"] elif self.type == "threadDeath": self.thread_id = m[":thread-id"] else: raise Exception("unexpected debug event of type " + str(self.type) + ": " + str(m)) class DebugKickoffResult(ActiveRecord): def __bool__(self): return not self.error def populate(self, m): status = m[":status"] if status == "success": self.error = False elif status == "error": self.error = True self.code = m[":error-code"] self.details = m[":details"] else: raise Exception("unexpected status: " + str(status)) class DebugBacktrace(ActiveRecord): def populate(self, m): self.frames = DebugStackFrame.parse_list(m[":frames"]) if ":frames" in m else [] self.thread_id = m[":thread-id"] self.thread_name = m[":thread-name"] class SourceFileInfo(ActiveRecord): def populate(self, m): self.file = m[":file"] self.contents = m[":contents"] if ":contents" in m else None self.contents_in = m[":contents-in"] if ":contents-in" in m else None def __init__(self, file_name, contents=None, contents_in=None): self.file = file_name self.contents = contents self.contents_in = contents_in def unparse(self): base = [key(":file"), self.file] if self.contents is not None: base.extend([key(":contents"), self.contents]) if self.contents_in is not None: base.extend([key(":contents-in"), self.contents_in]) return [base] class DebugStackFrame(ActiveRecord): def populate(self, m): self.index = m[":index"] self.locals = DebugStackLocal.parse_list(m[":locals"]) if ":locals" in m else [] self.num_args = m[":num-args"] self.class_name = m[":class-name"] self.method_name = m[":method-name"] self.pc_location = DebugSourcePosition.parse(m[":pc-location"]) self.this_object_id = m[":this-object-id"] class DebugSourcePosition(ActiveRecord): def populate(self, m): self.file_name = m[":file"] self.line = m[":line"] class DebugStackLocal(ActiveRecord): def populate(self, m): self.index = m[":index"] self.name = m[":name"] self.summary = m[":summary"] self.type_name = m[":type-name"] class DebugValue(ActiveRecord): def populate(self, m): self.type = m[":val-type"] self.type_name = m[":type-name"] self.length = m[":length"] if ":length" in m else None self.element_type_name = m[":element-type-name"] if ":element-type-name" in m else None self.summary = m[":summary"] if ":summary" in m else None self.object_id = m[":object-id"] if ":object-id" in m else None self.fields = DebugObjectField.parse_list(m[":fields"]) if ":fields" in m else [] if str(self.type) == "null" or str(self.type) == "prim" or str(self.type) == "obj" or str( self.type) == "str" or str(self.type) == "arr": pass else: raise Exception("unexpected debug value of type " + str(self.type) + ": " + str(m)) class DebugObjectField(ActiveRecord): def populate(self, m): self.index = m[":index"] self.name = m[":name"] self.summary = m[":summary"] self.type_name = m[":type-name"] class DebugLocation(ActiveRecord): def populate(self, m): self.type = str(m[":type"]) if self.type == "reference": self.object_id = m[":object-id"] elif self.type == "element": self.object_id = m[":object-id"] self.index = m[":index"] elif self.type == "field": self.object_id = m[":object-id"] self.field = m[":field"] elif self.type == "slot": self.thread_id = m[":thread-id"] self.frame = m[":frame"] self.offset = m[":offset"] else: raise Exception("unexpected debug location of type " + str(self.type) + ": " + str(m)) class DebugLocationReference(DebugLocation): def __init__(self, object_id): self.object_id = object_id def unparse(self): return [[key(":type"), sym("reference"), key(":object-id"), self.object_id]] class DebugLocationElement(DebugLocation): def __init__(self, object_id, index): self.object_id = object_id self.index = index def unparse(self): return [[key(":type"), sym("element"), key(":object-id"), self.object_id, key(":index"), self.index]] class DebugLocationField(DebugLocation): def __init__(self, object_id, field): self.object_id = object_id self.field = field def unparse(self): return [[key(":type"), sym("field"), key(":object-id"), self.object_id, key(":field"), self.field]] class DebugLocationSlot(DebugLocation): def __init__(self, thread_id, frame, offset): self.thread_id = thread_id self.frame = frame self.offset = offset def unparse(self): return [ [key(":type"), sym("slot"), key(":thread-id"), self.thread_id, key(":frame"), self.frame, key(":offset"), self.offset]] # ############################# REMOTE PROCEDURES ############################## def _mk_req(func, *args, **kwargs): if kwargs: raise Exception("kwargs are not supported by the RPC proxy") req = [] def translate_name(name): if name.startswith("_"): name = name[1:] name = name.replace("_", "-") return name req.append(sym("swank:" + translate_name(func.__name__))) (spec_args, spec_varargs, spec_keywords, spec_defaults) = inspect.getargspec(func) if spec_varargs: raise Exception("varargs in signature of " + str(func)) if spec_keywords: raise Exception("keywords in signature of " + str(func)) if len(spec_args) != len(args): if len(args) < len(spec_args) and len(args) + len(spec_defaults) >= len(spec_args): # everything is fine. we can use default values for parameters to provide arguments to the call args += spec_defaults[len(spec_defaults) - len(spec_args) + len(args):] else: preamble = "argc mismatch in signature of " + str(func) + ": " expected = "expected " + str(len(spec_args)) + " args " + str(spec_args) + ", " actual = "actual " + str(len(args)) + " args " + str(args) + " with types " + str([type(a) for a in args]) raise Exception(preamble + expected + actual) for arg in args[1:]: # strip off self if hasattr(arg, "unparse"): argreq = arg.unparse() else: argreq = [arg] req.extend(argreq) return req def async_rpc(*args): parser = args[0] if args else lambda raw: raw def wrapper(func): def wrapped(*args, **kwargs): self = args[0] if isinstance(args[-1], collections.Callable): on_complete = args[-1] args = args[:-1] else: on_complete = None req = _mk_req(func, *args, **kwargs) def callback(payload): data = parser(payload) if on_complete: on_complete(data) self.env.controller.client.async_req(req, callback, call_back_into_ui_thread=True) return wrapped return wrapper def sync_rpc(*args): parser = args[0] if args else lambda raw: raw def wrapper(func): def wrapped(*args, **kwargs): self = args[0] req = _mk_req(func, *args, **kwargs) timeout = self.env.settings.get("timeout_" + func.__name__) raw = self.env.controller.client.sync_req(req, timeout=timeout) return parser(raw) return wrapped return wrapper class Rpc(object): def __init__(self, env): self.env = env @sync_rpc() def shutdown_server(self): pass @async_rpc() def typecheck_file(self, file): pass @async_rpc() def typecheck_all(self): pass @async_rpc() def patch_source(self, file_name, edits): pass @sync_rpc(CompletionInfoList.parse) def completions(self, file_name, position, max_results, case_sensitive, reload_from_disk): pass @async_rpc(TypeInfo.parse) def type_at_point(self, file_name, position): pass @async_rpc(SymbolInfo.parse) def symbol_at_point(self, file_name, position): pass @async_rpc(SymbolInfo.parse) def symbol_by_name(self, symbol, token, t): pass @async_rpc(SymbolSearchResults.parse_list) def import_suggestions(self, file_name, position, type_names, max_results): pass @async_rpc(RefactorResult.parse) def prepare_refactor(self, procedure_id, refactor_type, parameters, require_confirmation): pass @async_rpc() def exec_refactor(self, procedure_id, refactor_type): pass @async_rpc() def debug_set_break(self, file_name, line): pass @async_rpc() def debug_clear_break(self, file_name, line): pass @async_rpc() def debug_clear_all_breaks(self): pass @async_rpc(DebugKickoffResult.parse) def _debug_start(self, command_line): pass @async_rpc(DebugKickoffResult.parse) def _debug_attach(self, host, port): pass def debug_start(self, launch, breakpoints, on_complete=None): def set_breakpoints(breakpoints, status): if status: if breakpoints: self.debug_set_break(breakpoints[0].file_name, breakpoints[0].line, bind(set_breakpoints, breakpoints[1:])) else: if launch.main_class: self._debug_start(launch.command_line, on_complete) elif launch.remote_address: self._debug_attach(launch.remote_host, launch.remote_port, on_complete) else: raise Exception("unsupported launch: " + str(launch)) elif on_complete: on_complete(status) def clear_breakpoints(): def callback(status): if status: set_breakpoints(breakpoints, status) elif on_complete: on_complete(status) self.debug_clear_all_breaks(callback) clear_breakpoints() @async_rpc() def debug_stop(self): pass @async_rpc() def debug_step(self, thread_id): pass @async_rpc() def debug_next(self, thread_id): pass @async_rpc() def debug_continue(self, thread_id): pass @sync_rpc(DebugBacktrace.parse) def debug_backtrace(self, thread_id, first_frame=0, num_frames=-1): pass @sync_rpc(DebugValue.parse) def debug_value(self, debug_location): pass @sync_rpc() def debug_to_string(self, thread_id, debug_location): pass # -*- coding: utf-8 -*- """ pygments.lexers ~~~~~~~~~~~~~~~ Pygments lexers. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys import types import fnmatch from os.path import basename from pygments.lexers._mapping import LEXERS from pygments.modeline import get_filetype_from_buffer from pygments.plugin import find_plugin_lexers from pygments.util import ClassNotFound, bytes __all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class', 'guess_lexer'] + LEXERS.keys() _lexer_cache = {} def _load_lexers(module_name): """ Load a lexer (and all others in the module too). """ mod = __import__(module_name, None, None, ['__all__']) for lexer_name in mod.__all__: cls = getattr(mod, lexer_name) _lexer_cache[cls.name] = cls def get_all_lexers(): """ Return a generator of tuples in the form ``(name, aliases, filenames, mimetypes)`` of all know lexers. """ for item in LEXERS.itervalues(): yield item[1:] for lexer in find_plugin_lexers(): yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes def find_lexer_class(name): """ Lookup a lexer class by name. Return None if not found. """ if name in _lexer_cache: return _lexer_cache[name] # lookup builtin lexers for module_name, lname, aliases, _, _ in LEXERS.itervalues(): if name == lname: _load_lexers(module_name) return _lexer_cache[name] # continue with lexers from setuptools entrypoints for cls in find_plugin_lexers(): if cls.name == name: return cls def get_lexer_by_name(_alias, **options): """ Get a lexer by an alias. """ # lookup builtin lexers for module_name, name, aliases, _, _ in LEXERS.itervalues(): if _alias in aliases: if name not in _lexer_cache: _load_lexers(module_name) return _lexer_cache[name](**options) # continue with lexers from setuptools entrypoints for cls in find_plugin_lexers(): if _alias in cls.aliases: return cls(**options) raise ClassNotFound('no lexer for alias %r found' % _alias) def get_lexer_for_filename(_fn, code=None, **options): """ Get a lexer for a filename. If multiple lexers match the filename pattern, use ``analyze_text()`` to figure out which one is more appropriate. """ matches = [] fn = basename(_fn) for modname, name, _, filenames, _ in LEXERS.itervalues(): for filename in filenames: if fnmatch.fnmatch(fn, filename): if name not in _lexer_cache: _load_lexers(modname) matches.append((_lexer_cache[name], filename)) for cls in find_plugin_lexers(): for filename in cls.filenames: if fnmatch.fnmatch(fn, filename): matches.append((cls, filename)) if sys.version_info > (3,) and isinstance(code, bytes): # decode it, since all analyse_text functions expect unicode code = code.decode('latin1') def get_rating(info): cls, filename = info # explicit patterns get a bonus bonus = '*' not in filename and 0.5 or 0 # The class _always_ defines analyse_text because it's included in # the Lexer class. The default implementation returns None which # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py # to find lexers which need it overridden. if code: return cls.analyse_text(code) + bonus return cls.priority + bonus if matches: matches.sort(key=get_rating) #print "Possible lexers, after sort:", matches return matches[-1][0](**options) raise ClassNotFound('no lexer for filename %r found' % _fn) def get_lexer_for_mimetype(_mime, **options): """ Get a lexer for a mimetype. """ for modname, name, _, _, mimetypes in LEXERS.itervalues(): if _mime in mimetypes: if name not in _lexer_cache: _load_lexers(modname) return _lexer_cache[name](**options) for cls in find_plugin_lexers(): if _mime in cls.mimetypes: return cls(**options) raise ClassNotFound('no lexer for mimetype %r found' % _mime) def _iter_lexerclasses(): """ Return an iterator over all lexer classes. """ for key in sorted(LEXERS): module_name, name = LEXERS[key][:2] if name not in _lexer_cache: _load_lexers(module_name) yield _lexer_cache[name] for lexer in find_plugin_lexers(): yield lexer def guess_lexer_for_filename(_fn, _text, **options): """ Lookup all lexers that handle those filenames primary (``filenames``) or secondary (``alias_filenames``). Then run a text analysis for those lexers and choose the best result. usage:: >>> from pygments.lexers import guess_lexer_for_filename >>> guess_lexer_for_filename('hello.html', '<%= @foo %>') >>> guess_lexer_for_filename('hello.html', '

{{ title|e }}

') >>> guess_lexer_for_filename('style.css', 'a { color: }') """ fn = basename(_fn) primary = None matching_lexers = set() for lexer in _iter_lexerclasses(): for filename in lexer.filenames: if fnmatch.fnmatch(fn, filename): matching_lexers.add(lexer) primary = lexer for filename in lexer.alias_filenames: if fnmatch.fnmatch(fn, filename): matching_lexers.add(lexer) if not matching_lexers: raise ClassNotFound('no lexer for filename %r found' % fn) if len(matching_lexers) == 1: return matching_lexers.pop()(**options) result = [] for lexer in matching_lexers: rv = lexer.analyse_text(_text) if rv == 1.0: return lexer(**options) result.append((rv, lexer)) result.sort() if not result[-1][0] and primary is not None: return primary(**options) return result[-1][1](**options) def guess_lexer(_text, **options): """ Guess a lexer by strong distinctions in the text (eg, shebang). """ # try to get a vim modeline first ft = get_filetype_from_buffer(_text) if ft is not None: try: return get_lexer_by_name(ft, **options) except ClassNotFound: pass best_lexer = [0.0, None] for lexer in _iter_lexerclasses(): rv = lexer.analyse_text(_text) if rv == 1.0: return lexer(**options) if rv > best_lexer[0]: best_lexer[:] = (rv, lexer) if not best_lexer[0] or best_lexer[1] is None: raise ClassNotFound('no lexer matching the text found') return best_lexer[1](**options) class _automodule(types.ModuleType): """Automatically import lexers.""" def __getattr__(self, name): info = LEXERS.get(name) if info: _load_lexers(info[0]) cls = _lexer_cache[info[1]] setattr(self, name, cls) return cls raise AttributeError(name) oldmod = sys.modules['pygments.lexers'] newmod = _automodule('pygments.lexers') newmod.__dict__.update(oldmod.__dict__) sys.modules['pygments.lexers'] = newmod del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types #! /usr/bin/env python # -*- coding: utf-8 -*- import codecs try: from setuptools import setup, find_packages, Command except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages, Command long_description = codecs.open("README.rst", "r", "utf-8").read() setup( name="html-tree-diff", version="0.1.2", description="Structure-aware diff for html and xml documents", author="Christian Oudard", author_email="christian.oudard@gmail.com", url="http://github.com/christian-oudard/htmltreediff/", platforms=["any"], license="BSD", packages=find_packages(), scripts=[], zip_safe=False, install_requires=['lxml', 'html5lib'], cmdclass={}, classifiers=[ "Development Status :: 3 - Alpha", "Programming Language :: Python", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Topic :: Text Processing :: Markup :: HTML", "Topic :: Text Processing :: Markup :: XML", ], long_description=long_description, ) # -*- coding: utf-8 -*- """ Offline editing Tests. WFS-T tests need using QGIS Server through qgis_wrapped_server.py. This is an integration test for QGIS Desktop WFS-T provider and QGIS Server WFS-T that check if QGIS offline editing works with a WFS-T endpoint. The test uses testdata/wfs_transactional/wfs_transactional.qgs and three initially empty shapefiles layers with points, lines and polygons. The point layer is used in the test From build dir, run: ctest -R PyQgsOfflineEditingWFS -V .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ from builtins import str __author__ = 'Alessandro Pasotti' __date__ = '05/15/2016' __copyright__ = 'Copyright 2016, The QGIS Project' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os import sys import re import subprocess from shutil import copytree, rmtree import tempfile from time import sleep from utilities import unitTestDataPath, waitServer from qgis.core import QgsVectorLayer from qgis.testing import ( start_app, unittest, ) from offlineditingtestbase import OfflineTestBase try: QGIS_SERVER_OFFLINE_PORT = os.environ['QGIS_SERVER_OFFLINE_PORT'] except: QGIS_SERVER_OFFLINE_PORT = '0' # Auto qgis_app = start_app() class TestWFST(unittest.TestCase, OfflineTestBase): # To fake the WFS cache! counter = 0 @classmethod def setUpClass(cls): """Run before all tests""" cls.port = QGIS_SERVER_OFFLINE_PORT # Create tmp folder cls.temp_path = tempfile.mkdtemp() cls.testdata_path = cls.temp_path + '/' + 'wfs_transactional' + '/' copytree(unitTestDataPath('wfs_transactional') + '/', cls.temp_path + '/' + 'wfs_transactional') cls.project_path = cls.temp_path + '/' + 'wfs_transactional' + '/' + \ 'wfs_transactional.qgs' assert os.path.exists(cls.project_path), "Project not found: %s" % \ cls.project_path # Clean env just to be sure env_vars = ['QUERY_STRING', 'QGIS_PROJECT_FILE'] for ev in env_vars: try: del os.environ[ev] except KeyError: pass # Clear all test layers cls._clearLayer(cls._getLayer('test_point')) os.environ['QGIS_SERVER_PORT'] = str(cls.port) cls.server_path = os.path.dirname(os.path.realpath(__file__)) + \ '/qgis_wrapped_server.py' @classmethod def tearDownClass(cls): """Run after all tests""" rmtree(cls.temp_path) def setUp(self): """Run before each test.""" self.server = subprocess.Popen([sys.executable, self.server_path], env=os.environ, stdout=subprocess.PIPE) line = self.server.stdout.readline() self.port = int(re.findall(b':(\d+)', line)[0]) assert self.port != 0 # Wait for the server process to start assert waitServer('http://127.0.0.1:%s' % self.port), "Server is not responding!" self._setUp() def tearDown(self): """Run after each test.""" # Clear test layer self._clearLayer(self._getOnlineLayer('test_point')) # Kill the server self.server.terminate() self.server.wait() del self.server # Delete the sqlite db os.unlink(os.path.join(self.temp_path, 'offlineDbFile.sqlite')) self._tearDown() def _getOnlineLayer(self, type_name, layer_name=None): """ Return a new WFS layer, overriding the WFS cache """ if layer_name is None: layer_name = 'wfs_' + type_name parms = { 'srsname': 'EPSG:4326', 'typename': type_name, 'url': 'http://127.0.0.1:%s/%s/?map=%s' % (self.port, self.counter, self.project_path), 'version': 'auto', 'table': '', #'sql': '', } self.counter += 1 uri = ' '.join([("%s='%s'" % (k, v)) for k, v in list(parms.items())]) wfs_layer = QgsVectorLayer(uri, layer_name, 'WFS') assert wfs_layer.isValid() return wfs_layer @classmethod def _getLayer(cls, layer_name): """ Layer factory (return the backend layer), provider specific """ path = cls.testdata_path + layer_name + '.shp' layer = QgsVectorLayer(path, layer_name, "ogr") assert layer.isValid() return layer if __name__ == '__main__': unittest.main() # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from nipype.testing import assert_equal from nipype.interfaces.freesurfer.model import Label2Vol def test_Label2Vol_inputs(): input_map = dict(annot_file=dict(argstr='--annot %s', copyfile=False, mandatory=True, requires=('subject_id', 'hemi'), xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), ), aparc_aseg=dict(argstr='--aparc+aseg', mandatory=True, xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), ), args=dict(argstr='%s', ), environ=dict(nohash=True, usedefault=True, ), fill_thresh=dict(argstr='--fillthresh %.f', ), hemi=dict(argstr='--hemi %s', ), identity=dict(argstr='--identity', xor=('reg_file', 'reg_header', 'identity'), ), ignore_exception=dict(nohash=True, usedefault=True, ), invert_mtx=dict(argstr='--invertmtx', ), label_file=dict(argstr='--label %s...', copyfile=False, mandatory=True, xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), ), label_hit_file=dict(argstr='--hits %s', ), label_voxel_volume=dict(argstr='--labvoxvol %f', ), map_label_stat=dict(argstr='--label-stat %s', ), native_vox2ras=dict(argstr='--native-vox2ras', ), proj=dict(argstr='--proj %s %f %f %f', requires=('subject_id', 'hemi'), ), reg_file=dict(argstr='--reg %s', xor=('reg_file', 'reg_header', 'identity'), ), reg_header=dict(argstr='--regheader %s', xor=('reg_file', 'reg_header', 'identity'), ), seg_file=dict(argstr='--seg %s', copyfile=False, mandatory=True, xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), ), subject_id=dict(argstr='--subject %s', ), subjects_dir=dict(), surface=dict(argstr='--surf %s', ), template_file=dict(argstr='--temp %s', mandatory=True, ), terminal_output=dict(nohash=True, ), vol_label_file=dict(argstr='--o %s', genfile=True, ), ) inputs = Label2Vol.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value def test_Label2Vol_outputs(): output_map = dict(vol_label_file=dict(), ) outputs = Label2Vol.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value # The Hazard Library # Copyright (C) 2012-2017 GEM Foundation # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . """ Module :mod:`openquake.hazardlib.source.area` defines :class:`AreaSource`. """ from openquake.hazardlib.source.base import ParametricSeismicSource from openquake.hazardlib.source.point import ( PointSource, angular_distance, KM_TO_DEGREES) from openquake.hazardlib.geo.utils import cross_idl class MultiPointSource(ParametricSeismicSource): """ MultiPointSource class, used to describe point sources with different MFDs and the same rupture_mesh_spacing, magnitude_scaling_relationship, rupture_aspect_ratio, temporal_occurrence_model, upper_seismogenic_depth, lower_seismogenic_depth, nodal_plane_distribution, hypocenter_distribution """ MODIFICATIONS = set(()) RUPTURE_WEIGHT = 1 / 10. def __init__(self, source_id, name, tectonic_region_type, mfd, rupture_mesh_spacing, magnitude_scaling_relationship, rupture_aspect_ratio, temporal_occurrence_model, # point-specific parameters (excluding location) upper_seismogenic_depth, lower_seismogenic_depth, nodal_plane_distribution, hypocenter_distribution, mesh): assert len(mfd) == len(mesh), (len(mfd), len(mesh)) super(MultiPointSource, self).__init__( source_id, name, tectonic_region_type, mfd, rupture_mesh_spacing, magnitude_scaling_relationship, rupture_aspect_ratio, temporal_occurrence_model) self.upper_seismogenic_depth = upper_seismogenic_depth self.lower_seismogenic_depth = lower_seismogenic_depth self.nodal_plane_distribution = nodal_plane_distribution self.hypocenter_distribution = hypocenter_distribution self.mesh = mesh self.max_radius = 0 def __iter__(self): for i, (mfd, point) in enumerate(zip(self.mfd, self.mesh)): name = '%s:%s' % (self.source_id, i) ps = PointSource( name, name, self.tectonic_region_type, mfd, self.rupture_mesh_spacing, self.magnitude_scaling_relationship, self.rupture_aspect_ratio, self.temporal_occurrence_model, self.upper_seismogenic_depth, self.lower_seismogenic_depth, point, self.nodal_plane_distribution, self.hypocenter_distribution) yield ps def iter_ruptures(self): """ Yield the ruptures of the underlying point sources """ for ps in self: for rupture in ps.iter_ruptures(): yield rupture def count_ruptures(self): """ See :meth:`openquake.hazardlib.source.base.BaseSeismicSource.count_ruptures` for description of parameters and return value. """ return (len(self.get_annual_occurrence_rates()) * len(self.nodal_plane_distribution.data) * len(self.hypocenter_distribution.data)) def filter_sites_by_distance_to_source(self, integration_distance, sites): """Do not filter""" return sites def get_rupture_enclosing_polygon(self, dilation=0): """No polygon""" def get_bounding_box(self, maxdist): """ Bounding box containing all points, enlarged by the maximum distance and the maximum rupture projection radius (upper limit). """ maxradius = self._get_max_rupture_projection_radius() min_lon = self.mesh.lons.min() max_lon = self.mesh.lons.max() if cross_idl(min_lon, max_lon): min_lon, max_lon = max_lon, min_lon + 360 min_lat = self.mesh.lats.min() max_lat = self.mesh.lats.max() a1 = (maxdist + maxradius) * KM_TO_DEGREES a2 = max(angular_distance(maxdist + maxradius, min_lat), angular_distance(maxdist + maxradius, max_lat)) return min_lon - a2, min_lat - a1, max_lon + a2, max_lat + a1 _get_rupture_dimensions = PointSource.__dict__['_get_rupture_dimensions'] _get_max_rupture_projection_radius = PointSource.__dict__[ '_get_max_rupture_projection_radius'] #!/bin/env python # -*- coding: utf-8 -*- #要填json坑,前面写的代码,json部分是网上找的,还没有完全理解,尤其是相关的字符串编码没有实践 #抽空了解下从xls文件读取数据的库 #xls -> json -> xml 是我的思路,当然也可以尝试下直接xls -> xml #主要还是比较看重json的应用。有时候感觉看了别人的代码,不自己用另一种方式实现,(即使变得复杂啰嗦)还是别人的代码 #导入模块 import xlrd #这个是系统自带的,如果安装lxml遇到问题可以使用这个 import xml.etree.ElementTree as ET from xml.dom import minidom def read_xls(filename): data = xlrd.open_workbook(filename) table = data.sheet_by_index(0) #通过索引获取xls文件第0个sheet nrows = table.nrows d = {} for i in range(nrows): d[str(i+1)] = table.row_values(i)[1:] #取编号后的数据,以列表形式存在字典对应的值中 return d def write_xml(d): doc = minidom.Document() root = doc.createElement("root") doc.appendChild(root) students = doc.createElement("students") root.appendChild(students) students.appendChild(doc.createComment(' 学生信息表\n "id" : [名字, 数学, 语文, 英文]')) for i in d: d[i][0] = d[i][0].encode('utf-8') #有一种无奈叫做我懒得玩了,python2你是个好人 content = doc.createTextNode(str(d)) students.appendChild(content) f = file("student.xml","w") doc.writexml(f) f.close() def main(): d = read_xls('student.xls') print(d) write_xml(d) if __name__ == '__main__': main() #!/usr/bin/python2 # (C) Copyright IBM Corporation 2004 # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # on the rights to use, copy, modify, merge, publish, distribute, sub # license, and/or sell copies of the Software, and to permit persons to whom # the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice (including the next # paragraph) shall be included in all copies or substantial portions of the # Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL # IBM AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # Authors: # Ian Romanick import gl_XML import license import sys, getopt class PrintGlTable(gl_XML.gl_print_base): def __init__(self, es=False): gl_XML.gl_print_base.__init__(self) self.es = es self.header_tag = '_GLAPI_TABLE_H_' self.name = "gl_table.py (from Mesa)" self.license = license.bsd_license_template % ( \ """Copyright (C) 1999-2003 Brian Paul All Rights Reserved. (C) Copyright IBM Corporation 2004""", "BRIAN PAUL, IBM") return def printBody(self, api): for f in api.functionIterateByOffset(): arg_string = f.get_parameter_string() print ' %s (GLAPIENTRYP %s)(%s); /* %d */' % (f.return_type, f.name, arg_string, f.offset) def printRealHeader(self): print '#ifndef GLAPIENTRYP' print '# ifndef GLAPIENTRY' print '# define GLAPIENTRY' print '# endif' print '' print '# define GLAPIENTRYP GLAPIENTRY *' print '#endif' print '' print '' print 'struct _glapi_table' print '{' return def printRealFooter(self): print '};' return class PrintRemapTable(gl_XML.gl_print_base): def __init__(self, es=False): gl_XML.gl_print_base.__init__(self) self.es = es self.header_tag = '_GLAPI_DISPATCH_H_' self.name = "gl_table.py (from Mesa)" self.license = license.bsd_license_template % ("(C) Copyright IBM Corporation 2005", "IBM") return def printRealHeader(self): print """ /* this file should not be included directly in mesa */ /** * \\file glapidispatch.h * Macros for handling GL dispatch tables. * * For each known GL function, there are 3 macros in this file. The first * macro is named CALL_FuncName and is used to call that GL function using * the specified dispatch table. The other 2 macros, called GET_FuncName * can SET_FuncName, are used to get and set the dispatch pointer for the * named function in the specified dispatch table. */ """ return def printBody(self, api): print '#define CALL_by_offset(disp, cast, offset, parameters) \\' print ' (*(cast (GET_by_offset(disp, offset)))) parameters' print '#define GET_by_offset(disp, offset) \\' print ' (offset >= 0) ? (((_glapi_proc *)(disp))[offset]) : NULL' print '#define SET_by_offset(disp, offset, fn) \\' print ' do { \\' print ' if ( (offset) < 0 ) { \\' print ' /* fprintf( stderr, "[%s:%u] SET_by_offset(%p, %d, %s)!\\n", */ \\' print ' /* __func__, __LINE__, disp, offset, # fn); */ \\' print ' /* abort(); */ \\' print ' } \\' print ' else { \\' print ' ( (_glapi_proc *) (disp) )[offset] = (_glapi_proc) fn; \\' print ' } \\' print ' } while(0)' print '' functions = [] abi_functions = [] alias_functions = [] count = 0 for f in api.functionIterateByOffset(): if not f.is_abi(): functions.append( [f, count] ) count += 1 else: abi_functions.append( f ) if self.es: # remember functions with aliases if len(f.entry_points) > 1: alias_functions.append(f) for f in abi_functions: print '#define CALL_%s(disp, parameters) (*((disp)->%s)) parameters' % (f.name, f.name) print '#define GET_%s(disp) ((disp)->%s)' % (f.name, f.name) print '#define SET_%s(disp, fn) ((disp)->%s = fn)' % (f.name, f.name) print '' print '#if !defined(_GLAPI_USE_REMAP_TABLE)' print '' for [f, index] in functions: print '#define CALL_%s(disp, parameters) (*((disp)->%s)) parameters' % (f.name, f.name) print '#define GET_%s(disp) ((disp)->%s)' % (f.name, f.name) print '#define SET_%s(disp, fn) ((disp)->%s = fn)' % (f.name, f.name) print '' print '#else' print '' print '#define driDispatchRemapTable_size %u' % (count) print 'extern int driDispatchRemapTable[ driDispatchRemapTable_size ];' print '' for [f, index] in functions: print '#define %s_remap_index %u' % (f.name, index) print '' for [f, index] in functions: arg_string = gl_XML.create_parameter_string( f.parameters, 0 ) cast = '%s (GLAPIENTRYP)(%s)' % (f.return_type, arg_string) print '#define CALL_%s(disp, parameters) CALL_by_offset(disp, (%s), driDispatchRemapTable[%s_remap_index], parameters)' % (f.name, cast, f.name) print '#define GET_%s(disp) GET_by_offset(disp, driDispatchRemapTable[%s_remap_index])' % (f.name, f.name) print '#define SET_%s(disp, fn) SET_by_offset(disp, driDispatchRemapTable[%s_remap_index], fn)' % (f.name, f.name) print '' print '#endif /* !defined(_GLAPI_USE_REMAP_TABLE) */' if alias_functions: print '' print '/* define aliases for compatibility */' for f in alias_functions: for name in f.entry_points: if name != f.name: print '#define CALL_%s(disp, parameters) CALL_%s(disp, parameters)' % (name, f.name) print '#define GET_%s(disp) GET_%s(disp)' % (name, f.name) print '#define SET_%s(disp, fn) SET_%s(disp, fn)' % (name, f.name) print '' print '#if defined(_GLAPI_USE_REMAP_TABLE)' for f in alias_functions: for name in f.entry_points: if name != f.name: print '#define %s_remap_index %s_remap_index' % (name, f.name) print '#endif /* defined(_GLAPI_USE_REMAP_TABLE) */' print '' return def show_usage(): print "Usage: %s [-f input_file_name] [-m mode] [-c]" % sys.argv[0] print " -m mode Mode can be 'table' or 'remap_table'." print " -c Enable compatibility with OpenGL ES." sys.exit(1) if __name__ == '__main__': file_name = "gl_API.xml" try: (args, trail) = getopt.getopt(sys.argv[1:], "f:m:c") except Exception,e: show_usage() mode = "table" es = False for (arg,val) in args: if arg == "-f": file_name = val elif arg == "-m": mode = val elif arg == "-c": es = True if mode == "table": printer = PrintGlTable(es) elif mode == "remap_table": printer = PrintRemapTable(es) else: show_usage() api = gl_XML.parse_GL_API( file_name ) printer.Print( api ) """ Module for formatting output data in Latex. """ from abc import ABC, abstractmethod from typing import Iterator, List, Optional, Sequence, Tuple, Type, Union import numpy as np from pandas.core.dtypes.generic import ABCMultiIndex from pandas.io.formats.format import DataFrameFormatter def _split_into_full_short_caption( caption: Optional[Union[str, Tuple[str, str]]] ) -> Tuple[str, str]: """Extract full and short captions from caption string/tuple. Parameters ---------- caption : str or tuple, optional Either table caption string or tuple (full_caption, short_caption). If string is provided, then it is treated as table full caption, while short_caption is considered an empty string. Returns ------- full_caption, short_caption : tuple Tuple of full_caption, short_caption strings. """ if caption: if isinstance(caption, str): full_caption = caption short_caption = "" else: try: full_caption, short_caption = caption except ValueError as err: msg = "caption must be either a string or a tuple of two strings" raise ValueError(msg) from err else: full_caption = "" short_caption = "" return full_caption, short_caption class RowStringConverter(ABC): r"""Converter for dataframe rows into LaTeX strings. Parameters ---------- formatter : `DataFrameFormatter` Instance of `DataFrameFormatter`. multicolumn: bool, optional Whether to use \multicolumn macro. multicolumn_format: str, optional Multicolumn format. multirow: bool, optional Whether to use \multirow macro. """ def __init__( self, formatter: DataFrameFormatter, multicolumn: bool = False, multicolumn_format: Optional[str] = None, multirow: bool = False, ): self.fmt = formatter self.frame = self.fmt.frame self.multicolumn = multicolumn self.multicolumn_format = multicolumn_format self.multirow = multirow self.clinebuf: List[List[int]] = [] self.strcols = self._get_strcols() self.strrows = list(zip(*self.strcols)) def get_strrow(self, row_num: int) -> str: """Get string representation of the row.""" row = self.strrows[row_num] is_multicol = ( row_num < self.column_levels and self.fmt.header and self.multicolumn ) is_multirow = ( row_num >= self.header_levels and self.fmt.index and self.multirow and self.index_levels > 1 ) is_cline_maybe_required = is_multirow and row_num < len(self.strrows) - 1 crow = self._preprocess_row(row) if is_multicol: crow = self._format_multicolumn(crow) if is_multirow: crow = self._format_multirow(crow, row_num) lst = [] lst.append(" & ".join(crow)) lst.append(" \\\\") if is_cline_maybe_required: cline = self._compose_cline(row_num, len(self.strcols)) lst.append(cline) return "".join(lst) @property def _header_row_num(self) -> int: """Number of rows in header.""" return self.header_levels if self.fmt.header else 0 @property def index_levels(self) -> int: """Integer number of levels in index.""" return self.frame.index.nlevels @property def column_levels(self) -> int: return self.frame.columns.nlevels @property def header_levels(self) -> int: nlevels = self.column_levels if self.fmt.has_index_names and self.fmt.show_index_names: nlevels += 1 return nlevels def _get_strcols(self) -> List[List[str]]: """String representation of the columns.""" if self.fmt.frame.empty: strcols = [[self._empty_info_line]] else: strcols = self.fmt.get_strcols() # reestablish the MultiIndex that has been joined by get_strcols() if self.fmt.index and isinstance(self.frame.index, ABCMultiIndex): out = self.frame.index.format( adjoin=False, sparsify=self.fmt.sparsify, names=self.fmt.has_index_names, na_rep=self.fmt.na_rep, ) # index.format will sparsify repeated entries with empty strings # so pad these with some empty space def pad_empties(x): for pad in reversed(x): if pad: break return [x[0]] + [i if i else " " * len(pad) for i in x[1:]] gen = (pad_empties(i) for i in out) # Add empty spaces for each column level clevels = self.frame.columns.nlevels out = [[" " * len(i[-1])] * clevels + i for i in gen] # Add the column names to the last index column cnames = self.frame.columns.names if any(cnames): new_names = [i if i else "{}" for i in cnames] out[self.frame.index.nlevels - 1][:clevels] = new_names # Get rid of old multiindex column and add new ones strcols = out + strcols[1:] return strcols @property def _empty_info_line(self): return ( f"Empty {type(self.frame).__name__}\n" f"Columns: {self.frame.columns}\n" f"Index: {self.frame.index}" ) def _preprocess_row(self, row: Sequence[str]) -> List[str]: """Preprocess elements of the row.""" if self.fmt.escape: crow = _escape_symbols(row) else: crow = [x if x else "{}" for x in row] if self.fmt.bold_rows and self.fmt.index: crow = _convert_to_bold(crow, self.index_levels) return crow def _format_multicolumn(self, row: List[str]) -> List[str]: r""" Combine columns belonging to a group to a single multicolumn entry according to self.multicolumn_format e.g.: a & & & b & c & will become \multicolumn{3}{l}{a} & b & \multicolumn{2}{l}{c} """ row2 = row[: self.index_levels] ncol = 1 coltext = "" def append_col(): # write multicolumn if needed if ncol > 1: row2.append( f"\\multicolumn{{{ncol:d}}}{{{self.multicolumn_format}}}" f"{{{coltext.strip()}}}" ) # don't modify where not needed else: row2.append(coltext) for c in row[self.index_levels :]: # if next col has text, write the previous if c.strip(): if coltext: append_col() coltext = c ncol = 1 # if not, add it to the previous multicolumn else: ncol += 1 # write last column name if coltext: append_col() return row2 def _format_multirow(self, row: List[str], i: int) -> List[str]: r""" Check following rows, whether row should be a multirow e.g.: becomes: a & 0 & \multirow{2}{*}{a} & 0 & & 1 & & 1 & b & 0 & \cline{1-2} b & 0 & """ for j in range(self.index_levels): if row[j].strip(): nrow = 1 for r in self.strrows[i + 1 :]: if not r[j].strip(): nrow += 1 else: break if nrow > 1: # overwrite non-multirow entry row[j] = f"\\multirow{{{nrow:d}}}{{*}}{{{row[j].strip()}}}" # save when to end the current block with \cline self.clinebuf.append([i + nrow - 1, j + 1]) return row def _compose_cline(self, i: int, icol: int) -> str: """ Create clines after multirow-blocks are finished. """ lst = [] for cl in self.clinebuf: if cl[0] == i: lst.append(f"\n\\cline{{{cl[1]:d}-{icol:d}}}") # remove entries that have been written to buffer self.clinebuf = [x for x in self.clinebuf if x[0] != i] return "".join(lst) class RowStringIterator(RowStringConverter): """Iterator over rows of the header or the body of the table.""" @abstractmethod def __iter__(self) -> Iterator[str]: """Iterate over LaTeX string representations of rows.""" class RowHeaderIterator(RowStringIterator): """Iterator for the table header rows.""" def __iter__(self) -> Iterator[str]: for row_num in range(len(self.strrows)): if row_num < self._header_row_num: yield self.get_strrow(row_num) class RowBodyIterator(RowStringIterator): """Iterator for the table body rows.""" def __iter__(self) -> Iterator[str]: for row_num in range(len(self.strrows)): if row_num >= self._header_row_num: yield self.get_strrow(row_num) class TableBuilderAbstract(ABC): """ Abstract table builder producing string representation of LaTeX table. Parameters ---------- formatter : `DataFrameFormatter` Instance of `DataFrameFormatter`. column_format: str, optional Column format, for example, 'rcl' for three columns. multicolumn: bool, optional Use multicolumn to enhance MultiIndex columns. multicolumn_format: str, optional The alignment for multicolumns, similar to column_format. multirow: bool, optional Use multirow to enhance MultiIndex rows. caption: str, optional Table caption. short_caption: str, optional Table short caption. label: str, optional LaTeX label. position: str, optional Float placement specifier, for example, 'htb'. """ def __init__( self, formatter: DataFrameFormatter, column_format: Optional[str] = None, multicolumn: bool = False, multicolumn_format: Optional[str] = None, multirow: bool = False, caption: Optional[str] = None, short_caption: Optional[str] = None, label: Optional[str] = None, position: Optional[str] = None, ): self.fmt = formatter self.column_format = column_format self.multicolumn = multicolumn self.multicolumn_format = multicolumn_format self.multirow = multirow self.caption = caption self.short_caption = short_caption self.label = label self.position = position def get_result(self) -> str: """String representation of LaTeX table.""" elements = [ self.env_begin, self.top_separator, self.header, self.middle_separator, self.env_body, self.bottom_separator, self.env_end, ] result = "\n".join([item for item in elements if item]) trailing_newline = "\n" result += trailing_newline return result @property @abstractmethod def env_begin(self) -> str: """Beginning of the environment.""" @property @abstractmethod def top_separator(self) -> str: """Top level separator.""" @property @abstractmethod def header(self) -> str: """Header lines.""" @property @abstractmethod def middle_separator(self) -> str: """Middle level separator.""" @property @abstractmethod def env_body(self) -> str: """Environment body.""" @property @abstractmethod def bottom_separator(self) -> str: """Bottom level separator.""" @property @abstractmethod def env_end(self) -> str: """End of the environment.""" class GenericTableBuilder(TableBuilderAbstract): """Table builder producing string representation of LaTeX table.""" @property def header(self) -> str: iterator = self._create_row_iterator(over="header") return "\n".join(list(iterator)) @property def top_separator(self) -> str: return "\\toprule" @property def middle_separator(self) -> str: return "\\midrule" if self._is_separator_required() else "" @property def env_body(self) -> str: iterator = self._create_row_iterator(over="body") return "\n".join(list(iterator)) def _is_separator_required(self) -> bool: return bool(self.header and self.env_body) @property def _position_macro(self) -> str: r"""Position macro, extracted from self.position, like [h].""" return f"[{self.position}]" if self.position else "" @property def _caption_macro(self) -> str: r"""Caption macro, extracted from self.caption. With short caption: \caption[short_caption]{caption_string}. Without short caption: \caption{caption_string}. """ if self.caption: return "".join( [ r"\caption", f"[{self.short_caption}]" if self.short_caption else "", f"{{{self.caption}}}", ] ) return "" @property def _label_macro(self) -> str: r"""Label macro, extracted from self.label, like \label{ref}.""" return f"\\label{{{self.label}}}" if self.label else "" def _create_row_iterator(self, over: str) -> RowStringIterator: """Create iterator over header or body of the table. Parameters ---------- over : {'body', 'header'} Over what to iterate. Returns ------- RowStringIterator Iterator over body or header. """ iterator_kind = self._select_iterator(over) return iterator_kind( formatter=self.fmt, multicolumn=self.multicolumn, multicolumn_format=self.multicolumn_format, multirow=self.multirow, ) def _select_iterator(self, over: str) -> Type[RowStringIterator]: """Select proper iterator over table rows.""" if over == "header": return RowHeaderIterator elif over == "body": return RowBodyIterator else: msg = f"'over' must be either 'header' or 'body', but {over} was provided" raise ValueError(msg) class LongTableBuilder(GenericTableBuilder): """Concrete table builder for longtable. >>> from pandas import DataFrame >>> from pandas.io.formats import format as fmt >>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]}) >>> formatter = fmt.DataFrameFormatter(df) >>> builder = LongTableBuilder(formatter, caption='a long table', ... label='tab:long', column_format='lrl') >>> table = builder.get_result() >>> print(table) \\begin{longtable}{lrl} \\caption{a long table} \\label{tab:long}\\\\ \\toprule {} & a & b \\\\ \\midrule \\endfirsthead \\caption[]{a long table} \\\\ \\toprule {} & a & b \\\\ \\midrule \\endhead \\midrule \\multicolumn{3}{r}{{Continued on next page}} \\\\ \\midrule \\endfoot \\bottomrule \\endlastfoot 0 & 1 & b1 \\\\ 1 & 2 & b2 \\\\ \\end{longtable} """ @property def env_begin(self) -> str: first_row = ( f"\\begin{{longtable}}{self._position_macro}{{{self.column_format}}}" ) elements = [first_row, f"{self._caption_and_label()}"] return "\n".join([item for item in elements if item]) def _caption_and_label(self) -> str: if self.caption or self.label: double_backslash = "\\\\" elements = [f"{self._caption_macro}", f"{self._label_macro}"] caption_and_label = "\n".join([item for item in elements if item]) caption_and_label += double_backslash return caption_and_label else: return "" @property def middle_separator(self) -> str: iterator = self._create_row_iterator(over="header") # the content between \endfirsthead and \endhead commands # mitigates repeated List of Tables entries in the final LaTeX # document when dealing with longtable environments; GH #34360 elements = [ "\\midrule", "\\endfirsthead", f"\\caption[]{{{self.caption}}} \\\\" if self.caption else "", self.top_separator, self.header, "\\midrule", "\\endhead", "\\midrule", f"\\multicolumn{{{len(iterator.strcols)}}}{{r}}" "{{Continued on next page}} \\\\", "\\midrule", "\\endfoot\n", "\\bottomrule", "\\endlastfoot", ] if self._is_separator_required(): return "\n".join(elements) return "" @property def bottom_separator(self) -> str: return "" @property def env_end(self) -> str: return "\\end{longtable}" class RegularTableBuilder(GenericTableBuilder): """Concrete table builder for regular table. >>> from pandas import DataFrame >>> from pandas.io.formats import format as fmt >>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]}) >>> formatter = fmt.DataFrameFormatter(df) >>> builder = RegularTableBuilder(formatter, caption='caption', label='lab', ... column_format='lrc') >>> table = builder.get_result() >>> print(table) \\begin{table} \\centering \\caption{caption} \\label{lab} \\begin{tabular}{lrc} \\toprule {} & a & b \\\\ \\midrule 0 & 1 & b1 \\\\ 1 & 2 & b2 \\\\ \\bottomrule \\end{tabular} \\end{table} """ @property def env_begin(self) -> str: elements = [ f"\\begin{{table}}{self._position_macro}", "\\centering", f"{self._caption_macro}", f"{self._label_macro}", f"\\begin{{tabular}}{{{self.column_format}}}", ] return "\n".join([item for item in elements if item]) @property def bottom_separator(self) -> str: return "\\bottomrule" @property def env_end(self) -> str: return "\n".join(["\\end{tabular}", "\\end{table}"]) class TabularBuilder(GenericTableBuilder): """Concrete table builder for tabular environment. >>> from pandas import DataFrame >>> from pandas.io.formats import format as fmt >>> df = DataFrame({"a": [1, 2], "b": ["b1", "b2"]}) >>> formatter = fmt.DataFrameFormatter(df) >>> builder = TabularBuilder(formatter, column_format='lrc') >>> table = builder.get_result() >>> print(table) \\begin{tabular}{lrc} \\toprule {} & a & b \\\\ \\midrule 0 & 1 & b1 \\\\ 1 & 2 & b2 \\\\ \\bottomrule \\end{tabular} """ @property def env_begin(self) -> str: return f"\\begin{{tabular}}{{{self.column_format}}}" @property def bottom_separator(self) -> str: return "\\bottomrule" @property def env_end(self) -> str: return "\\end{tabular}" class LatexFormatter: r""" Used to render a DataFrame to a LaTeX tabular/longtable environment output. Parameters ---------- formatter : `DataFrameFormatter` longtable : bool, default False Use longtable environment. column_format : str, default None The columns format as specified in `LaTeX table format `__ e.g 'rcl' for 3 columns multicolumn : bool, default False Use \multicolumn to enhance MultiIndex columns. multicolumn_format : str, default 'l' The alignment for multicolumns, similar to `column_format` multirow : bool, default False Use \multirow to enhance MultiIndex rows. caption : str or tuple, optional Tuple (full_caption, short_caption), which results in \caption[short_caption]{full_caption}; if a single string is passed, no short caption will be set. label : str, optional The LaTeX label to be placed inside ``\label{}`` in the output. position : str, optional The LaTeX positional argument for tables, to be placed after ``\begin{}`` in the output. See Also -------- HTMLFormatter """ def __init__( self, formatter: DataFrameFormatter, longtable: bool = False, column_format: Optional[str] = None, multicolumn: bool = False, multicolumn_format: Optional[str] = None, multirow: bool = False, caption: Optional[Union[str, Tuple[str, str]]] = None, label: Optional[str] = None, position: Optional[str] = None, ): self.fmt = formatter self.frame = self.fmt.frame self.longtable = longtable self.column_format = column_format self.multicolumn = multicolumn self.multicolumn_format = multicolumn_format self.multirow = multirow self.caption, self.short_caption = _split_into_full_short_caption(caption) self.label = label self.position = position def to_string(self) -> str: """ Render a DataFrame to a LaTeX tabular, longtable, or table/tabular environment output. """ return self.builder.get_result() @property def builder(self) -> TableBuilderAbstract: """Concrete table builder. Returns ------- TableBuilder """ builder = self._select_builder() return builder( formatter=self.fmt, column_format=self.column_format, multicolumn=self.multicolumn, multicolumn_format=self.multicolumn_format, multirow=self.multirow, caption=self.caption, short_caption=self.short_caption, label=self.label, position=self.position, ) def _select_builder(self) -> Type[TableBuilderAbstract]: """Select proper table builder.""" if self.longtable: return LongTableBuilder if any([self.caption, self.label, self.position]): return RegularTableBuilder return TabularBuilder @property def column_format(self) -> Optional[str]: """Column format.""" return self._column_format @column_format.setter def column_format(self, input_column_format: Optional[str]) -> None: """Setter for column format.""" if input_column_format is None: self._column_format = ( self._get_index_format() + self._get_column_format_based_on_dtypes() ) elif not isinstance(input_column_format, str): raise ValueError( f"column_format must be str or unicode, " f"not {type(input_column_format)}" ) else: self._column_format = input_column_format def _get_column_format_based_on_dtypes(self) -> str: """Get column format based on data type. Right alignment for numbers and left - for strings. """ def get_col_type(dtype): if issubclass(dtype.type, np.number): return "r" return "l" dtypes = self.frame.dtypes._values return "".join(map(get_col_type, dtypes)) def _get_index_format(self) -> str: """Get index column format.""" return "l" * self.frame.index.nlevels if self.fmt.index else "" def _escape_symbols(row: Sequence[str]) -> List[str]: """Carry out string replacements for special symbols. Parameters ---------- row : list List of string, that may contain special symbols. Returns ------- list list of strings with the special symbols replaced. """ return [ ( x.replace("\\", "\\textbackslash ") .replace("_", "\\_") .replace("%", "\\%") .replace("$", "\\$") .replace("#", "\\#") .replace("{", "\\{") .replace("}", "\\}") .replace("~", "\\textasciitilde ") .replace("^", "\\textasciicircum ") .replace("&", "\\&") if (x and x != "{}") else "{}" ) for x in row ] def _convert_to_bold(crow: Sequence[str], ilevels: int) -> List[str]: """Convert elements in ``crow`` to bold.""" return [ f"\\textbf{{{x}}}" if j < ilevels and x.strip() not in ["", "{}"] else x for j, x in enumerate(crow) ] if __name__ == "__main__": import doctest doctest.testmod() """Test the Profiler config flow.""" from datetime import timedelta import os from unittest.mock import patch from homeassistant import setup from homeassistant.components.profiler import ( CONF_SCAN_INTERVAL, CONF_SECONDS, CONF_TYPE, SERVICE_DUMP_LOG_OBJECTS, SERVICE_MEMORY, SERVICE_START, SERVICE_START_LOG_OBJECTS, SERVICE_STOP_LOG_OBJECTS, ) from homeassistant.components.profiler.const import DOMAIN import homeassistant.util.dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed async def test_basic_usage(hass, tmpdir): """Test we can setup and the service is registered."""