hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
7abf11f925b844b5454b62dd7ac6f2549d89bd2c | 9,139 | # frozen_string_literal: true
# -----------------------------------------------------------------------------
#
# GEOS toplevel interface
#
# -----------------------------------------------------------------------------
module RGeo
module Geos
class << self
# Returns true if the CAPI GEOS implementation is supported.
def capi_supported?
CAPI_SUPPORTED
end
# Returns true if the FFI GEOS implementation is supported.
def ffi_supported?
FFI_SUPPORTED
end
# Returns true if any GEOS implementation is supported.
# If this returns false, GEOS features are not available at all.
def supported?
FFI_SUPPORTED || CAPI_SUPPORTED
end
# Returns true if the given feature is a CAPI GEOS feature, or if
# the given factory is a CAPI GEOS factory.
def is_capi_geos?(object)
CAPI_SUPPORTED &&
(CAPIFactory === object || CAPIGeometryMethods === object ||
ZMFactory === object && CAPIFactory === object.z_factory ||
ZMGeometryMethods === object && CAPIGeometryMethods === object.z_geometry)
end
# Returns true if the given feature is an FFI GEOS feature, or if
# the given factory is an FFI GEOS factory.
def is_ffi_geos?(object)
FFI_SUPPORTED &&
(FFIFactory === object || FFIGeometryMethods === object ||
ZMFactory === object && FFIFactory === object.z_factory ||
ZMGeometryMethods === object && FFIGeometryMethods === object.z_geometry)
end
# Returns true if the given feature is a GEOS feature, or if the given
# factory is a GEOS factory. Does not distinguish between CAPI and FFI.
def is_geos?(object)
CAPI_SUPPORTED && (CAPIFactory === object || CAPIGeometryMethods === object) ||
FFI_SUPPORTED && (FFIFactory === object || FFIGeometryMethods === object) ||
ZMFactory === object || ZMGeometryMethods === object
end
# Returns the GEOS library version as a string of the format "x.y.z".
# Returns nil if GEOS is not available.
def version
unless defined?(@version)
if RGeo::Geos::CAPI_SUPPORTED
@version = RGeo::Geos::CAPIFactory._geos_version.freeze
elsif RGeo::Geos::FFI_SUPPORTED
@version = ::Geos::FFIGeos.GEOSversion.sub(/-CAPI-.*$/, "").freeze
else
@version = nil
end
end
@version
end
# The preferred native interface. This is the native interface
# used by default when a factory is created.
# Supported values are <tt>:capi</tt> and <tt>:ffi</tt>.
#
# This is set automatically when RGeo loads, to <tt>:capi</tt>
# if the CAPI interface is available, otheriwse to <tt>:ffi</tt>
# if FFI is available, otherwise to nil if no GEOS interface is
# available. You can override this setting if you want to prefer
# FFI over CAPI.
attr_accessor :preferred_native_interface
# Returns a factory for the GEOS implementation.
# Returns nil if the GEOS implementation is not supported.
#
# Note that GEOS does not natively support 4-dimensional data
# (i.e. both z and m values). However, RGeo's GEOS wrapper does
# provide a 4-dimensional factory that utilizes an extra native
# GEOS object to handle the extra coordinate. Hence, a factory
# configured with both Z and M support will work, but will be
# slower than a 2-dimensional or 3-dimensional factory.
#
# Options include:
#
# [<tt>:native_interface</tt>]
# Specifies which native interface to use. Possible values are
# <tt>:capi</tt> and <tt>:ffi</tt>. The default is the value
# of the preferred_native_interface.
# [<tt>:uses_lenient_multi_polygon_assertions</tt>]
# If set to true, assertion checking on MultiPolygon is disabled.
# This may speed up creation of MultiPolygon objects, at the
# expense of not doing the proper checking for OGC MultiPolygon
# compliance. See RGeo::Feature::MultiPolygon for details on
# the MultiPolygon assertions. Default is false. Also called
# <tt>:lenient_multi_polygon_assertions</tt>.
# [<tt>:buffer_resolution</tt>]
# The resolution of buffers around geometries created by this
# factory. This controls the number of line segments used to
# approximate curves. The default is 1, which causes, for
# example, the buffer around a point to be approximated by a
# 4-sided polygon. A resolution of 2 would cause that buffer
# to be approximated by an 8-sided polygon. The exact behavior
# for different kinds of buffers is defined by GEOS.
# [<tt>:srid</tt>]
# Set the SRID returned by geometries created by this factory.
# Default is 0.
# [<tt>:proj4</tt>]
# The coordinate system in Proj4 format, either as a
# CoordSys::Proj4 object or as a string or hash representing the
# proj4 format. Optional.
# [<tt>:coord_sys</tt>]
# The coordinate system in OGC form, either as a subclass of
# CoordSys::CS::CoordinateSystem, or as a string in WKT format.
# Optional.
# [<tt>:srs_database</tt>]
# Optional. If provided, the object should respond to #get and
# #clear_cache. If both this and an SRID are
# provided, they are used to look up the proj4 and coord_sys
# objects from a spatial reference system database.
# [<tt>:has_z_coordinate</tt>]
# Support <tt>z_coordinate</tt>. Default is false.
# [<tt>:has_m_coordinate</tt>]
# Support <tt>m_coordinate</tt>. Default is false.
# [<tt>:wkt_parser</tt>]
# Configure the parser for WKT. You may either pass a hash of
# configuration parameters for WKRep::WKTParser.new, or the
# special value <tt>:geos</tt>, indicating to use the native
# GEOS parser. Default is the empty hash, indicating the default
# configuration for WKRep::WKTParser.
# Note that the special <tt>:geos</tt> value is not supported for
# ZM factories, since GEOS currently can't handle ZM natively.
# [<tt>:wkb_parser</tt>]
# Configure the parser for WKB. You may either pass a hash of
# configuration parameters for WKRep::WKBParser.new, or the
# special value <tt>:geos</tt>, indicating to use the native
# GEOS parser. Default is the empty hash, indicating the default
# configuration for WKRep::WKBParser.
# Note that the special <tt>:geos</tt> value is not supported for
# ZM factories, since GEOS currently can't handle ZM natively.
# [<tt>:wkt_generator</tt>]
# Configure the generator for WKT. You may either pass a hash of
# configuration parameters for WKRep::WKTGenerator.new, or the
# special value <tt>:geos</tt>, indicating to use the native
# GEOS generator. Default is <tt>{:convert_case => :upper}</tt>.
# Note that the special <tt>:geos</tt> value is not supported for
# ZM factories, since GEOS currently can't handle ZM natively.
# [<tt>:wkb_generator</tt>]
# Configure the generator for WKB. You may either pass a hash of
# configuration parameters for WKRep::WKBGenerator.new, or the
# special value <tt>:geos</tt>, indicating to use the native
# GEOS generator. Default is the empty hash, indicating the
# default configuration for WKRep::WKBGenerator.
# Note that the special <tt>:geos</tt> value is not supported for
# ZM factories, since GEOS currently can't handle ZM natively.
# [<tt>:auto_prepare</tt>]
# Request an auto-prepare strategy. Supported values are
# <tt>:simple</tt> and <tt>:disabled</tt>. The former (which is
# the default) generates a prepared geometry the second time an
# operation that would benefit from it is called. The latter
# never automatically generates a prepared geometry (unless you
# generate one explicitly using the <tt>prepare!</tt> method).
def factory(opts = {})
if supported?
native_interface = opts[:native_interface] || Geos.preferred_native_interface
if opts[:has_z_coordinate] && opts[:has_m_coordinate]
ZMFactory.new(opts)
elsif native_interface == :ffi
FFIFactory.new(opts)
else
CAPIFactory.create(opts)
end
end
end
# Returns a Feature::FactoryGenerator that creates Geos-backed
# factories. The given options are used as the default options.
#
# A common case for this is to provide the <tt>:srs_database</tt>
# as a default. Then, the factory generator need only be passed
# an SRID and it will automatically fetch the appropriate Proj4
# and CoordSys objects.
def factory_generator(defaults = {})
proc { |c| factory(defaults.merge(c)) }
end
end
end
end
| 44.580488 | 87 | 0.632892 |
62c3239f8e17660667de221646d23178b2cfe94a | 720 | # Filters added to this controller apply to all controllers in the application.
# Likewise, all the methods added will be available for all controllers.
class ApplicationController < ActionController::Base
helper :all # include all helpers, all the time
# See ActionController::RequestForgeryProtection for details
# Uncomment the :secret if you're not using the cookie session store
protect_from_forgery # :secret => '65740677282e9a28e5b09740b73ec574'
# See ActionController::Base for details
# Uncomment this to filter the contents of submitted sensitive data parameters
# from your application log (in this case, all fields with names like "password").
# filter_parameter_logging :password
end
| 45 | 85 | 0.786111 |
e2a4175f0810f4bcbf350df36833259289494c78 | 1,247 | require 'spec_helper'
describe "schedule resource" do
let(:client) { Mbta::Client.new( api_key: ENV["API_KEY"] ) }
describe "schedulebystop" do
let(:stop) { "West Roxbury" }
it "retrieves the schedule" do
VCR.use_cassette("mbta_schedule_schedule_by_stop") do
client.schedule_by_stop(stop).tap do |data|
expect(data["mode"][0]["route_type"]).to eq("2")
expect(data["mode"][0].keys).to eq(["route_type", "mode_name", "route"])
end
end
end
end
describe "schedulebyroute" do
let(:route) { "CR-Needham" }
it "retrieves the schedule" do
VCR.use_cassette("mbta_schedule_schedule_by_route") do
client.schedule_by_route(route).tap do |data|
expect(data["direction"][0]["direction_name"]).to eq("Outbound")
expect(data["direction"][1]["direction_name"]).to eq("Inbound")
end
end
end
end
describe "schedulebytrip" do
let(:trip) { "CR-Needham-CR-Saturday-Needham-Dec14-1611" }
it "retrieves the schedule" do
VCR.use_cassette("mbta_schedule_schedule_by_trip") do
client.schedule_by_trip(trip).tap do |data|
expect(data["stop"].length).to eq(12)
end
end
end
end
end
| 25.979167 | 82 | 0.631115 |
7acbea3a13ddab106cf0fc6a64b723815697ed6b | 670 |
Pod::Spec.new do |s|
s.name = "PinCodeTextField"
s.version = "0.3.0"
s.summary = "Simple pin code text input based on UIKeyInput"
s.description = <<-DESC
Simple pin code text input with underlines for each character placeholder
DESC
s.homepage = "https://github.com/tkach/PinCodeTextField"
s.license = { :type => "MIT" }
s.author = { "Alex Tkachenko" => "[email protected]" }
s.platform = :ios, "14.0"
s.source = { :git => "https://github.com/tkach/PinCodeTextField.git", :tag => "#{s.version}" }
s.source_files = ["Pod/*.{swift}", "Pod/**/*.{swift}" ]
end
| 27.916667 | 102 | 0.562687 |
f7f8dc5a621cac8af9f05c6332a35feaaf6d7255 | 671 | require 'minitest/autorun'
require 'mockdata/names'
class MockdataTest < Minitest::Test
def test_names
assert Mockdata::Names::CAR_BRANDS.include? Mockdata::Names.car_brand
assert Mockdata::Names::COMPANY_NAMES.include? Mockdata::Names.company
assert Mockdata::Names::FILE_EXTENSIONS.include? Mockdata::Names.file_extension
assert Mockdata::Names::BUSINESS_TYPES.include? Mockdata::Names.business_type
assert Mockdata::Names::PROJECT_NAMES.include? Mockdata::Names.project
assert Mockdata::Names::HOTEL_NAMES.include? Mockdata::Names.hotel
assert Mockdata::Names::OCCUPATIONS.include? Mockdata::Names.occupation
end
end
| 44.733333 | 83 | 0.767511 |
39a731adffcb7cc3be1dc49a2de41d04e853ffda | 21,374 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/googleapis/googleapis/blob/master/google/ads/google_ads/v1/services/campaign_draft_service.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google
# engineers.
require "json"
require "pathname"
require "google/gax"
require "google/gax/operation"
require "google/longrunning/operations_client"
require "google/ads/google_ads/v1/services/campaign_draft_service_pb"
require "google/ads/google_ads/v1/services/credentials"
module Google
module Ads
module GoogleAds
module V1
module Services
# Service to manage campaign drafts.
#
# @!attribute [r] campaign_draft_service_stub
# @return [Google::Ads::GoogleAds::V1::Services::CampaignDraftService::Stub]
class CampaignDraftServiceClient
attr_reader :campaign_draft_service_stub
# The default address of the service.
SERVICE_ADDRESS = "googleads.googleapis.com".freeze
# The default port of the service.
DEFAULT_SERVICE_PORT = 443
# The default set of gRPC interceptors.
GRPC_INTERCEPTORS = []
DEFAULT_TIMEOUT = 30
PAGE_DESCRIPTORS = {
"list_campaign_draft_async_errors" => Google::Gax::PageDescriptor.new(
"page_token",
"next_page_token",
"errors")
}.freeze
private_constant :PAGE_DESCRIPTORS
# The scopes needed to make gRPC calls to all of the methods defined in
# this service.
ALL_SCOPES = [
].freeze
class OperationsClient < Google::Longrunning::OperationsClient
self::SERVICE_ADDRESS = CampaignDraftServiceClient::SERVICE_ADDRESS
self::GRPC_INTERCEPTORS = CampaignDraftServiceClient::GRPC_INTERCEPTORS
end
CAMPAIGN_DRAFT_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
"customers/{customer}/campaignDrafts/{campaign_draft}"
)
private_constant :CAMPAIGN_DRAFT_PATH_TEMPLATE
# Returns a fully-qualified campaign_draft resource name string.
# @param customer [String]
# @param campaign_draft [String]
# @return [String]
def self.campaign_draft_path customer, campaign_draft
CAMPAIGN_DRAFT_PATH_TEMPLATE.render(
:"customer" => customer,
:"campaign_draft" => campaign_draft
)
end
# @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
# Provides the means for authenticating requests made by the client. This parameter can
# be many types.
# A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
# authenticating requests made by this client.
# A `String` will be treated as the path to the keyfile to be used for the construction of
# credentials for this client.
# A `Hash` will be treated as the contents of a keyfile to be used for the construction of
# credentials for this client.
# A `GRPC::Core::Channel` will be used to make calls through.
# A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
# should already be composed with a `GRPC::Core::CallCredentials` object.
# A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
# metadata for requests, generally, to give OAuth credentials.
# @param scopes [Array<String>]
# The OAuth scopes for this service. This parameter is ignored if
# an updater_proc is supplied.
# @param client_config [Hash]
# A Hash for call options for each method. See
# Google::Gax#construct_settings for the structure of
# this data. Falls back to the default config if not specified
# or the specified config is missing data points.
# @param timeout [Numeric]
# The default timeout, in seconds, for calls made through this client.
# @param metadata [Hash]
# Default metadata to be sent with each request. This can be overridden on a per call basis.
# @param exception_transformer [Proc]
# An optional proc that intercepts any exceptions raised during an API call to inject
# custom error handling.
def initialize \
credentials: nil,
scopes: ALL_SCOPES,
client_config: {},
timeout: DEFAULT_TIMEOUT,
metadata: nil,
exception_transformer: nil,
lib_name: nil,
lib_version: ""
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "google/gax/grpc"
require "google/ads/google_ads/v1/services/campaign_draft_service_services_pb"
credentials ||= Google::Ads::GoogleAds::V1::Services::Credentials.default
@operations_client = OperationsClient.new(
credentials: credentials,
scopes: scopes,
client_config: client_config,
timeout: timeout,
lib_name: lib_name,
lib_version: lib_version,
metadata: metadata,
)
if credentials.is_a?(String) || credentials.is_a?(Hash)
updater_proc = Google::Ads::GoogleAds::V1::Services::Credentials.new(credentials).updater_proc
end
if credentials.is_a?(GRPC::Core::Channel)
channel = credentials
end
if credentials.is_a?(GRPC::Core::ChannelCredentials)
chan_creds = credentials
end
if credentials.is_a?(Proc)
updater_proc = credentials
end
if credentials.is_a?(Google::Auth::Credentials)
updater_proc = credentials.updater_proc
end
package_version = Gem.loaded_specs['google-ads-googleads'].version.version
google_api_client = "gl-ruby/#{RUBY_VERSION}"
google_api_client << " #{lib_name}/#{lib_version}" if lib_name
google_api_client << " gapic/#{package_version} gax/#{Google::Gax::VERSION}"
google_api_client << " grpc/#{GRPC::VERSION}"
google_api_client.freeze
headers = { :"x-goog-api-client" => google_api_client }
headers.merge!(metadata) unless metadata.nil?
client_config_file = Pathname.new(__dir__).join(
"campaign_draft_service_client_config.json"
)
defaults = client_config_file.open do |f|
Google::Gax.construct_settings(
"google.ads.googleads.v1.services.CampaignDraftService",
JSON.parse(f.read),
client_config,
Google::Gax::Grpc::STATUS_CODE_NAMES,
timeout,
page_descriptors: PAGE_DESCRIPTORS,
errors: Google::Gax::Grpc::API_ERRORS,
metadata: headers
)
end
# Allow overriding the service path/port in subclasses.
service_path = self.class::SERVICE_ADDRESS
port = self.class::DEFAULT_SERVICE_PORT
interceptors = self.class::GRPC_INTERCEPTORS
@campaign_draft_service_stub = Google::Gax::Grpc.create_stub(
service_path,
port,
chan_creds: chan_creds,
channel: channel,
updater_proc: updater_proc,
scopes: scopes,
interceptors: interceptors,
&Google::Ads::GoogleAds::V1::Services::CampaignDraftService::Stub.method(:new)
)
@get_campaign_draft = Google::Gax.create_api_call(
@campaign_draft_service_stub.method(:get_campaign_draft),
defaults["get_campaign_draft"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'resource_name' => request.resource_name}
end
)
@mutate_campaign_drafts = Google::Gax.create_api_call(
@campaign_draft_service_stub.method(:mutate_campaign_drafts),
defaults["mutate_campaign_drafts"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'customer_id' => request.customer_id}
end
)
@promote_campaign_draft = Google::Gax.create_api_call(
@campaign_draft_service_stub.method(:promote_campaign_draft),
defaults["promote_campaign_draft"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'campaign_draft' => request.campaign_draft}
end
)
@list_campaign_draft_async_errors = Google::Gax.create_api_call(
@campaign_draft_service_stub.method(:list_campaign_draft_async_errors),
defaults["list_campaign_draft_async_errors"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'resource_name' => request.resource_name}
end
)
end
# Service calls
# Returns the requested campaign draft in full detail.
#
# @param resource_name [String]
# The resource name of the campaign draft to fetch.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Ads::GoogleAds::V1::Resources::CampaignDraft]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Ads::GoogleAds::V1::Resources::CampaignDraft]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# campaign_draft_client = Google::Ads::GoogleAds::CampaignDraft.new(version: :v1)
# formatted_resource_name = Google::Ads::GoogleAds::V1::Services::CampaignDraftServiceClient.campaign_draft_path("[CUSTOMER]", "[CAMPAIGN_DRAFT]")
# response = campaign_draft_client.get_campaign_draft(formatted_resource_name)
def get_campaign_draft \
resource_name,
options: nil,
&block
req = {
resource_name: resource_name
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V1::Services::GetCampaignDraftRequest)
@get_campaign_draft.call(req, options, &block)
end
# Creates, updates, or removes campaign drafts. Operation statuses are
# returned.
#
# @param customer_id [String]
# The ID of the customer whose campaign drafts are being modified.
# @param operations [Array<Google::Ads::GoogleAds::V1::Services::CampaignDraftOperation | Hash>]
# The list of operations to perform on individual campaign drafts.
# A hash of the same form as `Google::Ads::GoogleAds::V1::Services::CampaignDraftOperation`
# can also be provided.
# @param partial_failure [true, false]
# If true, successful operations will be carried out and invalid
# operations will return errors. If false, all operations will be carried
# out in one transaction if and only if they are all valid.
# Default is false.
# @param validate_only [true, false]
# If true, the request is validated but not executed. Only errors are
# returned, not results.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Ads::GoogleAds::V1::Services::MutateCampaignDraftsResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Ads::GoogleAds::V1::Services::MutateCampaignDraftsResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# campaign_draft_client = Google::Ads::GoogleAds::CampaignDraft.new(version: :v1)
#
# # TODO: Initialize `customer_id`:
# customer_id = ''
#
# # TODO: Initialize `operations`:
# operations = []
# response = campaign_draft_client.mutate_campaign_drafts(customer_id, operations)
def mutate_campaign_drafts \
customer_id,
operations,
partial_failure: nil,
validate_only: nil,
options: nil,
&block
req = {
customer_id: customer_id,
operations: operations,
partial_failure: partial_failure,
validate_only: validate_only
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V1::Services::MutateCampaignDraftsRequest)
@mutate_campaign_drafts.call(req, options, &block)
end
# Promotes the changes in a draft back to the base campaign.
#
# This method returns a Long Running Operation (LRO) indicating if the
# Promote is done. Use [Operations.GetOperation] to poll the LRO until it
# is done. Only a done status is returned in the response. See the status
# in the Campaign Draft resource to determine if the promotion was
# successful. If the LRO failed, use
# {Google::Ads::GoogleAds::V1::Services::CampaignDraftService::ListCampaignDraftAsyncErrors CampaignDraftService::ListCampaignDraftAsyncErrors} to view the list of
# error reasons.
#
# @param campaign_draft [String]
# The resource name of the campaign draft to promote.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @return [Google::Gax::Operation]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# campaign_draft_client = Google::Ads::GoogleAds::CampaignDraft.new(version: :v1)
# formatted_campaign_draft = Google::Ads::GoogleAds::V1::Services::CampaignDraftServiceClient.campaign_draft_path("[CUSTOMER]", "[CAMPAIGN_DRAFT]")
#
# # Register a callback during the method call.
# operation = campaign_draft_client.promote_campaign_draft(formatted_campaign_draft) do |op|
# raise op.results.message if op.error?
# op_results = op.results
# # Process the results.
#
# metadata = op.metadata
# # Process the metadata.
# end
#
# # Or use the return value to register a callback.
# operation.on_done do |op|
# raise op.results.message if op.error?
# op_results = op.results
# # Process the results.
#
# metadata = op.metadata
# # Process the metadata.
# end
#
# # Manually reload the operation.
# operation.reload!
#
# # Or block until the operation completes, triggering callbacks on
# # completion.
# operation.wait_until_done!
def promote_campaign_draft \
campaign_draft,
options: nil
req = {
campaign_draft: campaign_draft
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V1::Services::PromoteCampaignDraftRequest)
operation = Google::Gax::Operation.new(
@promote_campaign_draft.call(req, options),
@operations_client,
Google::Protobuf::Empty,
Google::Protobuf::Empty,
call_options: options
)
operation.on_done { |operation| yield(operation) } if block_given?
operation
end
# Returns all errors that occurred during CampaignDraft promote. Throws an
# error if called before campaign draft is promoted.
# Supports standard list paging.
#
# @param resource_name [String]
# The name of the campaign draft from which to retrieve the async errors.
# @param page_size [Integer]
# The maximum number of resources contained in the underlying API
# response. If page streaming is performed per-resource, this
# parameter does not affect the return value. If page streaming is
# performed per-page, this determines the maximum number of
# resources in a page.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Gax::PagedEnumerable<Google::Rpc::Status>]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Gax::PagedEnumerable<Google::Rpc::Status>]
# An enumerable of Google::Rpc::Status instances.
# See Google::Gax::PagedEnumerable documentation for other
# operations such as per-page iteration or access to the response
# object.
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/ads/google_ads"
#
# campaign_draft_client = Google::Ads::GoogleAds::CampaignDraft.new(version: :v1)
# formatted_resource_name = Google::Ads::GoogleAds::V1::Services::CampaignDraftServiceClient.campaign_draft_path("[CUSTOMER]", "[CAMPAIGN_DRAFT]")
#
# # Iterate over all results.
# campaign_draft_client.list_campaign_draft_async_errors(formatted_resource_name).each do |element|
# # Process element.
# end
#
# # Or iterate over results one page at a time.
# campaign_draft_client.list_campaign_draft_async_errors(formatted_resource_name).each_page do |page|
# # Process each page at a time.
# page.each do |element|
# # Process element.
# end
# end
def list_campaign_draft_async_errors \
resource_name,
page_size: nil,
options: nil,
&block
req = {
resource_name: resource_name,
page_size: page_size
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Ads::GoogleAds::V1::Services::ListCampaignDraftAsyncErrorsRequest)
@list_campaign_draft_async_errors.call(req, options, &block)
end
end
end
end
end
end
end
| 47.183223 | 175 | 0.584308 |
214460aea3318acb0657ce2afe5588c11f5d71d6 | 7,198 | # frozen_string_literal: true
class WorkQueue::AppealSerializer
include FastJsonapi::ObjectSerializer
extend Helpers::AppealHearingHelper
attribute :assigned_attorney
attribute :assigned_judge
attribute :issues do |object|
object.request_issues.active_or_decided_or_withdrawn.includes(:remand_reasons).map do |issue|
{
id: issue.id,
program: issue.benefit_type,
description: issue.description,
notes: issue.notes,
diagnostic_code: issue.contested_rating_issue_diagnostic_code,
remand_reasons: issue.remand_reasons,
closed_status: issue.closed_status,
decision_date: issue.decision_date
}
end
end
attribute :status
attribute :decision_issues do |object, params|
if params[:user].nil?
fail Caseflow::Error::MissingRequiredProperty, message: "Params[:user] is required"
end
decision_issues = AppealDecisionIssuesPolicy.new(appeal: object, user: params[:user]).visible_decision_issues
decision_issues.uniq.map do |issue|
{
id: issue.id,
disposition: issue.disposition,
description: issue.description,
benefit_type: issue.benefit_type,
remand_reasons: issue.remand_reasons,
diagnostic_code: issue.diagnostic_code,
request_issue_ids: issue.request_decision_issues.pluck(:request_issue_id)
}
end
end
attribute :nod_date_updates do |object|
object.nod_date_updates.map do |nod_date_update|
WorkQueue::NodDateUpdateSerializer.new(nod_date_update).serializable_hash[:data][:attributes]
end
end
attribute :can_edit_request_issues do |object, params|
AppealRequestIssuesPolicy.new(user: params[:user], appeal: object).editable?
end
attribute(:hearings) do |object, params|
# For substitution appeals after death dismissal, we need to show hearings from the source appeal
# in addition to those on the new/target appeal; this avoids copying them to new appeal stream
associated_hearings = []
if object.substitution_appeal?
associated_hearings = hearings(object.appellant_substitution.source_appeal, params)
end
associated_hearings + hearings(object, params)
end
attribute :withdrawn, &:withdrawn?
attribute :removed, &:removed?
attribute :overtime, &:overtime?
attribute :veteran_appellant_deceased, &:veteran_appellant_deceased?
attribute :assigned_to_location
attribute :distributed_to_a_judge, &:distributed_to_a_judge?
attribute :completed_hearing_on_previous_appeal? do
false
end
attribute :appellant_is_not_veteran
attribute :appellant_full_name do |object|
object.claimant&.name
end
attribute :appellant_first_name do |object|
object.claimant&.first_name
end
attribute :appellant_middle_name do |object|
object.claimant&.middle_name
end
attribute :appellant_last_name do |object|
object.claimant&.last_name
end
attribute :appellant_suffix do |object|
object.claimant&.suffix
end
attribute :appellant_date_of_birth do |object|
object.claimant&.date_of_birth
end
attribute :appellant_address do |object|
object.claimant&.address
end
attribute :appellant_phone_number do |object|
object.claimant.is_a?(OtherClaimant) ? object.claimant&.phone_number : nil
end
attribute :appellant_email_address do |object|
object.claimant&.email_address
end
attribute :appellant_tz, &:appellant_tz
attribute :appellant_relationship, &:appellant_relationship
attribute :appellant_type do |appeal|
appeal.claimant&.type
end
attribute :appellant_party_type do |appeal|
appeal.claimant.is_a?(OtherClaimant) ? appeal.claimant&.party_type : nil
end
attribute :unrecognized_appellant_id do |appeal|
appeal.claimant.is_a?(OtherClaimant) ? appeal.claimant&.unrecognized_appellant&.id : nil
end
attribute :has_poa do |appeal|
appeal.claimant&.power_of_attorney
end
attribute :cavc_remand do |object|
if object.cavc_remand
WorkQueue::CavcRemandSerializer.new(object.cavc_remand).serializable_hash[:data][:attributes]
end
end
attribute :remand_source_appeal_id do |appeal|
appeal.cavc_remand&.source_appeal&.uuid
end
attribute :remand_judge_name do |appeal|
appeal.cavc_remand&.source_appeal&.reviewing_judge_name
end
attribute :appellant_substitution do |object|
if object.appellant_substitution
WorkQueue::AppellantSubstitutionSerializer.new(object.appellant_substitution)
.serializable_hash[:data][:attributes]
end
end
attribute :substitutions do |object|
object.substitutions.map do |substitution|
WorkQueue::AppellantSubstitutionSerializer.new(substitution).serializable_hash[:data][:attributes]
end
end
attribute :veteran_death_date
attribute :veteran_file_number
attribute :veteran_full_name do |object|
object.veteran ? object.veteran.name.formatted(:readable_full) : "Cannot locate"
end
attribute :closest_regional_office
attribute :closest_regional_office_label
attribute(:available_hearing_locations) { |object| available_hearing_locations(object) }
attribute :external_id, &:uuid
attribute :type
attribute :vacate_type
attribute :aod, &:advanced_on_docket?
attribute :docket_name
attribute :docket_number
attribute :docket_range_date
attribute :decision_date
attribute :nod_date, &:receipt_date
attribute :withdrawal_date
attribute :certification_date do
nil
end
attribute :paper_case do
false
end
attribute :regional_office do
end
attribute :caseflow_veteran_id do |object|
object.veteran ? object.veteran.id : nil
end
attribute :document_id do |object|
object.latest_attorney_case_review&.document_id
end
attribute :attorney_case_review_id do |object|
object.latest_attorney_case_review&.id
end
attribute :attorney_case_rewrite_details do |object|
if FeatureToggle.enabled?(:overtime_revamp, user: RequestStore.store[:current_user])
{
note_from_attorney: object.latest_attorney_case_review&.note,
untimely_evidence: object.latest_attorney_case_review&.untimely_evidence
}
else
{
overtime: object.latest_attorney_case_review&.overtime,
note_from_attorney: object.latest_attorney_case_review&.note,
untimely_evidence: object.latest_attorney_case_review&.untimely_evidence
}
end
end
attribute :can_edit_document_id do |object, params|
AmaDocumentIdPolicy.new(
user: params[:user],
case_review: object.latest_attorney_case_review
).editable?
end
attribute :readable_hearing_request_type, &:readable_current_hearing_request_type
attribute :readable_original_hearing_request_type, &:readable_original_hearing_request_type
attribute :docket_switch do |object|
if object.docket_switch
WorkQueue::DocketSwitchSerializer.new(object.docket_switch).serializable_hash[:data][:attributes]
end
end
attribute :switched_dockets do |object|
object.switched_dockets.map do |docket_switch|
WorkQueue::DocketSwitchSerializer.new(docket_switch).serializable_hash[:data][:attributes]
end
end
end
| 28.117188 | 113 | 0.755765 |
38dc003c9b3b57f171c26059a07ced55a049071b | 404 | #
# Cookbook Name:: newrelic
# Attributes:: dotnet-agent
#
# Copyright 2012-2014, Escape Studios
#
default['newrelic']['dotnet-agent']['https_download'] = 'https://download.newrelic.com/dot_net_agent/release/x64'
default['newrelic']['dotnet-agent']['install_level'] = '1'
default['newrelic']['dotnet-agent']['dotnet_recipe'] = 'ms_dotnet4'
default['newrelic']['dotnet-agent']['agent_action'] = :install
| 33.666667 | 113 | 0.722772 |
1a2c5ccca7de72e2013c3ffce888eaf07bdda483 | 403 | # frozen_string_literal: true
class AddDefaultTargetProject < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
def up
with_lock_retries do
add_column :project_settings, :mr_default_target_self, :boolean, default: false, null: false
end
end
def down
with_lock_retries do
remove_column :project_settings, :mr_default_target_self
end
end
end
| 22.388889 | 98 | 0.756824 |
0179a7cf896034252e088c93c5fa175e7f3640ee | 402 | class CreateRepositories < ActiveRecord::Migration[5.1]
def change
create_table :repositories do |t|
t.string :name
t.string :owner
t.index [:name, :owner], unique: true
end
add_reference :pull_requests, :repository, foreign_key: true
add_reference :review_rules, :repository, foreign_key: true
add_reference :settings, :repository, foreign_key: true
end
end
| 28.714286 | 64 | 0.711443 |
03a14acac084f260f82e02e3d356eb9d43f1e12d | 1,180 | require "test_helper"
class Edition::RoleAppointmentsTest < ActiveSupport::TestCase
test "re-drafting an edition with role appointments copies the appointments" do
appointments = [
create(:role_appointment),
create(:role_appointment),
]
published = create(:published_news_article, role_appointments: appointments)
assert_equal appointments, published.create_draft(create(:user)).role_appointments
end
test "editions with ministerial role appointments include them in their search info" do
minister = create(:ministerial_role)
appointment = create(:role_appointment, role: minister)
news_article = create(:news_article, role_appointments: [appointment])
assert_equal [appointment.person.slug], news_article.search_index["people"]
assert_equal [appointment.role.slug], news_article.search_index["roles"]
end
test "editions with non-ministerial role appointments don't include the role in the search info" do
appointment = create(:role_appointment, role: create(:judge_role))
news_article = create(:news_article, role_appointments: [appointment])
assert_equal [], news_article.search_index["roles"]
end
end
| 42.142857 | 101 | 0.766949 |
878312849f0824a6ef434b2f6fe9ce07acea7f36 | 369 | module Mutant
class Mutator
class Node
# Abstract mutator for literal AST nodes
class Literal < self
include AbstractType
private
# Emit values
#
# @return [undefined]
def emit_values
values.each(&method(:emit_type))
end
end # Literal
end # Node
end # Mutator
end # Mutant
| 18.45 | 46 | 0.571816 |
5d3e856ddcefde45c67ad7d9d6eb18277d1248bb | 684 | class AddInviteDataToMember < ActiveRecord::Migration
def up
add_column :members, :created_by_id, :integer
add_column :members, :invite_email, :string
add_column :members, :invite_token, :string
add_column :members, :invite_accepted_at, :datetime
change_column :members, :user_id, :integer, null: true
add_index :members, :invite_token, unique: true
end
def down
remove_index :members, :invite_token
change_column :members, :user_id, :integer, null: false
remove_column :members, :invite_accepted_at
remove_column :members, :invite_token
remove_column :members, :invite_email
remove_column :members, :created_by_id
end
end
| 28.5 | 59 | 0.741228 |
ac9f7a70900e4cf1a1b0562365f6bfc6d4efc823 | 3,040 | =begin
#ESP Documentation
#The Evident Security Platform API (version 2.0) is designed to allow users granular control over their Amazon Web Service security experience by allowing them to review alerts, monitor signatures, and create custom signatures.
OpenAPI spec version: v2_sdk
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for ESP::CustomSignatureDefinition
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'CustomSignatureDefinition' do
before do
# run before each test
@instance = ESP::CustomSignatureDefinition.new
end
after do
# run after each test
end
describe 'test an instance of CustomSignatureDefinition' do
it 'should create an instance of CustomSignatureDefinition' do
expect(@instance).to be_instance_of(ESP::CustomSignatureDefinition)
end
end
describe 'test attribute "id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "code"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "updated_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "created_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "version_number"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "language"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "custom_signature"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "custom_signature_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "results"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "result_ids"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.803922 | 227 | 0.722039 |
7995bcfc4442201d2473a3609efb8469c07784aa | 906 | class Redis
class Store < self
include Ttl, Interface
def initialize(options = { })
super
_extend_marshalling options
_extend_namespace options
end
def self.rails3? #:nodoc:
defined?(::Rails) && ::Rails::VERSION::MAJOR == 3
end
def self.rails31? #:nodoc:
defined?(::Rails) && ::Rails::VERSION::MAJOR == 3 && ::Rails::VERSION::MINOR == 1
end
def reconnect
@client.reconnect
end
def to_s
"Redis Client connected to #{@client.host}:#{@client.port} against DB #{@client.db}"
end
private
def _extend_marshalling(options)
@marshalling = !(options[:marshalling] === false) # HACK - TODO delegate to Factory
extend Marshalling if @marshalling
end
def _extend_namespace(options)
@namespace = options[:namespace]
extend Namespace if @namespace
end
end
end
| 22.65 | 91 | 0.613687 |
ed969df977a0dbad59e7eacdcdad40066aee2590 | 5,164 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe User, type: :model do
let(:user) { FactoryBot.create(:user) }
let(:post) { FactoryBot.create(:post) }
let(:other_post) { FactoryBot.create(:post) }
let(:friend_1) { FactoryBot.create(:user) }
let(:friend_2) { FactoryBot.create(:user) }
it 'is valid with a name, email, and password' do
expect(FactoryBot.build(:user)).to be_valid
end
it 'is invalid without a name' do
user = FactoryBot.build(:user, name: nil)
user.valid?
expect(user.errors[:name]).to include("can't be blank")
end
it 'is invalid without an email address' do
user = FactoryBot.build(:user, email: nil)
user.valid?
expect(user.errors[:email]).to include("can't be blank")
end
it 'is invalid with a duplicate email address' do
FactoryBot.create(:user, email: '[email protected]')
user = FactoryBot.build(:user, email: '[email protected]')
user.valid?
expect(user.errors[:email]).to include('has already been taken')
end
describe '#posts' do
it 'brings all posts' do
expect(user.posts).to be_empty
FactoryBot.create_list(:post, 3, user_id: user.id)
expect(user.posts.count).to eq 3
end
end
context '@comments' do
it 'brings all comments' do
expect(user.comments).to be_empty
FactoryBot.create_list(:comment, 5, user_id: user.id, post_id: post.id)
expect(user.comments.count).to eq 5
end
end
describe '#likes_given' do
it 'brings all likes given' do
expect(user.likes_given).to be_empty
FactoryBot.create_list(:like, 75, user_id: user.id, post_id: post.id)
expect(user.likes_given.count).to eq 75
end
end
describe '#friends' do
it 'brings all friends' do
expect(user.friends).to be_empty
user.friendships.create(friend_id: friend_1.id).confirm_friend
expect(user.friends).not_to be_empty
user.friendships.create(friend_id: friend_2.id).confirm_friend
expect(user.friends.count).to eq 2
end
end
describe '#pending_requests' do
it 'brings all pending_requests as Friendship model' do
expect(user.pending_requests).to be_empty
friend_1.friendships.create(friend_id: user.id)
friend_2.friendships.create(friend_id: user.id)
expect(user.pending_requests.count).to eq 2
expect(user.pending_requests.first).to be_a Friendship
expect(user.pending_requests.second).to be_a Friendship
end
end
describe '#pending_friends' do
it 'brings all pending requests as User model' do
expect(user.pending_friends).to be_empty
friend_1.friendships.create(friend_id: user.id)
friend_2.friendships.create(friend_id: user.id)
expect(user.pending_friends.count).to eq 2
expect(user.pending_friends.first).to be_a described_class
expect(user.pending_friends.second).to be_a described_class
end
end
describe '#sent_requests' do
it 'brings all sent request as Friendship model' do
expect(user.sent_requests).to be_empty
user.friendships.create(friend_id: friend_1.id)
user.friendships.create(friend_id: friend_2.id)
expect(user.sent_requests.count).to eq 2
expect(user.sent_requests.first).to be_a Friendship
expect(user.sent_requests.second).to be_a Friendship
end
end
describe '#sent_friends' do
it 'brings all sent requests as User model' do
expect(user.sent_friends).to be_empty
user.friendships.create(friend_id: friend_1.id)
expect(user.sent_friends.count).to eq 1
expect(user.sent_friends.first).to be_a described_class
end
end
describe '#liked?' do
it 'returns true if a user liked a specific post' do
user.likes_given.create(post_id: post.id)
expect(user.liked?(post)).to eq true
expect(user.liked?(other_post)).to eq false
end
end
describe '#sent_request?' do
it 'returns true if a user sent request given a specific user' do
user.friendships.create(friend_id: friend_1.id)
expect(user.sent_request?(friend_1)).to eq true
expect(user.sent_request?(friend_2)).to eq false
end
end
describe '#pending_request?' do
it 'returns true if a user receive request given a specific user' do
friend_1.friendships.create(friend_id: user.id)
expect(user.pending_request?(friend_1)).to eq true
expect(user.pending_request?(friend_2)).to eq false
end
end
describe '#friend?' do
it 'returns true if a user has friendship confirmed with another user' do
user.friendships.create(friend_id: friend_1.id).confirm_friend
user.friendships.create(friend_id: friend_2.id)
expect(user.friend?(friend_1)).to eq true
expect(user.friend?(friend_2)).to eq false
end
end
describe '#feed' do
it 'return all posts from the user and his friends' do
FactoryBot.create_list(:post, 7, user_id: user.id)
FactoryBot.create_list(:post, 9, user_id: friend_1.id)
FactoryBot.create_list(:post, 20, user_id: friend_2.id)
user.friendships.create(friend_id: friend_1.id).confirm_friend
expect(user.feed.count).to eq 16
end
end
end
| 33.532468 | 77 | 0.701975 |
38dcc2a7e2174bfe45de3d4f4b492324a910650d | 1,384 | # Copyright 2011-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require 'spec_helper'
module AWS
class SimpleWorkflow
describe ActivityType do
config_attributes = [
:default_task_heartbeat_timeout,
:default_task_schedule_to_close_timeout,
:default_task_schedule_to_start_timeout,
:default_task_start_to_close_timeout,
]
it_should_behave_like("a simple workflow type", config_attributes) do
let(:config) { stub_config }
let(:client) { config.simple_workflow_client }
let(:domain) { Domain.new('domain-name', :config => config) }
let(:type) { ActivityType.new(domain, 'name', 'version') }
let(:type_class) { ActivityType }
let(:describe_method) { :describe_activity_type }
let(:deprecate_method) { :deprecate_activity_type }
end
end
end
end
| 33.756098 | 78 | 0.700145 |
bf776dde4ac44e219b584604659aaa972d5dcedc | 1,579 | module SessionsHelper
#logs in the given user
#:user_id is arbitary key
def log_in(user)
session[:user_id] = user.id
end
#returns true if the given user is the current user
def current_user?(user)
user == current_user
end
#returns the current logged-in user (if any)
#Returns the user corresponding to the remember token cookie
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: session[:user_id])
elsif (user_id = cookies.signed[:user_id])
#raise #The tests still pass, so this branch is currently untested.
user = User.find_by(id: user_id)
if user && user.authenticated?(:remember,cookies[:remember_token])
log_in user
@current_user = user
end
end
end
#returns true if the user is logged in, false otherwise
def logged_in?
!current_user.nil?
end
#forgets a persistent session
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
#logs out the current user
def log_out
forget(current_user)
session.delete(:user_id)
@current_user =nil
end
#Remembers a user in a persistent session
def remember(user)
user.remember
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
#Redirects to stored location (or to the default)
def redirect_back_or(default)
redirect_to(session[:forwarding_url] || default)
session.delete(:forwarding_url)
end
#stores the url trying to be accessed
def store_location
session[:forwarding_url] = request.original_url if request.get?
end
end
| 26.316667 | 70 | 0.744775 |
e9c5219c395f3b43e5937f2c4c392e460e9fc465 | 622 | require 'simplecov'
require 'rspec'
require 'rspec/its'
require 'webmock/rspec'
require 'vcr'
require 'jortt'
SimpleCov.start
if ENV['CI'] == 'true'
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
VCR.configure do |c|
c.cassette_library_dir = "spec/fixtures/vcr_cassettes"
c.hook_into :webmock
c.configure_rspec_metadata!
c.default_cassette_options = { record: :once }
c.before_record do |i|
i.response.headers.delete('Set-Cookie')
i.request.headers.delete('Authorization')
end
end
ENV['JORTT_CLIENT_ID'] ||= 'client-id'
ENV['JORTT_CLIENT_SECRET'] ||= 'client-secret'
| 22.214286 | 56 | 0.731511 |
7a70fa9aae6a85aa71ef783a1efd8616d0422b6d | 138 | require 'test_helper'
class Protorails::Test < ActiveSupport::TestCase
test "truth" do
assert_kind_of Module, Protorails
end
end
| 17.25 | 48 | 0.76087 |
eda2b9f653e9bf348ad9549a9c6db4f95fe90e3c | 2,973 | require 'rails_helper'
feature 'Visitor signs up with email address' do
scenario 'visitor can sign up with valid email address' do
email = '[email protected]'
sign_up_with(email)
expect(page).to have_content t('notices.signed_up_but_unconfirmed.first_paragraph_start')
expect(page).to have_content t('notices.signed_up_but_unconfirmed.first_paragraph_end')
expect(page).to have_content email
expect(Funnel::Registration::TotalSubmittedCount.call).to eq(1)
expect(Funnel::Registration::TotalRegisteredCount.call).to eq(0)
end
scenario 'visitor cannot sign up with invalid email address' do
sign_up_with('bogus')
expect_email_invalid(page)
end
scenario 'visitor cannot sign up with email with invalid domain name' do
invalid_addresses = [
'[email protected]',
'[email protected]',
]
allow(ValidateEmail).to receive(:mx_valid?).and_return(false)
invalid_addresses.each do |email|
sign_up_with(email)
expect_email_invalid(page)
end
end
scenario 'visitor cannot sign up with empty email address' do
sign_up_with('')
expect_email_invalid(page)
end
context 'user signs up and sets password, tries to sign up again' do
scenario 'sends email saying someone tried to sign up with their email address' do
user = create(:user)
expect { sign_up_with(user.email) }.
to change { ActionMailer::Base.deliveries.count }.by(1)
expect(last_email.html_part.body).to have_content(
t('user_mailer.signup_with_your_email.intro', app: APP_NAME),
)
end
end
scenario 'taken to profile page after sign up flow complete' do
visit sign_up_email_path
sign_up_and_2fa_ial1_user
expect(Funnel::Registration::TotalSubmittedCount.call).to eq(1)
expect(Funnel::Registration::TotalRegisteredCount.call).to eq(1)
expect(current_path).to eq account_path
end
it 'returns a bad request if the email contains invalid bytes' do
suppress_output do
sign_up_with("test@\xFFbar\xF8.com")
expect(page).to have_content 'Bad request'
end
end
it 'throttles sending confirmations after user submitted and then resumes after wait period' do
email = '[email protected]'
sign_up_with(email)
starting_count = unread_emails_for(email).size
max_attempts = Figaro.env.reg_unconfirmed_email_max_attempts.to_i
max_attempts.times do |i|
sign_up_with(email)
expect(unread_emails_for(email).size).to eq(starting_count + i + 1)
end
expect(unread_emails_for(email).size).to eq(starting_count + max_attempts)
sign_up_with(email)
expect(unread_emails_for(email).size).to eq(starting_count + max_attempts)
window_in_minutes = Figaro.env.reg_unconfirmed_email_window_in_minutes.to_i + 1
Timecop.travel(Time.zone.now + window_in_minutes.minutes) do
sign_up_with(email)
expect(unread_emails_for(email).size).to eq(starting_count + max_attempts + 1)
end
end
end
| 33.033333 | 97 | 0.731248 |
ab3b78d3d8c557fba8fe3bc0c91c0f81d6809beb | 39 | module Blogger
VERSION = "0.0.1"
end
| 9.75 | 19 | 0.666667 |
21c0049a40d76c0d4d49b95a5b1275f02bbef2ac | 527 | package "gmetad"
service "gmetad" do
enabled true
end
provide_service ("#{node[:cluster_name]}-gmetad")
cluster_nodes = { node['cluster_name'] => [ node['ip-address'] ] }
template "/etc/ganglia/gmetad.conf" do
source "gmetad.conf.erb"
backup false
owner "ganglia"
group "ganglia"
mode 0644
variables(:cluster_nodes => cluster_nodes, :clusters => [ node['cluster_name'] ])
notifies :restart, resources(:service => "gmetad")
end
directory "/var/lib/ganglia/rrds" do
owner "ganglia"
group "ganglia"
end
| 19.518519 | 83 | 0.696395 |
08e96c02f8c17900e87664b3f23f68f6f4af15fc | 2,576 | class Dropdown < UnscoredQuestion
include ActionView::Helpers
validates :alternatives, presence: true
def edit(_count)
html = '<td align="center"><a rel="nofollow" data-method="delete" href="/questions/' + id.to_s + '">Remove</a></td>'
html += '<td><input size="6" value="' + seq.to_s
html += '" name="question[' + id.to_s + '][seq]" id="question_' + id.to_s + '_seq" type="text"></td>'
html += '<td><textarea cols="50" rows="1" name="question[' + id.to_s + '][txt]"'
html += ' id="question_' + id.to_s + '_txt" placeholder="Edit question content here">' + txt + '</textarea></td>'
html += '<td><input size="10" disabled="disabled" value="' + type
html += '" name="question[' + id.to_s + '][type]" id="question_' + id.to_s + '_type" type="text"></td>'
html += '<td><!--placeholder (UnscoredQuestion does not need weight)--></td>'
html += '<td> alternatives <input size="8" value="' + alternatives
html += '" name="question[' + id.to_s + '][alternatives]" id="question_' + id.to_s + '_alternatives" type="text"></td>'
safe_join(['<tr>'.html_safe, '</tr>'.html_safe], html.html_safe)
end
def view_question_text
html = '<TD align="left"> ' + txt + ' </TD>'
html += '<TD align="left">' + type + '</TD>'
html += '<td align="center">' + weight.to_s + '</TD><TD align="center">—</TD>'
safe_join(['<TR>'.html_safe, '</TR>'.html_safe], html.html_safe)
end
def complete(count, answer = nil)
html = '<p style="width: 80%;"><label for="responses_' + count.to_s + '"">' + txt + ' </label>'
html += '<input id="responses_' + count.to_s + '_score" name="responses[' + count.to_s + '][score]" type="hidden" value="" style="min-width: 100px;">'
html += '<select id="responses_' + count.to_s + '_comments" label=' + txt + ' name="responses[' + count.to_s + '][comment]">'
alternatives = self.alternatives.split('|')
html += complete_for_alternatives(alternatives, answer)
html += '</select></p>'
html.html_safe
end
def complete_for_alternatives(alternatives, answer)
html = ''
alternatives.each do |alternative|
html += '<option value="' + alternative.to_s + '"'
html += ' selected' if !answer.nil? && (answer.comments == alternative)
html += '>' + alternative.to_s + '</option>'
end
html
end
def view_completed_question(count, answer)
html = '<b>' + count.to_s + '. ' + txt + '</b>'
html += '<BR>    ' + answer.comments.to_s
safe_join([''.html_safe, ''.html_safe], html.html_safe)
end
end
| 42.933333 | 154 | 0.600932 |
61ab9430a6dfe12ed4254aa6db009de040424ded | 15,898 | module EneBuildings
module Zip
# ZipFile is modeled after java.util.zip.ZipFile from the Java SDK.
# The most important methods are those inherited from
# ZipCentralDirectory for accessing information about the entries in
# the archive and methods such as get_input_stream and
# get_output_stream for reading from and writing entries to the
# archive. The class includes a few convenience methods such as
# #extract for extracting entries to the filesystem, and #remove,
# #replace, #rename and #mkdir for making simple modifications to
# the archive.
#
# Modifications to a zip archive are not committed until #commit or
# #close is called. The method #open accepts a block following
# the pattern from File.open offering a simple way to
# automatically close the archive when the block returns.
#
# The following example opens zip archive <code>my.zip</code>
# (creating it if it doesn't exist) and adds an entry
# <code>first.txt</code> and a directory entry <code>a_dir</code>
# to it.
#
# require 'zip'
#
# Zip::File.open("my.zip", Zip::File::CREATE) {
# |zipfile|
# zipfile.get_output_stream("first.txt") { |f| f.puts "Hello from ZipFile" }
# zipfile.mkdir("a_dir")
# }
#
# The next example reopens <code>my.zip</code> writes the contents of
# <code>first.txt</code> to standard out and deletes the entry from
# the archive.
#
# require 'zip'
#
# Zip::File.open("my.zip", Zip::File::CREATE) {
# |zipfile|
# puts zipfile.read("first.txt")
# zipfile.remove("first.txt")
# }
#
# ZipFileSystem offers an alternative API that emulates ruby's
# interface for accessing the filesystem, ie. the File and Dir classes.
class File < CentralDirectory
CREATE = 1
SPLIT_SIGNATURE = 0x08074b50
ZIP64_EOCD_SIGNATURE = 0x06064b50
MAX_SEGMENT_SIZE = 3_221_225_472
MIN_SEGMENT_SIZE = 65_536
DATA_BUFFER_SIZE = 8192
IO_METHODS = [:tell, :seek, :read, :close]
attr_reader :name
# default -> false
attr_accessor :restore_ownership
# default -> false
attr_accessor :restore_permissions
# default -> true
attr_accessor :restore_times
# Returns the zip files comment, if it has one
attr_accessor :comment
# Opens a zip archive. Pass true as the second parameter to create
# a new archive if it doesn't exist already.
def initialize(file_name, create = nil, buffer = false, options = {})
super()
@name = file_name
@comment = ''
@create = create
case
when !buffer && ::File.size?(file_name)
@create = nil
@file_permissions = ::File.stat(file_name).mode
::File.open(name, 'rb') do |f|
read_from_stream(f)
end
when create
@file_permissions = create_file_permissions
@entry_set = EntrySet.new
when ::File.zero?(file_name)
raise Error, "File #{file_name} has zero size. Did you mean to pass the create flag?"
else
raise Error, "File #{file_name} not found"
end
@stored_entries = @entry_set.dup
@stored_comment = @comment
@restore_ownership = options[:restore_ownership] || false
@restore_permissions = options[:restore_permissions] || true
@restore_times = options[:restore_times] || true
end
class << self
# Same as #new. If a block is passed the ZipFile object is passed
# to the block and is automatically closed afterwards just as with
# ruby's builtin File.open method.
def open(file_name, create = nil)
zf = Zip::File.new(file_name, create)
return zf unless block_given?
begin
yield zf
ensure
zf.close
end
end
# Same as #open. But outputs data to a buffer instead of a file
def add_buffer
io = ::StringIO.new('')
zf = Zip::File.new(io, true, true)
yield zf
zf.write_buffer(io)
end
# Like #open, but reads zip archive contents from a String or open IO
# stream, and outputs data to a buffer.
# (This can be used to extract data from a
# downloaded zip archive without first saving it to disk.)
def open_buffer(io, options = {})
unless IO_METHODS.map { |method| io.respond_to?(method) }.all? || io.is_a?(String)
raise "Zip::File.open_buffer expects a String or IO-like argument (responds to #{IO_METHODS.join(', ')}). Found: #{io.class}"
end
if io.is_a?(::String)
require 'stringio'
io = ::StringIO.new(io)
elsif io.respond_to?(:binmode)
# https://github.com/rubyzip/rubyzip/issues/119
io.binmode
end
zf = Zip::File.new(io, true, true, options)
zf.read_from_stream(io)
yield zf
begin
zf.write_buffer(io)
rescue IOError => e
raise unless e.message == 'not opened for writing'
end
end
# Iterates over the contents of the ZipFile. This is more efficient
# than using a ZipInputStream since this methods simply iterates
# through the entries in the central directory structure in the archive
# whereas ZipInputStream jumps through the entire archive accessing the
# local entry headers (which contain the same information as the
# central directory).
def foreach(aZipFileName, &block)
open(aZipFileName) do |zipFile|
zipFile.each(&block)
end
end
def get_segment_size_for_split(segment_size)
case
when MIN_SEGMENT_SIZE > segment_size
MIN_SEGMENT_SIZE
when MAX_SEGMENT_SIZE < segment_size
MAX_SEGMENT_SIZE
else
segment_size
end
end
def get_partial_zip_file_name(zip_file_name, partial_zip_file_name)
partial_zip_file_name = zip_file_name.sub(/#{::File.basename(zip_file_name)}\z/,
partial_zip_file_name + ::File.extname(zip_file_name)) unless partial_zip_file_name.nil?
partial_zip_file_name ||= zip_file_name
partial_zip_file_name
end
def get_segment_count_for_split(zip_file_size, segment_size)
(zip_file_size / segment_size).to_i + (zip_file_size % segment_size == 0 ? 0 : 1)
end
def put_split_signature(szip_file, segment_size)
signature_packed = [SPLIT_SIGNATURE].pack('V')
szip_file << signature_packed
segment_size - signature_packed.size
end
#
# TODO: Make the code more understandable
#
def save_splited_part(zip_file, partial_zip_file_name, zip_file_size, szip_file_index, segment_size, segment_count)
ssegment_size = zip_file_size - zip_file.pos
ssegment_size = segment_size if ssegment_size > segment_size
szip_file_name = "#{partial_zip_file_name}.#{format('%03d', szip_file_index)}"
::File.open(szip_file_name, 'wb') do |szip_file|
if szip_file_index == 1
ssegment_size = put_split_signature(szip_file, segment_size)
end
chunk_bytes = 0
until ssegment_size == chunk_bytes || zip_file.eof?
segment_bytes_left = ssegment_size - chunk_bytes
buffer_size = segment_bytes_left < DATA_BUFFER_SIZE ? segment_bytes_left : DATA_BUFFER_SIZE
chunk = zip_file.read(buffer_size)
chunk_bytes += buffer_size
szip_file << chunk
# Info for track splitting
yield segment_count, szip_file_index, chunk_bytes, ssegment_size if block_given?
end
end
end
# Splits an archive into parts with segment size
def split(zip_file_name, segment_size = MAX_SEGMENT_SIZE, delete_zip_file = true, partial_zip_file_name = nil)
raise Error, "File #{zip_file_name} not found" unless ::File.exist?(zip_file_name)
raise Errno::ENOENT, zip_file_name unless ::File.readable?(zip_file_name)
zip_file_size = ::File.size(zip_file_name)
segment_size = get_segment_size_for_split(segment_size)
return if zip_file_size <= segment_size
segment_count = get_segment_count_for_split(zip_file_size, segment_size)
# Checking for correct zip structure
open(zip_file_name) {}
partial_zip_file_name = get_partial_zip_file_name(zip_file_name, partial_zip_file_name)
szip_file_index = 0
::File.open(zip_file_name, 'rb') do |zip_file|
until zip_file.eof?
szip_file_index += 1
save_splited_part(zip_file, partial_zip_file_name, zip_file_size, szip_file_index, segment_size, segment_count)
end
end
::File.delete(zip_file_name) if delete_zip_file
szip_file_index
end
end
# Returns an input stream to the specified entry. If a block is passed
# the stream object is passed to the block and the stream is automatically
# closed afterwards just as with ruby's builtin File.open method.
def get_input_stream(entry, &aProc)
get_entry(entry).get_input_stream(&aProc)
end
# Returns an output stream to the specified entry. If entry is not an instance
# of Zip::Entry, a new Zip::Entry will be initialized using the arguments
# specified. If a block is passed the stream object is passed to the block and
# the stream is automatically closed afterwards just as with ruby's builtin
# File.open method.
def get_output_stream(entry, permission_int = nil, comment = nil, extra = nil, compressed_size = nil, crc = nil, compression_method = nil, size = nil, time = nil, &aProc)
new_entry =
if entry.kind_of?(Entry)
entry
else
Entry.new(@name, entry.to_s, comment, extra, compressed_size, crc, compression_method, size, time)
end
if new_entry.directory?
raise ArgumentError,
"cannot open stream to directory entry - '#{new_entry}'"
end
new_entry.unix_perms = permission_int
zip_streamable_entry = StreamableStream.new(new_entry)
@entry_set << zip_streamable_entry
zip_streamable_entry.get_output_stream(&aProc)
end
# Returns the name of the zip archive
def to_s
@name
end
# Returns a string containing the contents of the specified entry
def read(entry)
get_input_stream(entry) { |is| is.read }
end
# Convenience method for adding the contents of a file to the archive
def add(entry, src_path, &continue_on_exists_proc)
continue_on_exists_proc ||= proc { Zip.continue_on_exists_proc }
check_entry_exists(entry, continue_on_exists_proc, 'add')
new_entry = entry.kind_of?(Zip::Entry) ? entry : Zip::Entry.new(@name, entry.to_s)
new_entry.gather_fileinfo_from_srcpath(src_path)
new_entry.dirty = true
@entry_set << new_entry
end
# Removes the specified entry.
def remove(entry)
@entry_set.delete(get_entry(entry))
end
# Renames the specified entry.
def rename(entry, new_name, &continue_on_exists_proc)
foundEntry = get_entry(entry)
check_entry_exists(new_name, continue_on_exists_proc, 'rename')
@entry_set.delete(foundEntry)
foundEntry.name = new_name
@entry_set << foundEntry
end
# Replaces the specified entry with the contents of srcPath (from
# the file system).
def replace(entry, srcPath)
check_file(srcPath)
remove(entry)
add(entry, srcPath)
end
# Extracts entry to file dest_path.
def extract(entry, dest_path, &block)
block ||= proc { Zip.on_exists_proc }
found_entry = get_entry(entry)
found_entry.extract(dest_path, &block)
end
# Commits changes that has been made since the previous commit to
# the zip archive.
def commit
return unless commit_required?
on_success_replace do |tmp_file|
Zip::OutputStream.open(tmp_file) do |zos|
@entry_set.each do |e|
e.write_to_zip_output_stream(zos)
e.dirty = false
e.clean_up
end
zos.comment = comment
end
true
end
initialize(name)
end
# Write buffer write changes to buffer and return
def write_buffer(io = ::StringIO.new(''))
Zip::OutputStream.write_buffer(io) do |zos|
@entry_set.each { |e| e.write_to_zip_output_stream(zos) }
zos.comment = comment
end
end
# Closes the zip file committing any changes that has been made.
def close
commit
end
# Returns true if any changes has been made to this archive since
# the previous commit
def commit_required?
@entry_set.each do |e|
return true if e.dirty
end
@comment != @stored_comment || @entry_set != @stored_entries || @create == Zip::File::CREATE
end
# Searches for entry with the specified name. Returns nil if
# no entry is found. See also get_entry
def find_entry(entry_name)
@entry_set.find_entry(entry_name)
end
# Searches for entries given a glob
def glob(*args, &block)
@entry_set.glob(*args, &block)
end
# Searches for an entry just as find_entry, but throws Errno::ENOENT
# if no entry is found.
def get_entry(entry)
selected_entry = find_entry(entry)
raise Errno::ENOENT, entry unless selected_entry
selected_entry.restore_ownership = @restore_ownership
selected_entry.restore_permissions = @restore_permissions
selected_entry.restore_times = @restore_times
selected_entry
end
# Creates a directory
def mkdir(entryName, permissionInt = 0755)
raise Errno::EEXIST, "File exists - #{entryName}" if find_entry(entryName)
entryName = entryName.dup.to_s
entryName << '/' unless entryName.end_with?('/')
@entry_set << Zip::StreamableDirectory.new(@name, entryName, nil, permissionInt)
end
private
def directory?(newEntry, srcPath)
srcPathIsDirectory = ::File.directory?(srcPath)
if newEntry.directory? && !srcPathIsDirectory
raise ArgumentError,
"entry name '#{newEntry}' indicates directory entry, but " \
"'#{srcPath}' is not a directory"
elsif !newEntry.directory? && srcPathIsDirectory
newEntry.name += '/'
end
newEntry.directory? && srcPathIsDirectory
end
def check_entry_exists(entryName, continue_on_exists_proc, procedureName)
continue_on_exists_proc ||= proc { Zip.continue_on_exists_proc }
return unless @entry_set.include?(entryName)
if continue_on_exists_proc.call
remove get_entry(entryName)
else
raise Zip::EntryExistsError,
procedureName + " failed. Entry #{entryName} already exists"
end
end
def check_file(path)
raise Errno::ENOENT, path unless ::File.readable?(path)
end
def on_success_replace
tmp_filename = create_tmpname
if yield tmp_filename
::File.rename(tmp_filename, name)
::File.chmod(@file_permissions, name) if defined?(@file_permissions)
end
ensure
::File.unlink(tmp_filename) if ::File.exist?(tmp_filename)
end
def create_tmpname
dirname, basename = ::File.split(name)
::Dir::Tmpname.create(basename, dirname) do |tmpname|
opts = {perm: 0600, mode: ::File::CREAT | ::File::WRONLY | ::File::EXCL}
f = File.open(tmpname, opts)
f.close
end
end
def create_file_permissions
Zip::RUNNING_ON_WINDOWS ? 0644 : 0666 - ::File.umask
end
end
end
# Copyright (C) 2002, 2003 Thomas Sondergaard
# rubyzip is free software; you can redistribute it and/or
# modify it under the terms of the ruby license.
end | 36.463303 | 175 | 0.655869 |
d55120a9e35c3484915f3c87364a30b3bf34ab55 | 837 | #
# This file is auto-generated, do not edit
#
module RecombeeApiClient
require_relative 'request'
require_relative '../errors'
##
#Gets the list of all the item properties in your database.
#
class ListItemProperties < ApiRequest
attr_accessor :timeout
attr_accessor :ensure_https
##
#
def initialize()
@timeout = 100000
@ensure_https = false
end
# HTTP method
def method
:get
end
# Values of body parameters as a Hash
def body_parameters
p = Hash.new
p
end
# Values of query parameters as a Hash.
# name of parameter => value of the parameter
def query_parameters
params = {}
params
end
# Relative path to the endpoint
def path
"/{databaseId}/items/properties/list/"
end
end
end
| 17.4375 | 61 | 0.624851 |
6169f95e0a3fff5a32bb3565cf2a237fa2bec777 | 1,014 | #
#
#
class PublishedProgrammeItem < ApplicationRecord
self.primary_key = :programmme_item_id
belongs_to :format
belongs_to :programmme_item
has_many :published_programme_assignments, dependent: :destroy do
# get the people with the given role
def role(role)
where(['programme_assignment_role_type_id = ?', role.id])
.order('published_programme_assignments.sort_order asc')
end
# get the people with the given roles
def roles(role_ids)
where(['programme_assignment_role_type_id in (?)', role_ids])
.order('published_programme_assignments.sort_order asc')
end
end
has_many :people, through: :published_programme_assignments
enum visibility: {
is_public: 'public',
is_private: 'private'
}
acts_as_taggable
def self.only_public
where(visibility: 'public')
end
def self.only_private
where(visibility: 'private')
end
def public?
visibility == 'public'
end
def private?
visibility == 'public'
end
end
| 21.125 | 67 | 0.708087 |
7afb5464c8e85925bab5b52c79875d45503afb07 | 9,799 | require 'abstract_unit'
require 'action_dispatch/http/upload'
require 'action_controller/metal/strong_parameters'
class ParametersPermitTest < ActiveSupport::TestCase
def assert_filtered_out(params, key)
assert !params.has_key?(key), "key #{key.inspect} has not been filtered out"
end
setup do
@params = ActionController::Parameters.new(
person: {
age: '32',
name: {
first: 'David',
last: 'Heinemeier Hansson'
},
addresses: [{city: 'Chicago', state: 'Illinois'}]
}
)
@struct_fields = []
%w(0 1 12).each do |number|
['', 'i', 'f'].each do |suffix|
@struct_fields << "sf(#{number}#{suffix})"
end
end
end
def walk_permitted params
params.each do |k,v|
case v
when ActionController::Parameters
walk_permitted v
when Array
v.each { |x| walk_permitted v }
end
end
end
test 'iteration should not impact permit' do
hash = {"foo"=>{"bar"=>{"0"=>{"baz"=>"hello", "zot"=>"1"}}}}
params = ActionController::Parameters.new(hash)
walk_permitted params
sanitized = params[:foo].permit(bar: [:baz])
assert_equal({"0"=>{"baz"=>"hello"}}, sanitized[:bar].to_unsafe_h)
end
test 'if nothing is permitted, the hash becomes empty' do
params = ActionController::Parameters.new(id: '1234')
permitted = params.permit
assert permitted.permitted?
assert permitted.empty?
end
test 'key: permitted scalar values' do
values = ['a', :a, nil]
values += [0, 1.0, 2**128, BigDecimal.new(1)]
values += [true, false]
values += [Date.today, Time.now, DateTime.now]
values += [STDOUT, StringIO.new, ActionDispatch::Http::UploadedFile.new(tempfile: __FILE__),
Rack::Test::UploadedFile.new(__FILE__)]
values.each do |value|
params = ActionController::Parameters.new(id: value)
permitted = params.permit(:id)
assert_equal value, permitted[:id]
@struct_fields.each do |sf|
params = ActionController::Parameters.new(sf => value)
permitted = params.permit(:sf)
assert_equal value, permitted[sf]
end
end
end
test 'key: unknown keys are filtered out' do
params = ActionController::Parameters.new(id: '1234', injected: 'injected')
permitted = params.permit(:id)
assert_equal '1234', permitted[:id]
assert_filtered_out permitted, :injected
end
test 'key: arrays are filtered out' do
[[], [1], ['1']].each do |array|
params = ActionController::Parameters.new(id: array)
permitted = params.permit(:id)
assert_filtered_out permitted, :id
@struct_fields.each do |sf|
params = ActionController::Parameters.new(sf => array)
permitted = params.permit(:sf)
assert_filtered_out permitted, sf
end
end
end
test 'key: hashes are filtered out' do
[{}, {foo: 1}, {foo: 'bar'}].each do |hash|
params = ActionController::Parameters.new(id: hash)
permitted = params.permit(:id)
assert_filtered_out permitted, :id
@struct_fields.each do |sf|
params = ActionController::Parameters.new(sf => hash)
permitted = params.permit(:sf)
assert_filtered_out permitted, sf
end
end
end
test 'key: non-permitted scalar values are filtered out' do
params = ActionController::Parameters.new(id: Object.new)
permitted = params.permit(:id)
assert_filtered_out permitted, :id
@struct_fields.each do |sf|
params = ActionController::Parameters.new(sf => Object.new)
permitted = params.permit(:sf)
assert_filtered_out permitted, sf
end
end
test 'key: it is not assigned if not present in params' do
params = ActionController::Parameters.new(name: 'Joe')
permitted = params.permit(:id)
assert !permitted.has_key?(:id)
end
test 'key to empty array: empty arrays pass' do
params = ActionController::Parameters.new(id: [])
permitted = params.permit(id: [])
assert_equal [], permitted[:id]
end
test 'do not break params filtering on nil values' do
params = ActionController::Parameters.new(a: 1, b: [1, 2, 3], c: nil)
permitted = params.permit(:a, c: [], b: [])
assert_equal 1, permitted[:a]
assert_equal [1, 2, 3], permitted[:b]
assert_equal nil, permitted[:c]
end
test 'key to empty array: arrays of permitted scalars pass' do
[['foo'], [1], ['foo', 'bar'], [1, 2, 3]].each do |array|
params = ActionController::Parameters.new(id: array)
permitted = params.permit(id: [])
assert_equal array, permitted[:id]
end
end
test 'key to empty array: permitted scalar values do not pass' do
['foo', 1].each do |permitted_scalar|
params = ActionController::Parameters.new(id: permitted_scalar)
permitted = params.permit(id: [])
assert_filtered_out permitted, :id
end
end
test 'key to empty array: arrays of non-permitted scalar do not pass' do
[[Object.new], [[]], [[1]], [{}], [{id: '1'}]].each do |non_permitted_scalar|
params = ActionController::Parameters.new(id: non_permitted_scalar)
permitted = params.permit(id: [])
assert_filtered_out permitted, :id
end
end
test "fetch raises ParameterMissing exception" do
e = assert_raises(ActionController::ParameterMissing) do
@params.fetch :foo
end
assert_equal :foo, e.param
end
test "fetch with a default value of a hash does not mutate the object" do
params = ActionController::Parameters.new({})
params.fetch :foo, {}
assert_equal nil, params[:foo]
end
test 'hashes in array values get wrapped' do
params = ActionController::Parameters.new(foo: [{}, {}])
params[:foo].each do |hash|
assert !hash.permitted?
end
end
# Strong params has an optimization to avoid looping every time you read
# a key whose value is an array and building a new object. We check that
# optimization here.
test 'arrays are converted at most once' do
params = ActionController::Parameters.new(foo: [{}])
assert_same params[:foo], params[:foo]
end
# Strong params has an internal cache to avoid duplicated loops in the most
# common usage pattern. See the docs of the method `converted_arrays`.
#
# This test checks that if we push a hash to an array (in-place modification)
# the cache does not get fooled, the hash is still wrapped as strong params,
# and not permitted.
test 'mutated arrays are detected' do
params = ActionController::Parameters.new(users: [{id: 1}])
permitted = params.permit(users: [:id])
permitted[:users] << {injected: 1}
assert_not permitted[:users].last.permitted?
end
test "fetch doesnt raise ParameterMissing exception if there is a default" do
assert_equal "monkey", @params.fetch(:foo, "monkey")
assert_equal "monkey", @params.fetch(:foo) { "monkey" }
end
test "not permitted is sticky beyond merges" do
assert [email protected](a: "b").permitted?
end
test "permitted is sticky beyond merges" do
@params.permit!
assert @params.merge(a: "b").permitted?
end
test "modifying the parameters" do
@params[:person][:hometown] = "Chicago"
@params[:person][:family] = { brother: "Jonas" }
assert_equal "Chicago", @params[:person][:hometown]
assert_equal "Jonas", @params[:person][:family][:brother]
end
test "permit state is kept on a dup" do
@params.permit!
assert_equal @params.permitted?, @params.dup.permitted?
end
test "permit is recursive" do
@params.permit!
assert @params.permitted?
assert @params[:person].permitted?
assert @params[:person][:name].permitted?
assert @params[:person][:addresses][0].permitted?
end
test "permitted takes a default value when Parameters.permit_all_parameters is set" do
begin
ActionController::Parameters.permit_all_parameters = true
params = ActionController::Parameters.new({ person: {
age: "32", name: { first: "David", last: "Heinemeier Hansson" }
}})
assert params.slice(:person).permitted?
assert params[:person][:name].permitted?
ensure
ActionController::Parameters.permit_all_parameters = false
end
end
test "permitting parameters as an array" do
assert_equal "32", @params[:person].permit([ :age ])[:age]
end
test "to_h returns empty hash on unpermitted params" do
assert @params.to_h.is_a? Hash
assert_not @params.to_h.is_a? ActionController::Parameters
assert @params.to_h.empty?
end
test "to_h returns converted hash on permitted params" do
@params.permit!
assert @params.to_h.is_a? Hash
assert_not @params.to_h.is_a? ActionController::Parameters
assert_equal @params.to_hash, @params.to_h
end
test "to_h returns converted hash when .permit_all_parameters is set" do
begin
ActionController::Parameters.permit_all_parameters = true
params = ActionController::Parameters.new(crab: "Senjougahara Hitagi")
assert params.to_h.is_a? Hash
assert_not @params.to_h.is_a? ActionController::Parameters
assert_equal({ "crab" => "Senjougahara Hitagi" }, params.to_h)
ensure
ActionController::Parameters.permit_all_parameters = false
end
end
test "to_h returns always permitted parameter on unpermitted params" do
params = ActionController::Parameters.new(
controller: "users",
action: "create",
user: {
name: "Sengoku Nadeko"
}
)
assert_equal({ "controller" => "users", "action" => "create" }, params.to_h)
end
test "to_unsafe_h returns unfiltered params" do
assert @params.to_h.is_a? Hash
assert_not @params.to_h.is_a? ActionController::Parameters
assert_equal @params.to_hash, @params.to_unsafe_h
end
end
| 31.508039 | 96 | 0.666292 |
6aa930d88329d44ba99e9c9dac7101643981cea1 | 1,008 | # coding: utf-8
Gem::Specification.new do |spec|
spec.name = 'agency-jekyll-theme'
spec.version = '1.0.0'
spec.authors = ["Klaudia Alvarez"]
spec.email = '[email protected]'
spec.summary = "Agency Jekyll Theme is a jekyll theme gem, based on Agency theme created by Start Bootstrap."
spec.description = "Agency Jekyll Theme is a single-page theme. It features several content sections, a responsive portfolio grid with hover effects, full page portfolio item modals, a responsive timeline, and a contact form."
spec.homepage = "http://github.com/laklau/agency-jekyll-theme"
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").select { |f| f.match(%r{^(assets|_layouts|_includes|_sass|_data|vendor|LICENSE|README.md|index.md|screenshot.png)}i) }
spec.add_development_dependency "jekyll", "~> 3.3"
spec.add_development_dependency "bundler", ">= 2.1.0"
spec.add_development_dependency "rake", ">= 12.3.3"
end
| 53.052632 | 230 | 0.688492 |
ffebb823caeac765b22d0c0517a148d5132bc465 | 596 | # lib/gemwarrior/entities/items/throne.rb
# Entity::Item::Throne
require_relative '../item'
module Gemwarrior
class Throne < Item
def initialize
super
self.name = 'throne'
self.name_display = 'Throne'
self.description = 'Made of what appears to be unfulfilled desires and latent, flawed happiness, the well-crafted seat still looks kinda comfy. The wizard Emerald sits in it, glaring at you.'
end
def use(world)
puts 'Your words fall on deaf chairs. Emerald continues to stare at you.'
{ type: nil, data: nil }
end
end
end
| 27.090909 | 198 | 0.672819 |
ab9ec4a432de09d90c9426bd3e27a79597c3ca15 | 3,425 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright:: Copyright 2011, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example deactivates all active ad units. To determine which ad units
# exist, run get_all_ad_units.rb or get_inventory_tree.rb.
require 'dfp_api'
API_VERSION = :v201605
def deactivate_ad_units()
# Get DfpApi instance and load configuration from ~/dfp_api.yml.
dfp = DfpApi::Api.new
# To enable logging of SOAP requests, set the log_level value to 'DEBUG' in
# the configuration file or provide your own logger:
# dfp.logger = Logger.new('dfp_xml.log')
# Get the InventoryService.
inventory_service = dfp.service(:InventoryService, API_VERSION)
# Create statement text to select active ad units.
statement = DfpApi::FilterStatement.new(
'WHERE status = :status',
[
{:key => 'status',
:value => {:value => 'ACTIVE', :xsi_type => 'TextValue'}}
]
)
ad_unit_ids = []
begin
# Get ad units by statement.
page = inventory_service.get_ad_units_by_statement(statement.toStatement())
if page[:results]
page[:results].each_with_index do |ad_unit, index|
puts ("%d) Ad unit with ID: %d, status: %s and name: %s will be " +
"deactivated.") % [index + statement.offset, ad_unit[:id],
ad_unit[:status], ad_unit[:name]]
ad_unit_ids << ad_unit[:id]
end
end
statement.offset += DfpApi::SUGGESTED_PAGE_LIMIT
end while statement.offset < page[:total_result_set_size]
puts "Number of ad units to be deactivated: %d" % ad_unit_ids.size
if !ad_unit_ids.empty?
# Modify statement for action. Note, the values are still present.
statement = DfpApi::FilterStatement.new(
"WHERE status = :status AND id in (%s)" % ad_unit_ids.join(', '),
[
{:key => 'status',
:value => {:value => 'ACTIVE', :xsi_type => 'TextValue'}}
]
)
# Perform action.
result = inventory_service.perform_ad_unit_action(
{:xsi_type => 'DeactivateAdUnits'}, statement.toStatement())
# Display results.
if result and result[:num_changes] > 0
puts "Number of ad units deactivated: %d" % result[:num_changes]
else
puts 'No ad units were deactivated.'
end
else
puts 'No ad units found to deactivate.'
end
end
if __FILE__ == $0
begin
deactivate_ad_units()
# HTTP errors.
rescue AdsCommon::Errors::HttpError => e
puts "HTTP Error: %s" % e
# API errors.
rescue DfpApi::Errors::ApiException => e
puts "Message: %s" % e.message
puts 'Errors:'
e.errors.each_with_index do |error, index|
puts "\tError [%d]:" % (index + 1)
error.each do |field, value|
puts "\t\t%s: %s" % [field, value]
end
end
end
end
| 30.855856 | 79 | 0.64292 |
7989b863781a1d888eb356cdcf433691b8938252 | 5,482 | # frozen_string_literal: true
require 'spec_helper'
describe Gitlab::UrlBuilder do
subject { described_class }
describe '#build' do
it 'delegates to the class method' do
expect(subject).to receive(:build).with(:foo, bar: :baz)
subject.instance.build(:foo, bar: :baz)
end
end
describe '.build' do
using RSpec::Parameterized::TableSyntax
where(:factory, :path_generator) do
:project | ->(project) { "/#{project.full_path}" }
:commit | ->(commit) { "/#{commit.project.full_path}/-/commit/#{commit.id}" }
:issue | ->(issue) { "/#{issue.project.full_path}/-/issues/#{issue.iid}" }
:merge_request | ->(merge_request) { "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}" }
:project_milestone | ->(milestone) { "/#{milestone.project.full_path}/-/milestones/#{milestone.iid}" }
:project_snippet | ->(snippet) { "/#{snippet.project.full_path}/snippets/#{snippet.id}" }
:project_wiki | ->(wiki) { "/#{wiki.container.full_path}/-/wikis/home" }
:ci_build | ->(build) { "/#{build.project.full_path}/-/jobs/#{build.id}" }
:group | ->(group) { "/groups/#{group.full_path}" }
:group_milestone | ->(milestone) { "/groups/#{milestone.group.full_path}/-/milestones/#{milestone.iid}" }
:group_wiki | ->(wiki) { "/groups/#{wiki.container.full_path}/-/wikis/home" }
:user | ->(user) { "/#{user.full_path}" }
:personal_snippet | ->(snippet) { "/snippets/#{snippet.id}" }
:wiki_page | ->(wiki_page) { "#{wiki_page.wiki.wiki_base_path}/#{wiki_page.slug}" }
:note_on_commit | ->(note) { "/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}" }
:diff_note_on_commit | ->(note) { "/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}" }
:discussion_note_on_commit | ->(note) { "/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}" }
:legacy_diff_note_on_commit | ->(note) { "/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}" }
:note_on_issue | ->(note) { "/#{note.project.full_path}/-/issues/#{note.noteable.iid}#note_#{note.id}" }
:discussion_note_on_issue | ->(note) { "/#{note.project.full_path}/-/issues/#{note.noteable.iid}#note_#{note.id}" }
:note_on_merge_request | ->(note) { "/#{note.project.full_path}/-/merge_requests/#{note.noteable.iid}#note_#{note.id}" }
:diff_note_on_merge_request | ->(note) { "/#{note.project.full_path}/-/merge_requests/#{note.noteable.iid}#note_#{note.id}" }
:discussion_note_on_merge_request | ->(note) { "/#{note.project.full_path}/-/merge_requests/#{note.noteable.iid}#note_#{note.id}" }
:legacy_diff_note_on_merge_request | ->(note) { "/#{note.project.full_path}/-/merge_requests/#{note.noteable.iid}#note_#{note.id}" }
:note_on_project_snippet | ->(note) { "/#{note.project.full_path}/snippets/#{note.noteable_id}#note_#{note.id}" }
:discussion_note_on_project_snippet | ->(note) { "/#{note.project.full_path}/snippets/#{note.noteable_id}#note_#{note.id}" }
:discussion_note_on_personal_snippet | ->(note) { "/snippets/#{note.noteable_id}#note_#{note.id}" }
:note_on_personal_snippet | ->(note) { "/snippets/#{note.noteable_id}#note_#{note.id}" }
end
with_them do
let(:object) { build_stubbed(factory) }
let(:path) { path_generator.call(object) }
it 'returns the full URL' do
expect(subject.build(object)).to eq("#{Gitlab.config.gitlab.url}#{path}")
end
it 'returns only the path if only_path is given' do
expect(subject.build(object, only_path: true)).to eq(path)
end
end
context 'when passing a commit without a project' do
let(:commit) { build_stubbed(:commit) }
it 'returns an empty string' do
allow(commit).to receive(:project).and_return(nil)
expect(subject.build(commit)).to eq('')
end
end
context 'when passing a commit note without a project' do
let(:note) { build_stubbed(:note_on_commit) }
it 'returns an empty string' do
allow(note).to receive(:project).and_return(nil)
expect(subject.build(note)).to eq('')
end
end
context 'when passing a Snippet' do
let(:snippet) { build_stubbed(:personal_snippet) }
it 'returns a raw snippet URL if requested' do
url = subject.build(snippet, raw: true)
expect(url).to eq "#{Gitlab.config.gitlab.url}/snippets/#{snippet.id}/raw"
end
end
context 'when passing an unsupported class' do
let(:object) { Object.new }
it 'raises an exception' do
expect { subject.build(object) }.to raise_error(NotImplementedError)
end
end
context 'when passing a batch loaded model' do
let(:project) { build_stubbed(:project) }
let(:object) do
BatchLoader.for(:project).batch do |batch, loader|
batch.each { |_| loader.call(:project, project) }
end
end
it 'returns the URL for the real object' do
expect(subject.build(object, only_path: true)).to eq("/#{project.full_path}")
end
end
end
end
| 45.305785 | 140 | 0.60124 |
2654c77f4e1b47c5cabf7c85a76873aeb020f9ee | 43 | module Daru
VERSION = '0.2.1'.freeze
end
| 10.75 | 26 | 0.674419 |
1d8b49056678382d6f969ea9c0202a028a3bb98b | 1,511 | class TadpolesController < ApplicationController
before_action :set_tadpole, only: [:show, :edit, :update, :destroy, :metamorphose]
def metamorphose
@frog = Frog.new(name: @tadpole.name, color: @tadpole.color, pond_id: @tadpole.frog.pond_id)
@tadpole.destroy
@frog.save
redirect_to frog_path(@frog)
end
def index
@tadpoles = Tadpole.all
end
def show
@tadpole = Tadpole.find(params[:id])
end
def new
@frog = Frog.find(set_frog)
@tadpole = Tadpole.new
end
def edit
@frog = @tadpole.frog
end
def create
@tadpole = Tadpole.new(tadpole_params)
respond_to do |format|
if @tadpole.save
format.html { redirect_to @tadpole, notice: 'Tadpole was successfully created.' }
else
format.html { render :new }
end
end
end
def update
respond_to do |format|
if @tadpole.update(tadpole_params)
format.html { redirect_to @tadpole, notice: 'Tadpole was successfully updated.' }
else
format.html { render :edit }
end
end
end
def destroy
@tadpole.destroy
respond_to do |format|
format.html { redirect_to tadpoles_url, notice: 'Tadpole was successfully destroyed.' }
end
end
private
def set_tadpole
@tadpole = Tadpole.find(params[:id])
end
def set_frog
@frog = Frog.find(params[:frog_id])
end
def tadpole_params
params.require(:tadpole).permit(:name, :color, :frog_id)
end
# this is a push test
end
| 21.28169 | 96 | 0.647253 |
bf4c5871bfb25f3e686ce4f4742db81286ee133c | 1,258 | # -*- encoding: utf-8 -*-
# stub: csl 1.5.1 ruby lib
Gem::Specification.new do |s|
s.name = "csl".freeze
s.version = "1.5.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Sylvester Keil".freeze]
s.date = "2020-01-07"
s.description = "A Ruby parser and full API for the Citation Style Language (CSL),\nan open XML-based language to describe the formatting of citations\nand bibliographies.\n".freeze
s.email = ["http://sylvester.keil.or.at".freeze]
s.homepage = "https://github.com/inukshuk/csl-ruby".freeze
s.licenses = ["AGPL-3.0".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.2".freeze)
s.rubygems_version = "3.0.3".freeze
s.summary = "A Ruby CSL parser and library".freeze
s.installed_by_version = "3.0.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<namae>.freeze, ["~> 1.0"])
else
s.add_dependency(%q<namae>.freeze, ["~> 1.0"])
end
else
s.add_dependency(%q<namae>.freeze, ["~> 1.0"])
end
end
| 37 | 183 | 0.680445 |
ab0448e148370168f728fd1b592b2168d6ac9c0d | 2,281 | class OracleTableCopier < TableCopier
def run
destination_connection.connect!.synchronize do |jdbc_conn|
copy_manager(jdbc_conn).copy_in(copy_sql, java_stream)
end
ensure
destination_connection.disconnect
end
def self.cancel(import)
connection = import.source_dataset.data_source.connect_as(import.user)
cancelable_query = CancelableQuery.new(connection, import.handle, import.user)
cancelable_query.cancel
kill_session(cancelable_query, connection)
end
def self.kill_session(cancelable_query, connection)
#Get the session id of the sql job running on oracle for the import and kill/disconnect that session
#Not tested due to difficulty with multiple threads. Be careful if changing this.
sid = connection.fetch(sid_sql(cancelable_query)).first
sid && connection.execute(cancel_sql(sid[:sid]))
end
private
def self.sid_sql(cancelable_query)
<<-SQL
SELECT
''''||sid||', '||s.serial#||'''' sid
FROM V$SESSION s,
V$SQLAREA sa
WHERE sa.sql_text like '%#{cancelable_query.check_id}%'
AND s.type != 'BACKGROUND'
AND s.program = 'JDBC Thin Client'
AND sid != sys_context('userenv', 'sid')
ORDER BY sid
SQL
end
def self.cancel_sql(sid)
<<-SQL
ALTER SYSTEM DISCONNECT SESSION #{sid} IMMEDIATE
SQL
end
def copy_manager(jdbc_conn)
org.postgresql.copy.CopyManager.new(jdbc_conn)
end
def copy_sql
"COPY #{destination_table_fullname}(#{column_names}) FROM STDIN WITH DELIMITER ',' CSV"
end
def java_stream
java.io.InputStreamReader.new(org.jruby.util.IOInputStream.new(EnumeratorIO.new(streamer_enum)))
end
def streamer_enum
cancelable_query.stream(source_dataset.all_rows_sql(sample_count), {:show_headers => false})
end
def cancelable_query
@cancelable_query ||= CancelableQuery.new(source_connection, pipe_name, user)
end
def column_names
account = source_dataset.data_source.account_for_user!(user)
columns = DatasetColumn.columns_for(account, source_dataset)
columns.map { |column| "\"#{column.name}\"" }.join(", ")
end
def distribution_key_columns
primary_key_columns
end
def convert_column_type(oracle_type)
OracleDataTypes.greenplum_type_for oracle_type
end
end
| 28.5125 | 104 | 0.727751 |
1a83a7b073d41f9a769bc86826abaf3e435f5173 | 38 | module Hunter
VERSION = "0.2.0"
end
| 9.5 | 19 | 0.657895 |
acfaceb40debadcbcac5b11bf5ea14db1d4f3bfe | 3,026 | class Clp < Formula
desc "Linear programming solver"
homepage "https://github.com/coin-or/Clp"
url "https://github.com/coin-or/Clp/archive/releases/1.17.6.tar.gz"
sha256 "afff465b1620cfcbb7b7c17b5d331d412039650ff471c4160c7eb24ae01284c9"
license "EPL-1.0"
revision 1
livecheck do
url :stable
regex(%r{^(?:releases/)?v?(\d+(?:\.\d+)+)$}i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "8434f19ed131a1dd8554891c5225c14b642045b286efbe6e99471b1555352404"
sha256 cellar: :any, arm64_big_sur: "f1e732d364b18f48463953078d4ee367367728be52708473a0920b2f34313364"
sha256 cellar: :any, monterey: "74b2b7ef2713b239a6f7c7d9e68279ee332859a25b223e50ccba63f3c97e6d3e"
sha256 cellar: :any, big_sur: "a77023f98b927b7a449142765c542ad774e3c92939cc1a93d29126a08acc81fb"
sha256 cellar: :any, catalina: "b68e1b527f9bd8a10c391f49835f379e973c4ad12fb68993d72e49604e4a21bb"
sha256 cellar: :any, mojave: "db3e0b70a5a5435d2c01b8c25c54615288d15dd0aef1606bc6812099b7feb052"
sha256 cellar: :any, high_sierra: "b279c98add833139bbdd65122391805109371eae1c2e99fbd35cbf9993e45ee5"
sha256 cellar: :any_skip_relocation, x86_64_linux: "cdc337e0f4b870eaae6f86ebf4b2ee986ef57cfd8e285245d788b412860b7ea5"
end
depends_on "pkg-config" => [:build, :test]
depends_on "coinutils"
depends_on "openblas"
depends_on "osi"
resource "coin-or-tools-data-sample-p0033-mps" do
url "https://raw.githubusercontent.com/coin-or-tools/Data-Sample/releases/1.2.11/p0033.mps"
sha256 "8ccff819023237c79ef32e238a5da9348725ce9a4425d48888baf3a0b3b42628"
end
def install
# Work around https://github.com/coin-or/Clp/issues/109:
# Error 1: "mkdir: #{include}/clp/coin: File exists."
mkdir include/"clp/coin"
args = [
"--datadir=#{pkgshare}",
"--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--includedir=#{include}/clp",
"--prefix=#{prefix}",
"--with-blas-incdir=#{Formula["openblas"].opt_include}",
"--with-blas-lib=-L#{Formula["openblas"].opt_lib} -lopenblas",
"--with-lapack-incdir=#{Formula["openblas"].opt_include}",
"--with-lapack-lib=-L#{Formula["openblas"].opt_lib} -lopenblas",
]
system "./configure", *args
system "make", "install"
end
test do
resource("coin-or-tools-data-sample-p0033-mps").stage testpath
system bin/"clp", "-import", testpath/"p0033.mps", "-primals"
(testpath/"test.cpp").write <<~EOS
#include <ClpSimplex.hpp>
int main() {
ClpSimplex model;
int status = model.readMps("#{testpath}/p0033.mps", true);
if (status != 0) { return status; }
status = model.primal();
return status;
}
EOS
pkg_config_flags = `pkg-config --cflags --libs clp`.chomp.split
system ENV.cxx, "test.cpp", *pkg_config_flags
system "./a.out"
end
end
| 40.891892 | 123 | 0.671183 |
ab3da33e5606e6f16420f24b8302c90a11824038 | 2,800 | class SnapTelemetry < Formula
desc "Snap is an opensource telemetry framework"
homepage "https://snap-telemetry.io/"
url "https://github.com/intelsdi-x/snap/archive/2.0.0.tar.gz"
sha256 "35f6ddcffcff27677309abb6eb4065b9fe029a266c3f7ff77103bf822ff315ab"
license "Apache-2.0"
head "https://github.com/intelsdi-x/snap.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "094be117be921cca221b7e0021e7e7d48d496e83599ed52fbd81c64b3b389d5b"
sha256 cellar: :any_skip_relocation, big_sur: "4e8cca8dbc731cb1bf7b92a8f410f287678ab270450cd0f58ce6f10eb7e3e1d5"
sha256 cellar: :any_skip_relocation, catalina: "6f52483af1ce2785dc7e9bf0fdc202430c61b804ef3a67e2487d669bf27edcb1"
sha256 cellar: :any_skip_relocation, mojave: "1cd9b411854596b3afe7afa22ed9041d31e21a860739246a5eeb47e03a6844e8"
sha256 cellar: :any_skip_relocation, high_sierra: "066cf3014caa27b6c3327f983cbe632cb85476c0731ec3fda40e85205c1a5f71"
sha256 cellar: :any_skip_relocation, sierra: "1ff53b8b2f1827e2a607d81dd3db246eb1388dfd1aa7110dcf59a8e4ba606d17"
sha256 cellar: :any_skip_relocation, el_capitan: "50ce1be7d6e83f309d8fd62bf2b36cb03c29b726d575abfbeef895b3f628fb46"
sha256 cellar: :any_skip_relocation, x86_64_linux: "2ce6d7532d5ca326b0bd497988ff5aab340c49f6e9334898ecdf312df73b1dfa"
end
# https://github.com/intelsdi-x/snap/commit/e3a6c8e39994b3980df0c7b069d5ede810622952
deprecate! date: "2018-12-20", because: :deprecated_upstream
depends_on "glide" => :build
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["CGO_ENABLED"] = "0"
ENV["GLIDE_HOME"] = HOMEBREW_CACHE/"glide_home/#{name}"
ENV["GO111MODULE"] = "auto"
snapteld = buildpath/"src/github.com/intelsdi-x/snap"
snapteld.install buildpath.children
cd snapteld do
system "glide", "install"
system "go", "build", "-o", "snapteld", "-ldflags", "-w -X main.gitversion=#{version}"
sbin.install "snapteld"
prefix.install_metafiles
end
snaptel = buildpath/"src/github.com/intelsdi-x/snap/cmd/snaptel"
cd snaptel do
system "go", "build", "-o", "snaptel", "-ldflags", "-w -X main.gitversion=#{version}"
bin.install "snaptel"
end
end
test do
assert_match version.to_s, shell_output("#{sbin}/snapteld --version")
assert_match version.to_s, shell_output("#{bin}/snaptel --version")
begin
snapteld_pid = fork do
exec "#{sbin}/snapteld -t 0 -l 1 -o #{testpath}"
end
sleep 5
assert_match("No plugins", shell_output("#{bin}/snaptel plugin list"))
assert_match("No task", shell_output("#{bin}/snaptel task list"))
assert_predicate testpath/"snapteld.log", :exist?
ensure
Process.kill("TERM", snapteld_pid)
end
end
end
| 42.424242 | 122 | 0.733214 |
ffa5a5850102ad0f53d6ed1c88b74a861ec8cded | 153 | class UsersUpdateLevel < ActiveRecord::Migration
def self.up
execute "update users set level = 3 where level = 2"
end
def self.down
end
end
| 17 | 56 | 0.712418 |
e27a967bc339a02e56a3916ee6fa1f3588d58164 | 3,844 | class Opensearch < Formula
desc "Open source distributed and RESTful search engine"
homepage "https://github.com/opensearch-project/OpenSearch"
url "https://github.com/opensearch-project/OpenSearch/archive/refs/tags/1.0.0-beta1.tar.gz"
sha256 "d23385aa42f636049ae270bdb496843dc8d2dfd88bd7f4761e305e8193b76399"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "3e76febc9b36676a9eb2ca55cc31ff4723783d74f47924c76d83701fb299c25d"
sha256 cellar: :any_skip_relocation, catalina: "a1fe07b1db26074848cca54545c795156dfd26c49493a52605f3809206f7d988"
sha256 cellar: :any_skip_relocation, mojave: "9aac76b813d07a95636df64b4c8b96c6aa5de5a464f728a53568a51d457d92a1"
end
depends_on "gradle@6" => :build
depends_on "openjdk"
def install
system "gradle", ":distribution:archives:no-jdk-darwin-tar:assemble"
mkdir "tar" do
# Extract the package to the tar directory
system "tar", "--strip-components=1", "-xf",
Dir["../distribution/archives/no-jdk-darwin-tar/build/distributions/opensearch-*.tar.gz"].first
# Install into package directory
libexec.install "bin", "lib", "modules"
# Set up Opensearch for local development:
inreplace "config/opensearch.yml" do |s|
# 1. Give the cluster a unique name
s.gsub!(/#\s*cluster\.name: .*/, "cluster.name: opensearch_homebrew")
# 2. Configure paths
s.sub!(%r{#\s*path\.data: /path/to.+$}, "path.data: #{var}/lib/opensearch/")
s.sub!(%r{#\s*path\.logs: /path/to.+$}, "path.logs: #{var}/log/opensearch/")
end
inreplace "config/jvm.options", %r{logs/gc.log}, "#{var}/log/opensearch/gc.log"
# add placeholder to avoid removal of empty directory
touch "config/jvm.options.d/.keepme"
# Move config files into etc
(etc/"opensearch").install Dir["config/*"]
end
inreplace libexec/"bin/opensearch-env",
"if [ -z \"$OPENSEARCH_PATH_CONF\" ]; then OPENSEARCH_PATH_CONF=\"$OPENSEARCH_HOME\"/config; fi",
"if [ -z \"$OPENSEARCH_PATH_CONF\" ]; then OPENSEARCH_PATH_CONF=\"#{etc}/opensearch\"; fi"
bin.install libexec/"bin/opensearch",
libexec/"bin/opensearch-keystore",
libexec/"bin/opensearch-plugin",
libexec/"bin/opensearch-shard"
bin.env_script_all_files(libexec/"bin", JAVA_HOME: Formula["openjdk"].opt_prefix)
end
def post_install
# Make sure runtime directories exist
(var/"lib/opensearch").mkpath
(var/"log/opensearch").mkpath
ln_s etc/"opensearch", libexec/"config" unless (libexec/"config").exist?
(var/"opensearch/plugins").mkpath
ln_s var/"opensearch/plugins", libexec/"plugins" unless (libexec/"plugins").exist?
# fix test not being able to create keystore because of sandbox permissions
system bin/"opensearch-keystore", "create" unless (etc/"opensearch/opensearch.keystore").exist?
end
def caveats
<<~EOS
Data: #{var}/lib/opensearch/
Logs: #{var}/log/opensearch/opensearch_homebrew.log
Plugins: #{var}/opensearch/plugins/
Config: #{etc}/opensearch/
EOS
end
plist_options manual: "opensearch"
service do
run opt_bin/"opensearch"
working_dir var
log_path var/"log/opensearch.log"
error_log_path var/"log/opensearch.log"
end
test do
port = free_port
(testpath/"data").mkdir
(testpath/"logs").mkdir
fork do
exec bin/"opensearch", "-Ehttp.port=#{port}",
"-Epath.data=#{testpath}/data",
"-Epath.logs=#{testpath}/logs"
end
sleep 20
output = shell_output("curl -s -XGET localhost:#{port}/")
assert_equal "opensearch", JSON.parse(output)["version"]["distribution"]
system "#{bin}/opensearch-plugin", "list"
end
end
| 37.686275 | 117 | 0.668574 |
e2d4ec7fd8cd77683e14b9e09ff29c5f30524ae4 | 603 |
module EbayTrading # :nodoc:
module Types # :nodoc:
# == Attributes
# text_node :event_type, 'EventType', :optional => true
# text_node :summary_period, 'SummaryPeriod', :optional => true
# text_node :frequency, 'Frequency', :optional => true
class SummaryEventSchedule
include XML::Mapping
include Initializer
root_element_name 'SummaryEventSchedule'
text_node :event_type, 'EventType', :optional => true
text_node :summary_period, 'SummaryPeriod', :optional => true
text_node :frequency, 'Frequency', :optional => true
end
end
end
| 30.15 | 68 | 0.676617 |
e82e817f8fbefd1cf92532d9ce0f6276b7aa71cf | 2,193 | #
# Cookbook Name:: rvm
# Recipe:: user_install
#
# Copyright 2013, The Open Data Institute
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
prerequisites = [
"curl",
"build-essential"
]
prerequisites.each do |pkg|
package pkg do
action :install
end
end
node["rvm"]["user_installs"].each do |r|
rvm_user = r["user"]
rvm_ruby = r["default_ruby"]
bash "install rvm" do
user "root"
code <<-EOF
sudo su - #{rvm_user} -c '~/.rvm/bin/rvm -v || curl -L get.rvm.io | bash -s stable'
EOF
end
bash "set rvm autolibs" do
rvm_user = r["user"]
user "root"
code <<-EOF
sudo su - #{rvm_user} -c 'rvm autolibs 3'
EOF
end
bash "install ruby" do
user "root"
code <<-EOF
sudo su - #{rvm_user} -c '[ -e ~/.rvm/rubies/ruby-#{rvm_ruby}/bin/ruby ] || rvm install #{rvm_ruby}'
EOF
end
bash "set default ruby" do
user "root"
code <<-EOF
sudo su - #{rvm_user} -c 'rvm use --default #{rvm_ruby}'
EOF
end
# bash "create gemset" do
# user "root"
# code <<-EOF
# sudo su - #{project} -c '~/.rvm/bin/rvm gemset list | grep #{gemset} || rvm gemset create #{gemset}'
# EOF
# end
end
| 28.480519 | 107 | 0.678067 |
87808a5ee4d9a0c390e1a6fcc7b050bc3362913a | 50 | scp_sql_2014 'developer' do
action :install
end
| 12.5 | 27 | 0.78 |
015fdda4be0d3574482f972dd9a2011b81f6cfe6 | 1,846 | # -*- encoding: utf-8 -*-
class ChatController < ApplicationController
include ChatHelper
include RoomHelper
can_edit_on_the_spot
PageSize = 20
def index
@rooms = Room.all_live(current_user)
end
def show
@id = params[:id]
@message = Message.where(:_id => @id).first
if @message.nil? or (not @message.room.accessible?(current_user))
flash[:error] = t(:error_room_deleted)
redirect_to :controller => 'chat'
return
end
@prev_size = int(params[:prev], 5)
@next_size = int(params[:next], 5)
@prev_options = [0,1,5,10,15,20,@prev_size].sort.uniq
@next_options = [0,1,5,10,15,20,@next_size].sort.uniq
@room = @message.room
@prev = @message.prev(@prev_size)
@next = @message.next(@next_size)
end
def room
find_room(params[:id], :not_auth=>true) do |room|
@room = room
@messages = Message.where("room_id" => @room.id).order_by(:_id.desc).limit(PageSize).to_a
@title = @room.title
call_hook(:in_chatroom_controller, :controller => self)
end
end
def message
unless logged?
flash[:error] = t(:error_message_user_not_login_yet)
redirect_to :controller => 'chat'
return
end
if request.post? then
find_room(params[:room_id]) do |room|
@room = room
has_file = !(params[:file].nil?)
case message = Message.make(current_user, room, params[:message], has_file)
when Message
message.attach(params[:file]) if has_file
room.update_attributes!(:updated_at => Time.now)
publish_message(:create, message, room)
end
end
end
redirect_to :controller => 'chat', :action => 'room', :id => params[:room_id]
end
private
def int(s, default)
if s.blank? then
default
else
s.to_i
end
end
end
| 24.613333 | 95 | 0.622427 |
28a9a5426af7e95e364edfc7c2f3e3a935137233 | 75 | FactoryBot.define do
factory :delete_favorites_request do
end
end
| 12.5 | 38 | 0.76 |
61ef7e7933927265765d424b686a7baf4d797d0a | 3,159 | module Intrigue
module Task
module Enrich
class DnsRecord < Intrigue::Task::BaseTask
def self.metadata
{
:name => "enrich/dns_record",
:pretty_name => "Enrich DnsRecord",
:authors => ["jcran"],
:description => "Fills in details for a DnsRecord",
:references => [],
:allowed_types => ["DnsRecord"],
:type => "enrichment",
:passive => true,
:example_entities => [
{"type" => "DnsRecord", "details" => {"name" => "intrigue.io"}}],
:allowed_options => [],
:created_types => [
"DnsRecord",
"IpAddress",
"FtpService",
"MongoService",
"NetworkService",
"SmtpService",
"SnmpService"
]
}
end
def run
lookup_name = _get_entity_name
# Do a lookup and keep track of all aliases
results = resolve(lookup_name)
_log "Creating aliases"
_create_aliases(results)
# Create new entities if we found vhosts / aliases
_log "Creating vhost services"
_create_vhost_entities(lookup_name)
_log "Grabbing resolutions"
resolutions = collect_resolutions(results)
_set_entity_detail("resolutions", resolutions)
_log "Grabbing SOA"
soa_details = collect_soa_details(lookup_name)
_set_entity_detail("soa_record", soa_details)
check_and_create_unscoped_domain(soa_details["primary_name_server"]) if soa_details
if soa_details
# grab any / all MX records (useful to see who accepts mail)
_log "Grabbing MX"
mx_records = collect_mx_records(lookup_name)
_set_entity_detail("mx_records", mx_records)
mx_records.each{|mx| check_and_create_unscoped_domain(mx["host"]) }
# collect TXT records (useful for random things)
_log "Grabbing TXT"
txt_records = collect_txt_records(lookup_name)
_set_entity_detail("txt_records", txt_records)
# grab any / all SPF records (useful to see who accepts mail)
_log "Grabbing SPF"
spf_details = collect_spf_details(lookup_name)
_set_entity_detail("spf_record", spf_details)
end
# create a domain for this entity
check_and_create_unscoped_domain(lookup_name)
end
private
def _create_aliases(results)
####
### Create aliased entities
####
results.each do |result|
_log "Creating entity for... #{result}"
if "#{result["name"]}".is_ip_address?
_create_entity("IpAddress", { "name" => result["name"] }, @entity)
else
_create_entity("DnsRecord", { "name" => result["name"] }, @entity)
end
end
end
def _create_vhost_entities(lookup_name)
### For each associated IpAddress, make sure we create any additional
### uris if we already have scan results
###
@entity.aliases.each do |a|
next unless a.type_string == "IpAddress" # only ips
#next if a.hidden # skip hidden
existing_ports = a.get_detail("ports")
if existing_ports
existing_ports.each do |p|
_create_network_service_entity(a,p["number"],p["protocol"],{})
end
end
end
end
end
end
end
end | 27.955752 | 87 | 0.635644 |
ff1371b8f566ccbdb869d18dbe07798cb612d087 | 383 | require 'test_helper'
class SentMessagesControllerTest < ActionDispatch::IntegrationTest
test "should get index" do
get sent_messages_index_url
assert_response :success
end
test "should get new" do
get sent_messages_new_url
assert_response :success
end
test "should get create" do
get sent_messages_create_url
assert_response :success
end
end
| 19.15 | 66 | 0.762402 |
bb11ae8c37c80954c0d6ada82ce628c73c21bdac | 312 | require_relative 'log'
class ClientThread < Thread
def initialize(name)
self.name = name
super() do
execute
end
end
private
def execute
puts "#{name} BEGIN"
10.times do |i|
Log.puts("i = #{i}")
sleep 0.1
end
Log.close
puts "#{name} END"
end
end
| 11.555556 | 27 | 0.560897 |
6a1843e653b14aea80e0a66749cd8be2b0b0e570 | 517 | class SessionsController < ApplicationController
def new
end
def create
if @user = User.find_by(email: params[:email])
if @user.authenticate(params[:password])
session[:user_id] = @user.id
redirect_to root_path, notice: "Welcome back #{@user.email}"
else
redirect_to new_session_path, notice: "Password is wrong"
end
else
redirect_to new_session_path, notice: "Email is wrong"
end
end
def destroy
session[:user_id] = nil
redirect_to root_path, notice: "Logged out."
end
end
| 22.478261 | 64 | 0.717602 |
b946ccb49f1f13cbb33548af5b30b4867120d167 | 5,553 | require 'rack/session/abstract/id'
module ActionDispatch
class Request < Rack::Request
# Session is responsible for lazily loading the session from store.
class Session # :nodoc:
ENV_SESSION_KEY = Rack::RACK_SESSION # :nodoc:
ENV_SESSION_OPTIONS_KEY = Rack::RACK_SESSION_OPTIONS # :nodoc:
# Singleton object used to determine if an optional param wasn't specified
Unspecified = Object.new
# Creates a session hash, merging the properties of the previous session if any
def self.create(store, req, default_options)
session_was = find req
session = Request::Session.new(store, req)
session.merge! session_was if session_was
set(req, session)
Options.set(req, Request::Session::Options.new(store, default_options))
session
end
def self.find(req)
req.get_header ENV_SESSION_KEY
end
def self.set(req, session)
req.set_header ENV_SESSION_KEY, session
end
class Options #:nodoc:
def self.set(req, options)
req.set_header ENV_SESSION_OPTIONS_KEY, options
end
def self.find(req)
req.get_header ENV_SESSION_OPTIONS_KEY
end
def initialize(by, default_options)
@by = by
@delegate = default_options.dup
end
def [](key)
@delegate[key]
end
def id(req)
@delegate.fetch(:id) {
@by.send(:extract_session_id, req)
}
end
def []=(k,v); @delegate[k] = v; end
def to_hash; @delegate.dup; end
def values_at(*args); @delegate.values_at(*args); end
end
def initialize(by, req)
@by = by
@req = req
@delegate = {}
@loaded = false
@exists = nil # we haven't checked yet
end
def id
options.id(@req)
end
def options
Options.find @req
end
def destroy
clear
options = self.options || {}
@by.send(:destroy_session, @req, options.id(@req), options)
# Load the new sid to be written with the response
@loaded = false
load_for_write!
end
# Returns value of the key stored in the session or
# nil if the given key is not found in the session.
def [](key)
load_for_read!
@delegate[key.to_s]
end
# Returns true if the session has the given key or false.
def has_key?(key)
load_for_read!
@delegate.key?(key.to_s)
end
alias :key? :has_key?
alias :include? :has_key?
# Returns keys of the session as Array.
def keys
@delegate.keys
end
# Returns values of the session as Array.
def values
@delegate.values
end
# Writes given value to given key of the session.
def []=(key, value)
load_for_write!
@delegate[key.to_s] = value
end
# Clears the session.
def clear
load_for_write!
@delegate.clear
end
# Returns the session as Hash.
def to_hash
load_for_read!
@delegate.dup.delete_if { |_,v| v.nil? }
end
# Updates the session with given Hash.
#
# session.to_hash
# # => {"session_id"=>"e29b9ea315edf98aad94cc78c34cc9b2"}
#
# session.update({ "foo" => "bar" })
# # => {"session_id"=>"e29b9ea315edf98aad94cc78c34cc9b2", "foo" => "bar"}
#
# session.to_hash
# # => {"session_id"=>"e29b9ea315edf98aad94cc78c34cc9b2", "foo" => "bar"}
def update(hash)
load_for_write!
@delegate.update stringify_keys(hash)
end
# Deletes given key from the session.
def delete(key)
load_for_write!
@delegate.delete key.to_s
end
# Returns value of given key from the session, or raises +KeyError+
# if can't find given key in case of not setted dafault value.
# Returns default value if specified.
#
# session.fetch(:foo)
# # => KeyError: key not found: "foo"
#
# session.fetch(:foo, :bar)
# # => :bar
#
# session.fetch(:foo) do
# :bar
# end
# # => :bar
def fetch(key, default=Unspecified, &block)
load_for_read!
if default == Unspecified
@delegate.fetch(key.to_s, &block)
else
@delegate.fetch(key.to_s, default, &block)
end
end
def inspect
if loaded?
super
else
"#<#{self.class}:0x#{(object_id << 1).to_s(16)} not yet loaded>"
end
end
def exists?
return @exists unless @exists.nil?
@exists = @by.send(:session_exists?, @req)
end
def loaded?
@loaded
end
def empty?
load_for_read!
@delegate.empty?
end
def merge!(other)
load_for_write!
@delegate.merge!(other)
end
private
def load_for_read!
load! if !loaded? && exists?
end
def load_for_write!
load! unless loaded?
end
def load!
id, session = @by.load_session @req
options[:id] = id
@delegate.replace(stringify_keys(session))
@loaded = true
end
def stringify_keys(other)
other.each_with_object({}) { |(key, value), hash|
hash[key.to_s] = value
}
end
end
end
end
| 24.570796 | 85 | 0.553214 |
181eaf595b07a9a614427d35ef25aef0fd2c7f45 | 2,271 | Pod::Spec.new do |s|
s.name = 'FirebaseInstanceID'
s.version = '4.3.0'
s.summary = 'Firebase InstanceID for iOS'
s.description = <<-DESC
Instance ID provides a unique ID per instance of your iOS apps. In addition to providing
unique IDs for authentication,Instance ID can generate security tokens for use with other
services.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'InstanceID-' + s.version.to_s
}
s.social_media_url = 'https://twitter.com/Firebase'
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.11'
s.tvos.deployment_target = '10.0'
s.watchos.deployment_target = '6.0'
s.cocoapods_version = '>= 1.4.0'
s.static_framework = true
s.prefix_header_file = false
base_dir = "Firebase/InstanceID/"
s.source_files = base_dir + '**/*.[mh]'
s.requires_arc = base_dir + '*.m'
s.public_header_files = base_dir + 'Public/*.h', base_dir + 'Private/*.h'
s.private_header_files = base_dir + 'Private/*.h'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'GCC_PREPROCESSOR_DEFINITIONS' =>
'FIRInstanceID_LIB_VERSION=' + String(s.version)
}
s.framework = 'Security'
s.dependency 'FirebaseCore', '~> 6.5'
s.dependency 'FirebaseInstallations', '~> 1.0'
s.dependency 'GoogleUtilities/UserDefaults', '~> 6.4'
s.dependency 'GoogleUtilities/Environment', '~> 6.4'
s.test_spec 'unit' do |unit_tests|
unit_tests.platforms = {:ios => '8.0', :osx => '10.11', :tvos => '10.0'}
unit_tests.source_files = 'Example/InstanceID/Tests/*.[mh]'
unit_tests.requires_app_host = true
unit_tests.dependency 'OCMock'
unit_tests.pod_target_xcconfig = {
# Unit tests do library imports using repo-root relative paths.
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"',
# Prevent linker warning for test category override of
# store:didDeleteFCMScopedTokensForCheckin:
'OTHER_LDFLAGS' => '-Xlinker -no_objc_category_merging',
'CLANG_ENABLE_OBJC_WEAK' => 'YES'
}
end
end
| 37.229508 | 89 | 0.653457 |
872c41ce8cde77a10445ab4ad852a01d70e76b4f | 1,210 | class Xkeyboardconfig < Formula
desc "Keyboard configuration database for the X Window System"
homepage "https://xorg.freedesktop.org"
url "https://xorg.freedesktop.org/archive/individual/data/xkeyboard-config/xkeyboard-config-2.26.tar.bz2"
mirror "ftp://ftp.x.org/pub/individual/data/xkeyboard-config/xkeyboard-config-2.26.tar.bz2"
sha256 "393718c7460cd06c4e8cb819d943ca54812ea476f32714c4d8975c77031a038e"
bottle do
root_url "https://linuxbrew.bintray.com/bottles-xorg"
cellar :any_skip_relocation
sha256 "91ec9583b3eadc646868a5bc4213986b51910e38cc6cb43afea2d89c9662d2e7" => :x86_64_linux
end
depends_on "gettext" => :build
depends_on "intltool" => :build
depends_on "libxslt" => :build
depends_on "pkg-config" => :build
def install
# Needed by intltool (xml::parser)
ENV.prepend_path "PERL5LIB", "#{Formula["intltool"].libexec}/lib/perl5"
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
--with-xkb-rules-symlink=xorg
--disable-runtime-deps
]
system "./configure", *args
system "make"
system "make", "install"
end
end
| 31.842105 | 107 | 0.710744 |
6187288391e4590d8817a4b7e4aea6cf4ee3ba37 | 5,289 | require File.dirname(__FILE__) + '/../helper'
RE_<%= file_name.capitalize %> = %r{(?:(?:the )? *(\w+) *)}
RE_<%= file_name.capitalize %>_TYPE = %r{(?: *(\w+)? *)}
steps_for(:<%= file_name %>) do
#
# Setting
#
Given "an anonymous <%= file_name %>" do
log_out!
end
Given "$an $<%= file_name %>_type <%= file_name %> with $attributes" do |_, <%= file_name %>_type, attributes|
create_<%= file_name %>! <%= file_name %>_type, attributes.to_hash_from_story
end
Given "$an $<%= file_name %>_type <%= file_name %> named '$login'" do |_, <%= file_name %>_type, login|
create_<%= file_name %>! <%= file_name %>_type, named_<%= file_name %>(login)
end
Given "$an $<%= file_name %>_type <%= file_name %> logged in as '$login'" do |_, <%= file_name %>_type, login|
create_<%= file_name %>! <%= file_name %>_type, named_<%= file_name %>(login)
log_in_<%= file_name %>!
end
Given "$actor is logged in" do |_, login|
log_in_<%= file_name %>! @<%= file_name %>_params || named_<%= file_name %>(login)
end
Given "there is no $<%= file_name %>_type <%= file_name %> named '$login'" do |_, login|
@<%= file_name %> = <%= class_name %>.find_by_login(login)
@<%= file_name %>.destroy! if @<%= file_name %>
@<%= file_name %>.should be_nil
end
#
# Actions
#
When "$actor logs out" do
log_out
end
When "$actor registers an account as the preloaded '$login'" do |_, login|
<%= file_name %> = named_<%= file_name %>(login)
<%= file_name %>['password_confirmation'] = <%= file_name %>['password']
create_<%= file_name %> <%= file_name %>
end
When "$actor registers an account with $attributes" do |_, attributes|
create_<%= file_name %> attributes.to_hash_from_story
end
<% if options[:include_activation] %>
When "$actor activates with activation code $attributes" do |_, activation_code|
activation_code = '' if activation_code == 'that is blank'
activate
end<% end %>
When "$actor logs in with $attributes" do |_, attributes|
log_in_<%= file_name %> attributes.to_hash_from_story
end
#
# Result
#
Then "$actor should be invited to sign in" do |_|
response.should render_template('/<%= controller_file_path %>/new')
end
Then "$actor should not be logged in" do |_|
controller.logged_in?.should_not be_true
end
Then "$login should be logged in" do |login|
controller.logged_in?.should be_true
controller.current_<%= file_name %>.should === @<%= file_name %>
controller.current_<%= file_name %>.login.should == login
end
end
def named_<%= file_name %> login
<%= file_name %>_params = {
'admin' => {'id' => 1, 'login' => 'addie', 'password' => '1234addie', 'email' => '[email protected]', },
'oona' => { 'login' => 'oona', 'password' => '1234oona', 'email' => '[email protected]'},
'reggie' => { 'login' => 'reggie', 'password' => 'monkey', 'email' => '[email protected]' },
}
<%= file_name %>_params[login.downcase]
end
#
# <%= class_name %> account actions.
#
# The ! methods are 'just get the job done'. It's true, they do some testing of
# their own -- thus un-DRY'ing tests that do and should live in the <%= file_name %> account
# stories -- but the repetition is ultimately important so that a faulty test setup
# fails early.
#
def log_out
get '/<%= controller_file_path %>/destroy'
end
def log_out!
log_out
response.should redirect_to('/')
follow_redirect!
end
def create_<%= file_name %>(<%= file_name %>_params={})
@<%= file_name %>_params ||= <%= file_name %>_params
post "/<%= model_controller_file_path %>", :<%= file_name %> => <%= file_name %>_params
@<%= file_name %> = <%= class_name %>.find_by_login(<%= file_name %>_params['login'])
end
def create_<%= file_name %>!(<%= file_name %>_type, <%= file_name %>_params)
<%= file_name %>_params['password_confirmation'] ||= <%= file_name %>_params['password'] ||= <%= file_name %>_params['password']
create_<%= file_name %> <%= file_name %>_params
response.should redirect_to('/')
follow_redirect!
<% if options[:include_activation] %>
# fix the <%= file_name %>'s activation status
activate_<%= file_name %>! if <%= file_name %>_type == 'activated'<% end %>
end
<% if options[:include_activation] %>
def activate_<%= file_name %> activation_code=nil
activation_code = @<%= file_name %>.activation_code if activation_code.nil?
get "/activate/#{activation_code}"
end
def activate_<%= file_name %>! *args
activate_<%= file_name %> *args
response.should redirect_to('/login')
follow_redirect!
response.should have_flash("notice", /Signup complete!/)
end<% end %>
def log_in_<%= file_name %> <%= file_name %>_params=nil
@<%= file_name %>_params ||= <%= file_name %>_params
<%= file_name %>_params ||= @<%= file_name %>_params
post "/<%= controller_routing_path %>", <%= file_name %>_params
@<%= file_name %> = <%= class_name %>.find_by_login(<%= file_name %>_params['login'])
controller.current_<%= file_name %>
end
def log_in_<%= file_name %>! *args
log_in_<%= file_name %> *args
response.should redirect_to('/')
follow_redirect!
response.should have_flash("notice", /Logged in successfully/)
end
| 34.344156 | 130 | 0.628852 |
03f12724018c2d3ffb2bc5fa774d0217c278b2a8 | 24,152 | require_relative '../spec_helper'
require_relative '../fixtures/constants'
require_relative 'fixtures/constants_sclass'
require_relative 'fixtures/constant_visibility'
# Read the documentation in fixtures/constants.rb for the guidelines and
# rationale for the structure and organization of these specs.
describe "Literal (A::X) constant resolution" do
describe "with statically assigned constants" do
it "searches the immediate class or module scope first" do
ConstantSpecs::ClassA::CS_CONST10.should == :const10_10
ConstantSpecs::ModuleA::CS_CONST10.should == :const10_1
ConstantSpecs::ParentA::CS_CONST10.should == :const10_5
ConstantSpecs::ContainerA::CS_CONST10.should == :const10_2
ConstantSpecs::ContainerA::ChildA::CS_CONST10.should == :const10_3
end
it "searches a module included in the immediate class before the superclass" do
ConstantSpecs::ContainerA::ChildA::CS_CONST15.should == :const15_1
end
it "searches the superclass before a module included in the superclass" do
ConstantSpecs::ContainerA::ChildA::CS_CONST11.should == :const11_1
end
it "searches a module included in the superclass" do
ConstantSpecs::ContainerA::ChildA::CS_CONST12.should == :const12_1
end
it "searches the superclass chain" do
ConstantSpecs::ContainerA::ChildA::CS_CONST13.should == :const13
end
it "searches Object if no class or module qualifier is given" do
CS_CONST1.should == :const1
CS_CONST10.should == :const10_1
end
it "searches Object after searching other scopes" do
module ConstantSpecs::SpecAdded1
CS_CONST10.should == :const10_1
end
end
it "searches Object if a toplevel qualifier (::X) is given" do
::CS_CONST1.should == :const1
::CS_CONST10.should == :const10_1
end
it "does not search the singleton class of the class or module" do
-> do
ConstantSpecs::ContainerA::ChildA::CS_CONST14
end.should raise_error(NameError)
-> { ConstantSpecs::CS_CONST14 }.should raise_error(NameError)
end
end
describe "with dynamically assigned constants" do
it "searches the immediate class or module scope first" do
ConstantSpecs::ClassB::CS_CONST101 = :const101_1
ConstantSpecs::ClassB::CS_CONST101.should == :const101_1
ConstantSpecs::ParentB::CS_CONST101 = :const101_2
ConstantSpecs::ParentB::CS_CONST101.should == :const101_2
ConstantSpecs::ContainerB::CS_CONST101 = :const101_3
ConstantSpecs::ContainerB::CS_CONST101.should == :const101_3
ConstantSpecs::ContainerB::ChildB::CS_CONST101 = :const101_4
ConstantSpecs::ContainerB::ChildB::CS_CONST101.should == :const101_4
ConstantSpecs::ModuleA::CS_CONST101 = :const101_5
ConstantSpecs::ModuleA::CS_CONST101.should == :const101_5
end
it "searches a module included in the immediate class before the superclass" do
ConstantSpecs::ParentB::CS_CONST102 = :const102_1
ConstantSpecs::ModuleF::CS_CONST102 = :const102_2
ConstantSpecs::ContainerB::ChildB::CS_CONST102.should == :const102_2
end
it "searches the superclass before a module included in the superclass" do
ConstantSpecs::ModuleE::CS_CONST103 = :const103_1
ConstantSpecs::ParentB::CS_CONST103 = :const103_2
ConstantSpecs::ContainerB::ChildB::CS_CONST103.should == :const103_2
end
it "searches a module included in the superclass" do
ConstantSpecs::ModuleA::CS_CONST104 = :const104_1
ConstantSpecs::ModuleE::CS_CONST104 = :const104_2
ConstantSpecs::ContainerB::ChildB::CS_CONST104.should == :const104_2
end
it "searches the superclass chain" do
ConstantSpecs::ModuleA::CS_CONST105 = :const105
ConstantSpecs::ContainerB::ChildB::CS_CONST105.should == :const105
end
it "searches Object if no class or module qualifier is given" do
CS_CONST106 = :const106
CS_CONST106.should == :const106
end
it "searches Object if a toplevel qualifier (::X) is given" do
::CS_CONST107 = :const107
::CS_CONST107.should == :const107
end
it "does not search the singleton class of the class or module" do
class << ConstantSpecs::ContainerB::ChildB
CS_CONST108 = :const108_1
end
-> do
ConstantSpecs::ContainerB::ChildB::CS_CONST108
end.should raise_error(NameError)
module ConstantSpecs
class << self
CS_CONST108 = :const108_2
end
end
-> { ConstantSpecs::CS_CONST108 }.should raise_error(NameError)
end
it "returns the updated value when a constant is reassigned" do
ConstantSpecs::ClassB::CS_CONST109 = :const109_1
ConstantSpecs::ClassB::CS_CONST109.should == :const109_1
-> {
ConstantSpecs::ClassB::CS_CONST109 = :const109_2
}.should complain(/already initialized constant/)
ConstantSpecs::ClassB::CS_CONST109.should == :const109_2
end
it "evaluates the right hand side before evaluating a constant path" do
mod = Module.new
mod.module_eval <<-EOC
ConstantSpecsRHS::B = begin
module ConstantSpecsRHS; end
"hello"
end
EOC
mod::ConstantSpecsRHS::B.should == 'hello'
end
end
it "raises a NameError if no constant is defined in the search path" do
-> { ConstantSpecs::ParentA::CS_CONSTX }.should raise_error(NameError)
end
ruby_version_is "3.0" do
it "uses the module or class #name to craft the error message" do
mod = Module.new do
def self.name
"ModuleName"
end
def self.inspect
"<unusable info>"
end
end
-> { mod::DOES_NOT_EXIST }.should raise_error(NameError, /uninitialized constant ModuleName::DOES_NOT_EXIST/)
end
it "uses the module or class #inspect to craft the error message if they are anonymous" do
mod = Module.new do
def self.name
nil
end
def self.inspect
"<unusable info>"
end
end
-> { mod::DOES_NOT_EXIST }.should raise_error(NameError, /uninitialized constant <unusable info>::DOES_NOT_EXIST/)
end
end
it "sends #const_missing to the original class or module scope" do
ConstantSpecs::ClassA::CS_CONSTX.should == :CS_CONSTX
end
it "evaluates the qualifier" do
ConstantSpecs.get_const::CS_CONST2.should == :const2
end
it "raises a TypeError if a non-class or non-module qualifier is given" do
-> { CS_CONST1::CS_CONST }.should raise_error(TypeError)
-> { 1::CS_CONST }.should raise_error(TypeError)
-> { "mod"::CS_CONST }.should raise_error(TypeError)
-> { false::CS_CONST }.should raise_error(TypeError)
end
end
describe "Constant resolution within methods" do
describe "with statically assigned constants" do
it "searches the immediate class or module scope first" do
ConstantSpecs::ClassA.const10.should == :const10_10
ConstantSpecs::ParentA.const10.should == :const10_5
ConstantSpecs::ContainerA.const10.should == :const10_2
ConstantSpecs::ContainerA::ChildA.const10.should == :const10_3
ConstantSpecs::ClassA.new.const10.should == :const10_10
ConstantSpecs::ParentA.new.const10.should == :const10_5
ConstantSpecs::ContainerA::ChildA.new.const10.should == :const10_3
end
it "searches a module included in the immediate class before the superclass" do
ConstantSpecs::ContainerA::ChildA.const15.should == :const15_1
ConstantSpecs::ContainerA::ChildA.new.const15.should == :const15_1
end
it "searches the superclass before a module included in the superclass" do
ConstantSpecs::ContainerA::ChildA.const11.should == :const11_1
ConstantSpecs::ContainerA::ChildA.new.const11.should == :const11_1
end
it "searches a module included in the superclass" do
ConstantSpecs::ContainerA::ChildA.const12.should == :const12_1
ConstantSpecs::ContainerA::ChildA.new.const12.should == :const12_1
end
it "searches the superclass chain" do
ConstantSpecs::ContainerA::ChildA.const13.should == :const13
ConstantSpecs::ContainerA::ChildA.new.const13.should == :const13
end
it "searches the lexical scope of the method not the receiver's immediate class" do
ConstantSpecs::ContainerA::ChildA.const19.should == :const19_1
end
it "searches the lexical scope of a singleton method" do
ConstantSpecs::CS_CONST18.const17.should == :const17_1
end
it "does not search the lexical scope of the caller" do
-> { ConstantSpecs::ClassA.const16 }.should raise_error(NameError)
end
it "searches the lexical scope of a block" do
ConstantSpecs::ClassA.const22.should == :const22_1
end
it "searches Object as a lexical scope only if Object is explicitly opened" do
ConstantSpecs::ContainerA::ChildA.const20.should == :const20_1
ConstantSpecs::ContainerA::ChildA.const21.should == :const21_1
end
it "does not search the lexical scope of qualifying modules" do
-> do
ConstantSpecs::ContainerA::ChildA.const23
end.should raise_error(NameError)
end
end
describe "with dynamically assigned constants" do
it "searches the immediate class or module scope first" do
ConstantSpecs::ModuleA::CS_CONST201 = :const201_1
class ConstantSpecs::ClassB; CS_CONST201 = :const201_2; end
ConstantSpecs::ParentB::CS_CONST201 = :const201_3
ConstantSpecs::ContainerB::CS_CONST201 = :const201_4
ConstantSpecs::ContainerB::ChildB::CS_CONST201 = :const201_5
ConstantSpecs::ClassB.const201.should == :const201_2
ConstantSpecs::ParentB.const201.should == :const201_3
ConstantSpecs::ContainerB.const201.should == :const201_4
ConstantSpecs::ContainerB::ChildB.const201.should == :const201_5
ConstantSpecs::ClassB.new.const201.should == :const201_2
ConstantSpecs::ParentB.new.const201.should == :const201_3
ConstantSpecs::ContainerB::ChildB.new.const201.should == :const201_5
end
it "searches a module included in the immediate class before the superclass" do
ConstantSpecs::ParentB::CS_CONST202 = :const202_2
ConstantSpecs::ContainerB::ChildB::CS_CONST202 = :const202_1
ConstantSpecs::ContainerB::ChildB.const202.should == :const202_1
ConstantSpecs::ContainerB::ChildB.new.const202.should == :const202_1
end
it "searches the superclass before a module included in the superclass" do
ConstantSpecs::ParentB::CS_CONST203 = :const203_1
ConstantSpecs::ModuleE::CS_CONST203 = :const203_2
ConstantSpecs::ContainerB::ChildB.const203.should == :const203_1
ConstantSpecs::ContainerB::ChildB.new.const203.should == :const203_1
end
it "searches a module included in the superclass" do
ConstantSpecs::ModuleA::CS_CONST204 = :const204_2
ConstantSpecs::ModuleE::CS_CONST204 = :const204_1
ConstantSpecs::ContainerB::ChildB.const204.should == :const204_1
ConstantSpecs::ContainerB::ChildB.new.const204.should == :const204_1
end
it "searches the superclass chain" do
ConstantSpecs::ModuleA::CS_CONST205 = :const205
ConstantSpecs::ContainerB::ChildB.const205.should == :const205
ConstantSpecs::ContainerB::ChildB.new.const205.should == :const205
end
it "searches the lexical scope of the method not the receiver's immediate class" do
ConstantSpecs::ContainerB::ChildB::CS_CONST206 = :const206_2
class ConstantSpecs::ContainerB::ChildB
class << self
CS_CONST206 = :const206_1
end
end
ConstantSpecs::ContainerB::ChildB.const206.should == :const206_1
end
it "searches the lexical scope of a singleton method" do
ConstantSpecs::CS_CONST207 = :const207_1
ConstantSpecs::ClassB::CS_CONST207 = :const207_2
ConstantSpecs::CS_CONST208.const207.should == :const207_1
end
it "does not search the lexical scope of the caller" do
ConstantSpecs::ClassB::CS_CONST209 = :const209
-> { ConstantSpecs::ClassB.const209 }.should raise_error(NameError)
end
it "searches the lexical scope of a block" do
ConstantSpecs::ClassB::CS_CONST210 = :const210_1
ConstantSpecs::ParentB::CS_CONST210 = :const210_2
ConstantSpecs::ClassB.const210.should == :const210_1
end
it "searches Object as a lexical scope only if Object is explicitly opened" do
Object::CS_CONST211 = :const211_1
ConstantSpecs::ParentB::CS_CONST211 = :const211_2
ConstantSpecs::ContainerB::ChildB.const211.should == :const211_1
Object::CS_CONST212 = :const212_2
ConstantSpecs::ParentB::CS_CONST212 = :const212_1
ConstantSpecs::ContainerB::ChildB.const212.should == :const212_1
end
it "returns the updated value when a constant is reassigned" do
ConstantSpecs::ParentB::CS_CONST213 = :const213_1
ConstantSpecs::ContainerB::ChildB.const213.should == :const213_1
ConstantSpecs::ContainerB::ChildB.new.const213.should == :const213_1
-> {
ConstantSpecs::ParentB::CS_CONST213 = :const213_2
}.should complain(/already initialized constant/)
ConstantSpecs::ContainerB::ChildB.const213.should == :const213_2
ConstantSpecs::ContainerB::ChildB.new.const213.should == :const213_2
end
it "does not search the lexical scope of qualifying modules" do
ConstantSpecs::ContainerB::CS_CONST214 = :const214
-> do
ConstantSpecs::ContainerB::ChildB.const214
end.should raise_error(NameError)
end
end
it "raises a NameError if no constant is defined in the search path" do
-> { ConstantSpecs::ParentA.constx }.should raise_error(NameError)
end
it "sends #const_missing to the original class or module scope" do
ConstantSpecs::ClassA.constx.should == :CS_CONSTX
ConstantSpecs::ClassA.new.constx.should == :CS_CONSTX
end
end
describe "Constant resolution within a singleton class (class << obj)" do
it "works like normal classes or modules" do
ConstantSpecs::CS_SINGLETON1.foo.should == 1
end
it "uses its own namespace for each object" do
a = ConstantSpecs::CS_SINGLETON2[0].foo
b = ConstantSpecs::CS_SINGLETON2[1].foo
[a, b].should == [1, 2]
end
it "uses its own namespace for nested modules" do
a = ConstantSpecs::CS_SINGLETON3[0].x
b = ConstantSpecs::CS_SINGLETON3[1].x
a.should_not equal(b)
end
it "allows nested modules to have proper resolution" do
a = ConstantSpecs::CS_SINGLETON4_CLASSES[0].new
b = ConstantSpecs::CS_SINGLETON4_CLASSES[1].new
[a.foo, b.foo].should == [1, 2]
end
end
describe "top-level constant lookup" do
context "on a class" do
it "does not search Object after searching other scopes" do
-> { String::Hash }.should raise_error(NameError)
end
end
it "searches Object unsuccessfully when searches on a module" do
-> { Enumerable::Hash }.should raise_error(NameError)
end
end
describe "Module#private_constant marked constants" do
it "remain private even when updated" do
mod = Module.new
mod.const_set :Foo, true
mod.send :private_constant, :Foo
-> {
mod.const_set :Foo, false
}.should complain(/already initialized constant/)
-> {mod::Foo}.should raise_error(NameError)
end
it "sends #const_missing to the original class or module" do
mod = Module.new
mod.const_set :Foo, true
mod.send :private_constant, :Foo
def mod.const_missing(name)
name == :Foo ? name : super
end
mod::Foo.should == :Foo
end
describe "in a module" do
it "cannot be accessed from outside the module" do
-> do
ConstantVisibility::PrivConstModule::PRIVATE_CONSTANT_MODULE
end.should raise_error(NameError)
end
it "cannot be reopened as a module from scope where constant would be private" do
-> do
module ConstantVisibility::ModuleContainer::PrivateModule; end
end.should raise_error(NameError)
end
it "cannot be reopened as a class from scope where constant would be private" do
-> do
class ConstantVisibility::ModuleContainer::PrivateClass; end
end.should raise_error(NameError)
end
it "can be reopened as a module where constant is not private" do
module ::ConstantVisibility::ModuleContainer
module PrivateModule
X = 1
end
PrivateModule::X.should == 1
end
end
it "can be reopened as a class where constant is not private" do
module ::ConstantVisibility::ModuleContainer
class PrivateClass
X = 1
end
PrivateClass::X.should == 1
end
end
it "is not defined? with A::B form" do
defined?(ConstantVisibility::PrivConstModule::PRIVATE_CONSTANT_MODULE).should == nil
end
it "can be accessed from the module itself" do
ConstantVisibility::PrivConstModule.private_constant_from_self.should be_true
end
it "is defined? from the module itself" do
ConstantVisibility::PrivConstModule.defined_from_self.should == "constant"
end
it "can be accessed from lexical scope" do
ConstantVisibility::PrivConstModule::Nested.private_constant_from_scope.should be_true
end
it "is defined? from lexical scope" do
ConstantVisibility::PrivConstModule::Nested.defined_from_scope.should == "constant"
end
it "can be accessed from classes that include the module" do
ConstantVisibility::ClassIncludingPrivConstModule.new.private_constant_from_include.should be_true
end
it "can be accessed from modules that include the module" do
ConstantVisibility::ModuleIncludingPrivConstModule.private_constant_from_include.should be_true
end
it "raises a NameError when accessed directly from modules that include the module" do
-> do
ConstantVisibility::ModuleIncludingPrivConstModule.private_constant_self_from_include
end.should raise_error(NameError)
-> do
ConstantVisibility::ModuleIncludingPrivConstModule.private_constant_named_from_include
end.should raise_error(NameError)
end
it "is defined? from classes that include the module" do
ConstantVisibility::ClassIncludingPrivConstModule.new.defined_from_include.should == "constant"
end
end
describe "in a class" do
it "cannot be accessed from outside the class" do
-> do
ConstantVisibility::PrivConstClass::PRIVATE_CONSTANT_CLASS
end.should raise_error(NameError)
end
it "cannot be reopened as a module" do
-> do
module ConstantVisibility::ClassContainer::PrivateModule; end
end.should raise_error(NameError)
end
it "cannot be reopened as a class" do
-> do
class ConstantVisibility::ClassContainer::PrivateClass; end
end.should raise_error(NameError)
end
it "can be reopened as a module where constant is not private" do
class ::ConstantVisibility::ClassContainer
module PrivateModule
X = 1
end
PrivateModule::X.should == 1
end
end
it "can be reopened as a class where constant is not private" do
class ::ConstantVisibility::ClassContainer
class PrivateClass
X = 1
end
PrivateClass::X.should == 1
end
end
it "is not defined? with A::B form" do
defined?(ConstantVisibility::PrivConstClass::PRIVATE_CONSTANT_CLASS).should == nil
end
it "can be accessed from the class itself" do
ConstantVisibility::PrivConstClass.private_constant_from_self.should be_true
end
it "is defined? from the class itself" do
ConstantVisibility::PrivConstClass.defined_from_self.should == "constant"
end
it "can be accessed from lexical scope" do
ConstantVisibility::PrivConstClass::Nested.private_constant_from_scope.should be_true
end
it "is defined? from lexical scope" do
ConstantVisibility::PrivConstClass::Nested.defined_from_scope.should == "constant"
end
it "can be accessed from subclasses" do
ConstantVisibility::PrivConstClassChild.new.private_constant_from_subclass.should be_true
end
it "is defined? from subclasses" do
ConstantVisibility::PrivConstClassChild.new.defined_from_subclass.should == "constant"
end
end
describe "in Object" do
it "cannot be accessed using ::Const form" do
-> do
::PRIVATE_CONSTANT_IN_OBJECT
end.should raise_error(NameError)
end
it "is not defined? using ::Const form" do
defined?(::PRIVATE_CONSTANT_IN_OBJECT).should == nil
end
it "can be accessed through the normal search" do
PRIVATE_CONSTANT_IN_OBJECT.should == true
end
it "is defined? through the normal search" do
defined?(PRIVATE_CONSTANT_IN_OBJECT).should == "constant"
end
end
describe "NameError by #private_constant" do
it "has :receiver and :name attributes" do
-> do
ConstantVisibility::PrivConstClass::PRIVATE_CONSTANT_CLASS
end.should raise_error(NameError) {|e|
e.receiver.should == ConstantVisibility::PrivConstClass
e.name.should == :PRIVATE_CONSTANT_CLASS
}
-> do
ConstantVisibility::PrivConstModule::PRIVATE_CONSTANT_MODULE
end.should raise_error(NameError) {|e|
e.receiver.should == ConstantVisibility::PrivConstModule
e.name.should == :PRIVATE_CONSTANT_MODULE
}
end
it "has the defined class as the :name attribute" do
-> do
ConstantVisibility::PrivConstClassChild::PRIVATE_CONSTANT_CLASS
end.should raise_error(NameError) {|e|
e.receiver.should == ConstantVisibility::PrivConstClass
e.name.should == :PRIVATE_CONSTANT_CLASS
}
-> do
ConstantVisibility::ClassIncludingPrivConstModule::PRIVATE_CONSTANT_MODULE
end.should raise_error(NameError) {|e|
e.receiver.should == ConstantVisibility::PrivConstModule
e.name.should == :PRIVATE_CONSTANT_MODULE
}
end
end
end
describe "Module#public_constant marked constants" do
before :each do
@module = ConstantVisibility::PrivConstModule.dup
end
describe "in a module" do
it "can be accessed from outside the module" do
@module.send :public_constant, :PRIVATE_CONSTANT_MODULE
@module::PRIVATE_CONSTANT_MODULE.should == true
end
it "is defined? with A::B form" do
@module.send :public_constant, :PRIVATE_CONSTANT_MODULE
defined?(@module::PRIVATE_CONSTANT_MODULE).should == "constant"
end
end
describe "in a class" do
before :each do
@class = ConstantVisibility::PrivConstClass.dup
end
it "can be accessed from outside the class" do
@class.send :public_constant, :PRIVATE_CONSTANT_CLASS
@class::PRIVATE_CONSTANT_CLASS.should == true
end
it "is defined? with A::B form" do
@class.send :public_constant, :PRIVATE_CONSTANT_CLASS
defined?(@class::PRIVATE_CONSTANT_CLASS).should == "constant"
end
end
describe "in Object" do
after :each do
ConstantVisibility.reset_private_constants
end
it "can be accessed using ::Const form" do
Object.send :public_constant, :PRIVATE_CONSTANT_IN_OBJECT
::PRIVATE_CONSTANT_IN_OBJECT.should == true
end
it "is defined? using ::Const form" do
Object.send :public_constant, :PRIVATE_CONSTANT_IN_OBJECT
defined?(::PRIVATE_CONSTANT_IN_OBJECT).should == "constant"
end
end
end
describe 'Allowed characters' do
it 'allows not ASCII characters in the middle of a name' do
mod = Module.new
mod.const_set("BBἍBB", 1)
eval("mod::BBἍBB").should == 1
end
it 'does not allow not ASCII characters that cannot be upcased or lowercased at the beginning' do
-> do
Module.new.const_set("થBB", 1)
end.should raise_error(NameError, /wrong constant name/)
end
it 'allows not ASCII upcased characters at the beginning' do
mod = Module.new
mod.const_set("ἍBB", 1)
eval("mod::ἍBB").should == 1
end
end
| 33.49792 | 120 | 0.701557 |
188d3248bd251af62fbe3e4aa79996bd846a6197 | 175 | class AddRatingToCommunityTools < ActiveRecord::Migration[5.0]
def change
add_column :community_tools, :rating, :float
add_index :community_tools, :rating
end
end
| 25 | 62 | 0.765714 |
5d6967f34665244c0ee2889bc58dd0ccab8cc462 | 1,120 | module Rester
module Service::Middleware
##
# Create a Request object for this thread, store the correlation ID, and
# perform the necessary logging. Cleanup the request once it's complete.
class RequestHandler < Base
def call(env)
Rester.wrap_request do
Rester.request = request = Rester::Service::Request.new(env)
Rester.correlation_id = request.correlation_id
Rester.request_info[:producer_name] = service.name
Rester.request_info[:consumer_name] = request.consumer_name
Rester.request_info[:path] = request.path_info
Rester.request_info[:verb] = request.request_method
service.logger.info('request received')
start_time = Time.now.to_f
super.tap { |response|
elapsed_ms = (Time.now.to_f - start_time) * 1000
response[1]["X-Rester-Producer-Name"] = service.name
service.logger.info("responding with #{response[0]} after %0.3fms" %
elapsed_ms)
}
end
end
end # RequestHandler
end # Service::Middleware
end # Rester
| 37.333333 | 80 | 0.641964 |
bb82e3fd3a2e5f12b61d848942a4f73d5e9aa01d | 2,878 | module Orbf
module RulesEngine
class Contract
KNOWN_FIELDS = %w[contract_start_date contract_end_date id org_unit date].freeze
attr_reader :id, :start_period, :end_period, :field_values
def initialize(field_values, calendar)
@field_values = field_values
@id = field_values.fetch("id")
@org_unit = field_values.fetch("org_unit")
@start_period = field_values.fetch("contract_start_date").gsub("-", "").slice(0, 6)
@end_period = field_values.fetch("contract_end_date").gsub("-", "").slice(0, 6)
@calendar = calendar
end
def match_period?(period)
@calendar.periods(period, "monthly").any? do |start_month_period|
(
start_period <= start_month_period && start_month_period <= end_period
)
end
end
def distance(period)
period_start_month = @calendar.periods(period, "monthly")[0]
[start_period.to_i - period_start_month.to_i, end_period.to_i - period_start_month.to_i].min
end
def org_unit
Orbf::RulesEngine::OrgUnit.new(ext_id: @org_unit["id"], name: @org_unit["name"], path: @org_unit["path"], group_ext_ids: [])
end
def org_unit_id
@org_unit["id"]
end
def org_unit_name
@org_unit["name"]
end
def codes
@codes ||= begin
other_data_element_values = field_values.entries.select do |(k, v)|
!KNOWN_FIELDS.include?(k) && v.is_a?(String)
end
other_data_element_values.map { |_k, v| v.downcase }
end
end
def overlaps?(contract)
return false if contract.id == id
(
contract.start_period <= end_period &&
start_period <= contract.end_period
)
end
def to_h
{
id: id,
org_unit_id: org_unit_id,
from_period: start_period,
end_period: end_period,
org_unit_name: org_unit_name,
field_values: field_values
}
end
def inspect
"#<#{self.class}:0x#{object_id.to_s(16)}(ou=#{org_unit_id},from_to=#{start_period}-#{end_period},#{org_unit_name},#{field_values}"
end
def to_s
JSON.pretty_generate(to_h)
end
def self.overlappings(contracts)
results = []
contracts.group_by(&:org_unit_id).each do |_org_unit_id, ou_contracts|
overlappings = overlapping_contracts(ou_contracts)
results.push(overlappings) if overlappings.present?
end
results
end
def self.overlapping_contracts(contracts)
overlappings = []
contracts.each do |c1|
contracts.each do |c2|
overlappings.push([c1, c2]) if c1.overlaps?(c2)
end
end
overlappings
end
end
end
end
| 28.78 | 138 | 0.59312 |
d58ee7a3a4283b5ecfc400c8c6b6297498413476 | 1,064 | Pod::Spec.new do |s|
s.name = 'RPHTTPServiceClient'
s.version = '2.0.0'
s.summary = 'RPHTTPServiceClient is a Type-safe JSON/HTTP client for iOS.'
s.description = <<-DESC
RPHTTPServiceClient enables anyone to simplify the way to consume webservices/APIs and map automagically the results to ObjectMapper\'s Mappable object instances
DESC
s.homepage = 'https://github.com/RedP4nda/RPHTTPServiceClient'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'MrCloud' => '[email protected]' }
s.source = { :git => 'https://github.com/RedP4nda/RPHTTPServiceClient.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/Florian_MrCloud'
s.ios.deployment_target = '11.0'
s.swift_versions = ['5.1', '5.2']
s.source_files = 'RPHTTPServiceClient/Classes/**/*'
s.dependency 'Moya', '14.0.0'
s.dependency 'Moya-ObjectMapper', '2.9'
s.dependency 'ObjectMapper', '3.5.1'
s.dependency 'Alamofire', '5.0.0'
end
| 38 | 163 | 0.62782 |
0394c8be2e8626cbe376a31731f463d7c1a95eb7 | 9,071 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: src/proto/grpc/testing/control.proto
require 'google/protobuf'
require 'src/proto/grpc/testing/payloads_pb'
require 'src/proto/grpc/testing/stats_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("src/proto/grpc/testing/control.proto", :syntax => :proto3) do
add_message "grpc.testing.PoissonParams" do
optional :offered_load, :double, 1
end
add_message "grpc.testing.ClosedLoopParams" do
end
add_message "grpc.testing.LoadParams" do
oneof :load do
optional :closed_loop, :message, 1, "grpc.testing.ClosedLoopParams"
optional :poisson, :message, 2, "grpc.testing.PoissonParams"
end
end
add_message "grpc.testing.SecurityParams" do
optional :use_test_ca, :bool, 1
optional :server_host_override, :string, 2
optional :cred_type, :string, 3
end
add_message "grpc.testing.ChannelArg" do
optional :name, :string, 1
oneof :value do
optional :str_value, :string, 2
optional :int_value, :int32, 3
end
end
add_message "grpc.testing.ClientConfig" do
repeated :server_targets, :string, 1
optional :client_type, :enum, 2, "grpc.testing.ClientType"
optional :security_params, :message, 3, "grpc.testing.SecurityParams"
optional :outstanding_rpcs_per_channel, :int32, 4
optional :client_channels, :int32, 5
optional :async_client_threads, :int32, 7
optional :rpc_type, :enum, 8, "grpc.testing.RpcType"
optional :load_params, :message, 10, "grpc.testing.LoadParams"
optional :payload_config, :message, 11, "grpc.testing.PayloadConfig"
optional :histogram_params, :message, 12, "grpc.testing.HistogramParams"
repeated :core_list, :int32, 13
optional :core_limit, :int32, 14
optional :other_client_api, :string, 15
repeated :channel_args, :message, 16, "grpc.testing.ChannelArg"
optional :threads_per_cq, :int32, 17
optional :messages_per_stream, :int32, 18
optional :use_coalesce_api, :bool, 19
optional :median_latency_collection_interval_millis, :int32, 20
optional :client_processes, :int32, 21
end
add_message "grpc.testing.ClientStatus" do
optional :stats, :message, 1, "grpc.testing.ClientStats"
end
add_message "grpc.testing.Mark" do
optional :reset, :bool, 1
end
add_message "grpc.testing.ClientArgs" do
oneof :argtype do
optional :setup, :message, 1, "grpc.testing.ClientConfig"
optional :mark, :message, 2, "grpc.testing.Mark"
end
end
add_message "grpc.testing.ServerConfig" do
optional :server_type, :enum, 1, "grpc.testing.ServerType"
optional :security_params, :message, 2, "grpc.testing.SecurityParams"
optional :port, :int32, 4
optional :async_server_threads, :int32, 7
optional :core_limit, :int32, 8
optional :payload_config, :message, 9, "grpc.testing.PayloadConfig"
repeated :core_list, :int32, 10
optional :other_server_api, :string, 11
optional :threads_per_cq, :int32, 12
optional :resource_quota_size, :int32, 1001
repeated :channel_args, :message, 1002, "grpc.testing.ChannelArg"
optional :server_processes, :int32, 21
end
add_message "grpc.testing.ServerArgs" do
oneof :argtype do
optional :setup, :message, 1, "grpc.testing.ServerConfig"
optional :mark, :message, 2, "grpc.testing.Mark"
end
end
add_message "grpc.testing.ServerStatus" do
optional :stats, :message, 1, "grpc.testing.ServerStats"
optional :port, :int32, 2
optional :cores, :int32, 3
end
add_message "grpc.testing.CoreRequest" do
end
add_message "grpc.testing.CoreResponse" do
optional :cores, :int32, 1
end
add_message "grpc.testing.Void" do
end
add_message "grpc.testing.Scenario" do
optional :name, :string, 1
optional :client_config, :message, 2, "grpc.testing.ClientConfig"
optional :num_clients, :int32, 3
optional :server_config, :message, 4, "grpc.testing.ServerConfig"
optional :num_servers, :int32, 5
optional :warmup_seconds, :int32, 6
optional :benchmark_seconds, :int32, 7
optional :spawn_local_worker_count, :int32, 8
end
add_message "grpc.testing.Scenarios" do
repeated :scenarios, :message, 1, "grpc.testing.Scenario"
end
add_message "grpc.testing.ScenarioResultSummary" do
optional :qps, :double, 1
optional :qps_per_server_core, :double, 2
optional :server_system_time, :double, 3
optional :server_user_time, :double, 4
optional :client_system_time, :double, 5
optional :client_user_time, :double, 6
optional :latency_50, :double, 7
optional :latency_90, :double, 8
optional :latency_95, :double, 9
optional :latency_99, :double, 10
optional :latency_999, :double, 11
optional :server_cpu_usage, :double, 12
optional :successful_requests_per_second, :double, 13
optional :failed_requests_per_second, :double, 14
optional :client_polls_per_request, :double, 15
optional :server_polls_per_request, :double, 16
optional :server_queries_per_cpu_sec, :double, 17
optional :client_queries_per_cpu_sec, :double, 18
end
add_message "grpc.testing.ScenarioResult" do
optional :scenario, :message, 1, "grpc.testing.Scenario"
optional :latencies, :message, 2, "grpc.testing.HistogramData"
repeated :client_stats, :message, 3, "grpc.testing.ClientStats"
repeated :server_stats, :message, 4, "grpc.testing.ServerStats"
repeated :server_cores, :int32, 5
optional :summary, :message, 6, "grpc.testing.ScenarioResultSummary"
repeated :client_success, :bool, 7
repeated :server_success, :bool, 8
repeated :request_results, :message, 9, "grpc.testing.RequestResultCount"
end
add_enum "grpc.testing.ClientType" do
value :SYNC_CLIENT, 0
value :ASYNC_CLIENT, 1
value :OTHER_CLIENT, 2
value :CALLBACK_CLIENT, 3
end
add_enum "grpc.testing.ServerType" do
value :SYNC_SERVER, 0
value :ASYNC_SERVER, 1
value :ASYNC_GENERIC_SERVER, 2
value :OTHER_SERVER, 3
value :CALLBACK_SERVER, 4
end
add_enum "grpc.testing.RpcType" do
value :UNARY, 0
value :STREAMING, 1
value :STREAMING_FROM_CLIENT, 2
value :STREAMING_FROM_SERVER, 3
value :STREAMING_BOTH_WAYS, 4
end
end
end
module Grpc
module Testing
PoissonParams = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.PoissonParams").msgclass
ClosedLoopParams = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ClosedLoopParams").msgclass
LoadParams = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.LoadParams").msgclass
SecurityParams = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.SecurityParams").msgclass
ChannelArg = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ChannelArg").msgclass
ClientConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ClientConfig").msgclass
ClientStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ClientStatus").msgclass
Mark = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.Mark").msgclass
ClientArgs = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ClientArgs").msgclass
ServerConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ServerConfig").msgclass
ServerArgs = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ServerArgs").msgclass
ServerStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ServerStatus").msgclass
CoreRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.CoreRequest").msgclass
CoreResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.CoreResponse").msgclass
Void = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.Void").msgclass
Scenario = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.Scenario").msgclass
Scenarios = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.Scenarios").msgclass
ScenarioResultSummary = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ScenarioResultSummary").msgclass
ScenarioResult = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ScenarioResult").msgclass
ClientType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ClientType").enummodule
ServerType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.ServerType").enummodule
RpcType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("grpc.testing.RpcType").enummodule
end
end
| 47.492147 | 131 | 0.717892 |
ff50a8be218cacb9a9145dc823869842fbe5d253 | 1,258 | # frozen_string_literal: true
module Binford
module Github
class ReviewDiscussion
include Binford::Utils
attr_reader :conversation
def initialize(commits, review_comments)
@changes = commits.map { |commit| commit_info(commit) }
@comments = review_comments.map { |comment| comment_info(comment) }
@conversation = (@changes + @comments).sort { |a, b| a[:timestamp] <=> b[:timestamp] }
end
def author
@author ||= changes.first[:author]
end
def reviewers
@reviewers || (comments.map { |comment| comment[:author] }.uniq - [author])
end
private
attr_reader :changes, :comments
def commit_info(commit)
date_str = commit.dig(:commit, :author, :date) || commit.dig(:commit, :committer, :date)
{
type: :change,
author: commit.dig(:author, :login),
content: commit.dig(:commit, :message),
timestamp: safe_parse_time(date_str)
}
end
def comment_info(comment)
{
type: :comment,
author: comment.dig(:user, :login),
content: comment[:body],
timestamp: safe_parse_time(comment[:created_at])
}
end
end
end
end
| 25.673469 | 96 | 0.588235 |
113d4cf59cc3d4b7ab372812344ab072e957cfc3 | 307 | require_dependency "cor1440_gen/concerns/controllers/indicadorespf_controller"
module Cor1440Gen
class IndicadorespfController < ApplicationController
load_and_authorize_resource class: Cor1440Gen::Proyectofinanciero
include Cor1440Gen::Concerns::Controllers::IndicadorespfController
end
end
| 27.909091 | 78 | 0.85342 |
b96e5528c82863ad5dd32d8d4729c61deefe8c4f | 1,191 | class UsersController < ApplicationController
before_action :set_user, only: [:show, :edit, :update, :destroy]
def new
if logged_in?
redirect_to profile_path(session[:user_id])
else
@user = User.new
end
end
def create
@user = User.new(user_params)
if @user.save
session[:user_id] = @user.id
redirect_to profile_path(@user)
else
render :new
end
end
def show
authorized
@move_tutorials = MoveTutorial.all
@session = session[:user_id]
end
def edit
end
def update
@user.update(user_params)
if @user.valid?
redirect_to profile_path(@user)
else
flash.now[:notice] = "Please make sure all filelds are correct."
render :edit
end
end
def destroy
@user.destroy
session[:user_id] = nil
redirect_to '/'
end
private
def set_user
@user = User.find_by_id(params[:id])
end
def user_params
params.require(:user).permit(:id, :email, :username, :password, :password_confirmation)
end
end
| 19.85 | 95 | 0.563392 |
ab74d579fc259b706483fe9fae07c8baa883ea75 | 2,755 | require 'abstract_unit'
module ActiveSupport
class TestCaseTest < ActiveSupport::TestCase
class FakeRunner
attr_reader :puked
def initialize
@puked = []
end
def puke(klass, name, e)
@puked << [klass, name, e]
end
def options
nil
end
def record(*args)
end
end
if defined?(MiniTest::Assertions) && TestCase < MiniTest::Assertions
def test_standard_error_raised_within_setup_callback_is_puked
tc = Class.new(TestCase) do
setup :bad_callback
def bad_callback; raise 'oh noes' end
def test_true; assert true end
end
test_name = 'test_true'
fr = FakeRunner.new
test = tc.new test_name
test.run fr
klass, name, exception = *fr.puked.first
assert_equal tc, klass
assert_equal test_name, name
assert_equal 'oh noes', exception.message
end
def test_standard_error_raised_within_teardown_callback_is_puked
tc = Class.new(TestCase) do
teardown :bad_callback
def bad_callback; raise 'oh noes' end
def test_true; assert true end
end
test_name = 'test_true'
fr = FakeRunner.new
test = tc.new test_name
test.run fr
klass, name, exception = *fr.puked.first
assert_equal tc, klass
assert_equal test_name, name
assert_equal 'oh noes', exception.message
end
def test_passthrough_exception_raised_within_test_method_is_not_rescued
tc = Class.new(TestCase) do
def test_which_raises_interrupt; raise Interrupt; end
end
test_name = 'test_which_raises_interrupt'
fr = FakeRunner.new
test = tc.new test_name
assert_raises(Interrupt) { test.run fr }
end
def test_passthrough_exception_raised_within_setup_callback_is_not_rescued
tc = Class.new(TestCase) do
setup :callback_which_raises_interrupt
def callback_which_raises_interrupt; raise Interrupt; end
def test_true; assert true end
end
test_name = 'test_true'
fr = FakeRunner.new
test = tc.new test_name
assert_raises(Interrupt) { test.run fr }
end
def test_passthrough_exception_raised_within_teardown_callback_is_not_rescued
tc = Class.new(TestCase) do
teardown :callback_which_raises_interrupt
def callback_which_raises_interrupt; raise Interrupt; end
def test_true; assert true end
end
test_name = 'test_true'
fr = FakeRunner.new
test = tc.new test_name
assert_raises(Interrupt) { test.run fr }
end
end
end
end
| 26.238095 | 83 | 0.635935 |
394fc873a2eaf76c18e154d08267caa690c74a9a | 816 | # frozen_string_literal: true
module Resolvers
class Comments < Resolvers::Base
type '[Types::Comment]', null: true
description 'Find all comments or filter by approved'
argument :id, String, required: false, default_value: '', as: :uuid
argument :approved, Boolean, required: false
argument :lang, String, required: false, default_value: ''
def resolve(params)
@db_query = current_account.comments
filter_approved(params[:approved])
# filter_translation(params['lang'])
db_query.order(created_at: :desc)
end
protected
def filter_approved(approved)
return if approved.nil?
@db_query = if approved
db_query.approved
else
db_query.pending
end
end
end
end
| 24.727273 | 71 | 0.631127 |
3803257499fea7ec8db6907be33b0362f7dbad72 | 465 | require 'active_record'
config = YAML.load(File.read('config/database.yml'))
ActiveRecord::Base.establish_connection(config['test'])
def need_to_migrate?
ActiveRecord::Migrator.new(:up, 'db/migrate').pending_migrations.any?
end
if need_to_migrate?
ActiveRecord::Migrator.up "db/migrate"
end
RSpec.configure do |config|
config.around do |example|
ActiveRecord::Base.transaction do
example.run
raise ActiveRecord::Rollback
end
end
end
| 21.136364 | 71 | 0.748387 |
e9c09ab90a4f4845ddd17bf04e22ed63ce795687 | 1,744 | exit if jruby?
ENV["DATABASE_URL"] ||= "postgres:///queue_classic_test"
require 'toro'
require 'test_workers'
describe TestWorker do
it_behaves_like "a worker"
end
describe MultiWorker do
context "when Toro is loaded" do
it "defaults to the :toro adapter" do
expect(MultiWorker.default_adapter).to eq(:toro)
end
end
context "when using the :toro adapter" do
it "performs the work using Toro" do
expect(Toro::Client).to receive(:create_job).exactly(2).times.with({class_name:"TestWorker", name:nil, args:["foo"], queue::default})
TestWorker.perform_async("foo")
MultiWorker.enqueue(TestWorker, "foo")
end
it "exposes the Toro rake task" do
expect(MultiWorker.adapter.rake_task.name).to eq("toro")
end
context "with advanced options" do
context "when configuring the :retry option" do
context "with a hash" do
it "configures retry interval" do
retry_worker = Class.new do
worker retry: {delay: 2.minutes}
end
expect(retry_worker.toro_options[:retry_interval]).to eq(2.minutes)
end
end
context "with an ActiveSupport::Duration" do
it "configures retry interval" do
retry_worker = Class.new do
worker retry: 15.seconds
end
expect(retry_worker.toro_options[:retry_interval]).to eq(15.seconds)
end
end
context "with a number" do
it "configures retry interval" do
retry_worker = Class.new do
worker retry: 20
end
expect(retry_worker.toro_options[:retry_interval]).to eq(20)
end
end
end
end
end
end | 27.25 | 139 | 0.622133 |
61c182dea7d36b64940b4f60582e4c968ac42722 | 727 | require 'tc_xml'
require 'tc_attributes'
require 'tc_attr'
require 'tc_attr_decl'
require 'tc_document'
require 'tc_document_write'
require 'tc_dtd'
require 'tc_html_parser'
require 'tc_namespaces'
require 'tc_namespace'
require 'tc_node'
require 'tc_node_cdata'
require 'tc_node_comment'
require 'tc_node_copy'
require 'tc_node_edit'
require 'tc_node_text'
require 'tc_node_write'
require 'tc_node_xlink'
require 'tc_parser'
require 'tc_parser_context'
require 'tc_reader'
require 'tc_relaxng'
require 'tc_sax_parser'
require 'tc_schema'
require 'tc_traversal'
require 'tc_xinclude'
require 'tc_xpath'
require 'tc_xpath_expression'
require 'tc_xpointer'
# Compatibility
require 'tc_properties'
require 'tc_deprecated_require' | 22.030303 | 31 | 0.825309 |
333cbe5f0916e901b532098d9ec018fe4db28c87 | 866 | Pod::Spec.new do |s|
s.name = 'iNotify'
s.version = '1.5.2'
s.license = 'zlib'
s.summary = "Lightweight alternative to Apple's push notifications(only launch time) for cross-promoting apps or pointing out non-obvious features."
s.description = "Library for displaying remotely administered notifications within a Mac or iPhone app. Similar to Apple's push notifications, but more lightweight and only displayed at app launch time. Perfect for cross-promoting apps or pointing out non-obvious features."
s.homepage = 'http://www.charcoaldesign.co.uk/source/cocoa#inotify'
s.author = 'Nick Lockwood'
s.source = { :git => 'https://github.com/nicklockwood/iNotify.git', :tag => '1.5.2' }
s.source_files = 'iNotify/iNotify.{h,m}'
s.resources = 'iNotify/iNotify.bundle'
s.requires_arc = false
end
| 61.857143 | 278 | 0.690531 |
03a1a255fb5b22de50489e0c3ee7299ca06cc60e | 210 | # rubocop:disable Naming/FileName
# frozen_string_literal: true
module Elastic
module V12p1
RepositoryInstanceProxy = Elastic::Latest::RepositoryInstanceProxy
end
end
# rubocop:enable Naming/FileName
| 19.090909 | 70 | 0.804762 |
f888151ea6194c4eac94f4dba65527f792408e59 | 819 | # frozen_string_literal: true
# Category model
class Category < ApplicationRecord
serialize :name, Hash
serialize :description, Hash
belongs_to :parent, class_name: 'Category', foreign_key: :parent_id, optional: true
has_many :children, class_name: 'Category', foreign_key: :parent_id
has_many :item_categories
has_many :items, through: :item_categories
has_one_attached :image
validates :image, blob: { content_type: :image, size_range: 1..5.megabytes }
# validates :name, presence: true, uniqueness: true
scope :parents, -> { where(parent_id: nil) }
def children?
children.size.positive?
end
def parent?
parent.present?
end
def path_ids
[]
end
def translated_name
name[I18n.locale]
end
def translated_description
description[I18n.locale]
end
end
| 19.97561 | 85 | 0.722833 |
01af672537c1fde1bf5c4e60c2adc2ac9fd883da | 8,461 | # frozen_string_literal: true
require 'spec_helper'
require 'rails_helper'
require_relative '../contexts/tag_context'
# rubocop: disable Metrics/BlockLength
describe Api::TagsController, type: :controller do
before(:each) do
TagType.delete_cache
end
include_context 'tags'
let!(:user) { create(:user) }
let(:recipe_index) { Graph::RecipeIndex.instance }
let(:tag_index) { Graph::TagIndex.instance }
describe 'GET - index' do
before do
recipe_index.reset
tag_index.reset
sign_in user
get :index,
params: params,
format: 'json'
end
let!(:tag_groups) do
{ protein.id.to_s => { nut.id.to_s => [almond.id], soy.id.to_s => [tofu.id] } }
end
let!(:ingredient_tags) do
[
{ 'Label' => 'Nut', 'Value' => nut.id },
{ 'Label' => 'Almond', 'Value' => almond.id },
{ 'Label' => 'Protein', 'Value' => protein.id },
{ 'Label' => 'Vodka', 'Value' => vodka.id },
{ 'Label' => 'Soy', 'Value' => soy.id },
{ 'Label' => 'Tofu', 'Value' => tofu.id }
]
end
let(:non_ingredient_tags) do
[
{ 'Label' => 'plants', 'Value' => plants.id },
{ 'Label' => 'crushed', 'Value' => crushed.id }
]
end
let(:tags) { ingredient_tags + non_ingredient_tags }
describe 'returns all tags and tag_groups' do
let(:params) { {} }
it 'responds with tag_groups' do
body = JSON.parse(response.body)
expect(body['tag_groups']).to eq(tag_groups)
end
it 'responds with tags user has access to' do
# returning tags that have no recipes so they can be added in recipes form
body = JSON.parse(response.body)
expect(body['tags'].size).to eq 8
expect(body['tags'].map { |t| t['Value'] } - [
almond.id, vodka.id, toasted.id, crushed.id, nut.id, protein.id, soy.id, tofu.id
]).
to eq([])
end
end
describe 'returns all tags and tag_groups' do
let(:params) { { type: 'ingredients' } }
it 'responds with tag_groups' do
body = JSON.parse(response.body)
expect(body['tag_groups']).to be_nil
end
it 'responds with tags' do
body = JSON.parse(response.body)
expect(body['tags'].size).to eq 6
expect(body['tags'] - ingredient_tags).to eq([])
end
end
describe 'returns all tags and tag_groups' do
let(:params) { { type: 'more' } }
it 'responds with tag_groups' do
body = JSON.parse(response.body)
expect(body['tag_groups']).to be_nil
end
it 'responds with tags' do
body = JSON.parse(response.body)
expect(body['tags'].size).to eq 2
expect(body['tags'] - non_ingredient_tags).to eq([])
end
end
end
describe 'GET - show' do
before do
recipe_index.reset
tag_index.reset
sign_in user
get :show,
params: params,
format: 'json'
end
describe 'returns data for an ingredient tag' do
let!(:params) { { id: almond.id } }
it 'returns the correct name, id, and type' do
body = JSON.parse(response.body)
expect(body['id']).to eq almond.id
expect(body['name']).to eq almond.name
expect(body['tag_type_id']).to eq almond.tag_type_id
end
end
describe 'returns data for an ingredient type tag' do
let!(:params) { { id: nut.id } }
let!(:expected_response) do
{
'id' => nut.id,
'name' => 'Nut',
'description' => nil,
'grandchild_tags' => {},
'grandparent_tags' => {},
'tag_type_id' => nut.tag_type_id,
'recipe_id' => nil,
'sister_tags' => { soy.id.to_s => 'Soy' },
'tags' => { nut.id.to_s => 'Nut' },
'child_tags' => { almond.id.to_s => 'Almond' },
'parent_tags' => { protein.id.to_s => 'Protein' },
'modification_tags' => { crushed.id.to_s => 'crushed' },
'modified_tags' => {}
}
end
it 'returns the correct name, id, and type' do
body = JSON.parse(response.body)
expect(body).to eq expected_response
end
end
describe 'returns data for an ingredient family tag' do
let!(:params) { { id: protein.id } }
let!(:expected_response) do
{
'id' => protein.id,
'name' => 'Protein',
'description' => nil,
'tag_type_id' => protein.tag_type_id,
'recipe_id' => nil,
'sister_tags' => {},
'tags' => { protein.id.to_s => 'Protein' },
'child_tags' => { nut.id.to_s => 'Nut', soy.id.to_s => 'Soy' },
'grandchild_tags' => { almond.id.to_s => 'Almond', tofu.id.to_s => 'Tofu' },
'grandparent_tags' => {},
'parent_tags' => {},
'modification_tags' => { crushed.id.to_s => crushed.name },
'modified_tags' => {}
}
end
it 'returns the correct name, id, and type' do
body = JSON.parse(response.body)
expect(body).to eq expected_response
end
end
end
describe 'GET - edit' do
before do
sign_in user
get :edit,
params: params,
format: 'json'
end
describe 'returns data for an ingredient tag' do
let!(:params) { { id: almond.id } }
it 'returns the correct name, id, and type' do
body = JSON.parse(response.body)
expect(body['id']).to eq almond.id
expect(body['name']).to eq almond.name
expect(body['tag_type_id']).to eq almond.tag_type_id
expect(body['description']).to eq almond.description
expect(body['recipe_id']).to eq almond.recipe_id
expect(body['parent_tags']).to eq [{ 'id' => nut.id, 'name' => nut.name }]
end
end
end
describe 'PUT - update' do
before do
sign_in user
put :update,
params: params,
format: 'json'
end
describe 'returns data for an ingredient tag' do
let!(:params) do
{
id: almond.id,
name: 'almond2',
description: 'desc2',
recipe_id: 2,
tag_type_id: almond.tag_type_id + 1,
parent_tags: [{ 'id' => protein.id, 'name' => protein.name }]
}
end
it 'returns the correct name, id, and type' do
body = JSON.parse(response.body)
expect(body['id']).to eq almond.id
expect(body['name']).to eq 'almond2'
expect(body['tag_type_id']).to eq almond.tag_type_id + 1
expect(body['description']).to eq 'desc2'
expect(body['recipe_id']).to eq 2
expect(body['parent_tags']).to eq [{ 'id' => protein.id, 'name' => protein.name }]
end
end
end
describe 'PUT - update - missing permissions' do
before do
sign_in other_user
put :update,
params: params,
format: 'json'
end
describe 'returns data for an ingredient tag' do
let!(:other_user) { create :user }
let!(:params) do
{
id: almond.id,
name: 'almond2',
description: 'desc2',
recipe_id: 2,
tag_type_id: almond.tag_type_id + 1,
parent_tags: [{ 'id' => protein.id, 'name' => protein.name }]
}
end
it 'returns the correct name, id, and type' do
body = JSON.parse(response.body)
expect(body).to eq({})
expect(response.status).to eq 403
end
end
end
describe 'POST - create' do
before do
sign_in user
post :create,
params: params,
format: 'json'
end
describe 'returns data for an ingredient tag' do
let!(:params) do
{
name: 'hazelnut',
description: 'descHazel',
recipe_id: 3,
tag_type_id: almond.tag_type_id,
parent_tags: [{ 'id' => protein.id, 'name' => protein.name }]
}
end
it 'returns the correct name, id, and type' do
body = JSON.parse(response.body)
expect(body['id']).to be_a_kind_of(Integer)
expect(body['name']).to eq 'hazelnut'
expect(body['tag_type_id']).to eq almond.tag_type_id
expect(body['description']).to eq 'descHazel'
expect(body['recipe_id']).to eq 3
expect(body['parent_tags']).to eq [{ 'id' => protein.id, 'name' => protein.name }]
end
end
end
end
# rubocop: enable Metrics/BlockLength
| 30.003546 | 90 | 0.551944 |
6adfe75f49731af206adc5cfc8c696d0be95e4c4 | 1,831 | class Mat2 < Formula
desc "Metadata anonymization toolkit"
homepage "https://0xacab.org/jvoisin/mat2"
url "https://0xacab.org/jvoisin/mat2/-/archive/0.12.0/mat2-0.12.0.tar.gz"
sha256 "d2a7a4dd674b084fcd2a63d70cd132a36cea46d98626df3c9258f8327085baa0"
license "LGPL-3.0-or-later"
bottle do
cellar :any_skip_relocation
sha256 "4fad5de836a909094672ce10f2de736f371ba6e075b2d2c89bd91f13a718772c" => :big_sur
sha256 "f561dac02a6bf1c540295f9498d59bfa987c4f279d8a23ce546eee45fdc0c605" => :catalina
sha256 "2bf3052ed3c51c5e77982c80904ad0599763125ad9dc31ccdd73ec1808a5ab05" => :mojave
end
depends_on "exiftool"
depends_on "ffmpeg"
depends_on "gdk-pixbuf"
depends_on "librsvg"
depends_on "poppler"
depends_on "py3cairo"
depends_on "pygobject3"
depends_on "[email protected]"
resource "mutagen" do
url "https://files.pythonhosted.org/packages/f3/d9/2232a4cb9a98e2d2501f7e58d193bc49c956ef23756d7423ba1bd87e386d/mutagen-1.45.1.tar.gz"
sha256 "6397602efb3c2d7baebd2166ed85731ae1c1d475abca22090b7141ff5034b3e1"
end
def install
version = Language::Python.major_minor_version Formula["[email protected]"].bin/"python3"
pygobject3 = Formula["pygobject3"]
ENV["PYTHONPATH"] = lib/"python#{version}/site-packages"
ENV.append_path "PYTHONPATH", pygobject3.opt_lib+"python#{version}/site-packages"
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python#{version}/site-packages"
resources.each do |r|
r.stage do
system Formula["[email protected]"].bin/"python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
system Formula["[email protected]"].bin/"python3", *Language::Python.setup_install_args(prefix)
bin.env_script_all_files(libexec/"bin", PYTHONPATH: ENV["PYTHONPATH"])
end
test do
system "#{bin}/mat2", "-l"
end
end
| 35.901961 | 138 | 0.754233 |
6a15f638e3dc2bcfcc8cf7685cd0b7250ed595bb | 1,768 | require "rails_helper"
RSpec.describe "Authentication", type: :system do
context "when guest" do
it "renders sign in page with no flash messages" do
visit "/"
expect(page).to have_text "Log in"
expect(page).to_not have_text "sign in before continuing"
end
%w[volunteer supervisor casa_admin].each do |user_type|
# Add back when Travis CI correctly handles large screen size
xit "allows #{user_type} to sign in" do
user = create(user_type.to_sym)
visit "/"
expect(page).to have_text "Log in"
expect(page).to_not have_text "sign in before continuing"
fill_in "Email", with: user.email
fill_in "Password", with: "123456"
within ".actions" do
click_on "Log in"
end
expect(page).to have_text user.email
end
end
it "does not allow AllCasaAdmin to sign in" do
user = create(:all_casa_admin)
visit "/"
expect(page).to have_text "Log in"
expect(page).to_not have_text "sign in before continuing"
fill_in "Email", with: user.email
fill_in "Password", with: "123456"
within ".actions" do
click_on "Log in"
end
expect(page).to have_text "Invalid Email or password"
end
end
context "when authenticated user" do
let(:user) { create(:casa_admin) }
before { sign_in user }
# Add back when Travis CI correctly handles large screen size
xit "renders dashboard page and shows correct flash message upon sign out" do
visit "/"
expect(page).to have_text "Volunteers"
# click_link "Log out"
# expect(page).to_not have_text "sign in before continuing"
# expect(page).to have_text "Signed out successfully"
end
end
end
| 28.983607 | 81 | 0.645362 |
0856e13fa522e4fce90023e40e5e8463a284e243 | 748 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Vulnerabilities::Statistics::ScheduleWorker do
let(:worker) { described_class.new }
describe "#perform" do
let(:project) { create(:project) }
let(:deleted_project) { create(:project, pending_delete: true) }
before do
project.project_setting.update!(has_vulnerabilities: true)
deleted_project.project_setting.update!(has_vulnerabilities: true)
allow(Vulnerabilities::Statistics::AdjustmentWorker).to receive(:perform_in)
end
it 'schedules the AdjustmentWorker with project_ids' do
worker.perform
expect(Vulnerabilities::Statistics::AdjustmentWorker).to have_received(:perform_in).with(30, [project.id])
end
end
end
| 28.769231 | 112 | 0.741979 |
618ac9fc76bc21068e3f9dd7063359bbd4f75809 | 886 | # frozen_string_literal: true
module SolidusGraphqlApi
module Mutations
class BaseMutation < GraphQL::Schema::RelayClassicMutation
argument_class Types::Base::Argument
field_class Types::Base::Field
input_object_class Types::Base::InputObject
object_class Types::Base::Object
private
def guest_token
context[:order_token]
end
def current_order
context[:current_order]
end
def current_user
context[:current_user]
end
def current_ability
context[:current_ability]
end
def user_errors(*path, errors)
return [] if errors.empty?
errors.map do |attribute, message|
{
path: ["input", *path].concat(attribute.to_s.camelize(:lower).split('.')),
message: message,
}
end
end
end
end
end
| 21.095238 | 86 | 0.613995 |
1a8543f9d9efd5318860636b3d3803830ba38ceb | 8,588 | =begin
#Selling Partner API for Product Type Definitions
#The Selling Partner API for Product Type Definitions provides programmatic access to attribute and data requirements for product types in the Amazon catalog. Use this API to return the JSON Schema for a product type that you can then use with other Selling Partner APIs, such as the Selling Partner API for Listings Items, the Selling Partner API for Catalog Items, and the Selling Partner API for Feeds (for JSON-based listing feeds). For more information, see the [Product Type Definitions API Use Case Guide](https://github.com/amzn/selling-partner-api-docs/blob/main/guides/en-US/use-case-guides/product-type-definitions-api-use-case-guide/definitions-product-types-api-use-case-guide_2020-09-01.md).
OpenAPI spec version: 2020-09-01
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.26
=end
require 'date'
module AmzSpApi::ProductTypeDefinitionsApiModel
# Link to retrieve the schema.
class SchemaLinkLink
# URI resource for the link.
attr_accessor :resource
# HTTP method for the link operation.
attr_accessor :verb
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'resource' => :'resource',
:'verb' => :'verb'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'resource' => :'Object',
:'verb' => :'Object'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `AmzSpApi::ProductTypeDefinitionsApiModel::SchemaLinkLink` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `AmzSpApi::ProductTypeDefinitionsApiModel::SchemaLinkLink`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'resource')
self.resource = attributes[:'resource']
end
if attributes.key?(:'verb')
self.verb = attributes[:'verb']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @resource.nil?
invalid_properties.push('invalid value for "resource", resource cannot be nil.')
end
if @verb.nil?
invalid_properties.push('invalid value for "verb", verb cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @resource.nil?
return false if @verb.nil?
verb_validator = EnumAttributeValidator.new('Object', ['GET'])
return false unless verb_validator.valid?(@verb)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] verb Object to be assigned
def verb=(verb)
validator = EnumAttributeValidator.new('Object', ['GET'])
unless validator.valid?(verb)
fail ArgumentError, "invalid value for \"verb\", must be one of #{validator.allowable_values}."
end
@verb = verb
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
resource == o.resource &&
verb == o.verb
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[resource, verb].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
elsif attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
AmzSpApi::ProductTypeDefinitionsApiModel.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end end
end
| 32.653992 | 706 | 0.638449 |
ab21c532d0a5fddc590d3ce7f60c5211343b11c8 | 1,306 | # frozen_string_literal: true
require "dnsruby"
require "public_suffix"
module GitHubPages
module HealthCheck
class CAA
attr_reader :host
attr_reader :error
def initialize(host)
raise ArgumentError, "host cannot be nil" if host.nil?
@host = host
end
def errored?
records # load the records first
!error.nil?
end
def lets_encrypt_allowed?
return false if errored?
return true unless records_present?
records.any? { |r| r.property_value == "letsencrypt.org" }
end
def records_present?
return false if errored?
records && !records.empty?
end
def records
@records ||= (get_caa_records(host) | get_caa_records(PublicSuffix.domain(host)))
end
private
def get_caa_records(domain)
return [] if domain.nil?
query(domain).select { |r| issue_caa_record?(r) }
end
def issue_caa_record?(record)
record.type == Dnsruby::Types::CAA && record.property_tag == "issue"
end
def query(domain)
GitHubPages::HealthCheck::Resolver.new(domain).query(Dnsruby::Types::CAA)
rescue Dnsruby::ResolvError, Dnsruby::ResolvTimeout => e
@error = e
[]
end
end
end
end
| 22.517241 | 89 | 0.612557 |
91ac327545d98ac4e867a84e778163b2a5d740bb | 1,596 | require 'spec_helper'
require 'ruby_event_store/rom/memory'
require 'ruby_event_store/spec/rom/relations/stream_entries_lint'
module RubyEventStore
module ROM
module Memory
RSpec.describe Relations::StreamEntries do
let(:rom_helper) { SpecHelper.new }
it_behaves_like :stream_entries_relation, Relations::StreamEntries
specify '#insert raises errors' do
relation = rom_helper.env.rom_container.relations[:stream_entries]
stream_entries = [
{ stream: 'stream', position: 0, event_id: id1 = SecureRandom.uuid },
{ stream: 'stream', position: 1, event_id: SecureRandom.uuid },
{ stream: 'stream', position: 2, event_id: SecureRandom.uuid }
]
relation.command(:create).call(stream_entries)
conflicting_event_id = { stream: 'stream', position: 3, event_id: id1, created_at: Time.now }
expect(relation.to_a.size).to eq(3)
expect do
relation.insert(conflicting_event_id)
end.to raise_error do |ex|
expect(ex).to be_a(RubyEventStore::ROM::TupleUniquenessError)
expect(ex.message).to eq("Uniquness violated for stream (\"stream\") and event_id (\"#{id1}\")")
end
conflicting_position = { stream: 'stream', position: 2, event_id: SecureRandom.uuid, created_at: Time.now }
expect do
relation.insert(conflicting_position)
end.to raise_error do |ex|
expect(ex).to be_a(RubyEventStore::ROM::TupleUniquenessError)
expect(ex.message).to eq('Uniquness violated for stream ("stream") and position (2)')
end
end
end
end
end
end
| 33.957447 | 113 | 0.689223 |
0375cfc55a2e7ceba0a2ad75c2355708ef0c8627 | 1,756 | Pod::Spec.new do |s|
s.name = "AppXperience"
s.version = "1.9.5"
s.summary = "AppXperience is an advertising fmk of mobile applications. Learn more at http://appxperience.com."
s.homepage = "https://github.com/atelierdumobile/AppXperienceFramework.git"
s.license = {
:type => 'Commercial',
:text => <<-LICENSE
Copyright (C) 2013 AppXperience
Permission is hereby granted to use this framework as is, modification are not allowed.
All rights reserved.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
LICENSE
}
s.author = { "Nicolas Lauquin" => "[email protected]" }
s.source = { :git => "https://github.com/atelierdumobile/AppXperienceFramework.git" , :tag => "1.9.5"}
s.platform = :ios, '5.0'
s.source_files = 'AppXperience.framework/Versions/A/Headers/*.{h}'
s.resources = 'AppXperience.bundle'
s.frameworks = 'QuartzCore', 'CoreGraphics', 'MediaPlayer', 'CoreText', 'UIKit', 'AppXperience'
s.xcconfig = { 'FRAMEWORK_SEARCH_PATHS' => '"$(PODS_ROOT)/AppXperience/"' }
s.preserve_paths = 'AppXperience.framework', 'AppXperience.bundle'
s.dependency 'MKNetworkKit', '~> 0.85'
s.dependency 'SDWebImage', '~> 3.1'
s.requires_arc = false
end
| 45.025641 | 118 | 0.666856 |
26dc1189d40fe60096e05afeb8fa16ece8ed1788 | 2,781 | require 'doorkeeper/rails/routes/mapping'
require 'doorkeeper/rails/routes/mapper'
module Doorkeeper
module Rails
class Routes # :nodoc:
module Helper
def use_doorkeeper(options = {}, &block)
Doorkeeper::Rails::Routes.new(self, &block).generate_routes!(options)
end
end
def self.install!
ActionDispatch::Routing::Mapper.send :include, Doorkeeper::Rails::Routes::Helper
end
attr_reader :routes
def initialize(routes, &block)
@routes = routes
@mapping = Mapper.new.map(&block)
if Doorkeeper.configuration.api_only
@mapping.skips.push(:applications, :authorized_applications)
end
end
def generate_routes!(options)
routes.scope options[:scope] || 'oauth', as: 'oauth' do
map_route(:authorizations, :authorization_routes)
map_route(:tokens, :token_routes)
map_route(:tokens, :revoke_routes)
map_route(:tokens, :introspect_routes)
map_route(:applications, :application_routes)
map_route(:authorized_applications, :authorized_applications_routes)
map_route(:token_info, :token_info_routes)
end
end
private
def map_route(name, method)
send(method, @mapping[name]) unless @mapping.skipped?(name)
end
def authorization_routes(mapping)
routes.resource(
:authorization,
path: 'authorize',
only: %i[create destroy],
as: mapping[:as],
controller: mapping[:controllers]
) do
routes.get '/native', action: :show, on: :member
routes.get '/', action: :new, on: :member
end
end
def token_routes(mapping)
routes.resource(
:token,
path: 'token',
only: [:create], as: mapping[:as],
controller: mapping[:controllers]
)
end
def revoke_routes(mapping)
routes.post 'revoke', controller: mapping[:controllers], action: :revoke
end
def introspect_routes(mapping)
routes.post 'introspect', controller: mapping[:controllers], action: :introspect
end
def token_info_routes(mapping)
routes.resource(
:token_info,
path: 'token/info',
only: [:show], as: mapping[:as],
controller: mapping[:controllers]
)
end
def application_routes(mapping)
routes.resources :doorkeeper_applications, controller: mapping[:controllers], as: :applications, path: 'applications'
end
def authorized_applications_routes(mapping)
routes.resources :authorized_applications, only: %i[index destroy], controller: mapping[:controllers]
end
end
end
end
| 29.273684 | 125 | 0.619561 |
61c784b30a589ff9030766a50245533b1df8da83 | 310 | class Selected_book
attr_accessor :title_and_author, :total_votes, :title, :author, :url, :description, :rating, :number_of_ratings
@@all = []
def initialize(title_and_author, url)
@title_and_author = title_and_author
@url = url
@@all << self
end
def self.all
@@all
end
end | 25.833333 | 114 | 0.670968 |
bf58f011fb7234fb8d6f69541334ff4b84328381 | 1,060 | # frozen_string_literal: true
module API
module Meta
class Base
def initialize(endpoint_klass, opts = {}, &block)
@endpoint_klass = endpoint_klass
@opts = opts
opts[:block] = block if block.present?
decorate
end
private
attr_reader :endpoint_klass, :opts
def decorate
return unless opts.present?
endpoint_klass.send(:desc, desc)
endpoint_klass.send(
:route_setting,
scopes: opts[:scopes]
) if opts.key?(:scopes)
define_params
define_endpoint
end
def define_params
endpoint_klass.instance_exec(this_resource) do |resource_name|
params do
requires :id, type: Integer, desc: "ID of #{resource_name}"
end
end
end
def define_endpoint
raise NotImplementedError
end
def desc
raise NotImplementedError
end
def this_resource
@this_resource ||= endpoint_klass.name.demodulize
end
end
end
end
| 20.384615 | 71 | 0.595283 |
f7eefeb89251f73a600aaaa3c27efaf11124cd0f | 6,785 | # Spec requirements
require 'models/persistence/sequel/spec_helper'
require 'models/persistence/sequel/page_shared'
require 'lims-core/persistence/multi_criteria_filter'
require 'lims-laboratory-app/organization/order/all'
module Lims::LaboratoryApp
module Lims::Core::Persistence
shared_context "with saved orders ('models' version)" do
include_context "with saved batches ('models' version)"
let(:basic_parameters) { { :creator => Organization::User.new(), :study => Organization::Study.new(), :pipeline => "testing" } }
let(:orders) {
# We give a different pipeline to be able to differentiate each order easily
# and sort them
[
Organization::Order.new(basic_parameters.merge(:pipeline => "P1")).tap do |o|
o.add_source("source1", "11111111-1111-0000-0000-000000000000")
o.add_target("source2", "11111111-2222-0000-0000-000000000000")
o.add_source("source3", "00000000-3333-0000-0000-000000000000")
o.build!
o.start!
end,
Organization::Order.new(basic_parameters.merge(:pipeline => "P2")).tap do |o|
o.add_source("source1", "22222222-1111-0000-0000-000000000000")
o.add_source("source2", "22222222-2222-0000-0000-000000000000")
o.add_target("source3", "00000000-3333-0000-0000-000000000000")
o.build!
o.start!
end,
Organization::Order.new(basic_parameters.merge(:pipeline => "P3")).tap do |o|
o.add_source("source1", "33333333-1111-0000-0000-000000000000")
o.add_source("source2", "33333333-2222-0000-0000-000000000000")
o.add_target("target1", "00000000-3333-0000-0000-000000000000")
o.build!
o.start!
o.complete!
end
]
}
let!(:ids) {
orders.map do |o|
store.with_session do |session|
session << o
o[:source2].first.batch = session[batch_uuids[0]] if o.pipeline == 'P1'
o[:source1].first.batch = session[batch_uuids[1]] if o.pipeline == 'P2'
o[:target1].first.batch = session[batch_uuids[0]] if o.pipeline == 'P3'
end
end
}
end
shared_context "with saved batches ('models' version)" do
let!(:batch_uuids) do
['11111111-2222-2222-3333-000000000000', '11111111-2222-2222-3333-111111111111'].tap do |uuids|
uuids.each do |uuid|
store.with_session do |session|
batch = Organization::Batch.new
session << batch
ur = session.new_uuid_resource_for(batch)
ur.send(:uuid=, uuid)
end
end
end
end
end
shared_examples_for "finding orders" do |criteria, indexes|
let(:filter) { MultiCriteriaFilter.new(criteria)
}
let(:persistor) { store.with_session { |s| filter.call(s.order) } }
context do
it "find the right orders" do
loaded = persistor.slice(0, orders.size).to_a.sort { |a,b| a.pipeline <=> b.pipeline }
original = indexes.map { |i| orders[i]}.sort { |a,b| a.pipeline <=> b.pipeline }
loaded.should == original
end
it "find the correct number of order" do
persistor.count.should == indexes.size
end
end
end
shared_examples_for "searchable by item criteria" do
context "saved orders" do
include_context "with saved orders ('models' version)"
context "lookup by one uuid" do
it_behaves_like "finding orders", { :item => {:uuid => "11111111-2222-0000-0000-000000000000" } }, [0]
context "find 2 orders" do
it_behaves_like "finding orders", { :item => {:uuid => "00000000-3333-0000-0000-000000000000" } }, [0,1,2]
end
end
context "lookup by role" do
it_behaves_like "finding orders", { :item => {:role => "source3"} }, [0,1]
it_behaves_like "finding orders", { :item => {:role => %w[source3 target1] } }, [0,1,2]
end
context "lookup by status" do
it_behaves_like "finding orders", { :item => {:uuid => "00000000-3333-0000-0000-000000000000", :status => "pending" } }, [1,2]
end
context "lookup by role and uuid and status" do
it_behaves_like "finding orders", { :item => { :role => "source3", :status => "pending", :uuid => "00000000-3333-0000-0000-000000000000" } }, [1]
end
context "mix order and items criteria" do
it_behaves_like "finding orders", { :status => "completed", :item => { :uuid => "00000000-3333-0000-0000-000000000000" } }, [2]
end
context "lookup by batch assigned to order items" do
it_behaves_like "finding orders", { :item => {:batch => {"uuid" => '11111111-2222-2222-3333-111111111111'}}}, [1]
end
end
end
shared_examples_for "finding resources" do |uuids|
it "finds the resource" do
store.with_session do |session|
results = search.call(session)
all = results.slice(0, uuids.size).to_a
all.size.should == uuids.size
uuids.each do |uuid|
all.should include(session[uuid])
end
all.each do |resource|
resource.should be_a(model)
end
end
end
end
shared_examples_for "orders filtrable" do
include_context "with saved orders ('models' version)"
let(:description) { "lookup by order" }
let(:filter) { Lims::Core::Persistence::OrderFilter.new(criteria) }
let(:search) { Lims::Core::Persistence::Search.new(:model => model, :filter => filter, :description => description) }
context "by order pipeline" do
let(:criteria) { {:order => {:pipeline => "P1"}} }
it_behaves_like "finding resources", ['11111111-2222-0000-0000-000000000000', '00000000-3333-0000-0000-000000000000']
end
context "by order status" do
let(:criteria) { {:order => {:status => "in_progress"}} }
it_behaves_like "finding resources", ['22222222-1111-0000-0000-000000000000','11111111-2222-0000-0000-000000000000', '00000000-3333-0000-0000-000000000000']
end
context "by order item" do
let(:criteria) { {:order => {:item => {:status => "pending"}}} }
it_behaves_like "finding resources", ['11111111-2222-0000-0000-000000000000']
end
context "by batch assigned to order items" do
let(:criteria) { {:order => {:item => {:batch => {"uuid" => '11111111-2222-2222-3333-000000000000'}}}} }
it_behaves_like "finding resources", ['11111111-2222-0000-0000-000000000000', '00000000-3333-0000-0000-000000000000']
end
end
end
end
| 40.873494 | 164 | 0.601916 |
1c916e85474a4b81af0f84791aac129e30d69ea9 | 3,725 | # frozen_string_literal: true
require_relative '../../test_helper'
require 'pagy/extras/materialize'
require_relative '../../mock_helpers/pagy_buggy'
require_relative '../../mock_helpers/app'
describe 'pagy/extras/materialize' do
let(:app) { MockApp.new }
describe '#pagy_materialize_nav' do
it 'renders first page' do
pagy = Pagy.new(count: 1000, page: 1)
_(app.pagy_materialize_nav(pagy)).must_rematch
_(app.pagy_materialize_nav(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra')).must_rematch
end
it 'renders intermediate page' do
pagy = Pagy.new(count: 1000, page: 20)
_(app.pagy_materialize_nav(pagy)).must_rematch
_(app.pagy_materialize_nav(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra')).must_rematch
end
it 'renders last page' do
pagy = Pagy.new(count: 1000, page: 50)
_(app.pagy_materialize_nav(pagy)).must_rematch
_(app.pagy_materialize_nav(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra')).must_rematch
end
it 'should raise for wrong series' do
_ { app.pagy_materialize_nav(PagyBuggy.new(count: 100)) }.must_raise Pagy::InternalError
end
end
describe '#pagy_materialize_nav_js' do
it 'renders first page' do
pagy = Pagy.new(count: 1000, page: 1)
_(app.pagy_materialize_nav_js(pagy)).must_rematch
_(app.pagy_materialize_nav_js(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra',
steps: { 0 => [1, 2, 2, 1], 600 => [1, 3, 3, 1] })).must_rematch
end
it 'renders intermediate page' do
pagy = Pagy.new(count: 1000, page: 20)
_(app.pagy_materialize_nav_js(pagy)).must_rematch
_(app.pagy_materialize_nav_js(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra',
steps: { 0 => [1, 2, 2, 1], 600 => [1, 3, 3, 1] })).must_rematch
end
it 'renders last page' do
pagy = Pagy.new(count: 1000, page: 50)
_(app.pagy_materialize_nav_js(pagy)).must_rematch
_(app.pagy_materialize_nav_js(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra',
steps: { 0 => [1, 2, 2, 1], 600 => [1, 3, 3, 1] })).must_rematch
end
it 'renders with :steps' do
pagy = Pagy.new(count: 1000, page: 20, steps: { 0 => [1, 2, 2, 1], 500 => [2, 3, 3, 2] })
_(app.pagy_materialize_nav_js(pagy)).must_rematch
_(app.pagy_materialize_nav_js(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra',
steps: { 0 => [1, 2, 2, 1], 600 => [1, 3, 3, 1] })).must_rematch
end
it 'raises with missing step 0' do
pagy = Pagy.new(count: 1000, page: 20, steps: { 0 => [1, 2, 2, 1], 600 => [1, 3, 3, 1] })
_ { app.pagy_materialize_nav_js(pagy, steps: { 600 => [1, 3, 3, 1] }) }.must_raise Pagy::VariableError
end
end
describe '#pagy_materialize_combo_nav_js' do
it 'renders first page' do
pagy = Pagy.new(count: 103, page: 1)
_(app.pagy_materialize_combo_nav_js(pagy)).must_rematch
_(app.pagy_materialize_combo_nav_js(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra')).must_rematch
end
it 'renders intermediate page' do
pagy = Pagy.new(count: 103, page: 3)
_(app.pagy_materialize_combo_nav_js(pagy)).must_rematch
_(app.pagy_materialize_combo_nav_js(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra')).must_rematch
end
it 'renders last page' do
pagy = Pagy.new(count: 103, page: 6)
_(app.pagy_materialize_combo_nav_js(pagy)).must_rematch
_(app.pagy_materialize_combo_nav_js(pagy, pagy_id: 'test-nav-id', link_extra: 'link-extra')).must_rematch
end
end
end
| 45.426829 | 111 | 0.635705 |
acfe861eb5675c84881cfe2b084e4b1a7fae888d | 1,684 | Rails.application.routes.draw do
get '/events/feed' => 'events#feed'
get '/news/feed' => 'news#feed'
get '/people/feed' => 'people#feed'
get '/resources/feed' => 'resources#feed'
resources :resources
resources :news
resources :events
resources :at_who
resources :indications, only: :create
resources :votes, only: [:create] do
collection do
delete :destroy
end
end
resources :people do
collection do
post :send_message
end
end
resource :newsletters, :controller => :newsletter_signup, only: [:create, :destroy]
get '/people/claim/:slug', to: 'people#claim', as: 'people_claim'
post '/people/claim/:slug', to: 'people#claim_person', as: 'people_claim_person'
post '/people/:id/contact', to: 'people#contact', as: 'people_contact'
get '/people/:id/contact', to: 'people#show', as: 'people_contact_get'
devise_for :users, :controllers => { :registrations => "users/registrations", :omniauth_callbacks => "users/omniauth_callbacks" }
match "/people/create_profile" => "people#new", as: :people_create_profile, :via => :get
match "/people/skills/:tag" => "people#index", as: :people_with_skills, :via => :get
match "/people/interests/:tag" => "people#index", as: :people_with_interests, :via => :get
match "/people/roles/:tag" => "people#index", as: :people_with_role, :via => :get
match "/profile/:slug" => "people#show", as: :profile, :via => :get
controller :pages do
get :unsubscribe
end
root 'pages#index'
get '/topics/tags', to: 'topics#tags', :defaults => { :format => :json }
match "/slack" => redirect("http://startupwichita-slackin.herokuapp.com"), as: :slack, via: :get
end
| 31.773585 | 131 | 0.66924 |
b922cea73259dd8ed37e669cdcf2a1ff65e6a153 | 4,128 | # frozen_string_literal: true
module ActionView
module Helpers # :nodoc:
# = Action View Rendering
#
# Implements methods that allow rendering from a view context.
# In order to use this module, all you need is to implement
# view_renderer that returns an ActionView::Renderer object.
module RenderingHelper
# Returns the result of a render that's dictated by the options hash. The primary options are:
#
# * <tt>:partial</tt> - See <tt>ActionView::PartialRenderer</tt>.
# * <tt>:file</tt> - Renders an explicit template file (this used to be the old default), add :locals to pass in those.
# * <tt>:inline</tt> - Renders an inline template similar to how it's done in the controller.
# * <tt>:plain</tt> - Renders the text passed in out. Setting the content
# type as <tt>text/plain</tt>.
# * <tt>:html</tt> - Renders the HTML safe string passed in out, otherwise
# performs HTML escape on the string first. Setting the content type as
# <tt>text/html</tt>.
# * <tt>:body</tt> - Renders the text passed in, and inherits the content
# type of <tt>text/plain</tt> from <tt>ActionDispatch::Response</tt>
# object.
#
# If no <tt>options</tt> hash is passed or if <tt>:update</tt> is specified, then:
#
# If an object responding to +render_in+ is passed, +render_in+ is called on the object,
# passing in the current view context.
#
# Otherwise, a partial is rendered using the second parameter as the locals hash.
def render(options = {}, locals = {}, &block)
case options
when Hash
in_rendering_context(options) do |renderer|
if block_given?
view_renderer.render_partial(self, options.merge(partial: options[:layout]), &block)
else
view_renderer.render(self, options)
end
end
else
if options.respond_to?(:render_in)
options.render_in(self, &block)
else
view_renderer.render_partial(self, partial: options, locals: locals, &block)
end
end
end
# Overwrites _layout_for in the context object so it supports the case a block is
# passed to a partial. Returns the contents that are yielded to a layout, given a
# name or a block.
#
# You can think of a layout as a method that is called with a block. If the user calls
# <tt>yield :some_name</tt>, the block, by default, returns <tt>content_for(:some_name)</tt>.
# If the user calls simply +yield+, the default block returns <tt>content_for(:layout)</tt>.
#
# The user can override this default by passing a block to the layout:
#
# # The template
# <%= render layout: "my_layout" do %>
# Content
# <% end %>
#
# # The layout
# <html>
# <%= yield %>
# </html>
#
# In this case, instead of the default block, which would return <tt>content_for(:layout)</tt>,
# this method returns the block that was passed in to <tt>render :layout</tt>, and the response
# would be
#
# <html>
# Content
# </html>
#
# Finally, the block can take block arguments, which can be passed in by +yield+:
#
# # The template
# <%= render layout: "my_layout" do |customer| %>
# Hello <%= customer.name %>
# <% end %>
#
# # The layout
# <html>
# <%= yield Struct.new(:name).new("David") %>
# </html>
#
# In this case, the layout would receive the block passed into <tt>render :layout</tt>,
# and the struct specified would be passed into the block as an argument. The result
# would be
#
# <html>
# Hello David
# </html>
#
def _layout_for(*args, &block)
name = args.first
if block && !name.is_a?(Symbol)
capture(*args, &block)
else
super
end
end
end
end
end
| 37.527273 | 125 | 0.583818 |
617fa7c9af63fc8541482e5506149554e07017ef | 815 | require_relative 'base_information'
require 'json'
require 'rest-client'
class TranslateLanguage < Link::Base #Inherits from Base with Class
attr_accessor :text, :code, :url, :information
def initialize(text, code)
@url = altera_link('translate') #Passa the url/functionality reponsible for Translate
@text = text
@code = code
get_information
end
private
def get_information
get_translate
save_information('Translating the Language', @information)
end
def get_translate
response = RestClient.get(@url, params: {key: TOKEN, text: "#{@text}", lang: "#{@code}"})
@information = "Text: #{@text} para #{@code} \nResult: "
@information += JSON.parse(response)["text"].join
#@information returns the information old text, the code and new text translated
end
end | 30.185185 | 93 | 0.706748 |
287b2dd44086cffe7ba23ff28ee9bc18e70b3403 | 1,378 | =begin
#Datadog API V2 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.0-SNAPSHOT
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for DatadogAPIClient::V2::RoleCreateAttributes
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe DatadogAPIClient::V2::RoleCreateAttributes do
let(:instance) { DatadogAPIClient::V2::RoleCreateAttributes.new }
describe 'test an instance of RoleCreateAttributes' do
it 'should create an instance of RoleCreateAttributes' do
expect(instance).to be_instance_of(DatadogAPIClient::V2::RoleCreateAttributes)
end
end
describe 'test attribute "created_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "modified_at"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 29.319149 | 102 | 0.75254 |
87905130d62fc23fe6eaa9ad840f8b7c21369358 | 1,421 | # -*- encoding: utf-8 -*-
# stub: notiffany 0.0.8 ruby lib
Gem::Specification.new do |s|
s.name = "notiffany"
s.version = "0.0.8"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Cezary Baginski", "R\u{e9}my Coutable", "Thibaud Guillaume-Gentil"]
s.date = "2015-09-15"
s.description = " Wrapper libray for most popular notification\n libraries such as Growl, Libnotify, Notifu\n"
s.email = ["[email protected]"]
s.homepage = "https://github.com/guard/notiffany"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.5.1"
s.summary = "Notifier library (extracted from Guard project)"
s.installed_by_version = "2.4.5.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<nenv>, ["~> 0.1"])
s.add_runtime_dependency(%q<shellany>, ["~> 0.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.7"])
else
s.add_dependency(%q<nenv>, ["~> 0.1"])
s.add_dependency(%q<shellany>, ["~> 0.0"])
s.add_dependency(%q<bundler>, ["~> 1.7"])
end
else
s.add_dependency(%q<nenv>, ["~> 0.1"])
s.add_dependency(%q<shellany>, ["~> 0.0"])
s.add_dependency(%q<bundler>, ["~> 1.7"])
end
end
| 36.435897 | 118 | 0.642505 |
180441cb3bd89b3b033188c2b4016d00f4681359 | 909 | Pod::Spec.new do |s|
s.name = "FamilySearchCocoa"
s.version = "0.8.0"
s.summary = "An easy to use library for interacting with the FamilySearch.org API on iOS or OS X"
s.description = <<-DESC
You need to get a developer key from https://devnet.familysearch.org/ first. Then you can easily get the current user, add
parents, children, spouses and modify a persons events like birth, death, marriage date, etc. More is coming soon.
DESC
s.homepage = "https://github.com/FamilySearch/FamilySearchCocoa"
s.license = 'BSD'
s.author = { "Adam Kirk" => "[email protected]" }
s.source = { :git => "https://github.com/FamilySearch/FamilySearchCocoa.git", :tag => "0.8.0" }
s.source_files = 'FamilySearchCocoa/*.{h,m}'
s.requires_arc = true
s.dependency 'MTPocket'
s.dependency 'MTDates'
s.dependency 'MTJSONUtils'
end
| 47.842105 | 127 | 0.657866 |
1c226fd3e16cd536d6b31d4654ae99903e939b5d | 2,798 | # frozen_string_literal: true
require 'java'
require_relative '../../../../java/saxon9he.jar'
java_import 'java.io.StringReader'
java_import 'java.io.StringWriter'
java_import 'javax.xml.parsers.DocumentBuilder'
java_import 'javax.xml.parsers.DocumentBuilderFactory'
java_import 'javax.xml.transform.TransformerFactory'
java_import 'javax.xml.transform.Transformer'
java_import 'javax.xml.transform.dom.DOMSource'
java_import 'javax.xml.transform.stream.StreamSource'
java_import 'javax.xml.transform.stream.StreamResult'
java_import 'org.w3c.dom.Document'
TRANSFORMER_FACTORY_IMPL = 'net.sf.saxon.TransformerFactoryImpl'
module CqmValidators
module Schematron
module JavaProcessor
ISO_SCHEMATRON2 = File.join(DIR, 'lib/schematron/iso-schematron-xslt2/iso_svrl_for_xslt2.xsl')
class HdsUrlResolver
include javax.xml.transform.URIResolver
def initialize(schematron)
@file = schematron
end
def resolve(href, _base)
path = File.join(File.dirname(@file), href)
StreamSource.new(java.io.File.new(path))
end
end
def get_errors(document)
document_j = get_document_j(document)
output = build_transformer(StringReader.new(processor), StreamSource.new(document_j), true)
Nokogiri::XML(output)
end
def get_document_j(doc)
case doc
when File
java.io.File.new(doc.path)
else
StringReader.new(doc.to_s)
end
end
def processor
@processor ||= build_transformer(java.io.File.new(ISO_SCHEMATRON2), schematron_file)
end
def schematron_file
# this allows us to run the validation utility app in jBoss/TorqueBox
# for some reason it breaks the first time you call DocumentBuilderFactory,
# so the solution is to catch the error and retry
# TODO: pull this out when the above is no longer the case.
begin
dbf = DocumentBuilderFactory.new_instance
rescue Exception
retry
end
dbf.setIgnoringElementContentWhitespace(true)
db = dbf.new_document_builder
document = db.parse(java.io.File.new(@schematron_file))
root = document.document_element
root.set_attribute('defaultPhase', 'errors')
DOMSource.new(root)
end
def build_transformer(xslt, input_file, url = false)
factory = TransformerFactory.newInstance(TRANSFORMER_FACTORY_IMPL, nil)
factory.uri_resolver = HdsUrlResolver.new(@schematron_file) if url
transformer = factory.new_transformer(StreamSource.new(xslt))
sw = StringWriter.new
output = StreamResult.new(sw)
transformer.transform(input_file, output)
sw.to_s
end
end
end
end
| 30.747253 | 100 | 0.690136 |
bf079a7de109edbcd63c692904a730b659f99024 | 950 | require 'spec_helper'
describe SearchController, type: :controller do
let(:single_events) { double('SingleEvents') }
let(:region_name) { 'koeln' }
let(:region) { double('Region') }
let(:region_slug) { double('RegionSlug', region: region) }
let(:search_params) { 'SearchParams' }
let(:search_results) { double('SingleEventsByDay') }
before do
allow(RegionSlug).to receive(:find_by_slug).with(region_name).and_return(region_slug)
allow(RegionSlug).to receive(:find_by_slug).with(nil).and_return(nil)
allow(SingleEvent).to receive(:search_in_region)
.with(search_params, region)
.and_return(single_events)
end
it 'should sort the search results from the database by day' do
allow(SingleEventsByDay).to receive(:new)
.with(single_events)
.and_return(search_results)
get :index, search: search_params, region: region_name
expect(assigns[:search_result]).to be search_results
end
end
| 33.928571 | 89 | 0.723158 |
61963cbf2967dd8402f300b0100f8998fcde62eb | 288 | require File.dirname(__FILE__) + '/../../spec_helper'
describe "Float#ceil" do
it "returns the smallest Integer greater than or equal to self" do
-1.2.ceil.should == -1
-1.0.ceil.should == -1
0.0.ceil.should == 0
1.3.ceil.should == 2
3.0.ceil.should == 3
end
end
| 24 | 68 | 0.625 |
bbcc73b9bc130b37b50fccdc2bff4e7f352a444d | 4,859 | # spec/sleeping_king_studios/tasks/ci/rspec_runner_spec.rb
require 'sleeping_king_studios/tasks/ci/rspec_runner'
RSpec.describe SleepingKingStudios::Tasks::Ci::RSpecRunner do
let(:default_env) do
{ :ci => true, :bundle_gemfile => ENV['BUNDLE_GEMFILE'] }
end # let
let(:default_opts) { ['--color'] }
let(:instance) do
described_class.new :env => default_env, :options => default_opts
end # let
describe '::new' do
it 'should define the constructor' do
expect(described_class).
to be_constructible.
with(0).arguments.
and_keywords(:env, :options)
end # it
end # describe
describe '#call' do
let(:report) do
{
'duration' => 1.0,
'example_count' => 3,
'failure_count' => 2,
'pending_count' => 1,
'error_count' => 0
} # end report
end # let
let(:summary_line) do
'3 examples, 2 failures, 1 pending in 1.0 seconds'
end # let
let(:report_file) { 'tmp/ci/rspec.json' }
let(:expected_files) { [] }
let(:expected_env) { {} }
let(:expected_options) { ['--format=json', "--out=#{report_file}"] }
let(:expected_command) do
opts = expected_files + default_opts + expected_options
env = default_env.merge expected_env
env = instance.send(:build_environment, :env => env)
"#{env} bundle exec rspec #{opts.join ' '}".strip
end # let
let(:expected_report) do
JSON.dump 'summary' => report, 'summary_line' => summary_line
end # let
before(:example) do
allow(instance).to receive(:stream_process)
allow(File).to receive(:read).and_return('{}')
end # before example
it 'should define the method' do
expect(instance).
to respond_to(:call).
with(0).arguments.
and_keywords(:env, :files, :options, :report)
end # it
it 'should call an rspec process' do
expect(instance).to receive(:stream_process).with(expected_command)
instance.call
end # it
it 'should load and parse the report file' do
expect(File).
to receive(:read).
with(report_file).
and_return(expected_report)
expect(instance.call).to be == report
end # it
context 'when the summary line lists errors' do
let(:report) do
{
'duration' => 1.0,
'example_count' => 3,
'failure_count' => 2,
'pending_count' => 1,
'error_count' => 1
} # end report
end # let
let(:summary_line) do
'3 examples, 2 failures, 1 error occurred outside of examples'
end # let
it 'should load and parse the report file' do
expect(File).
to receive(:read).
with(report_file).
and_return(expected_report)
expect(instance.call).to be == report
end # it
end # context
describe 'with :env => environment variables' do
let(:env) { { :bundle_gemfile => 'path/to/Gemfile' } }
let(:expected_env) do
super().merge :bundle_gemfile => 'path/to/Gemfile'
end # let
it 'should call an rspec process' do
expect(instance).to receive(:stream_process).with(expected_command)
instance.call :env => env
end # it
end # describe
describe 'with :files => file list' do
let(:expected_files) do
['spec/foo', 'spec/bar', 'spec/wibble/wobble_spec.rb']
end # let
it 'should call an rspec process' do
expect(instance).to receive(:stream_process).with(expected_command)
instance.call :files => expected_files
end # it
end # describe
describe 'with :options => custom options' do
let(:options) { ['--format=progress'] }
let(:expected_options) { options + super() }
it 'should call an rspec process' do
expect(instance).to receive(:stream_process).with(expected_command)
instance.call :options => options
end # it
end # describe
describe 'with :report => false' do
let(:expected_options) { [] }
it 'should call an rspec process' do
expect(instance).to receive(:stream_process).with(expected_command)
instance.call :report => false
end # it
it 'should not load the report file' do
expect(File).not_to receive(:read)
expect(instance.call :report => false).to be == {}
end # it
end # describe
describe 'with :report => file path' do
let(:report_file) { 'tmp/rspec.json' }
it 'should load and parse the report file' do
expect(File).
to receive(:read).
with(report_file).
and_return(JSON.dump 'summary' => report)
expect(instance.call :report => report_file).to be == report
end # it
end # describe
end # describe
end # describe
| 28.751479 | 75 | 0.599712 |
0138033658f6eb46fdf19b9499dd7e269a5bd585 | 30,063 | # frozen_string_literal: true
#
# Author:: Adam Jacob (<[email protected]>)
# Author:: Chris Read <[email protected]>
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Ohai.plugin(:Network) do
provides "network", "network/interfaces"
provides "counters/network", "counters/network/interfaces"
provides "ipaddress", "ip6address", "macaddress"
def linux_encaps_lookup(encap)
return "Loopback" if encap.eql?("Local Loopback") || encap.eql?("loopback")
return "PPP" if encap.eql?("Point-to-Point Protocol")
return "SLIP" if encap.eql?("Serial Line IP")
return "VJSLIP" if encap.eql?("VJ Serial Line IP")
return "IPIP" if encap.eql?("IPIP Tunnel")
return "6to4" if encap.eql?("IPv6-in-IPv4")
return "Ethernet" if encap.eql?("ether")
encap
end
def ipv6_enabled?
file_exist? "/proc/net/if_inet6"
end
def ethtool_binary_path
@ethtool ||= which("ethtool")
end
def is_openvz?
@openvz ||= file_directory?("/proc/vz")
end
def is_openvz_host?
is_openvz? && file_directory?("/proc/bc")
end
def extract_neighbors(family, iface, neigh_attr)
so = shell_out("ip -f #{family[:name]} neigh show")
so.stdout.lines do |line|
if line =~ /^([a-f0-9\:\.]+)\s+dev\s+([^\s]+)\s+lladdr\s+([a-fA-F0-9\:]+)/
interface = iface[$2]
unless interface
logger.warn("neighbor list has entries for unknown interface #{interface}")
next
end
interface[neigh_attr] ||= Mash.new
interface[neigh_attr][$1] = $3.downcase
end
end
iface
end
# checking the routing tables
# why ?
# 1) to set the default gateway and default interfaces attributes
# 2) on some occasions, the best way to select node[:ipaddress] is to look at
# the routing table source field.
# 3) and since we're at it, let's populate some :routes attributes
# (going to do that for both inet and inet6 addresses)
def check_routing_table(family, iface, default_route_table)
so = shell_out("ip -o -f #{family[:name]} route show table #{default_route_table}")
so.stdout.lines do |line|
line.strip!
logger.trace("Plugin Network: Parsing #{line}")
if /\\/.match?(line)
parts = line.split("\\")
route_dest = parts.shift.strip
route_endings = parts
elsif line =~ /^([^\s]+)\s(.*)$/
route_dest = $1
route_endings = [$2]
else
next
end
route_endings.each do |route_ending|
if route_ending =~ /\bdev\s+([^\s]+)\b/
route_int = $1
else
logger.trace("Plugin Network: Skipping route entry without a device: '#{line}'")
next
end
route_int = "venet0:0" if is_openvz? && !is_openvz_host? && route_int == "venet0" && iface["venet0:0"]
unless iface[route_int]
logger.trace("Plugin Network: Skipping previously unseen interface from 'ip route show': #{route_int}")
next
end
route_entry = Mash.new(destination: route_dest,
family: family[:name])
%w{via scope metric proto src}.each do |k|
# http://rubular.com/r/pwTNp65VFf
route_entry[k] = $1 if route_ending =~ /\b#{k}\s+([^\s]+)/
end
# https://rubular.com/r/k1sMrRn5yLjgVi
route_entry["via"] = $1 if route_ending =~ /\bvia\s+inet6\s+([^\s]+)/
# a sanity check, especially for Linux-VServer, OpenVZ and LXC:
# don't report the route entry if the src address isn't set on the node
# unless the interface has no addresses of this type at all
if route_entry[:src]
addr = iface[route_int][:addresses]
unless addr.nil? || addr.key?(route_entry[:src]) ||
addr.values.all? { |a| a["family"] != family[:name] }
logger.trace("Plugin Network: Skipping route entry whose src does not match the interface IP")
next
end
end
iface[route_int][:routes] = [] unless iface[route_int][:routes]
iface[route_int][:routes] << route_entry
end
end
iface
end
# now looking at the routes to set the default attributes
# for information, default routes can be of this form :
# - default via 10.0.2.4 dev br0
# - default dev br0 scope link
# - default dev eth0 scope link src 1.1.1.1
# - default via 10.0.3.1 dev eth1 src 10.0.3.2 metric 10
# - default via 10.0.4.1 dev eth2 src 10.0.4.2 metric 20
# using a temporary var to hold routes and their interface name
def parse_routes(family, iface)
iface.filter_map do |i, iv|
next unless iv[:routes]
iv[:routes].filter_map do |r|
r.merge(dev: i) if r[:family] == family[:name]
end
end.flatten
end
# determine layer 1 details for the interface using ethtool
def ethernet_layer_one(iface)
return iface unless ethtool_binary_path
keys = %w{Speed Duplex Port Transceiver Auto-negotiation MDI-X}
iface.each_key do |tmp_int|
next unless iface[tmp_int][:encapsulation] == "Ethernet"
so = shell_out("#{ethtool_binary_path} #{tmp_int}")
so.stdout.lines do |line|
line.chomp!
logger.trace("Plugin Network: Parsing ethtool output: #{line}")
line.lstrip!
k, v = line.split(": ")
next unless keys.include? k
k.downcase!.tr!("-", "_")
if k == "speed"
k = "link_speed" # This is not necessarily the maximum speed the NIC supports
v = v[/\d+/].to_i
end
iface[tmp_int][k] = v
end
end
iface
end
# determine ring parameters for the interface using ethtool
def ethernet_ring_parameters(iface)
return iface unless ethtool_binary_path
iface.each_key do |tmp_int|
next unless iface[tmp_int][:encapsulation] == "Ethernet"
so = shell_out("#{ethtool_binary_path} -g #{tmp_int}")
logger.trace("Plugin Network: Parsing ethtool output: #{so.stdout}")
type = nil
iface[tmp_int]["ring_params"] = {}
so.stdout.lines.each do |line|
next if line.start_with?("Ring parameters for")
next if line.strip.nil?
if /Pre-set maximums/.match?(line)
type = "max"
next
end
if /Current hardware settings/.match?(line)
type = "current"
next
end
key, val = line.split(/:\s+/)
if type && val
ring_key = "#{type}_#{key.downcase.tr(" ", "_")}"
iface[tmp_int]["ring_params"][ring_key] = val.to_i
end
end
end
iface
end
# determine channel parameters for the interface using ethtool
def ethernet_channel_parameters(iface)
return iface unless ethtool_binary_path
iface.each_key do |tmp_int|
next unless iface[tmp_int][:encapsulation] == "Ethernet"
so = shell_out("#{ethtool_binary_path} -l #{tmp_int}")
logger.trace("Plugin Network: Parsing ethtool output: #{so.stdout}")
type = nil
iface[tmp_int]["channel_params"] = {}
so.stdout.lines.each do |line|
next if line.start_with?("Channel parameters for")
next if line.strip.nil?
if /Pre-set maximums/.match?(line)
type = "max"
next
end
if /Current hardware settings/.match?(line)
type = "current"
next
end
key, val = line.split(/:\s+/)
if type && val
channel_key = "#{type}_#{key.downcase.tr(" ", "_")}"
iface[tmp_int]["channel_params"][channel_key] = val.to_i
end
end
end
iface
end
# determine coalesce parameters for the interface using ethtool
def ethernet_coalesce_parameters(iface)
return iface unless ethtool_binary_path
iface.each_key do |tmp_int|
next unless iface[tmp_int][:encapsulation] == "Ethernet"
so = shell_out("#{ethtool_binary_path} -c #{tmp_int}")
logger.trace("Plugin Network: Parsing ethtool output: #{so.stdout}")
iface[tmp_int]["coalesce_params"] = {}
so.stdout.lines.each do |line|
next if line.start_with?("Coalesce parameters for")
next if line.strip.nil?
if line.start_with?("Adaptive")
_, adaptive_rx, _, adaptive_tx = line.split(/:\s+|\s+TX|\n/)
iface[tmp_int]["coalesce_params"]["adaptive_rx"] = adaptive_rx
iface[tmp_int]["coalesce_params"]["adaptive_tx"] = adaptive_tx
next
end
key, val = line.split(/:\s+/)
if val
coalesce_key = key.downcase.tr(" ", "_").to_s
iface[tmp_int]["coalesce_params"][coalesce_key] = val.to_i
end
end
end
iface
end
# determine offload features for the interface using ethtool
def ethernet_offload_parameters(iface)
return iface unless ethtool_binary_path
iface.each_key do |tmp_int|
next unless iface[tmp_int][:encapsulation] == "Ethernet"
so = shell_out("#{ethtool_binary_path} -k #{tmp_int}")
Ohai::Log.debug("Plugin Network: Parsing ethtool output: #{so.stdout}")
iface[tmp_int]["offload_params"] = {}
so.stdout.lines.each do |line|
next if line.start_with?("Features for")
next if line.strip.nil?
key, val = line.split(/:\s+/)
if val
offload_key = key.downcase.strip.tr(" ", "_").to_s
iface[tmp_int]["offload_params"][offload_key] = val.downcase.gsub(/\[.*\]/, "").strip.to_s
end
end
end
iface
end
# determine pause parameters for the interface using ethtool
def ethernet_pause_parameters(iface)
return iface unless ethtool_binary_path
iface.each_key do |tmp_int|
next unless iface[tmp_int][:encapsulation] == "Ethernet"
so = shell_out("#{ethtool_binary_path} -a #{tmp_int}")
logger.trace("Plugin Network: Parsing ethtool output: #{so.stdout}")
iface[tmp_int]["pause_params"] = {}
so.stdout.lines.each do |line|
next if line.start_with?("Pause parameters for")
next if line.strip.nil?
key, val = line.split(/:\s+/)
if val
pause_key = "#{key.downcase.tr(" ", "_")}"
iface[tmp_int]["pause_params"][pause_key] = val.strip.eql? "on"
end
end
end
iface
end
# determine driver info for the interface using ethtool
def ethernet_driver_info(iface)
return iface unless ethtool_binary_path
iface.each_key do |tmp_int|
next unless iface[tmp_int][:encapsulation] == "Ethernet"
so = shell_out("#{ethtool_binary_path} -i #{tmp_int}")
logger.trace("Plugin Network: Parsing ethtool output: #{so.stdout}")
iface[tmp_int]["driver_info"] = {}
so.stdout.lines.each do |line|
next if line.strip.nil?
key, val = line.split(/:\s+/)
if val.nil?
val = ""
end
driver_key = key.downcase.tr(" ", "_").to_s
iface[tmp_int]["driver_info"][driver_key] = val.chomp
end
end
iface
end
# determine link stats, vlans, queue length, and state for an interface using ip
def link_statistics(iface, net_counters)
so = shell_out("ip -d -s link")
tmp_int = nil
on_rx = true
so.stdout.lines do |line|
if line =~ IPROUTE_INT_REGEX
tmp_int = $2
iface[tmp_int] ||= Mash.new
net_counters[tmp_int] ||= Mash.new
end
if /^\s+(ip6tnl|ipip)/.match?(line)
iface[tmp_int][:tunnel_info] = {}
words = line.split
words.each_with_index do |word, index|
case word
when "external"
iface[tmp_int][:tunnel_info][word] = true
when "any", "ipip6", "ip6ip6"
iface[tmp_int][:tunnel_info][:proto] = word
when "remote",
"local",
"encaplimit",
"hoplimit",
"tclass",
"flowlabel",
"addrgenmode",
"numtxqueues",
"numrxqueues",
"gso_max_size",
"gso_max_segs"
iface[tmp_int][:tunnel_info][word] = words[index + 1]
end
end
end
if line =~ /(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)/
int = on_rx ? :rx : :tx
net_counters[tmp_int][int] ||= Mash.new
net_counters[tmp_int][int][:bytes] = $1
net_counters[tmp_int][int][:packets] = $2
net_counters[tmp_int][int][:errors] = $3
net_counters[tmp_int][int][:drop] = $4
if int == :rx
net_counters[tmp_int][int][:overrun] = $5
else
net_counters[tmp_int][int][:carrier] = $5
net_counters[tmp_int][int][:collisions] = $6
end
on_rx = !on_rx
end
if line =~ /qlen (\d+)/
net_counters[tmp_int][:tx] ||= Mash.new
net_counters[tmp_int][:tx][:queuelen] = $1
end
if line =~ /vlan id (\d+)/ || line =~ /vlan protocol ([\w\.]+) id (\d+)/
if $2
tmp_prot = $1
tmp_id = $2
else
tmp_id = $1
end
iface[tmp_int][:vlan] ||= Mash.new
iface[tmp_int][:vlan][:id] = tmp_id
iface[tmp_int][:vlan][:protocol] = tmp_prot if tmp_prot
vlan_flags = line.scan(/(REORDER_HDR|GVRP|LOOSE_BINDING)/)
if vlan_flags.length > 0
iface[tmp_int][:vlan][:flags] = vlan_flags.flatten.uniq
end
end
# https://rubular.com/r/JRp6lNANmpcLV5
if line =~ /\sstate (\w+)/
iface[tmp_int]["state"] = $1.downcase
end
end
iface
end
def match_iproute(iface, line, cint)
if line =~ IPROUTE_INT_REGEX
cint = $2
iface[cint] = Mash.new
if cint =~ /^(\w+?)(\d+.*)/
iface[cint][:type] = $1
iface[cint][:number] = $2
end
if line =~ /mtu (\d+)/
iface[cint][:mtu] = $1
end
flags = line.scan(/(UP|BROADCAST|DEBUG|LOOPBACK|POINTTOPOINT|NOTRAILERS|LOWER_UP|NOARP|PROMISC|ALLMULTI|SLAVE|MASTER|MULTICAST|DYNAMIC)/)
if flags.length > 1
iface[cint][:flags] = flags.flatten.uniq
end
end
cint
end
def parse_ip_addr(iface)
so = shell_out("ip addr")
cint = nil
so.stdout.lines do |line|
cint = match_iproute(iface, line, cint)
parse_ip_addr_link_line(cint, iface, line)
cint = parse_ip_addr_inet_line(cint, iface, line)
parse_ip_addr_inet6_line(cint, iface, line)
end
end
def parse_ip_addr_link_line(cint, iface, line)
if line =~ %r{link/(\w+) ([\da-f\:]+) }
iface[cint][:encapsulation] = linux_encaps_lookup($1)
unless $2 == "00:00:00:00:00:00"
iface[cint][:addresses] ||= Mash.new
iface[cint][:addresses][$2.upcase] = { "family" => "lladdr" }
end
end
end
def parse_ip_addr_inet_line(cint, iface, line)
if line =~ %r{inet (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(/(\d{1,2}))?}
tmp_addr, tmp_prefix = $1, $3
tmp_prefix ||= "32"
original_int = nil
# Are we a formerly aliased interface?
if line =~ /#{cint}:(\d+)$/
sub_int = $1
alias_int = "#{cint}:#{sub_int}"
original_int = cint
cint = alias_int
end
iface[cint] ||= Mash.new # Create the fake alias interface if needed
iface[cint][:addresses] ||= Mash.new
iface[cint][:addresses][tmp_addr] = { "family" => "inet", "prefixlen" => tmp_prefix }
iface[cint][:addresses][tmp_addr][:netmask] = IPAddr.new("255.255.255.255").mask(tmp_prefix.to_i).to_s
if line =~ /peer (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
iface[cint][:addresses][tmp_addr][:peer] = $1
end
if line =~ /brd (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
iface[cint][:addresses][tmp_addr][:broadcast] = $1
end
if line =~ /scope (\w+)/
iface[cint][:addresses][tmp_addr][:scope] = ($1.eql?("host") ? "Node" : $1.capitalize)
end
# If we found we were an alias interface, restore cint to its original value
cint = original_int unless original_int.nil?
end
cint
end
def parse_ip_addr_inet6_line(cint, iface, line)
if line =~ %r{inet6 ([a-f0-9\:]+)/(\d+) scope (\w+)( .*)?}
iface[cint][:addresses] ||= Mash.new
tmp_addr = $1
tags = $4 || ""
tags = tags.split
iface[cint][:addresses][tmp_addr] = {
"family" => "inet6",
"prefixlen" => $2,
"scope" => ($3.eql?("host") ? "Node" : $3.capitalize),
"tags" => tags,
}
end
end
# returns the macaddress for interface from a hash of interfaces (iface elsewhere in this file)
def get_mac_for_interface(interfaces, interface)
interfaces[interface][:addresses].find { |k, v| v["family"] == "lladdr" }.first unless interfaces[interface][:addresses].nil? || interfaces[interface][:flags].include?("NOARP")
end
# returns the default route with the lowest metric (unspecified metric is 0)
def choose_default_route(routes)
routes.select do |r|
r[:destination] == "default"
end.min do |x, y|
(x[:metric].nil? ? 0 : x[:metric].to_i) <=> (y[:metric].nil? ? 0 : y[:metric].to_i)
end
end
def interface_has_no_addresses_in_family?(iface, family)
return true if iface[:addresses].nil?
iface[:addresses].values.all? { |addr| addr["family"] != family }
end
def interface_have_address?(iface, address)
return false if iface[:addresses].nil?
iface[:addresses].key?(address)
end
def interface_address_not_link_level?(iface, address)
!(iface[:addresses][address][:scope].casecmp("link") == 0)
end
def interface_valid_for_route?(iface, address, family)
return true if interface_has_no_addresses_in_family?(iface, family)
interface_have_address?(iface, address) && interface_address_not_link_level?(iface, address)
end
def route_is_valid_default_route?(route, default_route)
# if the route destination is a default route, it's good
return true if route[:destination] == "default"
return false if default_route[:via].nil?
dest_ipaddr = IPAddr.new(route[:destination])
default_route_via = IPAddr.new(default_route[:via])
# check if nexthop is the same address family
return false if dest_ipaddr.ipv4? != default_route_via.ipv4?
# the default route has a gateway and the route matches the gateway
dest_ipaddr.include?(default_route_via)
end
# ipv4/ipv6 routes are different enough that having a single algorithm to select the favored route for both creates unnecessary complexity
# this method attempts to deduce the route that is most important to the user, which is later used to deduce the favored values for {ip,mac,ip6}address
# we only consider routes that are default routes, or those routes that get us to the gateway for a default route
def favored_default_route_linux(routes, iface, default_route, family)
routes.select do |r|
if family[:name] == "inet"
# the route must have a source address
next if r[:src].nil? || r[:src].empty?
# the interface specified in the route must exist
route_interface = iface[r[:dev]]
next if route_interface.nil? # the interface specified in the route must exist
# the interface must have no addresses, or if it has the source address, the address must not
# be a link-level address
next unless interface_valid_for_route?(route_interface, r[:src], "inet")
# the route must either be a default route, or it must have a gateway which is accessible via the route
next unless route_is_valid_default_route?(r, default_route)
true
elsif family[:name] == "inet6"
iface[r[:dev]] &&
iface[r[:dev]][:state] == "up" &&
route_is_valid_default_route?(r, default_route)
end
end.min_by do |r|
# sorting the selected routes:
# - getting default routes first
# - then sort by metric
# - then by prefixlen
[
r[:destination] == "default" ? 0 : 1,
r[:metric].nil? ? 0 : r[:metric].to_i,
# for some reason IPAddress doesn't accept "::/0", it doesn't like prefix==0
# just a quick workaround: use 0 if IPAddress fails
begin
IPAddress( r[:destination] == "default" ? family[:default_route] : r[:destination] ).prefix
rescue
0
end,
]
end
end
# Both the network plugin and this plugin (linux/network) are run on linux. This plugin runs first.
# If the 'ip' binary is available, this plugin may set {ip,mac,ip6}address. The network plugin should not overwrite these.
# The older code section below that relies on the deprecated net-tools, e.g. netstat and ifconfig, provides less functionality.
collect_data(:linux) do
require "ipaddr" unless defined?(IPAddr)
iface = Mash.new
net_counters = Mash.new
network Mash.new unless network
network[:interfaces] ||= Mash.new
counters Mash.new unless counters
counters[:network] ||= Mash.new
# ohai.plugin[:network][:default_route_table] = 'default'
if configuration(:default_route_table).nil? || configuration(:default_route_table).empty?
default_route_table = "main"
else
default_route_table = configuration(:default_route_table)
end
logger.trace("Plugin Network: default route table is '#{default_route_table}'")
# Match the lead line for an interface from iproute2
# 3: eth0.11@eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc noqueue state UP
# The '@eth0:' portion doesn't exist on primary interfaces and thus is optional in the regex
IPROUTE_INT_REGEX ||= /^(\d+): ([0-9a-zA-Z@:\.\-_]*?)(@[0-9a-zA-Z]+|):\s/.freeze
if which("ip")
# families to get default routes from
families = [{
name: "inet",
default_route: "0.0.0.0/0",
default_prefix: :default,
neighbour_attribute: :arp,
}]
if ipv6_enabled?
families << {
name: "inet6",
default_route: "::/0",
default_prefix: :default_inet6,
neighbour_attribute: :neighbour_inet6,
}
end
parse_ip_addr(iface)
iface = link_statistics(iface, net_counters)
families.each do |family|
neigh_attr = family[:neighbour_attribute]
default_prefix = family[:default_prefix]
iface = extract_neighbors(family, iface, neigh_attr)
iface = check_routing_table(family, iface, default_route_table)
routes = parse_routes(family, iface)
default_route = choose_default_route(routes)
if default_route.nil? || default_route.empty?
attribute_name = if family[:name] == "inet"
"default_interface"
else
"default_#{family[:name]}_interface"
end
logger.trace("Plugin Network: Unable to determine '#{attribute_name}' as no default routes were found for that interface family")
else
network["#{default_prefix}_interface"] = default_route[:dev]
logger.trace("Plugin Network: #{default_prefix}_interface set to #{default_route[:dev]}")
# setting gateway to 0.0.0.0 or :: if the default route is a link level one
network["#{default_prefix}_gateway"] = default_route[:via] || family[:default_route].chomp("/0")
logger.trace("Plugin Network: #{default_prefix}_gateway set to #{network["#{default_prefix}_gateway"]}")
# deduce the default route the user most likely cares about to pick {ip,mac,ip6}address below
favored_route = favored_default_route_linux(routes, iface, default_route, family)
# FIXME: This entire block should go away, and the network plugin should be the sole source of {ip,ip6,mac}address
# since we're at it, let's populate {ip,mac,ip6}address with the best values
# if we don't set these, the network plugin may set them afterwards
if favored_route && !favored_route.empty?
if family[:name] == "inet"
ipaddress favored_route[:src]
m = get_mac_for_interface(iface, favored_route[:dev])
logger.trace("Plugin Network: Overwriting macaddress #{macaddress} with #{m} from interface #{favored_route[:dev]}") if macaddress
macaddress m
elsif family[:name] == "inet6"
# this rarely does anything since we rarely have src for ipv6, so this usually falls back on the network plugin
ip6address favored_route[:src]
if macaddress
logger.trace("Plugin Network: Not setting macaddress from ipv6 interface #{favored_route[:dev]} because macaddress is already set")
else
macaddress get_mac_for_interface(iface, favored_route[:dev])
end
end
else
logger.trace("Plugin Network: Unable to deduce the favored default route for family '#{family[:name]}' despite finding a default route, and is not setting ipaddress/ip6address/macaddress. the network plugin may provide fallbacks.")
logger.trace("Plugin Network: This potential default route was excluded: #{default_route}")
end
end
end # end families.each
else # ip binary not available, falling back to net-tools, e.g. route, ifconfig
begin
so = shell_out("route -n")
route_result = so.stdout.split($/).grep( /^0.0.0.0/ )[0].split( /[ \t]+/ )
network[:default_gateway], network[:default_interface] = route_result.values_at(1, 7)
rescue Ohai::Exceptions::Exec
logger.trace("Plugin Network: Unable to determine default interface")
end
so = shell_out("ifconfig -a")
cint = nil
so.stdout.lines do |line|
tmp_addr = nil
# dev_valid_name in the kernel only excludes slashes, nulls, spaces
# http://git.kernel.org/?p=linux/kernel/git/stable/linux-stable.git;a=blob;f=net/core/dev.c#l851
if line =~ /^([0-9a-zA-Z@\.\:\-_]+)\s+/
cint = $1
iface[cint] = Mash.new
if cint =~ /^(\w+?)(\d+.*)/
iface[cint][:type] = $1
iface[cint][:number] = $2
end
end
if line =~ /Link encap:(Local Loopback)/ || line =~ /Link encap:(.+?)\s/
iface[cint][:encapsulation] = linux_encaps_lookup($1)
end
if line =~ /HWaddr (.+?)\s/
iface[cint][:addresses] ||= Mash.new
iface[cint][:addresses][$1] = { "family" => "lladdr" }
end
if line =~ /inet addr:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
iface[cint][:addresses] ||= Mash.new
iface[cint][:addresses][$1] = { "family" => "inet" }
tmp_addr = $1
end
if line =~ %r{inet6 addr: ([a-f0-9\:]+)/(\d+) Scope:(\w+)}
iface[cint][:addresses] ||= Mash.new
iface[cint][:addresses][$1] = { "family" => "inet6", "prefixlen" => $2, "scope" => ($3.eql?("Host") ? "Node" : $3) }
end
if line =~ /Bcast:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
iface[cint][:addresses][tmp_addr]["broadcast"] = $1
end
if line =~ /Mask:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
iface[cint][:addresses][tmp_addr]["netmask"] = $1
end
flags = line.scan(/(UP|BROADCAST|DEBUG|LOOPBACK|POINTTOPOINT|NOTRAILERS|RUNNING|NOARP|PROMISC|ALLMULTI|SLAVE|MASTER|MULTICAST|DYNAMIC)\s/)
if flags.length > 1
iface[cint][:flags] = flags.flatten
end
if line =~ /MTU:(\d+)/
iface[cint][:mtu] = $1
end
if line =~ /P-t-P:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/
iface[cint][:peer] = $1
end
if line =~ /RX packets:(\d+) errors:(\d+) dropped:(\d+) overruns:(\d+) frame:(\d+)/
net_counters[cint] ||= Mash.new
net_counters[cint][:rx] = { "packets" => $1, "errors" => $2, "drop" => $3, "overrun" => $4, "frame" => $5 }
end
if line =~ /TX packets:(\d+) errors:(\d+) dropped:(\d+) overruns:(\d+) carrier:(\d+)/
net_counters[cint][:tx] = { "packets" => $1, "errors" => $2, "drop" => $3, "overrun" => $4, "carrier" => $5 }
end
if line =~ /collisions:(\d+)/
net_counters[cint][:tx]["collisions"] = $1
end
if line =~ /txqueuelen:(\d+)/
net_counters[cint][:tx]["queuelen"] = $1
end
if line =~ /RX bytes:(\d+) \((\d+?\.\d+ .+?)\)/
net_counters[cint][:rx]["bytes"] = $1
end
if line =~ /TX bytes:(\d+) \((\d+?\.\d+ .+?)\)/
net_counters[cint][:tx]["bytes"] = $1
end
end
so = shell_out("arp -an")
so.stdout.lines do |line|
if line =~ /^\S+ \((\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\) at ([a-fA-F0-9\:]+) \[(\w+)\] on ([0-9a-zA-Z\.\:\-]+)/
next unless iface[$4] # this should never happen
iface[$4][:arp] ||= Mash.new
iface[$4][:arp][$1] = $2.downcase
end
end
end # end "ip else net-tools" block
iface = ethernet_layer_one(iface)
iface = ethernet_ring_parameters(iface)
iface = ethernet_channel_parameters(iface)
iface = ethernet_coalesce_parameters(iface)
iface = ethernet_offload_parameters(iface)
iface = ethernet_driver_info(iface)
iface = ethernet_pause_parameters(iface)
counters[:network][:interfaces] = net_counters
network["interfaces"] = iface
end
end
| 36.351874 | 243 | 0.599308 |
185d70920c33b545b0c08aa6b536711d751b9cf1 | 1,064 | require File.expand_path('../boot', __FILE__)
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module StocksApi
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 34.322581 | 82 | 0.770677 |
1d5a87dd0cc4347b93684080ac43eab53530394f | 183 | class AddFeatureStatusToSolusPlans < ActiveRecord::Migration
def change
add_column :solus_plans, :feature_status, :integer
add_index :solus_plans, :feature_status
end
end
| 26.142857 | 60 | 0.79235 |
f82a757ce39720ff4e89635d1e521ffba0e31f92 | 68 | FactoryGirl.define do
factory :import_job_error do
end
end
| 11.333333 | 30 | 0.735294 |
61e1cf994262d01bc7bc59d76d6660b5f481b34a | 539 | # Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = '1.0'
# Add additional assets to the asset load path
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# Rails.application.config.assets.precompile += %w( search.js )
Rails.application.config.serve_static_assets = false
| 41.461538 | 93 | 0.779221 |