hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e8b8b9099d89aedfebecd7f0f942f408074c0e62 | 1,144 | require 'redcarpet'
require 'coderay'
module Highlighter
class << self
def registered(app)
app.helpers Helpers
end
alias :included :registered
end
module Helpers
def _highlight(string, language, class_name=nil)
result = %Q{<div class="highlight #{language} #{class_name}">}
result += '<div class="ribbon"></div>'
result += '<div class="scroller">'
code = CodeRay.scan(string, language)
result += code.div css: :class,
line_numbers: :table,
line_number_anchors: false
result += '</div>'
result += %Q{</div>}
result
end
def highlight(language, class_name, &block)
concat(_highlight(capture(&block), language, class_name))
end
end
class HighlightedHTML < Redcarpet::Render::HTML
include Helpers
# def header(text, level)
# "<h#{level} class='anchorable-toc' id='toc_#{TOC::TableOfContents.anchorify(text)}'>#{text}</h#{level}>"
# end
def block_code(code, language)
_highlight(code, language)
end
end
end
::Middleman::Extensions.register(:highlighter, Highlighter)
| 25.422222 | 112 | 0.624126 |
d5b679a34ce6e1b4b32ea27553e2212671fe9edc | 198 | require 'togostanza'
require_relative '../stanza'
FindSentenceFromTermStanza.root = File.expand_path('../..', __FILE__)
TogoStanza.sprockets.append_path File.expand_path('../../assets', __FILE__)
| 28.285714 | 75 | 0.762626 |
269ec469fa287ced1a2e1c789bf0153c7d538f56 | 1,454 | class Libice < Formula
desc "X.Org: Inter-Client Exchange Library"
homepage "https://www.x.org/"
url "https://www.x.org/archive/individual/lib/libICE-1.0.10.tar.bz2"
sha256 "6f86dce12cf4bcaf5c37dddd8b1b64ed2ddf1ef7b218f22b9942595fb747c348"
license "MIT"
bottle do
cellar :any
sha256 "4c5c97814304360fdaeec959107e79e9fdb62ba151159ca55342944efec4bd82" => :catalina
sha256 "d7249247483e6ee2787e66c7f887a7df52aedd5abd2558ae377b5d16e3b6275e" => :mojave
sha256 "b5f1f14bc4fd8d18fd19b2552ddc898f53f573015de0706289de54c177b16eb4" => :high_sierra
sha256 "935f190724128149ac5c9d38202080532a071a4e8ae1248a794824a9dee61f12" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "xtrans" => :build
depends_on "libx11"=> :test
depends_on "xorgproto"
depends_on "libx11"=> :test unless OS.mac?
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
--enable-docs=no
--enable-specs=no
]
system "./configure", *args
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include "X11/Xlib.h"
#include "X11/ICE/ICEutil.h"
int main(int argc, char* argv[]) {
IceAuthFileEntry entry;
return 0;
}
EOS
system ENV.cc, "test.c"
assert_equal 0, $CHILD_STATUS.exitstatus
end
end
| 27.433962 | 94 | 0.688446 |
1a869063189cf84c1d6830c183aa079c4ee6e93e | 1,532 | #!/usr/bin/env ruby
# coding: utf-8
# Proxy for this gdbstub + EverDrive-64
# usage: ruby $0 /dev/ttyUSB0 [port]
require 'io/console'
require 'socket'
def read_whole_timeout io, size, timeout
buf = ''.b
while buf.size < size
rs, _, _ = IO.select([io], nil, nil, timeout)
rs.nil? or rs.size == 0 and return buf
buf += io.readpartial size - buf.size
end
return buf
end
def interact remote, local, cut=false
loop {
rs, _, es = IO.select([remote, local[:r]], nil, [remote, local[:r]])
if not es.empty?
$stderr.puts es.inspect
break
end
if rs.include?(remote)
r = read_whole_timeout(remote, 512, 0.5)
cut and r.sub!(/\0*$/n, '')
cut and $stderr.puts "> #{r.inspect}"
local[:w].write(r)
end
if rs.include?(local[:r])
local[:r].eof? and break
r = local[:r].readpartial(2048)
if not cut and r[0] == '!'
r = '$' + r[1..-2] + '#' + '%02x' % [r[1..-2].bytes.inject(&:+) & 0xFF]
end
cut and $stderr.puts "< #{r.inspect}"
r = r.ljust((r.size + 511) & -512, "\0")
remote.write(r)
end
}
end
File.open(ARGV[0], 'r+b') { |com|
com.raw!
com.sync = true
if ARGV[1]
ARGV[1] =~ /\A(?:(.+):)?(.+)\z/
addr = $1 || '127.0.0.1'
port = $2
TCPServer.open(addr, port) { |ss|
loop {
puts "listening #{addr}:#{port}"
s = ss.accept
puts "accept from #{s.peeraddr.values_at(3,1).inspect}"
interact com, {r: s, w: s}, true
puts "disconnected"
}
}
else
$stdout.puts "com64 ready"
$stdout.sync = true
interact com, {r: $stdin, w: $stdout}
end
}
| 22.202899 | 75 | 0.578982 |
7a10398d67a6dfff822e1949be9621459ee7cd18 | 1,196 | #
# Copyright:: Copyright (c) 2012-2014 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "ffi-yajl"
default_version "master"
relative_path "ffi-yajl"
source git: "git://github.com/lamont-granquist/ffi-yajl"
if windows?
dependency "ruby-windows"
dependency "ruby-windows-devkit"
else
dependency "libffi"
dependency "ruby"
dependency "rubygems"
end
dependency "bundler"
env = with_embedded_path
build do
bundle "install --without development_extras", env: env
bundle "exec rake gem", env: env
delete "pkg/*java*", env: env
gem ["install pkg/ffi-yajl-*.gem",
"--no-rdoc --no-ri"].join(" "), env: env
end
| 27.181818 | 74 | 0.728261 |
edbc049092cc05a8982bbde8294cbdce0a0a0803 | 269 | class CreateMaterialSheets < ActiveRecord::Migration
def change
create_table :material_sheets do |t|
t.string :material_id
t.integer :cantidad
t.references :sheet, index: true, foreign_key: true
t.timestamps null: false
end
end
end
| 22.416667 | 57 | 0.698885 |
3861549459c286a4126c9022abbd5a8a4d2e363e | 468 | # Copyright (c) 2016 Jasper Lievisse Adriaanse <[email protected]>
# See LICENSE for details
require 'dockertags/version'
require 'dockertags/log'
require 'dockertags/db'
require 'dockertags/utils'
require 'dockertags/cli'
require 'dockertags/commands'
module DockerTags
unless defined? @@database_path
@@database_path = 'docker-tags.db'
end
def self.database_path
@@database_path
end
def self.database_path=(path)
@@database_path = path
end
end
| 19.5 | 60 | 0.745726 |
79264e0933d3c92d30837d66b73abf50ee4f42b8 | 2,681 | # frozen_string_literal: true
require 'uri'
# A parser and emitter for Problem Details for HTTP APIs.
# @see https://tools.ietf.org/html/rfc7807
module ProblemDetail
# This document defines a "problem detail" as a way to carry machine-readable
# details of errors in a HTTP response, to avoid the need to define new error
# response formats for HTTP APIs.
class Document
# A problem details object can have some members.
#
# @param title [#to_s] A short, human-readable summary of the problem type.
# It SHOULD NOT change from occurrence to occurrence of the problem,
# except for purposes of localisation (e.g., using proactive content
# negotiation).
# @param type [#to_s, nil] A URI reference that identifies the problem type.
# When dereferenced, it is encouraged to provide human-readable
# documentation for the problem type (e.g., using HTML. When this member
# is not present, its value is assumed to be "about:blank".
# @param options [Hash] Extend the problem details with additional members
# such as:
# * status: the HTTP status code generated by the origin server for this
# occurrence of the problem;
# * detail: an human readable explanation specific to this occurrence of
# the problem;
# * instance: a URI reference that identifies the specific occurrence of
# the problem.
def initialize(title:, type: 'about:blank', **options)
@title = title.to_s
@type = URI(type.to_s)
@options = options
end
# @!attribute [r] title
#
# @return [String] A short, human-readable summary of the problem type.
attr_reader :title
# @!attribute [r] type
#
# @return [URI] A URI reference that identifies the problem type.
attr_reader :type
# @return [Fixnum, nil] The HTTP status code generated by the origin server.
def status
@options[:status]&.to_i
end
# @return [String, nil] An human readable explanation.
def detail
@options[:detail]&.to_s
end
# @return [URI, nil] The specific occurrence of the problem.
def instance
URI(@options[:instance].to_s) unless @options[:instance].nil?
end
# @return [Hash] Additional members.
def options
@options.reject { |k, _v| %i(status detail instance).include?(k) }
end
# Properties of the result.
#
# @return [Hash] The properties of the result.
def to_h
options.merge({
status: status,
detail: detail,
instance: instance
}.reject { |_k, v| v.nil? }).merge(
title: title,
type: type
)
end
end
end
| 33.098765 | 80 | 0.651623 |
bb42fdd21101c68ed65661361bbe95bc06443e27 | 3,959 | require 'spec_helper'
describe Admin::NomenclatureChanges::StatusSwapController do
login_admin
include_context 'status_change_definitions'
describe 'GET show' do
context 'primary_output' do
before(:each) do
@status_change = create(:nomenclature_change_status_swap)
end
it 'renders the primary_output template' do
get :show, id: :primary_output, nomenclature_change_id: @status_change.id
response.should render_template('primary_output')
end
end
context 'swap' do
before(:each) do
@status_change = a_to_s_with_swap
end
it 'renders the swap template' do
get :show, id: :secondary_output, nomenclature_change_id: @status_change.id
response.should render_template('secondary_output')
end
end
context 'reassignments' do
before(:each) do
@status_change = a_to_s_with_swap
end
context "when legislation present" do
before(:each) do
create_cites_I_addition(taxon_concept: input_species)
end
it 'renders the legislation template' do
get :show, id: :legislation, nomenclature_change_id: @status_change.id
response.should render_template('legislation')
end
end
context "when no legislation" do
it 'redirects to next step' do
get :show, id: :legislation, nomenclature_change_id: @status_change.id
response.should redirect_to(
admin_nomenclature_change_status_swap_url(
nomenclature_change_id: assigns(:nomenclature_change).id, :id => 'summary'
)
)
end
end
end
context 'summary' do
before(:each) do
@status_change = a_to_s_with_swap
end
it 'renders the summary template' do
get :show, id: :summary, nomenclature_change_id: @status_change.id
response.should render_template('summary')
end
end
end
describe 'POST create' do
it 'redirects to status_change wizard' do
post :create, nomenclature_change_id: 'new'
response.should redirect_to(
admin_nomenclature_change_status_swap_url(
nomenclature_change_id: assigns(:nomenclature_change).id, :id => 'primary_output'
)
)
end
end
describe 'PUT update' do
before(:each) do
@status_change = create(:nomenclature_change_status_swap)
end
context 'when successful' do
it 'redirects to next step' do
put :update, nomenclature_change_status_swap: {
primary_output_attributes: {
taxon_concept_id: create_cites_eu_species.id,
new_name_status: 'S'
}
}, nomenclature_change_id: @status_change.id, id: 'primary_output'
response.should redirect_to(
admin_nomenclature_change_status_swap_url(
nomenclature_change_id: assigns(:nomenclature_change).id, :id => 'secondary_output'
)
)
end
end
context 'when unsuccessful' do
it 're-renders step' do
put :update, nomenclature_change_status_swap: {},
nomenclature_change_id: @status_change.id, id: 'primary_output'
response.should render_template('primary_output')
end
end
context 'when last step' do
context 'when user is secretariat' do
login_secretariat_user
it 'redirects to admin root path' do
put :update, nomenclature_change_id: @status_change.id, id: 'summary'
response.should redirect_to admin_root_path
end
end
context 'when user is manager' do
it 'redirects to nomenclature changes path' do
pending("Strange render mismatch after upgrading to Rails 4")
put :update, nomenclature_change_id: @status_change.id, id: 'summary'
response.should be_successful
response.should render_template("nomenclature_changes")
end
end
end
end
end
| 33.550847 | 95 | 0.65951 |
385a9d227eaaa009a7d5775b802818d2467ad25e | 23,867 | require 'test_helper'
class VersionTest < ActiveSupport::TestCase
should belong_to :rubygem
should have_many :dependencies
context "#as_json" do
setup do
@version = create(:version)
end
should "only have relevant API fields" do
json = @version.as_json
assert_equal %w[number built_at summary description authors platform ruby_version prerelease downloads_count licenses requirements].map(&:to_s).sort, json.keys.sort
assert_equal @version.authors, json["authors"]
assert_equal @version.built_at, json["built_at"]
assert_equal @version.description, json["description"]
assert_equal @version.downloads_count, json["downloads_count"]
assert_equal @version.number, json["number"]
assert_equal @version.platform, json["platform"]
assert_equal @version.prerelease, json["prerelease"]
assert_equal @version.ruby_version, json["ruby_version"]
assert_equal @version.summary, json["summary"]
assert_equal @version.licenses, json["licenses"]
assert_equal @version.requirements, json["requirements"]
end
end
context "#to_xml" do
setup do
@version = create(:version)
end
should "only have relevant API fields" do
xml = Nokogiri.parse(@version.to_xml)
assert_equal %w[number built-at summary description authors platform ruby-version prerelease downloads-count licenses requirements].map(&:to_s).sort, xml.root.children.map{|a| a.name}.reject{|t| t == "text"}.sort
assert_equal @version.authors, xml.at_css("authors").content
assert_equal @version.built_at.to_i, xml.at_css("built-at").content.to_time.to_i
assert_equal @version.description, xml.at_css("description").content
assert_equal @version.downloads_count, xml.at_css("downloads-count").content.to_i
assert_equal @version.number, xml.at_css("number").content
assert_equal @version.platform, xml.at_css("platform").content
assert_equal @version.prerelease.to_s, xml.at_css("prerelease").content
assert_equal @version.ruby_version, xml.at_css("ruby-version").content
assert_equal @version.summary.to_s, xml.at_css("summary").content
assert_equal @version.licenses, xml.at_css("licenses").content
assert_equal @version.requirements, xml.at_css("requirements").content
end
end
context ".most_recent" do
setup do
@gem = create(:rubygem)
end
should "return most recently created version for versions with multiple non-ruby platforms" do
create(:version, :rubygem => @gem, :number => '0.1', :platform => 'linux')
@most_recent = create(:version, :rubygem => @gem, :number => '0.2', :platform => 'universal-rubinius')
create(:version, :rubygem => @gem, :number => '0.1', :platform => 'mswin32')
assert_equal @most_recent, Version.most_recent
end
end
context ".reverse_dependencies" do
setup do
@dep_rubygem = create(:rubygem)
@gem_one = create(:rubygem)
@gem_two = create(:rubygem)
@gem_three = create(:rubygem)
@version_one_latest = create(:version, :rubygem => @gem_one, :number => '0.2')
@version_one_earlier = create(:version, :rubygem => @gem_one, :number => '0.1')
@version_two_latest = create(:version, :rubygem => @gem_two, :number => '1.0')
@version_two_earlier = create(:version, :rubygem => @gem_two, :number => '0.5')
@version_three = create(:version, :rubygem => @gem_three, :number => '1.7')
@version_one_latest.dependencies << create(:dependency, :version => @version_one_latest, :rubygem => @dep_rubygem)
@version_two_earlier.dependencies << create(:dependency, :version => @version_two_earlier, :rubygem => @dep_rubygem)
@version_three.dependencies << create(:dependency, :version => @version_three, :rubygem => @dep_rubygem)
end
should "return all depended gem versions" do
version_list = Version.reverse_dependencies(@dep_rubygem.name)
assert_equal 3, version_list.size
assert version_list.include?(@version_one_latest)
assert version_list.include?(@version_two_earlier)
assert version_list.include?(@version_three)
assert ! version_list.include?(@version_one_earlier)
assert ! version_list.include?(@version_two_latest)
end
end
context "updated gems" do
setup do
Timecop.freeze Date.today
@existing_gem = create(:rubygem)
@second = create(:version, :rubygem => @existing_gem, :created_at => 1.day.ago)
@fourth = create(:version, :rubygem => @existing_gem, :created_at => 4.days.ago)
@another_gem = create(:rubygem)
@third = create(:version, :rubygem => @another_gem, :created_at => 3.days.ago)
@first = create(:version, :rubygem => @another_gem, :created_at => 1.minute.ago)
@yanked = create(:version, :rubygem => @another_gem, :created_at => 30.seconds.ago)
@yanked.yank!
@bad_gem = create(:rubygem)
@only_one = create(:version, :rubygem => @bad_gem, :created_at => 1.minute.ago)
end
teardown do
Timecop.return
end
should "order gems by created at and show only gems that have more than one version" do
versions = Version.just_updated
assert_equal 4, versions.size
assert_equal [@first, @second, @third, @fourth], versions
end
end
context "with a rubygem" do
setup do
@rubygem = create(:rubygem)
end
should "not allow duplicate versions" do
@version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "ruby")
@dup_version = @version.dup
@number_version = build(:version, :rubygem => @rubygem, :number => "2.0.0", :platform => "ruby")
@platform_version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "mswin32")
assert @version.save
assert @number_version.save
assert @platform_version.save
assert ! @dup_version.valid?
end
should "be able to find dependencies" do
@dependency = create(:rubygem)
@version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "ruby")
@version.dependencies << create(:dependency, :version => @version, :rubygem => @dependency)
assert ! Version.with_deps.first.dependencies.empty?
end
should "sort dependencies alphabetically" do
@version = build(:version, :rubygem => @rubygem, :number => "1.0.0", :platform => "ruby")
@first_dependency_by_alpha = create(:rubygem, :name => 'acts_as_indexed')
@second_dependency_by_alpha = create(:rubygem, :name => 'friendly_id')
@third_dependency_by_alpha = create(:rubygem, :name => 'refinerycms')
@version.dependencies << create(:dependency, :version => @version, :rubygem => @second_dependency_by_alpha)
@version.dependencies << create(:dependency, :version => @version, :rubygem => @third_dependency_by_alpha)
@version.dependencies << create(:dependency, :version => @version, :rubygem => @first_dependency_by_alpha)
assert @first_dependency_by_alpha.name, @version.dependencies.first.name
assert @second_dependency_by_alpha.name, @version.dependencies[1].name
assert @third_dependency_by_alpha.name, @version.dependencies.last.name
end
end
context "with a ruby version" do
setup do
@ruby_version = ">= 1.9.3"
@version = create(:version)
end
subject { @version }
should "have a ruby version" do
@version.ruby_version = @ruby_version
@version.save!
new_version = Version.find(@version.id)
assert_equal new_version.ruby_version, @ruby_version
end
end
context "without a ruby version" do
setup do
@ruby_version = ">= 1.9.3"
@version = create(:version)
end
subject { @version }
should "not have a ruby version" do
@version.ruby_version = nil
@version.save!
nil_version = Version.find(@version.id)
assert_nil nil_version.ruby_version
end
end
context "with a version" do
setup do
@version = create(:version)
@info = "some info"
end
subject { @version }
should_not allow_value("#YAML<CEREALIZATION-FAIL>").for(:number)
should_not allow_value("1.2.3-\"[javalol]\"").for(:number)
should_not allow_value("0.8.45::Gem::PLATFORM::FAILBOAT").for(:number)
should_not allow_value("1.2.3\n<bad>").for(:number)
should allow_value("ruby").for(:platform)
should allow_value("mswin32").for(:platform)
should allow_value("x86_64-linux").for(:platform)
should_not allow_value("Gem::Platform::Ruby").for(:platform)
should "give number for #to_s" do
assert_equal @version.number, @version.to_s
end
should "not be platformed" do
assert ! @version.platformed?
end
should "save full name" do
assert_equal "#{@version.rubygem.name}-#{@version.number}", @version.full_name
assert_equal @version.number, @version.slug
end
should "save info into redis" do
info = $redis.hgetall(Version.info_key(@version.full_name))
assert_equal @version.rubygem.name, info["name"]
assert_equal @version.number, info["number"]
assert_equal @version.platform, info["platform"]
end
should "add version onto redis versions list" do
assert_equal @version.full_name, $redis.lindex(Rubygem.versions_key(@version.rubygem.name), 0)
end
should "raise an ActiveRecord::RecordNotFound if an invalid slug is given" do
assert_raise ActiveRecord::RecordNotFound do
Version.find_from_slug!(@version.rubygem_id, "some stupid version 399")
end
end
%w[x86_64-linux java mswin x86-mswin32-60].each do |platform|
should "be able to find with platform of #{platform}" do
version = create(:version, :platform => platform)
slug = "#{version.number}-#{platform}"
assert version.platformed?
assert_equal version.reload, Version.find_from_slug!(version.rubygem_id, slug)
assert_equal slug, version.slug
end
end
should "have a default download count" do
assert @version.downloads_count.zero?
end
should "give no version flag for the latest version" do
new_version = create(:version, :rubygem => @version.rubygem, :built_at => 1.day.from_now)
assert_equal "gem install #{@version.rubygem.name} -v #{@version.number}", @version.to_install
assert_equal "gem install #{new_version.rubygem.name}", new_version.to_install
end
should "tack on prerelease flag" do
@version.update_attributes(:number => "0.3.0.pre")
new_version = create(:version, :rubygem => @version.rubygem,
:built_at => 1.day.from_now,
:number => "0.4.0.pre")
assert @version.prerelease
assert new_version.prerelease
@version.rubygem.reorder_versions
assert_equal "gem install #{@version.rubygem.name} -v #{@version.number} --pre",
@version.to_install
assert_equal "gem install #{new_version.rubygem.name} --pre",
new_version.to_install
end
should "give no version count for the latest prerelease version" do
@version.update_attributes(:number => "0.3.0.pre")
old_version = create(:version, :rubygem => @version.rubygem,
:built_at => 1.day.from_now,
:number => "0.2.0")
assert @version.prerelease
assert !old_version.prerelease
@version.rubygem.reorder_versions
assert_equal "gem install #{@version.rubygem.name} --pre", @version.to_install
assert_equal "gem install #{old_version.rubygem.name}", old_version.to_install
end
should "give title for #to_title" do
assert_equal "#{@version.rubygem.name} (#{@version.to_s})", @version.to_title
end
should "give version with twiddle-wakka for #to_bundler" do
assert_equal %{gem '#{@version.rubygem.name}', '~> #{@version.to_s}'}, @version.to_bundler
end
should "give title and platform for #to_title" do
@version.platform = "zomg"
assert_equal "#{@version.rubygem.name} (#{@version.number}-zomg)", @version.to_title
end
should "have description for info" do
@version.description = @info
assert_equal @info, @version.info
end
should "have summary for info if description does not exist" do
@version.description = nil
@version.summary = @info
assert_equal @info, @version.info
end
should "have summary for info if description is blank" do
@version.description = ""
@version.summary = @info
assert_equal @info, @version.info
end
should "have some text for info if neither summary or description exist" do
@version.description = nil
@version.summary = nil
assert_equal "This rubygem does not have a description or summary.", @version.info
end
should "give 'N/A' for size when size not available" do
@version.size = nil
assert_equal 'N/A', @version.size
end
context "when yanked" do
setup do
@version.yank!
end
should("unindex") { assert [email protected]? }
should("be considered yanked") { assert Version.yanked.include?(@version) }
should("no longer be latest") { assert [email protected]?}
should "not appear in the version list" do
assert ! $redis.exists(Rubygem.versions_key(@version.rubygem.name))
end
context "and consequently unyanked" do
setup do
@version.unyank!
@version.reload
end
should("re-index") { assert @version.indexed? }
should("become the latest again") { assert @version.latest? }
should("be considered unyanked") { assert !Version.yanked.include?(@version) }
should "appear in the version list" do
assert_equal @version.full_name, $redis.lindex(Rubygem.versions_key(@version.rubygem.name), 0)
end
end
end
end
context "with a very long authors string." do
should "create without error" do
create(:version, :authors => ["Fbdoorman: David Pelaez", "MiniFB:Appoxy", "Dan Croak", "Mike Burns", "Jason Morrison", "Joe Ferris", "Eugene Bolshakov", "Nick Quaranto", "Josh Nichols", "Mike Breen", "Marcel G\303\266rner", "Bence Nagy", "Ben Mabey", "Eloy Duran", "Tim Pope", "Mihai Anca", "Mark Cornick", "Shay Arnett", "Jon Yurek", "Chad Pytel"])
end
end
context "when indexing" do
setup do
@rubygem = create(:rubygem)
@first_version = create(:version, :rubygem => @rubygem, :number => "0.0.1", :built_at => 7.days.ago)
@second_version = create(:version, :rubygem => @rubygem, :number => "0.0.2", :built_at => 6.days.ago)
@third_version = create(:version, :rubygem => @rubygem, :number => "0.0.3", :built_at => 5.days.ago)
@fourth_version = create(:version, :rubygem => @rubygem, :number => "0.0.4", :built_at => 5.days.ago)
end
should "always sort properly" do
assert_equal -1, (@first_version <=> @second_version)
assert_equal -1, (@first_version <=> @third_version)
assert_equal -1, (@first_version <=> @fourth_version)
assert_equal 1, (@second_version <=> @first_version)
assert_equal -1, (@second_version <=> @third_version)
assert_equal -1, (@second_version <=> @fourth_version)
assert_equal 1, (@third_version <=> @first_version)
assert_equal 1, (@third_version <=> @second_version)
assert_equal -1, (@third_version <=> @fourth_version)
assert_equal 1, (@fourth_version <=> @first_version)
assert_equal 1, (@fourth_version <=> @second_version)
assert_equal 1, (@fourth_version <=> @third_version)
end
end
context "with mixed release and prerelease versions" do
setup do
@prerelease = create(:version, :number => '1.0.rc1')
@release = create(:version, :number => '1.0')
end
should "know if it is a prelease version" do
assert @prerelease.prerelease?
assert [email protected]?
end
should "return prerelease gems from the prerelease named scope" do
assert_equal [@prerelease], Version.prerelease
assert_equal [@release], Version.release
end
end
context "with only prerelease versions" do
setup do
@rubygem = create(:rubygem)
@one = create(:version, :rubygem => @rubygem, :number => '1.0.0.pre')
@two = create(:version, :rubygem => @rubygem, :number => '1.0.1.pre')
@three = create(:version, :rubygem => @rubygem, :number => '1.0.2.pre')
@rubygem.reload
end
should "show last pushed as latest version" do
assert_equal @three, @rubygem.versions.most_recent
end
end
context "with versions created out of order" do
setup do
@gem = create(:rubygem)
create(:version, :rubygem => @gem, :number => '0.5')
create(:version, :rubygem => @gem, :number => '0.3')
create(:version, :rubygem => @gem, :number => '0.7')
create(:version, :rubygem => @gem, :number => '0.2')
@gem.reload # make sure to reload the versions just created
end
should "be in the proper order" do
assert_equal %w[0.7 0.5 0.3 0.2], @gem.versions.by_position.map(&:number)
end
should "know its latest version" do
assert_equal '0.7', @gem.versions.most_recent.number
end
end
context "with multiple rubygems and versions created out of order" do
setup do
@gem_one = create(:rubygem)
@gem_two = create(:rubygem)
@version_one_latest = create(:version, :rubygem => @gem_one, :number => '0.2')
@version_one_earlier = create(:version, :rubygem => @gem_one, :number => '0.1')
@version_two_latest = create(:version, :rubygem => @gem_two, :number => '1.0')
@version_two_earlier = create(:version, :rubygem => @gem_two, :number => '0.5')
end
should "be able to fetch the latest versions" do
assert_contains Version.latest.map(&:id), @version_one_latest.id
assert_contains Version.latest.map(&:id), @version_two_latest.id
assert_does_not_contain Version.latest.map(&:id), @version_one_earlier.id
assert_does_not_contain Version.latest.map(&:id), @version_two_earlier.id
end
end
context "with a few versions" do
setup do
@thin = create(:version, :authors => %w[thin], :built_at => 1.year.ago)
@rake = create(:version, :authors => %w[rake], :built_at => 1.month.ago)
@json = create(:version, :authors => %w[json], :built_at => 1.week.ago)
@thor = create(:version, :authors => %w[thor], :built_at => 2.days.ago)
@rack = create(:version, :authors => %w[rack], :built_at => 1.day.ago)
@haml = create(:version, :authors => %w[haml], :built_at => 1.hour.ago)
@dust = create(:version, :authors => %w[dust], :built_at => 1.day.from_now)
@fake = create(:version, :authors => %w[fake], :indexed => false, :built_at => 1.minute.ago)
end
should "get the latest versions up to today" do
assert_equal [@haml, @rack, @thor, @json, @rake].map(&:authors), Version.published(5).map(&:authors)
assert_equal [@haml, @rack, @thor, @json, @rake, @thin].map(&:authors), Version.published(6).map(&:authors)
end
end
context "with a few versions some owned by a user" do
setup do
@user = create(:user)
@gem = create(:rubygem)
@owned_one = create(:version, :rubygem => @gem, :built_at => 1.day.ago)
@owned_two = create(:version, :rubygem => @gem, :built_at => 2.days.ago)
@unowned = create(:version)
create(:ownership, :rubygem => @gem, :user => @user)
end
should "return the owned gems from #owned_by" do
assert_contains Version.owned_by(@user).map(&:id), @owned_one.id
assert_contains Version.owned_by(@user).map(&:id), @owned_two.id
end
should "not return the unowned versions from #owned_by" do
assert_does_not_contain Version.owned_by(@user).map(&:id), @unowned.id
end
end
context "with a few versions some subscribed to by a user" do
setup do
@user = create(:user)
@gem = create(:rubygem)
@subscribed_one = create(:version, :rubygem => @gem)
@subscribed_two = create(:version, :rubygem => @gem)
@unsubscribed = create(:version)
create(:subscription, :rubygem => @gem, :user => @user)
end
should "return the owned gems from #owned_by" do
assert_contains Version.subscribed_to_by(@user).map(&:id), @subscribed_one.id
assert_contains Version.subscribed_to_by(@user).map(&:id), @subscribed_two.id
end
should "not return the unowned versions from #owned_by" do
assert_does_not_contain Version.subscribed_to_by(@user).map(&:id), @unsubscribed.id
end
should "order them from latest-oldest pushed to Gemcutter, not build data" do
# Setup so that gem one was built earlier than gem two, but pushed to Gemcutter after gem two
# We do this so that:
# a) people with RSS will get smooth results, rather than gem versions jumping around the place
# b) people can't hijack the latest gem spot by building in the far future, but pushing today
@subscribed_one.update_attributes(:built_at => Time.now - 3.days, :created_at => Time.now - 1.day)
@subscribed_two.update_attributes(:built_at => Time.now - 2.days, :created_at => Time.now - 2.days)
# Even though gem two was build before gem one, it was pushed to gemcutter first
# Thus, we should have from newest to oldest, gem one, then gem two
expected = [@subscribed_one, @subscribed_two].map do |s|
s.created_at.to_s(:db)
end
actual = Version.subscribed_to_by(@user).map do |s|
s.created_at.to_s(:db)
end
assert_equal expected, actual
end
end
context "with a Gem::Specification" do
setup do
@spec = gem_spec
@version = build(:version)
end
[/foo/, 1337, {:foo => "bar"}].each do |example|
should "be invalid with authors as an Array of #{example.class}'s" do
assert_raise ActiveRecord::RecordInvalid do
@spec.authors = [example]
@version.update_attributes_from_gem_specification!(@spec)
end
end
end
should "have attributes set properly from the specification" do
@version.update_attributes_from_gem_specification!(@spec)
assert @version.indexed
assert_equal @spec.authors.join(', '), @version.authors
assert_equal @spec.description, @version.description
assert_equal @spec.summary, @version.summary
assert_equal @spec.date, @version.built_at
end
end
context "indexes" do
setup do
@first_rubygem = create(:rubygem, :name => "first")
@second_rubygem = create(:rubygem, :name => "second")
@first_version = create(:version, :rubygem => @first_rubygem, :number => "0.0.1", :platform => "ruby")
@second_version = create(:version, :rubygem => @first_rubygem, :number => "0.0.2", :platform => "ruby")
@other_version = create(:version, :rubygem => @second_rubygem, :number => "0.0.2", :platform => "java")
@pre_version = create(:version, :rubygem => @second_rubygem, :number => "0.0.2.pre", :platform => "java", :prerelease => true)
end
should "select all gems" do
assert_equal [
["first", "0.0.1", "ruby"],
["first", "0.0.2", "ruby"],
["second", "0.0.2", "java"]
], Version.rows_for_index
end
should "select only most recent" do
assert_equal [
["first", "0.0.2", "ruby"],
["second", "0.0.2", "java"]
], Version.rows_for_latest_index
end
should "select only prerelease" do
assert_equal [
["second", "0.0.2.pre", "java"]
], Version.rows_for_prerelease_index
end
end
end
| 39.449587 | 355 | 0.653496 |
e80936d31844d6e358e06c51c8cb3391d29fb58d | 3,881 | class StudentTaskController < ApplicationController
include AuthorizationHelper
helper :submitted_content
def action_allowed?
current_user_has_student_privileges?
end
def impersonating_as_admin?
original_user = session[:original_user]
admin_role_ids = Role.where(name:%w[Administrator Super-Administrator]).pluck(:id)
admin_role_ids.include? original_user.role_id
end
def impersonating_as_ta?
original_user = session[:original_user]
ta_role = Role.where(name:['Teaching Assistant']).pluck(:id)
ta_role.include? original_user.role_id
end
def list
redirect_to(controller: 'eula', action: 'display') if current_user.is_new_user
session[:user] = User.find_by(id: current_user.id)
@student_tasks = StudentTask.from_user current_user
if session[:impersonate] && !impersonating_as_admin?
if impersonating_as_ta?
ta_course_ids = TaMapping.where(:ta_id => session[:original_user].id).pluck(:course_id)
@student_tasks = @student_tasks.select {|t| ta_course_ids.include?t.assignment.course_id }
else
@student_tasks = @student_tasks.select {|t| t.assignment.course and session[:original_user].id == t.assignment.course.instructor_id or !t.assignment.course and session[:original_user].id == t.assignment.instructor_id }
end
end
@student_tasks.select! {|t| t.assignment.availability_flag }
# #######Tasks and Notifications##################
@tasknotstarted = @student_tasks.select(&:not_started?)
@taskrevisions = @student_tasks.select(&:revision?)
######## Students Teamed With###################
@students_teamed_with = StudentTask.teamed_students(current_user, session[:ip])
end
def view
StudentTask.from_participant_id params[:id]
@participant = AssignmentParticipant.find(params[:id])
@can_submit = @participant.can_submit
@can_review = @participant.can_review
@can_take_quiz = @participant.can_take_quiz
@authorization = Participant.get_authorization(@can_submit, @can_review, @can_take_quiz)
@team = @participant.team
denied unless current_user_id?(@participant.user_id)
@assignment = @participant.assignment
@can_provide_suggestions = @assignment.allow_suggestions
@topic_id = SignedUpTeam.topic_id(@assignment.id, @participant.user_id)
@topics = SignUpTopic.where(assignment_id: @assignment.id)
@use_bookmark = @assignment.use_bookmark
# Timeline feature
@timeline_list = StudentTask.get_timeline_data(@assignment, @participant, @team)
end
def others_work
@participant = AssignmentParticipant.find(params[:id])
return unless current_user_id?(@participant.user_id)
@assignment = @participant.assignment
# Finding the current phase that we are in
due_dates = AssignmentDueDate.where(parent_id: @assignment.id)
@very_last_due_date = AssignmentDueDate.where(parent_id: @assignment.id).order("due_at DESC").limit(1)
next_due_date = @very_last_due_date[0]
for due_date in due_dates
if due_date.due_at > Time.now
next_due_date = due_date if due_date.due_at < next_due_date.due_at
end
end
@review_phase = next_due_date.deadline_type_id
if next_due_date.review_of_review_allowed_id == DeadlineRight::LATE or next_due_date.review_of_review_allowed_id == DeadlineRight::OK
@can_view_metareview = true if @review_phase == DeadlineType.find_by(name: "metareview").id
end
@review_mappings = ResponseMap.where(reviewer_id: @participant.id)
@review_of_review_mappings = MetareviewResponseMap.where(reviewer_id: @participant.id)
end
def publishing_rights_update
@participant = AssignmentParticipant.find(params[:id])
@participant.permission_granted = params[:status]
@participant.save
respond_to do |format|
format.html {head :no_content}
end
end
def your_work; end
end
| 38.04902 | 226 | 0.737954 |
d5710c0cb6a6be5da31c71a18d41271f30faac46 | 225 | require "test/unit"
require_relative "./part1"
class Part1Test < Test::Unit::TestCase
def test_solve
input = File.read(File.expand_path("../input.txt", __FILE__))
assert_equal 807, Part1.new.solve(input)
end
end
| 22.5 | 65 | 0.72 |
e9b11a66a271bcbfbec1a3c9e7645f6df18c7e53 | 200 | Dir[File.expand_path("../../../app/jobs/**/*.rb", __FILE__)].each do |file|
require file
end
class LocalQueue < Struct.new(:config)
def to_s
"cc-#{config[:name]}-#{config[:index]}"
end
end
| 20 | 75 | 0.625 |
2651ab0e9d8011f734d6e7563cc18c59b254f636 | 401 | require 'cli/murakumo_options'
require 'srv/murakumo_server'
# オプションをパース
options = murakumo_parse_args
# サーバの初期化
Murakumo::Server.init(options)
if options[:daemon]
# デーモン化する場合
# RExecに処理を委譲するのでARGVの先頭にdaemonizeのコマンドを格納
ARGV.unshift options[:daemon].to_s
Murakumo::Server.working_directory = options[:working_dir]
Murakumo::Server.daemonize
else
# デーモン化しない場合
Murakumo::Server.run
end
| 19.095238 | 60 | 0.78803 |
e8f049c9ec9f43426cf05cd5243498e05218286c | 16,324 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'kitchen/verifier/base'
module Kitchen
module Verifier
# Serverspec verifier for Kitchen.
class Serverspec < Kitchen::Verifier::Base
require 'mixlib/shellout'
kitchen_verifier_api_version 1
plugin_version Kitchen::VERSION
default_config :sleep, 0
default_config :serverspec_command, nil
default_config :custom_serverspec_command, nil
default_config :additional_serverspec_command, nil
default_config :shellout_opts, {}
default_config :live_stream, $stdout
default_config :remote_exec, true
default_config :sudo_command, 'sudo -E -H'
default_config :format, 'documentation'
default_config :color, true
default_config :default_path, '/tmp/kitchen'
default_config :patterns, []
default_config :default_pattern, false
default_config :gemfile, nil
default_config :custom_install_command, nil
default_config :additional_install_command, nil
default_config :test_serverspec_installed, true
default_config :extra_flags, nil
default_config :remove_default_path, false
default_config :env_vars, {}
default_config :bundler_path, nil
default_config :rspec_path, nil
default_config :require_runner, false
default_config :runner_url, 'https://raw.githubusercontent.com/neillturner/serverspec-runners/master/ansiblespec_runner.rb'
# (see Base#call)
def call(state)
info("[#{name}] Verify on instance=#{instance} with state=#{state}")
sleep_if_set
merge_state_to_env(state)
if config[:remote_exec]
debug('Running Serverspec on remote server')
if config[:default_pattern]
create_sandbox
sandbox_dirs = [ sandbox_path ]
end
instance.transport.connection(state) do |conn|
conn.execute(install_command)
if config[:default_pattern]
info("Transferring files to #{instance.to_str}")
conn.upload(sandbox_dirs, config[:root_path])
debug('Transfer complete')
end
conn.execute(serverspec_commands)
end
cleanup_sandbox if config[:default_pattern]
else
debug('Running Serverspec locally on workstation')
config[:default_path] = Dir.pwd if config[:default_path] == '/tmp/kitchen'
install_command
serverspec_commands
end
debug("[#{name}] Verify completed.")
end
## for legacy drivers.
def run_command
sleep_if_set
serverspec_commands
end
def setup_cmd
sleep_if_set
install_command
end
# (see Base#create_sandbox)
def create_sandbox
super
prepare_suites
end
def serverspec_commands
if config[:remote_exec]
if custom_serverspec_command
<<-INSTALL
#{custom_serverspec_command}
INSTALL
else
<<-INSTALL
#{config[:additional_serverspec_command]}
mkdir -p #{config[:default_path]}
cd #{config[:default_path]}
RSPEC_CMD=#{rspec_bash_cmd}
echo "---> RSPEC_CMD variable is: ${RSPEC_CMD}"
#{rspec_commands}
#{remove_default_path}
INSTALL
end
elsif custom_serverspec_command
shellout custom_serverspec_command
else
if config[:additional_serverspec_command]
c = config[:additional_serverspec_command]
shellout c
end
c = rspec_commands
shellout c
end
end
def install_command
info('Installing with custom install command') if config[:custom_install_command]
return config[:custom_install_command] if config[:custom_install_command]
if config[:remote_exec]
info('Installing ruby, bundler and serverspec remotely on server')
<<-INSTALL
#{config[:additional_install_command]}
if [ ! $(which ruby) ]; then
echo '-----> Installing ruby, will try to determine platform os'
if [ -f /etc/centos-release ] || [ -f /etc/redhat-release ] || [ -f /etc/oracle-release ]; then
#{sudo_env('yum')} -y install ruby
else
if [ -f /etc/system-release ] && grep -q 'Amazon Linux' /etc/system-release; then
#{sudo_env('yum')} -y install ruby
else
#{sudo_env('apt-get')} -y update
#{sudo_env('apt-get')} -y install ruby
fi
fi
fi
#{install_bundler}
if [ -d #{config[:default_path]} ]; then
#{install_serverspec}
#{install_runner}
else
echo "ERROR: Default path '#{config[:default_path]}' does not exist"
exit 1
fi
INSTALL
else
info('Installing bundler and serverspec locally on workstation')
if config[:additional_install_command]
c = config[:additional_install_command]
shellout c
end
install_bundler
install_serverspec
install_runner
end
end
# private
def install_bundler
if config[:remote_exec]
<<-INSTALL
if [ -f /etc/centos-release ] || [ -f /etc/redhat-release ] || [ -f /etc/oracle-release ] || ( [ -f /etc/os-release ] && grep -q 'Amazon Linux 2' /etc/os-release ); then
echo '-----> Installing os provided bundler package'
#{sudo_env('yum')} -y install rubygem-bundler
else
echo '-----> Installing bundler via rubygems'
if [ \"$(#{sudo('gem')} list bundler -i)\" = \"true\" ]; then
echo "Bundler already installed"
else
if [ \"$(#{sudo('gem')} list bundler -i)\" = \"false\" ]; then
#{sudo_env('gem')} install #{gem_proxy_parm} --no-ri --no-rdoc bundler
else
echo "ERROR: Ruby not installed correctly"
exit 1
fi
fi
fi
INSTALL
else
begin
require 'bundler'
rescue LoadError
shellout `gem install --no-ri --no-rdoc bundler`
end
end
end
def install_runner
return unless config[:require_runner]
raise ActionFailed, 'Serverspec Runners only for remote execution' unless config[:remote_exec]
<<-INSTALL
if [ ! -f #{config[:default_path]}/#{runner_filename} ]; then
#{sudo_env('curl')} -o #{config[:default_path]}/#{runner_filename} #{config[:runner_url]}
fi
INSTALL
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
def install_serverspec
if config[:remote_exec]
<<-INSTALL
#{test_serverspec_installed}
#{install_gemfile}
BUNDLE_CMD=#{bundler_cmd}
echo "---> BUNDLE_CMD variable is: ${BUNDLE_CMD}"
#{sudo_env('')} $BUNDLE_CMD install --gemfile=#{config[:default_path]}/Gemfile
#{fi_test_serverspec_installed}
INSTALL
else
if config[:test_serverspec_installed]
begin
require 'serverspec'
return
rescue LoadError
info('serverspec not installed installing ...')
end
end
unless config[:gemfile]
gemfile = "#{config[:default_path]}/Gemfile"
unless File.exist?(gemfile)
File.open(gemfile, 'w') do |f|
f.write("source 'https://rubygems.org'\ngem 'net-ssh','~> 2.9.4'\ngem 'serverspec'")
end
end
end
gemfile = config[:gemfile] if config[:gemfile]
begin
shellout "#{bundler_local_cmd} install --gemfile=#{gemfile}"
rescue
raise ActionFailed, 'Serverspec install failed'
end
nil
end
end
def install_gemfile
if config[:gemfile]
<<-INSTALL
#{read_gemfile}
INSTALL
else
<<-INSTALL
#{sudo('rm')} -f #{config[:default_path]}/Gemfile
#{sudo('echo')} "source 'https://rubygems.org'" >> #{config[:default_path]}/Gemfile
#{sudo('echo')} "gem 'net-ssh','~> 3'" >> #{config[:default_path]}/Gemfile
#{sudo('echo')} "gem 'serverspec'" >> #{config[:default_path]}/Gemfile
INSTALL
end
end
def read_gemfile
data = "#{sudo('rm')} -f #{config[:default_path]}/Gemfile\n"
f = File.open(config[:gemfile], 'r')
f.each_line do |line|
data = "#{data}#{sudo('echo')} \"#{line}\" >> #{config[:default_path]}/Gemfile\n"
end
f.close
data
end
def remove_default_path
info('Removing default path') if config[:remove_default_path]
config[:remove_default_path] ? "rm -rf #{config[:default_path]}" : nil
end
def test_serverspec_installed
config[:test_serverspec_installed] ? "if [ \"$(#{sudo('gem')} list serverspec -i)\" = \"false\" ]; then" : nil
end
def fi_test_serverspec_installed
config[:test_serverspec_installed] ? 'fi' : nil
end
def rspec_commands
info('Running Serverspec')
if config[:default_pattern]
if config[:remote_exec]
info("Using default pattern #{config[:root_path]}/suites/serverspec/*_spec.rb")
config[:patterns] = ["#{config[:root_path]}/suites/serverspec/*_spec.rb"]
else
info("Using default pattern #{config[:test_base_path]}/#{config[:suite_name]}/serverspec/*_spec.rb")
config[:patterns] = ["#{config[:test_base_path]}/#{config[:suite_name]}/serverspec/*_spec.rb"]
end
end
if config[:require_runner]
"#{env_vars} #{sudo_env(rspec_cmd)} #{color} -f #{config[:format]} --default-path #{config[:default_path]} #{rspec_path_option} #{config[:extra_flags]}"
elsif config[:remote_exec]
config[:patterns].map { |s| "#{env_vars} #{sudo_env('')} $RSPEC_CMD #{color} -f #{config[:format]} --default-path #{config[:default_path]} #{config[:extra_flags]} -P #{s}" }.join(';')
else
config[:patterns].empty? ? '' : "#{env_vars} #{sudo_env(rspec_cmd)} #{color} -f #{config[:format]} --default-path #{config[:default_path]} #{config[:extra_flags]} #{config[:patterns].map { |s| "-P #{s}" }.join(' ')}"
end
end
def rspec_cmd
config[:require_runner] ? "ruby #{config[:default_path]}/#{runner_filename}" : "#{rspec_path}rspec"
end
def env_vars
return nil if config[:env_vars].none?
cmd = nil
if !config[:remote_exec]
config[:env_vars].map do |k, v|
info("Environment variable #{k} value #{v}")
ENV[k.to_s] = v.to_s
end
else
cmd = config[:env_vars].map { |k, v| "#{k}=#{v}" }.join(' ')
debug(cmd)
end
cmd
end
def sudo_env(pm)
if config[:remote_exec]
s = https_proxy ? "https_proxy=#{https_proxy}" : nil
p = http_proxy ? "http_proxy=#{http_proxy}" : nil
p || s ? "#{sudo('env')} #{p} #{s} #{pm}" : sudo(pm).to_s
else
# TODO: handle proxies
pm
end
end
def custom_serverspec_command
return config[:custom_serverspec_command] if config[:custom_serverspec_command]
config[:serverspec_command]
end
def bundler_cmd
config[:bundler_path] ? "#{config[:bundler_path]}/bundle" : '$(which bundle)'
end
def bundler_local_cmd
config[:bundler_path] ? "#{config[:bundler_path]}/bundle" : 'bundle'
end
def rspec_bash_cmd
config[:rspec_path] ? "#{config[:rspec_path]}/rspec" : '$(which rspec)'
end
def rspec_path
config[:rspec_path] ? "#{config[:rspec_path]}/" : nil
end
def rspec_path_option
config[:rspec_path] ? "--rspec-path #{config[:rspec_path]}/" : nil
end
def runner_filename
File.basename(config[:runner_url])
end
def http_proxy
config[:http_proxy]
end
def https_proxy
config[:https_proxy]
end
def gem_proxy_parm
http_proxy ? "--http-proxy #{http_proxy}" : nil
end
def color
config[:color] ? '-c' : nil
end
# Sleep for a period of time, if a value is set in the config.
#
# @api private
def sleep_if_set
config[:sleep].to_i.times do
print '.'
sleep 1
end
end
def shellout(command)
command = command.strip
info("Running command: #{command}")
cmd = Mixlib::ShellOut.new(command, config[:shellout_opts])
cmd.live_stream = config[:live_stream]
cmd.run_command
begin
cmd.error!
rescue Mixlib::ShellOut::ShellCommandFailed
raise ActionFailed, "Command #{command.inspect} failed for #{instance.to_str}"
end
end
def merge_state_to_env(state)
env_state = { :environment => {} }
env_state[:environment]['KITCHEN_INSTANCE'] = instance.name
env_state[:environment]['KITCHEN_PLATFORM'] = instance.platform.name
env_state[:environment]['KITCHEN_SUITE'] = instance.suite.name
state.each_pair do |key, value|
env_state[:environment]['KITCHEN_' + key.to_s.upcase] = value.to_s
ENV['KITCHEN_' + key.to_s.upcase] = value.to_s
info("Environment variable #{'KITCHEN_' + key.to_s.upcase} value #{value}")
end
# if using a driver that uses transport expose those too
%w[username password ssh_key port].each do |key|
next if instance.transport[key.to_sym].nil?
value = instance.transport[key.to_sym].to_s
ENV['KITCHEN_' + key.to_s.upcase] = value
info("Transport Environment variable #{'KITCHEN_' + key.to_s.upcase} value #{value}")
end
config[:shellout_opts].merge!(env_state)
end
def chef_data_dir?(base, file)
file =~ %r{^#{base}/(data|data_bags|environments|nodes|roles)/}
end
# Returns an Array of test suite filenames for the related suite currently
# residing on the local workstation. Any special provisioner-specific
# directories (such as a Chef roles/ directory) are excluded.
#
# @return [Array<String>] array of suite files
# @api private
def local_suite_files
base = File.join(config[:test_base_path], config[:suite_name])
glob = File.join(base, '*/**/*')
Dir.glob(glob).reject do |f|
chef_data_dir?(base, f) || File.directory?(f)
end
end
# Copies all test suite files into the suites directory in the sandbox.
def prepare_suites
base = File.join(config[:test_base_path], config[:suite_name])
debug("Creating local sandbox of all test suite files in #{base}")
local_suite_files.each do |src|
dest = File.join(sandbox_suites_dir, src.sub("#{base}/", ''))
FileUtils.mkdir_p(File.dirname(dest))
FileUtils.cp(src, dest, :preserve => true)
end
end
# @return [String] path to suites directory under sandbox path
# @api private
def sandbox_suites_dir
File.join(sandbox_path, 'suites')
end
end
end
end
| 35.486957 | 227 | 0.582639 |
b9fd435d93900bad34970dda98e11329e1f99761 | 1,126 | Rails.application.routes.draw do
get 'signup', to: 'users#new'
get 'login', to: 'sessions#new'
post 'login', to: 'sessions#create'
post 'logout', to: 'sessions#logout'
get 'welcome', to: 'sessions#welcome'
root 'sessions#welcome'
get 'username_or_email_exists', to: 'users#name_or_email_exists?'
get 'mode_name_exists_for_user', to: 'modes#name_exists_for_user?'
resources :mode_types, only: [:index, :show]
resources :achievements
resources :users do
resources :messages
resources :achievement_grants, only: [:index, :show]
end
resources :modes do
resources :results, only: [:index, :show, :destroy]
resources :stats, only: [:index, :show, :destroy]
end
resources :stat_types, only: [:index, :show]
get 'users/:user_id/unread_messages_count', to: 'messages#count_unread'
get 'trainer/:mode_id', to: 'trainer#index'
post 'trainer/:mode_id/inputs', to: 'trainer#create'
delete 'trainer/:mode_id/inputs/:id', to: 'trainer#destroy'
post 'trainer/:mode_id/inputs/:id', to: 'trainer#stop'
get 'trainer/:mode_id/inputs/:input_id/image/:img_side', to: 'cube_images#show'
end
| 40.214286 | 81 | 0.709591 |
aba05a31f4a2c688ed1002af83779eefec25e78d | 354 | # frozen_string_literal: true
require 'spec_helper'
describe Poker::OnePair do
[
[0, 1, 2, 9, 9],
[0, 10, 10, 11, 12],
[11, 12, 10, 11, 0],
[1, 1, 2, 3, 2],
[3, 4, 5, 3, 3]
].each do |cards|
it "detects pair for #{cards}" do
hand = described_class.new(cards)
expect(hand.check).to eq :one_pair
end
end
end
| 18.631579 | 40 | 0.550847 |
797fdd908dfcfddfbf49919ab383000b158ef63b | 1,744 | require 'cancan'
require 'state_machine'
module Pageflow
class ApplicationController < ActionController::Base
layout 'pageflow/application'
before_filter do
I18n.locale = current_user.try(:locale) || I18n.default_locale
end
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
include EditLocking
rescue_from ActionController::UnknownFormat do
render(status: 404, text: 'Not found')
end
rescue_from ActiveRecord::RecordNotFound do
respond_to do |format|
format.html { render :file => Rails.root.join('public/404.html'), :status => :not_found }
format.any(:json, :css) { head :not_found }
end
end
rescue_from CanCan::AccessDenied do |exception|
respond_to do |format|
format.html { redirect_to main_app.admin_root_path, :alert => t('pageflow.unauthorized') }
format.any(:json, :css) { head :forbidden }
end
end
rescue_from StateMachine::InvalidTransition do |exception|
respond_to do |format|
format.html { redirect_to main_app.admin_root_path, :alert => t('pageflow.invalid_transition') }
format.json { head :bad_request }
end
end
protected
def current_ability
@current_ability ||= Ability.new(current_user)
end
def after_sign_in_path_for(resource_or_scope)
root_url(:protocol => 'http')
end
def after_sign_out_path_for(resource_or_scope)
root_url(:protocol => 'http')
end
def prevent_ssl
if request.ssl?
redirect_to("http://#{request.host}#{request.fullpath}", :status => :moved_permanently)
end
end
end
end
| 27.25 | 104 | 0.676032 |
e8f7d22bf778a5e26e2c0a6c79f9302f2d195245 | 2,868 | # frozen_string_literal: true
class ApplicationExperiment < Gitlab::Experiment
def publish(_result)
track(:assignment) # track that we've assigned a variant for this context
Gon.global.push({ experiment: { name => signature } }, true) # push to client
end
def track(action, **event_args)
return if excluded? # no events for opted out actors or excluded subjects
Gitlab::Tracking.event(name, action.to_s, **event_args.merge(
context: (event_args[:context] || []) << SnowplowTracker::SelfDescribingJson.new(
'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0', signature
)
))
end
private
def resolve_variant_name
variant_names.first if Feature.enabled?(name, self, type: :experiment)
end
# Cache is an implementation on top of Gitlab::Redis::SharedState that also
# adheres to the ActiveSupport::Cache::Store interface and uses the redis
# hash data type.
#
# Since Gitlab::Experiment can use any type of caching layer, utilizing the
# long lived shared state interface here gives us an efficient way to store
# context keys and the variant they've been assigned -- while also giving us
# a simple way to clean up an experiments data upon resolution.
#
# The data structure:
# key: experiment.name
# fields: context key => variant name
#
# The keys are expected to be `experiment_name:context_key`, which is the
# default cache key strategy. So running `cache.fetch("foo:bar", "value")`
# would create/update a hash with the key of "foo", with a field named
# "bar" that has "value" assigned to it.
class Cache < ActiveSupport::Cache::Store
# Clears the entire cache for a given experiment. Be careful with this
# since it would reset all resolved variants for the entire experiment.
def clear(key:)
key = hkey(key)[0] # extract only the first part of the key
pool do |redis|
case redis.type(key)
when 'hash', 'none' then redis.del(key)
else raise ArgumentError, 'invalid call to clear a non-hash cache key'
end
end
end
private
def pool
raise ArgumentError, 'missing block' unless block_given?
Gitlab::Redis::SharedState.with { |redis| yield redis }
end
def hkey(key)
key.split(':') # this assumes the default strategy in gitlab-experiment
end
def read_entry(key, **options)
value = pool { |redis| redis.hget(*hkey(key)) }
value.nil? ? nil : ActiveSupport::Cache::Entry.new(value)
end
def write_entry(key, entry, **options)
return false unless Feature.enabled?(:caching_experiments)
return false if entry.value.blank? # don't cache any empty values
pool { |redis| redis.hset(*hkey(key), entry.value) }
end
def delete_entry(key, **options)
pool { |redis| redis.hdel(*hkey(key)) }
end
end
end
| 34.142857 | 87 | 0.683403 |
4a48e2a9ea017d8b6bfd476e361768c65998729a | 3,289 | # frozen_string_literal: true
shared_examples "a controls config" do
let(:param) { :button_text }
let(:name) { nil }
describe "#param" do
it "can be set" do
subject.param(:name)
expect(subject.param).to eq(:name)
end
it "set returns the control" do
expect(subject.param(:name)).to eq(subject)
end
end
describe "#name" do
it "by default is derived from the param" do
expect(subject.name).to eq("Button Text")
end
context "with name" do
let(:name) { "Button" }
it "can be passed in the construcor" do
expect(subject.name).to eq("Button")
end
end
it "can be set" do
subject.name("Button")
expect(subject.name).to eq("Button")
end
it "set returns the control" do
expect(subject.name("Button")).to eq(subject)
end
end
describe "#prefix_param" do
it "prefixes the param" do
subject.param(:name).prefix_param(:author)
expect(subject.param).to eq(:author__name)
end
end
describe "#valid?" do
it "true for param that the component supports" do
expect(subject.valid?).to eq(true)
end
context "without a param" do
let(:param) { nil }
it "is invalid" do
expect(subject.valid?).to eq(false)
expect(subject.errors.size).to eq(1)
expect(subject.errors[:param]).to eq(["can't be blank"])
end
end
end
end
shared_examples "a simple controls config" do
include_examples "a controls config"
let(:default_value) { "OK" }
let(:value_from_param) { "OK" }
it "#type" do
expect(subject.type).to eq(type)
end
describe "#valid?" do
context "without a default_value" do
let(:default_value) { nil }
it "is valid" do
expect(subject.valid?).to eq(true)
end
end
end
let(:expected_csf_value) { default_value }
let(:csf_arg_type_control_overrides) { {} }
let(:expected_csf_params) do
{
args: {
button_text: expected_csf_value,
},
argTypes: {
button_text: { control: { type: type }.merge(csf_arg_type_control_overrides), name: "Button Text" },
},
}
end
describe "#to_csf_params" do
it "creates params" do
expect(subject.to_csf_params).to eq(expected_csf_params)
end
context "with name" do
let(:name) { "Text" }
it "creates params" do
name_params = { argTypes: { button_text: { name: "Text" } } }
expect(subject.to_csf_params).to eq(expected_csf_params.deep_merge(name_params))
end
end
it "calls validate!" do
allow(subject).to receive(:validate!).and_raise ActiveModel::ValidationError.new(subject)
expect { subject.to_csf_params }.to raise_error ActiveModel::ValidationError
expect(subject).to have_received(:validate!).once
end
end
let(:param_value) { "OK" }
describe "#value_from_params" do
it "parses param_value" do
expect(subject.value_from_params(subject.param => param_value)).to eq(default_value)
end
it "parses nil param_value" do
expect(subject.value_from_params(subject.param => nil)).to eq(nil)
end
it "returns default_value if param is missing" do
expect(subject.value_from_params({})).to eq(default_value)
end
end
end
| 23.833333 | 108 | 0.638492 |
f8bf51155971200d301a28e2d650a61aa64b94cd | 15,447 | # frozen_string_literal: true
require_relative "../helpers/debug"
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/MethodLength
# rubocop:disable Metrics/ModuleLength
# rubocop:disable Metrics/ParameterLists
# rubocop:disable Metrics/PerceivedComplexity
module Chip8
module Components
# Chip-8s Instructions
module Instructions
def self.run(nibbles, display:, keyboard:, memory:, program_counter:, register:, stack:, clock:)
case nibbles["instruction"]
when 0x0
case nibbles["nnn"]
when 0x0EE # 00EE OK
instruction_00ee(nibbles, program_counter, stack)
when 0x0E0 # 00E0 CHECKED
instruction_00e0(display, nibbles, program_counter)
else
puts "Implementation not needed" # 0NNN: Execute machine language routine
end
when 0x1 # 1NNN OK
instruction_1nnn(nibbles, program_counter)
when 0x2 # 2NNN ~checked
instruction_2nnn(nibbles, program_counter, stack)
when 0x3 # 3XNN OK
instruction_3xnn(nibbles, program_counter, register)
when 0x4 # 4XNN OK
instruction_4xnn(nibbles, program_counter, register)
when 0x5 # 5XY0 OK
instruction_5xy0(nibbles, program_counter, register)
when 0x6 # 6XNN OK
instruction_6xnn(nibbles, program_counter, register)
when 0x7 # 7XNN OK
instruction_7xnn(nibbles, program_counter, register)
when 0x8
arithmetical_methods(nibbles, register, program_counter)
when 0x9 # 9XY0 OK
instruction_9xy0(nibbles, program_counter, register)
when 0xA
instruction_annn(nibbles, program_counter, register)
when 0xB
instruction_bnnn(nibbles, program_counter, register)
when 0xC
instruction_cxnn(nibbles, program_counter, register)
when 0xD
instruction_dxyn(display, memory, nibbles, program_counter, register)
when 0xE
case nibbles["nn"]
when 0x9E
instruction_ex9e(keyboard, nibbles, program_counter, register)
when 0xA1
instruction_exa1(keyboard, nibbles, program_counter, register)
else
puts "some strange instruction... 1nibble['nn']"
end
when 0xF
case nibbles["nn"]
when 0x07
instruction_fx07(clock, nibbles, program_counter, register)
when 0x0A # CHECKED
instruction_fx0a(keyboard, nibbles, program_counter, register)
when 0x15
instruction_fx15(clock, nibbles, program_counter, register)
when 0x18
instruction_fx18(clock, nibbles, program_counter, register)
when 0x1E
instruction_fx1e(nibbles, program_counter, register)
when 0x29
instruction_fx29(nibbles, program_counter, register)
when 0x33 # FX33 OK
instruction_fx33(memory, nibbles, program_counter, register)
when 0x55 # FX55 OK
instruction_fx55(memory, nibbles, program_counter, register)
when 0x65
instruction_fx65(memory, nibbles, program_counter, register)
else
puts "some strange instruction... 2nibble['nn'] #{nibbles.inspect}"
end
else
puts "do nothing (for now)"
end
end
# Fourth nibbles Operations
def self.arithmetical_methods(nibbles, register, program_counter)
case nibbles["n"] # Nibble 4
when 0x0 # 0x8XY0 OK
Chip8::Helpers::Debug.debug("8XY0", "(set register VX from VY)", nibbles, program_counter: program_counter)
register.copy_variable_data_from_to(nibbles["y"], nibbles["x"])
when 0x1 # 0x8XY1 OK
Chip8::Helpers::Debug.debug("8XY1", "(Bitwise Logical OR)", nibbles, program_counter: program_counter)
register.bitwise_logical_or(nibbles["x"], nibbles["y"])
when 0x2 # 0x8XY2 OK
Chip8::Helpers::Debug.debug("8XY2", "(Logical AND)", nibbles, program_counter: program_counter)
register.bitwise_logical_and(nibbles["x"], nibbles["y"])
when 0x3 # 0x8XY3 OK
Chip8::Helpers::Debug.debug("8XY3", "(Logical XOR)", nibbles, program_counter: program_counter)
register.bitwise_logical_xor(nibbles["x"], nibbles["y"])
when 0x4 # 0x8XY4 OK
Chip8::Helpers::Debug.debug("8XY4", "(ADD => VX = VX+VY)", nibbles, program_counter: program_counter)
register.sum_variable_from_into(nibbles["y"], nibbles["x"])
when 0x5 # 0x8XY5 OK
Chip8::Helpers::Debug.debug("8XY5", "(SUBTRACT => VX = VX-VY)", nibbles, program_counter: program_counter)
register.subtract_variables(nibbles["x"], nibbles["y"], var_destination: nibbles["x"])
when 0x6 # 0x8XY6 OK
Chip8::Helpers::Debug.debug("8XY6", "(VY Shifting to RIGHT (becomes VX))", nibbles,
program_counter: program_counter)
register.bitwise_logical_shift(nibbles["x"], nibbles["y"], to_right: true)
when 0x7 # 8XY7 OK
Chip8::Helpers::Debug.debug("8XY7", "(SUBTRACT => VX = VY - VX)", nibbles, program_counter: program_counter)
register.subtract_variables(nibbles["y"], nibbles["x"], var_destination: nibbles["x"])
when 0xE # 0x8XYE OK
Chip8::Helpers::Debug.debug("8XYE", "(VY Shifting to LEFT (becomes VX))", nibbles,
program_counter: program_counter)
register.bitwise_logical_shift(nibbles["x"], nibbles["y"], to_right: false)
else
puts "some strange instruction..."
end
end
def self.instruction_00e0(display, nibbles, program_counter)
Chip8::Helpers::Debug.debug("00E0", "clear screen", nibbles, program_counter: program_counter)
display.clean_display
end
def self.instruction_00ee(nibbles, program_counter, stack)
Chip8::Helpers::Debug.debug("00EE", "(00EE 'popping' Subroutines)", nibbles, program_counter: program_counter)
popped_data = stack.pop_data
program_counter.update_index(popped_data)
end
def self.instruction_1nnn(nibbles, program_counter)
Chip8::Helpers::Debug.debug("1NNN", "(jump)", nibbles["nnn"]&.to_s(16), program_counter: program_counter)
program_counter.update_index(nibbles["nnn"])
end
def self.instruction_2nnn(nibbles, program_counter, stack)
Chip8::Helpers::Debug.debug("2NNN", "(2NNN 'pushing' Subroutines)", nibbles["nnn"]&.to_s(16),
program_counter: program_counter)
stack.push_data(program_counter.index)
program_counter.update_index(nibbles["nnn"])
end
def self.instruction_3xnn(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("3XNN", "(VX == NN => Skip)", nibbles, program_counter: program_counter)
variable_value = register.get_variable_in_position(nibbles["x"])
program_counter.increment_index_position if variable_value == nibbles["nn"]
end
def self.instruction_4xnn(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("4XNN", "(X != NN => Skip)", nibbles, program_counter: program_counter)
variable_value = register.get_variable_in_position(nibbles["x"])
program_counter.increment_index_position if variable_value != nibbles["nn"]
end
def self.instruction_5xy0(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("5XY0", "(vX == vY => Skip)", nibbles, program_counter: program_counter)
variable_x_value = register.get_variable_in_position(nibbles["x"])
variable_y_value = register.get_variable_in_position(nibbles["y"])
program_counter.increment_index_position if variable_x_value == variable_y_value
end
def self.instruction_7xnn(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("7XNN", "(add value to register VX)", nibbles, program_counter: program_counter)
register.add_to_variable(nibbles["x"], nibbles["nn"])
end
def self.instruction_9xy0(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("9XY0", "(Vx != Vy => SKIP)", nibbles, program_counter: program_counter)
variable_x_value = register.get_variable_in_position(nibbles["x"])
variable_y_value = register.get_variable_in_position(nibbles["y"])
program_counter.increment_index_position if variable_x_value != variable_y_value
end
def self.instruction_annn(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("ANNN", "(set index register I)", nibbles, program_counter: program_counter)
register.update_index(nibbles["nnn"])
end
def self.instruction_exa1(keyboard, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("EXA1", "(SKIP if Key NOT Pressed)", nibbles, program_counter: program_counter,
force_debug: false)
value_register_x = register.get_variable_in_position(nibbles["x"])
program_counter.increment_index_position if keyboard.key_pressed?(value_register_x) == false
end
def self.instruction_ex9e(keyboard, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("EX9E", "(SKIP if Key Pressed)", nibbles, program_counter: program_counter,
force_debug: false)
value_register_x = register.get_variable_in_position(nibbles["x"])
program_counter.increment_index_position if keyboard.key_pressed?(value_register_x)
end
def self.instruction_dxyn(display, memory, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("DXYN", "(display/draw)", nibbles, program_counter: program_counter)
new_display_data = register.new_display_data(nibbles["x"], nibbles["y"], nibbles["n"], memory,
display.display_buffer)
display.update_display_buffer(new_display_data)
end
def self.instruction_cxnn(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("CXNN", "(CXNN: Random)", nibbles, program_counter: program_counter)
register.set_random_value_into_variable(nibbles["x"], nibbles["nn"])
end
def self.instruction_bnnn(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("BNNN", "(set index register I[Jump with offset])", nibbles,
program_counter: program_counter)
program_counter.update_index(nibbles["nnn"], offset: register.get_variable_in_position(0x0))
end
def self.instruction_6xnn(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("6XNN", "(set register VX)", nibbles["nn"]&.to_s(16),
program_counter: program_counter)
register.set_variable_in_position(nibbles["x"], nibbles["nn"])
end
def self.instruction_fx07(clock, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX07", "VX = delay timer value", nibbles, program_counter: program_counter)
register.set_variable_in_position(nibbles["x"], clock.delay_timer)
end
def self.instruction_fx0a(keyboard, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX0A", "Get key pressed", nibbles, program_counter: program_counter)
key_pressed_pos = keyboard.position_of_key_pressed
if key_pressed_pos.nil?
program_counter.decrement_index_position
else
register.set_variable_in_position(nibbles["x"], key_pressed_pos)
end
end
def self.instruction_fx15(clock, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX15", "Sets delay timer = VX value", nibbles, program_counter: program_counter)
variable_x_value = register.get_variable_in_position(nibbles["x"])
clock.update_delay_timer(variable_x_value)
end
def self.instruction_fx18(clock, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX18", "Sets sound timer = VX Value", nibbles, program_counter: program_counter)
variable_x_value = register.get_variable_in_position(nibbles["x"])
clock.update_sound_timer(variable_x_value)
end
def self.instruction_fx1e(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX1E", "Add to index", nibbles, program_counter: program_counter)
variable_x_value = register.get_variable_in_position(nibbles["x"])
current_register_index = register.index
final_index = variable_x_value + current_register_index
register.update_index(final_index)
end
def self.instruction_fx29(nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX29", "Font character", nibbles, program_counter: program_counter)
font_index = Fonts::FONT_STARTING_ADDRESS
variable_x_value = (register.get_variable_in_position(nibbles["x"]) & 0xF) * 5
final_value = font_index + variable_x_value
register.update_index(final_value)
end
def self.instruction_fx33(memory, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX33", "Binary-coded decimal conversion", nibbles,
program_counter: program_counter)
convert_binary_coded_into_decimal_digits(nibbles, memory, register)
end
def self.instruction_fx55(memory, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX55", "", nibbles, program_counter: program_counter)
# Modern
x_value = nibbles["x"]
(0x0..x_value).each do |pos|
memory_position = register.index + pos
register_data = register.get_variable_in_position(pos)
memory.update_value(memory_position, register_data)
end
end
def self.instruction_fx65(memory, nibbles, program_counter, register)
Chip8::Helpers::Debug.debug("FX65", "", nibbles, program_counter: program_counter, force_debug: false)
# Modern
x_value = nibbles["x"]
(0x0..x_value).each do |pos|
memory_position = register.index + pos
memory_data = memory.access(memory_position)
register.set_variable_in_position(pos, memory_data)
end
end
def self.convert_binary_coded_into_decimal_digits(nibbles, memory, register)
register_index = register.index
register_value = register.get_variable_in_position(nibbles["x"])
hundredth_val = register_value / 100
decimal_val = (register_value - (hundredth_val * 100)) / 10
unit_val = (register_value - ((hundredth_val * 100) + (decimal_val * 10)))
memory.update_value(register_index, hundredth_val)
memory.update_value(register_index + 1, decimal_val)
memory.update_value(register_index + 2, unit_val)
end
end
end
end
# rubocop:enable Metrics/CyclomaticComplexity
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/MethodLength
# rubocop:enable Metrics/ModuleLength
# rubocop:enable Metrics/ParameterLists
# rubocop:enable Metrics/PerceivedComplexity
| 47.529231 | 118 | 0.66822 |
874596812b524847a801db76e1a4599bcb6649e1 | 2,395 | class Stolon < Formula
desc "Cloud native PostgreSQL manager for high availability"
homepage "https://github.com/sorintlab/stolon"
url "https://github.com/sorintlab/stolon.git",
tag: "v0.16.0",
revision: "920fe4b83c158a6fe496dd6427a3715b84c0b4e2"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "6ddbacf93e9ab9f5bbf6e60011f398d5718d7d1ed9f66134e517d4caeeb9e852" => :big_sur
sha256 "8bbf533b32cba6f798e17aad03e6268c6fe66be84cac9801226217e40c9cc0a9" => :catalina
sha256 "fac96dde0102d1b0b7cd6e6dad8cb3bb3b7dfdee16f0fe98cbb439e4602a48e5" => :mojave
sha256 "3f82acb97ffd0f586b5c6c643237205f0df3c5d03ed2cbd01143aba404d0e1b7" => :high_sierra
sha256 "f9a42e2d84e9a0a7e6edbe81878b02e25f8e1ce62129e3e45b7b696bbcdbe17c" => :x86_64_linux
end
depends_on "go" => :build
depends_on "consul" => :test
depends_on "postgresql"
def install
system "go", "build", "-ldflags", "-s -w -X github.com/sorintlab/stolon/cmd.Version=#{version}",
"-trimpath", "-o", bin/"stolonctl", "./cmd/stolonctl"
system "go", "build", "-ldflags", "-s -w -X github.com/sorintlab/stolon/cmd.Version=#{version}",
"-trimpath", "-o", bin/"stolon-keeper", "./cmd/keeper"
system "go", "build", "-ldflags", "-s -w -X github.com/sorintlab/stolon/cmd.Version=#{version}",
"-trimpath", "-o", bin/"stolon-sentinel", "./cmd/sentinel"
system "go", "build", "-ldflags", "-s -w -X github.com/sorintlab/stolon/cmd.Version=#{version}",
"-trimpath", "-o", bin/"stolon-proxy", "./cmd/proxy"
prefix.install_metafiles
end
test do
pid = fork do
exec "consul", "agent", "-dev"
end
sleep 2
assert_match "stolonctl version #{version}",
shell_output("#{bin}/stolonctl version 2>&1")
assert_match "nil cluster data: <nil>",
shell_output("#{bin}/stolonctl status --cluster-name test --store-backend consul 2>&1", 1)
assert_match "stolon-keeper version #{version}",
shell_output("#{bin}/stolon-keeper --version 2>&1")
assert_match "stolon-sentinel version #{version}",
shell_output("#{bin}/stolon-sentinel --version 2>&1")
assert_match "stolon-proxy version #{version}",
shell_output("#{bin}/stolon-proxy --version 2>&1")
Process.kill("TERM", pid)
Process.wait(pid)
end
end
| 43.545455 | 100 | 0.664718 |
7a8d611aff56c10318f3fa52c71f77ee864266fa | 1,080 | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'omniauth-healthvault/version'
Gem::Specification.new do |gem|
gem.name = "omniauth-healthvault"
gem.version = Omniauth::Healthvault::VERSION
gem.authors = ["Andrey Voronkov"]
gem.email = ["[email protected]"]
gem.description = %q{This is the unofficial OmniAuth strategy for authenticating to Microsoft HealthVault.}
gem.summary = %q{This is the unofficial OmniAuth strategy for authenticating to Microsoft HealthVault.}
gem.homepage = "https://github.com/Antiarchitect/omniauth-healthvault"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency 'builder', '~> 3.0'
gem.add_dependency 'faraday', '~> 0.8'
gem.add_dependency 'multi_xml', '~> 0.5'
gem.add_dependency 'omniauth', '~> 1.2'
end
| 45 | 111 | 0.675 |
1d6b3a777431872629077ec3d79c9590204ff066 | 6,791 | require 'i18n'
module Lit
class I18nBackend
include I18n::Backend::Simple::Implementation
include I18n::Backend::Pluralization
attr_reader :cache
def initialize(cache)
@cache = cache
@available_locales_cache = nil
@translations = {}
reserved_keys = I18n.const_get(:RESERVED_KEYS) + %i[lit_default_copy]
I18n.send(:remove_const, :RESERVED_KEYS)
I18n.const_set(:RESERVED_KEYS, reserved_keys.freeze)
end
def translate(locale, key, options = {})
options[:lit_default_copy] = options[:default].dup if can_dup_default(options)
content = super(locale, key, options)
if Lit.all_translations_are_html_safe && content.respond_to?(:html_safe)
content.html_safe
else
content
end
end
def available_locales
return @available_locales_cache unless @available_locales_cache.nil?
@locales ||= ::Rails.configuration.i18n.available_locales
if @locales && [email protected]?
@available_locales_cache = @locales.map(&:to_sym)
else
@available_locales_cache = Lit::Locale.ordered.visible.map { |l| l.locale.to_sym }
end
@available_locales_cache
end
def reset_available_locales_cache
@available_locales_cache = nil
end
# Stores the given translations.
#
# @param [String] locale the locale (ie "en") to store translations for
# @param [Hash] data nested key-value pairs to be added as blurbs
def store_translations(locale, data, options = {})
super
ActiveRecord::Base.transaction do
store_item(locale, data)
end if store_items? && valid_locale?(locale)
end
private
def can_dup_default(options = {})
return false unless options.key?(:default)
return true if options[:default].is_a?(String)
return true if options[:default].is_a?(Array) && \
(options[:default].first.is_a?(String) || \
options[:default].first.is_a?(Symbol) || \
options[:default].first.is_a?(Array))
false
end
def lookup(locale, key, scope = [], options = {})
init_translations unless initialized?
parts = I18n.normalize_keys(locale, key, scope, options[:separator])
key_with_locale = parts.join('.')
# check in cache or in simple backend
content = @cache[key_with_locale] || super
return content if parts.size <= 1
if content.nil? && should_cache?(key_with_locale, options)
new_content = @cache.init_key_with_value(key_with_locale, content)
content = new_content if content.nil? # Content can change when Lit.humanize is true for example
# so there is no content in cache - it might not be if ie. we're doing
# fallback to already existing language
if content.nil?
# check if default was provided
if options[:lit_default_copy].present?
# default most likely will be an array
if options[:lit_default_copy].is_a?(Array)
default = options[:lit_default_copy].map do |key_or_value|
if key_or_value.is_a?(Symbol)
I18n.normalize_keys(nil, key_or_value.to_s, options[:scope], options[:separator]).join('.').to_sym
else
key_or_value
end
end
default = default.first if default.is_a?(Array)
else
default = options[:lit_default_copy]
end
content = default
end
# if we have content now, let's store it in cache
if content.present?
@cache[key_with_locale] = content
content = @cache[key_with_locale]
end
# content might be nil - default value passed to cache was in fact
# useless.
# if content is still nil, we may try to humanize it. Rails will do
# it anyway if we return nil, but then it will wrap it also in
# translation_missing span.
# Humanizing key should be last resort
if content.nil? && Lit.humanize_key && key.match(Lit.humanize_key_ignored).nil?
content = key.to_s.split('.').last.humanize
if content.present?
@cache[key_with_locale] = content
content = @cache[key_with_locale]
end
end
end
end
# return translated content
content
end
def store_item(locale, data, scope = [], startup_process = false)
key = ([locale] + scope).join('.')
if data.respond_to?(:to_hash)
# ActiveRecord::Base.transaction do
data.to_hash.each do |k, value|
store_item(locale, value, scope + [k], startup_process)
end
# end
elsif data.respond_to?(:to_str) || data.is_a?(Array)
key = ([locale] + scope).join('.')
return if startup_process && @cache.keys.member?(key) && Lit.ignore_yaml_on_startup
@cache.update_locale(key, data, data.is_a?(Array), startup_process)
elsif data.nil?
return if startup_process
key = ([locale] + scope).join('.')
@cache.delete_locale(key)
end
end
def load_translations_to_cache
ActiveRecord::Base.transaction do
(@translations || {}).each do |locale, data|
store_item(locale, data, [], true) if valid_locale?(locale)
end
end
end
def init_translations
# Load all translations from *.yml, *.rb files to @translations variable.
# We don't want to store translations in lit cache just yet. We'll do it
# with `load_translations_to_cache` when all translations form yml (rb)
# files will be loaded.
without_store_items { load_translations }
# load translations from database to cache
@cache.load_all_translations
# load translations from @translations to cache
load_translations_to_cache
@initialized = true
end
def without_store_items
@store_items = false
yield
ensure
@store_items = true
end
def store_items?
!instance_variable_defined?(:@store_items) || @store_items
end
def valid_locale?(locale)
@locales ||= ::Rails.configuration.i18n.available_locales
!@locales || @locales.map(&:to_s).include?(locale.to_s)
end
def is_ignored_key(key_without_locale)
Lit.ignored_keys.any?{ |k| key_without_locale.start_with?(k) }
end
def should_cache?(key_with_locale, options)
if @cache.has_key?(key_with_locale)
return false unless options[:default]
end
_, key_without_locale = ::Lit::Cache.split_key(key_with_locale)
return false if is_ignored_key(key_without_locale)
true
end
end
end
| 35.005155 | 116 | 0.629657 |
62a9f594d5abd2f049fc94b6ca1bee0f271397b4 | 1,181 | class Bard::CLI < Thor
class Data < Struct.new(:bard, :from, :to)
def call
if to == "local"
data_pull_db from.to_sym
data_pull_assets from.to_sym
end
if from == "local"
data_push_db to.to_sym
data_push_assets to.to_sym
end
end
private
def data_pull_db server
bard.instance_eval do
run_crucial ssh_command(server, "bin/rake db:dump && gzip -9f db/data.sql")
copy :from, server, "db/data.sql.gz"
run_crucial "gunzip -f db/data.sql.gz && bin/rake db:load"
end
end
def data_push_db server
bard.instance_eval do
run_crucial "bin/rake db:dump && gzip -9f db/data.sql"
copy :to, server, "db/data.sql.gz"
run_crucial ssh_command(server, "gunzip -f db/data.sql.gz && bin/rake db:load")
end
end
def data_pull_assets server
bard.instance_eval do
@config.data.each do |path|
rsync :from, server, path
end
end
end
def data_push_assets server
bard.instance_eval do
@config.data.each do |path|
rsync :to, server, path
end
end
end
end
end
| 23.62 | 87 | 0.595258 |
264832f83593ff19bcffbb26f53f91ed15aec136 | 58 | # frozen_string_literal: true
WillPaginate.per_page = 15
| 14.5 | 29 | 0.810345 |
39769f7e0e7185f8baf3d23dcd2d4d71e62e6312 | 3,008 | ###############################
# to run use:
# $ ruby girls.rb
$LOAD_PATH.unshift( "../cryptopunks/lib" )
require 'cryptopunks'
FEMALE3 = Punks::Sheet.find_by( name: 'Female 3', size: 's' )
COWBOY_HAT = Punks::Sheet.find_by( name: 'Cowboy Hat', gender: 'f', size: 's' )
HEADBAND = Punks::Sheet.find_by( name: 'Headband', gender: 'f', size: 's' )
BIG_SHADES = Punks::Sheet.find_by( name: 'Big Shades', gender: 'f', size: 's' )
PIGTAILS = Punks::Sheet.find_by( name: 'Pigtails', gender: 'f', size: 's' )
SMILE3 = Punks::Sheet.find_by( name: 'Smile 3', gender: 'f', size: 's' )
AMBER = '#ffbf00' # background colors
BLUE = '#638596'
####
# girl 1 - sophia
punk = Image.new( 24, 24, AMBER )
punk.compose!( FEMALE3 )
punk.compose!( PIGTAILS )
punk.save( 'i/sophia.png' )
punk.zoom(4).save( 'i/[email protected]' )
punk.compose!( SMILE3 )
punk.save( 'i/sophia_(2).png' )
punk.zoom(4).save( 'i/sophia_(2)@4x.png' )
punk = punk.change_colors( { AMBER => BLUE })
punk.save( 'i/sophia_(3).png' )
punk.zoom(4).save( 'i/sophia_(3)@4x.png' )
####
# girl 2 - chloe
punk = Image.new( 24, 24, AMBER )
punk.compose!( FEMALE3 )
punk.compose!( HEADBAND )
punk.compose!( BIG_SHADES )
punk.save( 'i/chloe.png' )
punk.zoom(4).save( 'i/[email protected]' )
punk.compose!( SMILE3 )
punk.save( 'i/chloe_(2).png' )
punk.zoom(4).save( 'i/chloe_(2)@4x.png' )
punk = punk.change_colors( { AMBER => BLUE })
punk.save( 'i/chloe_(3).png' )
punk.zoom(4).save( 'i/chloe_(3)@4x.png' )
####
# girl 3 - veda
punk = Image.new( 24, 24, AMBER )
punk.compose!( FEMALE3 )
punk.compose!( COWBOY_HAT )
punk.save( 'i/veda.png' )
punk.zoom(4).save( 'i/[email protected]' )
punk.compose!( SMILE3 )
punk.save( 'i/veda_(2).png' )
punk.zoom(4).save( 'i/veda_(2)@4x.png' )
## change background color to "classic" blue-ish
punk = punk.change_colors( { AMBER => BLUE })
punk.save( 'i/veda_(3).png' )
punk.zoom(4).save( 'i/veda_(3)@4x.png' )
####
# Bonus - smiling girls with hoodies
HOODIE = Punks::Sheet.find_by( name: 'Hoodie', gender: 'f', size: 's' )
punk = Image.new( 24, 24, BLUE )
punk.compose!( FEMALE3 )
punk.compose!( HOODIE )
punk.save( 'i/girl.png' )
punk.zoom(4).save( 'i/[email protected]' )
punk.compose!( SMILE3 )
punk.save( 'i/girl_(2).png' )
punk.zoom(4).save( 'i/girl_(2)@4x.png' )
GRAY = '#555555' # hoodie colors
PINK = '#FFC0CB'
punk = Image.new( 24, 24, BLUE )
punk.compose!( FEMALE3 )
punk.compose!( HOODIE.change_colors( GRAY => PINK ) )
punk.compose!( SMILE3 )
punk.save( 'i/girl_(3).png' )
punk.zoom(4).save( 'i/girl_(3)@4x.png' )
punk.zoom(8).save( 'i/girl_(3)@8x.png' )
#####
# save the attributes for inline display in quote
punk = Image.new( 24, 24 )
punk.compose!( FEMALE3 )
punk.compose!( HOODIE )
punk.save( 'i/hoodie.png' )
punk = Image.new( 24, 24 )
punk.compose!( FEMALE3 )
punk.compose!( COWBOY_HAT )
punk.save( 'i/cowboy_hat.png' )
punk = Image.new( 24, 24 )
punk.compose!( FEMALE3 )
punk.compose!( SMILE3 )
punk.save( 'i/smile3.png' )
puts "bye"
| 21.485714 | 79 | 0.622008 |
012f460953761846493bc2cc882c9f2711027e1e | 6,080 | class SampleTypesController < ApplicationController
respond_to :html, :json
include Seek::UploadHandling::DataUpload
include Seek::IndexPager
before_filter :samples_enabled?
before_filter :find_sample_type, only: [:show, :edit, :update, :destroy, :template_details]
before_filter :check_no_created_samples, only: [:destroy]
before_filter :find_assets, only: [:index]
before_filter :auth_to_create, only: [:new, :create]
before_filter :project_membership_required, only: [:create, :new, :select, :filter_for_select]
before_filter :authorize_requested_sample_type, except: [:index, :new, :create]
# GET /sample_types/1 ,'sample_attributes','linked_sample_attributes'
# GET /sample_types/1.json
def show
respond_to do |format|
format.html
# format.json {render json: @sample_type}
format.json {render json: :not_implemented, status: :not_implemented }
end
end
# GET /sample_types/new
# GET /sample_types/new.json
def new
@tab = 'manual'
@sample_type = SampleType.new
@sample_type.sample_attributes.build(is_title: true, required: true) # Initial attribute
respond_with(@sample_type)
end
def create_from_template
build_sample_type_from_template
@sample_type.contributor = User.current_user.person
@tab = 'from-template'
respond_to do |format|
if @sample_type.errors.empty? && @sample_type.save
format.html { redirect_to edit_sample_type_path(@sample_type), notice: 'Sample type was successfully created.' }
else
@sample_type.content_blob.destroy if @sample_type.content_blob.persisted?
format.html { render action: 'new' }
end
end
end
# GET /sample_types/1/edit
def edit
respond_with(@sample_type)
end
# POST /sample_types
# POST /sample_types.json
def create
# because setting tags does an unfortunate save, these need to be updated separately to avoid a permissions to edit error
tags = params[:sample_type].delete(:tags)
@sample_type = SampleType.new(sample_type_params)
@sample_type.contributor = User.current_user.person
# removes controlled vocabularies or linked seek samples where the type may differ
@sample_type.resolve_inconsistencies
@tab = 'manual'
respond_to do |format|
if @sample_type.save
@sample_type.update_attribute(:tags, tags)
format.html { redirect_to @sample_type, notice: 'Sample type was successfully created.' }
format.json { render json: @sample_type, status: :created, location: @sample_type}
else
format.html { render action: 'new' }
format.json { render json: @sample_type.errors, status: :unprocessable_entity}
end
end
end
# PUT /sample_types/1
# PUT /sample_types/1.json
def update
@sample_type.update_attributes(sample_type_params)
@sample_type.resolve_inconsistencies
flash[:notice] = 'Sample type was successfully updated.' if @sample_type.save
respond_to do |format|
format.html { respond_with(@sample_type) }
format.json {render json: @sample_type}
end
end
# DELETE /sample_types/1
# DELETE /sample_types/1.json
def destroy
if @sample_type.can_delete? && @sample_type.destroy
flash[:notice] = 'The sample type was successfully deleted.'
else
flash[:notice] = 'It was not possible to delete the sample type.'
end
respond_with(@sample_type, location: sample_types_path)
end
def template_details
render partial: 'template'
end
# current just for selecting a sample type for creating a sample, but easily has potential as a general browser
def select
respond_with(@sample_types)
end
# used for ajax call to get the filtered sample types for selection
def filter_for_select
@sample_types = SampleType.joins(:projects).where('projects.id' => params[:projects]).uniq.to_a
unless params[:tags].blank?
@sample_types.select! do |sample_type|
if params[:exclusive_tags] == '1'
(params[:tags] - sample_type.annotations_as_text_array).empty?
else
(sample_type.annotations_as_text_array & params[:tags]).any?
end
end
end
render partial: 'sample_types/select/filtered_sample_types'
end
private
def sample_type_params
params.require(:sample_type).permit(:title, :description, :tags,
{project_ids: [],
sample_attributes_attributes: [:id, :title, :pos, :required, :is_title,
:sample_attribute_type_id,
:sample_controlled_vocab_id,
:linked_sample_type_id,
:unit_id, :_destroy]})
end
def build_sample_type_from_template
@sample_type = SampleType.new(sample_type_params)
@sample_type.uploaded_template = true
handle_upload_data
@sample_type.content_blob.save! # Need's to be saved so the spreadsheet can be read from disk
@sample_type.build_attributes_from_template
end
private
def find_sample_type
@sample_type = SampleType.find(params[:id])
end
#intercepts the standard 'find_and_authorize_requested_item' for additional special check for a referring_sample_id
def authorize_requested_sample_type
privilege = Seek::Permissions::Translator.translate(action_name)
return if privilege.nil?
if privilege == :view && params[:referring_sample_id].present?
@sample_type.can_view?(User.current_user,Sample.find_by_id(params[:referring_sample_id])) || find_and_authorize_requested_item
else
find_and_authorize_requested_item
end
end
def check_no_created_samples
if (count = @sample_type.samples.count) > 0
flash[:error] = "Cannot #{action_name} this sample type - There are #{count} samples using it."
redirect_to @sample_type
end
end
end
| 34.157303 | 132 | 0.680263 |
b9c537b30cb642215e7980d6521aacbd0e4be40a | 1,934 | # frozen_string_literal: true
RSpec.describe Miteru::Feeds do
subject { described_class }
describe "#breakdown" do
context "when given an url without path" do
it "returns an Array (length == 1)" do
results = subject.new.breakdown("http://test.com")
expect(results).to be_an(Array)
expect(results.length).to eq(1)
end
end
context "when given an url with path" do
context "when disabling directory_traveling" do
it "returns an Array (length == 1)" do
results = subject.new.breakdown("http://test.com/test/test/index.html")
expect(results).to be_an(Array)
expect(results.length).to eq(1)
expect(results.first).to eq("http://test.com")
end
end
context "when enabling directory_traveling" do
before do
allow(Miteru.configuration).to receive(:directory_traveling?).and_return(true)
end
it do
results = subject.new.breakdown("http://test.com/test/test/index.html")
expect(results).to eq(["http://test.com", "http://test.com/test", "http://test.com/test/test"])
end
end
end
end
describe "#suspicious_urls" do
let(:url) { "http://sameurl.com" }
before do
allow(Miteru::Feeds::UrlScan).to receive_message_chain(:new, :urls).and_return([url])
allow(Miteru::Feeds::UrlScanPro).to receive_message_chain(:new, :urls).and_return([url])
allow(Miteru::Feeds::Ayashige).to receive_message_chain(:new, :urls).and_return([url])
allow(Miteru::Feeds::PhishingDatabase).to receive_message_chain(:new, :urls).and_return([url])
allow(Miteru::Feeds::PhishStats).to receive_message_chain(:new, :urls).and_return([url])
end
it "returns an Array without duplicated" do
results = subject.new.suspicious_urls
expect(results).to be_an(Array)
expect(results.length).to eq(1)
end
end
end
| 34.535714 | 105 | 0.650465 |
e951f7b4410fa8e0c30b2f110b2f5d2039e182b6 | 765 | class UsersController < ApplicationController
before_action :require_logged_in_user, only: [:edit, :update]
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
flash[:success] = 'Usuario cadastrado com sucesso'
redirect_to root_url
else
render 'new'
end
end
def edit
end
def update
if current_user.update(user_params)
flash[:success] = 'Dados atualizados!'
redirect_to contacts_url
else
render 'edit'
end
end
private
def user_params
params.require(:user).permit(:email, :name, :password, :password_confirmation)
end
end
| 20.131579 | 90 | 0.571242 |
edd3fe13e954e106209755279312d8bdd02e0e39 | 417 | # frozen_string_literal: true
source 'https://rubygems.org'
gemspec
group :ci do
gem 'simplecov', '~> 0.16.0', require: false
end
group :development do
gem 'bump', '~> 0.6.0', require: false
gem 'bundler', '~> 1.8', require: false
end
group :ci, :development do
gem 'rake', '~> 12.0', require: false
gem 'rspec', '~> 3.0', require: false
gem 'rubocop', '~> 0.59.0', require: false
end
| 19.857143 | 46 | 0.609113 |
08dff2ec3086dc8ff8d37d78868e07bf907821ca | 3,571 | TaxonWorks::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
#
# TODO: find out the consequences and resolve autoload/eager_loadness
# at present this resolved TW-261
# config.eager_load = true
config.eager_load = false
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_files = false # true # was false until oct/2014
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Load local settings
Settings.load_from_settings_file(config, :production)
require 'taxonworks'
require 'taxonworks_autoload'
end
| 38.815217 | 104 | 0.755531 |
183073bc13f2d0fd868aebcfb1eb9a7db61b92a7 | 576 | require_relative './helper.rb'
describe 'adapter_cassandra_with_default_expires', isolate: true, retry: 3, adapter: :Cassandra do
let(:t_res) { 1 }
let(:min_ttl) { 2 }
include_context :global_cassandra_cluster
moneta_build do
Moneta::Adapters::Cassandra.new(
cluster: cluster,
keyspace: 'adapter_cassandra_with_default_expires',
expires: min_ttl,
create_keyspace: { durable_writes: false })
end
moneta_specs ADAPTER_SPECS.without_increment.without_create.with_native_expires.with_default_expires.with_values(:nil).with_each_key
end
| 30.315789 | 134 | 0.763889 |
79454a75eca51174ddfc9c3af0bf2930d77949ec | 691 | # frozen_string_literal: true
class DigitalProject < ApplicationRecord
include Csv
serialize :author_first_name, Array
serialize :author_last_name, Array
has_and_belongs_to_many :colleges
validates :author_first_name, presence: true
validates :author_last_name, presence: true
validates :work_title, presence: true
def self.to_csv
attributes = %w[submitter_id work_title other_title authors colleges uc_department name_of_site name_of_affiliated_organization publication_date version url doi]
CSV.generate(headers: true) do |csv|
csv << attributes
all.each do |item|
csv << attributes.map { |attr| item.send(attr) }
end
end
end
end
| 30.043478 | 165 | 0.756874 |
e8b2ffe1f14d824f76e035a5b709fa4d8f69e271 | 3,554 | # frozen_string_literal: true
module QA
RSpec.describe 'Create' do
describe 'Version control for project snippets' do
let(:new_file) { 'new_snippet_file' }
let(:changed_content) { 'changes' }
let(:commit_message) { 'Changes to snippets' }
let(:added_content) { 'updated ' }
let(:branch_name) { snippet.project.default_branch }
let(:snippet) do
Resource::ProjectSnippet.fabricate! do |snippet|
snippet.file_name = new_file
end
end
let(:ssh_key) do
Resource::SSHKey.fabricate_via_api! do |resource|
resource.title = "my key title #{Time.now.to_f}"
end
end
let(:repository_uri_http) do
snippet.visit!
Page::Dashboard::Snippet::Show.perform(&:get_repository_uri_http)
end
let(:repository_uri_ssh) do
ssh_key
snippet.visit!
Page::Dashboard::Snippet::Show.perform(&:get_repository_uri_ssh)
end
before do
Flow::Login.sign_in
end
after do
ssh_key.remove_via_api!
end
it 'clones, pushes, and pulls a project snippet over HTTP, edits via UI', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/1359' do
Resource::Repository::Push.fabricate! do |push|
push.repository_http_uri = repository_uri_http
push.file_name = new_file
push.file_content = changed_content
push.commit_message = commit_message
push.new_branch = false
end
page.refresh
verify_changes_in_ui
Page::Dashboard::Snippet::Show.perform(&:click_edit_button)
Page::Dashboard::Snippet::Edit.perform do |snippet|
snippet.add_to_file_content(added_content)
snippet.save_changes
end
Git::Repository.perform do |repository|
repository.init_repository
repository.pull(repository_uri_http, branch_name)
expect(repository.commits.size).to eq 3
expect(repository.commits.first).to include 'Update snippet'
expect(repository.file_content(new_file)).to include "#{added_content}#{changed_content}"
end
snippet.remove_via_api!
end
it 'clones, pushes, and pulls a project snippet over SSH, deletes via UI', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/1360' do
Resource::Repository::Push.fabricate! do |push|
push.repository_ssh_uri = repository_uri_ssh
push.ssh_key = ssh_key
push.file_name = new_file
push.file_content = changed_content
push.commit_message = commit_message
push.new_branch = false
end
page.refresh
verify_changes_in_ui
Page::Dashboard::Snippet::Show.perform(&:click_delete_button)
# attempt to pull a deleted snippet, get a missing repository error
Git::Repository.perform do |repository|
repository.uri = repository_uri_ssh
repository.use_ssh_key(ssh_key)
repository.init_repository
expect { repository.pull(repository_uri_ssh, branch_name) }
.to raise_error(QA::Support::Run::CommandError, /fatal: Could not read from remote repository\./)
end
end
def verify_changes_in_ui
Page::Dashboard::Snippet::Show.perform do |snippet|
expect(snippet).to have_file_name(new_file)
expect(snippet).to have_file_content(changed_content)
end
end
end
end
end
| 32.907407 | 169 | 0.650816 |
e213bb7e12b9965cd0fd4fbb00ba6dede44d5309 | 1,291 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-ec2instanceconnect'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - EC2 Instance Connect'
spec.description = 'Official AWS Ruby gem for AWS EC2 Instance Connect (EC2 Instance Connect). This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['LICENSE.txt', 'CHANGELOG.md', 'VERSION', 'lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-ec2instanceconnect',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-ec2instanceconnect/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.112.0')
spec.add_dependency('aws-sigv4', '~> 1.1')
end
| 40.34375 | 141 | 0.676995 |
03b7140266036e3ea8a30d03f42f8c99989f0ae6 | 2,951 | module Fog
module Storage
class AWS
class Real
require 'fog/aws/requests/storage/acl_utils'
# Change access control list for an S3 object
#
# @param [String] bucket_name name of bucket to modify
# @param [String] object_name name of object to get access control list for
# @param [Hash] acl
# * Owner [Hash]
# * ID [String] id of owner
# * DisplayName [String] display name of owner
# * AccessControlList [Array]
# * Grantee [Hash]
# * DisplayName [String] Display name of grantee
# * ID [String] Id of grantee
# or
# * EmailAddress [String] Email address of grantee
# or
# * URI [String] URI of group to grant access for
# * Permission [String] Permission, in [FULL_CONTROL, WRITE, WRITE_ACP, READ, READ_ACP]
# @param [String] acl Permissions, must be in ['private', 'public-read', 'public-read-write', 'authenticated-read']
# @param [Hash] options
# @option options [String] versionId specify a particular version to retrieve
#
# @see http://docs.amazonwebservices.com/AmazonS3/latest/API/RESTObjectPUTacl.html
def put_object_acl(bucket_name, object_name, acl, options = {})
query = {'acl' => nil}
if version_id = options.delete('versionId')
query['versionId'] = version_id
end
data = ""
headers = {}
if acl.is_a?(Hash)
data = Fog::Storage::AWS.hash_to_acl(acl)
else
if !['private', 'public-read', 'public-read-write', 'authenticated-read'].include?(acl)
raise Excon::Errors::BadRequest.new('invalid x-amz-acl')
end
headers['x-amz-acl'] = acl
end
headers['Content-MD5'] = Base64.encode64(Digest::MD5.digest(data)).strip
headers['Content-Type'] = 'application/json'
headers['Date'] = Fog::Time.now.to_date_header
request({
:body => data,
:expects => 200,
:headers => headers,
:bucket_name => bucket_name,
:object_name => object_name,
:method => 'PUT',
:query => query
})
end
end
class Mock
def put_object_acl(bucket_name, object_name, acl, options = {})
if acl.is_a?(Hash)
self.data[:acls][:object][bucket_name][object_name] = Fog::Storage::AWS.hash_to_acl(acl)
else
if !['private', 'public-read', 'public-read-write', 'authenticated-read'].include?(acl)
raise Excon::Errors::BadRequest.new('invalid x-amz-acl')
end
self.data[:acls][:object][bucket_name][object_name] = acl
end
end
end
end
end
end
| 36.432099 | 123 | 0.540834 |
ed6ba221ae92bdbac5bfa4978d0aaaacf25c73e9 | 562 | require 'waiting/constant_wait_strategy'
require 'waiting/exponential_backoff_wait_strategy'
require 'waiting/incremental_wait_strategy'
require 'waiting/random_wait_strategy'
module Waiting
module Strategies
def build(type)
strategy = case type
when :constant
ConstantWaitStrategy
when :incremental
IncrementalWaitStrategy
when :random
RandomWaitStrategy
when :exponential
ExponentialBackoffWaitStrategy
else
ConstantWaitStrategy
end
strategy.new
end
end
end
| 21.615385 | 51 | 0.717082 |
1c32e862120deea6e08efaa47025b57ee3f47d7a | 944 | Pod::Spec.new do |spec|
spec.name = 'TradPlusMaioAdapter'
spec.version = '7.6.0'
spec.summary = 'TradPlus Maio Adapter'
spec.description = <<-DESC
Maio Adapter used for mediation with the TradPlusAd SDK
DESC
spec.homepage = 'https://github.com/tradplus/pod_tradplus_sdk_adapters'
spec.license = { :type => 'Commercial License', :text => 'Copyright 2022 Tradplus Corp. All rights reserved.' }
spec.author = { 'tradplus' => '[email protected]' }
spec.source = { :http => 'https://github.com/tradplus/pod_tradplus_sdk_adapters/raw/main/TradPlusMaioAdapter/TradPlusMaioAdapter-7.6.0.zip', :type => 'zip' }
spec.platform = :ios, '9.0'
spec.ios.deployment_target = '9.0'
spec.requires_arc = true
spec.frameworks = 'SystemConfiguration', 'CoreGraphics','Foundation','UIKit'
spec.user_target_xcconfig = {'OTHER_LDFLAGS' => ['-lObjC']}
spec.vendored_frameworks = 'TradPlusMaioAdapter-7.6.0/MaioAdapter.framework'
end
| 49.684211 | 159 | 0.722458 |
1cbf69a7b9635ab57ae6d9c92f7e49fe9d3ad469 | 265 | class Profile < SimpleDB::Base
set_domain Panda::Config[:sdb_profiles_domain]
properties :title, :player, :container, :width, :height, :video_codec, :video_bitrate, :fps, :audio_codec, :audio_bitrate, :audio_sample_rate, :position, :updated_at, :created_at
end
| 53 | 180 | 0.769811 |
ffaa7c0229294b4743bc7f03a1240e83630afd1b | 2,415 | # -*- encoding: utf-8 -*-
# stub: json 2.1.0 ruby lib
# stub: ext/json/ext/generator/extconf.rb ext/json/ext/parser/extconf.rb ext/json/extconf.rb
Gem::Specification.new do |s|
s.name = "json".freeze
s.version = "2.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Florian Frank".freeze]
s.date = "2017-04-18"
s.description = "This is a JSON implementation as a Ruby extension in C.".freeze
s.email = "[email protected]".freeze
s.extensions = ["ext/json/ext/generator/extconf.rb".freeze, "ext/json/ext/parser/extconf.rb".freeze, "ext/json/extconf.rb".freeze]
s.extra_rdoc_files = ["README.md".freeze]
s.files = ["./tests/test_helper.rb".freeze, "README.md".freeze, "ext/json/ext/generator/extconf.rb".freeze, "ext/json/ext/parser/extconf.rb".freeze, "ext/json/extconf.rb".freeze, "json.rb".freeze, "json/add/bigdecimal.rb".freeze, "json/add/complex.rb".freeze, "json/add/core.rb".freeze, "json/add/date.rb".freeze, "json/add/date_time.rb".freeze, "json/add/exception.rb".freeze, "json/add/ostruct.rb".freeze, "json/add/range.rb".freeze, "json/add/rational.rb".freeze, "json/add/regexp.rb".freeze, "json/add/struct.rb".freeze, "json/add/symbol.rb".freeze, "json/add/time.rb".freeze, "json/common.rb".freeze, "json/ext.rb".freeze, "json/ext/generator.so".freeze, "json/ext/parser.so".freeze, "json/generic_object.rb".freeze, "json/version.rb".freeze]
s.homepage = "http://flori.github.com/json".freeze
s.licenses = ["Ruby".freeze]
s.rdoc_options = ["--title".freeze, "JSON implemention for Ruby".freeze, "--main".freeze, "README.md".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9".freeze)
s.rubygems_version = "3.0.3.1".freeze
s.summary = "JSON Implementation for Ruby".freeze
s.test_files = ["./tests/test_helper.rb".freeze]
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<test-unit>.freeze, ["~> 2.0"])
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<test-unit>.freeze, ["~> 2.0"])
end
else
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<test-unit>.freeze, ["~> 2.0"])
end
end
| 58.902439 | 749 | 0.689855 |
616f2ff50b7edf2db11a7bfd71a69f603241330f | 5,624 | require 'fog/hp'
module Fog
module HP
class BlockStorage < Fog::Service
requires :hp_secret_key, :hp_tenant_id, :hp_avl_zone
recognizes :hp_auth_uri
recognizes :persistent, :connection_options
recognizes :hp_use_upass_auth_style, :hp_auth_version, :user_agent
recognizes :hp_access_key, :hp_account_id # :hp_account_id is deprecated use hp_access_key instead
secrets :hp_secret_key
model_path 'fog/hp/models/block_storage'
model :volume
collection :volumes
collection :bootable_volumes
model :snapshot
collection :snapshots
request_path 'fog/hp/requests/block_storage'
request :create_volume
request :delete_volume
request :get_bootable_volume_details
request :get_volume_details
request :list_bootable_volumes
request :list_volumes
request :create_snapshot
request :delete_snapshot
request :get_snapshot_details
request :list_snapshots
module Utils
def compute
@compute ||= Fog::Compute.new(
:provider => 'HP',
:hp_access_key => @hp_access_key,
:hp_secret_key => @hp_secret_key,
:hp_auth_uri => @hp_auth_uri,
:hp_tenant_id => @hp_tenant_id,
:hp_avl_zone => @hp_avl_zone,
:connection_options => @connection_options
)
end
end
class Mock
include Utils
def self.data
@data ||= Hash.new do |hash, key|
hash[key] = {
:volumes => {},
:snapshots => {}
}
end
end
def self.reset
@data = nil
end
def initialize(options={})
# deprecate hp_account_id
if options[:hp_account_id]
Fog::Logger.deprecation(":hp_account_id is deprecated, please use :hp_access_key instead.")
@hp_access_key = options.delete(:hp_account_id)
end
@hp_access_key = options[:hp_access_key]
unless @hp_access_key
raise ArgumentError.new("Missing required arguments: hp_access_key. :hp_account_id is deprecated, please use :hp_access_key instead.")
end
end
def data
self.class.data[@hp_access_key]
end
def reset_data
self.class.data.delete(@hp_access_key)
end
end
class Real
include Utils
def initialize(options={})
# deprecate hp_account_id
if options[:hp_account_id]
Fog::Logger.deprecation(":hp_account_id is deprecated, please use :hp_access_key instead.")
options[:hp_access_key] = options.delete(:hp_account_id)
end
@hp_access_key = options[:hp_access_key]
unless @hp_access_key
raise ArgumentError.new("Missing required arguments: hp_access_key. :hp_account_id is deprecated, please use :hp_access_key instead.")
end
@hp_secret_key = options[:hp_secret_key]
@hp_auth_uri = options[:hp_auth_uri]
@connection_options = options[:connection_options] || {}
### Set an option to use the style of authentication desired; :v1 or :v2 (default)
auth_version = options[:hp_auth_version] || :v2
### Pass the service name for object storage to the authentication call
options[:hp_service_type] = "Block Storage"
@hp_tenant_id = options[:hp_tenant_id]
@hp_avl_zone = options[:hp_avl_zone]
### Make the authentication call
if (auth_version == :v2)
# Call the control services authentication
credentials = Fog::HP.authenticate_v2(options, @connection_options)
# the CS service catalog returns the block storage endpoint
@hp_block_uri = credentials[:endpoint_url]
else
# Call the legacy v1.0/v1.1 authentication
credentials = Fog::HP.authenticate_v1(options, @connection_options)
# the user sends in the block storage endpoint
@hp_block_uri = options[:hp_auth_uri]
end
@auth_token = credentials[:auth_token]
@persistent = options[:persistent] || false
uri = URI.parse(@hp_block_uri)
@host = uri.host
@path = uri.path
@port = uri.port
@scheme = uri.scheme
@connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}", @persistent, @connection_options)
end
def reload
@connection.reset
end
def request(params, parse_json = true, &block)
begin
response = @connection.request(params.merge!({
:headers => {
'Content-Type' => 'application/json',
'Accept' => 'application/json',
'X-Auth-Token' => @auth_token
}.merge!(params[:headers] || {}),
:host => @host,
:path => "#{@path}/#{params[:path]}",
}), &block)
rescue Excon::Errors::HTTPStatusError => error
raise case error
when Excon::Errors::NotFound
Fog::HP::BlockStorage::NotFound.slurp(error)
else
error
end
end
if !response.body.empty? && parse_json && response.headers['Content-Type'] =~ %r{application/json}
response.body = Fog::JSON.decode(response.body)
end
response
end
end
end
end
end
| 32.888889 | 146 | 0.580725 |
7ad118ef38882deac88d77e8658a0ffe41621a87 | 1,947 | class API::V3::CatalogPolicy < CatalogPolicy
include CatalogAdmin::CatalogsHelper
def initialize(user, catalog)
super
raise Pundit::NotAuthorizedError unless catalog.not_deactivated?
end
def user_requirements_according_to_visibility?
return true if user.system_admin?
case catalog_access(catalog)
when 1
# Access to everyone
true
when 2
# Access to members
user_is_at_least_a_member?
when 3
else
# Access to catalog staff
user_is_at_least_an_editor?
end
end
# Catalog
alias_method :index?, :user_is_catalog_admin?
alias_method :categories_index?, :user_is_catalog_admin?
alias_method :choice_sets_index?, :user_is_catalog_admin?
alias_method :groups_index?, :user_is_catalog_admin?
alias_method :users_index?, :user_is_catalog_admin?
alias_method :item_types_index?, :user_is_at_least_an_editor?
# Category
alias_method :category_fields_index?, :user_is_catalog_admin?
# ChoiceSet
alias_method :choice_set_choices_index?, :user_is_catalog_admin?
alias_method :choice_set_choice_show?, :user_is_catalog_admin?
# ItemType
alias_method :item_type_fields_index?, :user_is_catalog_admin?
alias_method :item_type_items_index?, :user_requirements_according_to_visibility?
alias_method :item_type_show?, :user_is_at_least_an_editor?
alias_method :item_type_field_show?, :user_is_catalog_admin?
alias_method :item_type_item_show?, :user_requirements_according_to_visibility?
# SimpleSearch
alias_method :simple_search_create?, :user_requirements_according_to_visibility?
alias_method :simple_search_show?, :user_requirements_according_to_visibility?
# AdvancedSearch
alias_method :advanced_search_new?, :user_requirements_according_to_visibility?
alias_method :advanced_search_create?, :user_requirements_according_to_visibility?
alias_method :advanced_search_show?, :user_requirements_according_to_visibility?
end
| 34.157895 | 84 | 0.802773 |
084fa1666cd37bd297da17ceba27a0cf41860c8f | 436 | #!ruby
require "benchmark"
expr = %(10000000.times { :a.to_s })
Benchmark.bm(ARGV.max_by { |mruby| mruby.size }&.size || 0) do |bm|
meas = 5.times.map {
ARGV.map { |mruby|
bm.report(mruby) {
system *%W(#{mruby} -e #{expr})
}
}
}
meas = meas.transpose
meas.each do |mm|
mm.sort_by! { |m| m.total }
end
puts
meas.each do |mm|
puts "#{mm[2].to_s.chomp!} #{mm[2].label}\n"
end
end
| 16.769231 | 67 | 0.541284 |
38b29871b5c7623a786873d2d5963a2f4154d9e0 | 432 | #!/usr/bin/env ruby
class Gender
def self.for_options options
role = options[:role]
all.select{ |gender| role.genders.include?(gender.hotkey) }
end
def self.all
DataLoader.load_file('genders').map do |data|
new(data)
end
end
attr_reader :name, :hotkey
def initialize data
data.each do |key, value|
instance_variable_set("@#{key}", value)
end
end
def to_s
name
end
end
| 15.428571 | 63 | 0.643519 |
acc4b0a9ab736821d902892435d1764370d9d777 | 5,100 | # == Schema Information
#
# Table name: legacy_os_records
#
# id :bigint not null, primary key
# owner_username :string(255) not null
# owner_full_name :string(255) not null
# phone :string(255)
# additional_dept_contact :string(255)
# additional_dept_contact_phone :string(255)
# support_poc :string(255)
# legacy_os :string(255)
# unique_app :string(255)
# unique_hardware :string(255)
# unique_date :datetime
# exception_approval_date :datetime
# review_date :datetime
# review_contact :string(255)
# local_it_support_group :string(255)
# data_type_id :bigint
# device_id :bigint
# created_at :datetime not null
# updated_at :datetime not null
# deleted_at :datetime
# incomplete :boolean default(FALSE)
# department_id :bigint not null
#
require 'rails_helper'
RSpec.describe LegacyOsRecord, type: :model do
let!(:device) { FactoryBot.create(:device) }
let!(:data_classification_level) { FactoryBot.create(:data_classification_level) }
let!(:data_type) { FactoryBot.create(:data_type, { data_classification_level: data_classification_level }) }
let!(:department) { FactoryBot.create(:department) }
let!(:legacy_os_record) { FactoryBot.create(:legacy_os_record) }
it "is valid with required attributes (including only unique_app)" do
expect(LegacyOsRecord.new(owner_username: "brita", owner_full_name: "Rita Barvinok",
department: department, phone: "123-345-6789",
unique_app: "unique_app",
device: device)).to be_valid
end
it "is valid with required attributes (including only unique_hardware)" do
expect(LegacyOsRecord.new(owner_username: "brita", owner_full_name: "Rita Barvinok",
department: department, phone: "123-345-6789",
unique_hardware: "unique_hardware",
device: device)).to be_valid
end
it "is valid with required attributes (including both unique_app and unique_hardware)" do
expect(LegacyOsRecord.new(owner_username: "brita", owner_full_name: "Rita Barvinok",
department: department, phone: "123-345-6789",
unique_hardware: "unique_hardware", unique_app: "unique_app",
device: device)).to be_valid
end
it "is valid without a data_type" do
device = Device.new(serial: "C02ZF95GLVDL")
expect(LegacyOsRecord.new(owner_username: "brita", owner_full_name: "Rita Barvinok", remediation: "remediation",
review_contact: "review_contact", department: department, phone: "123-345-6789", support_poc: "support_poc",
unique_date: "2021-03-19 16:50:16", unique_app: "unique_app",
device: device)).to be_valid
end
it "is not valid without a device" do
expect(LegacyOsRecord.new(owner_username: "brita", owner_full_name: "Rita Barvinok", remediation: "remediation",
review_contact: "review_contact", department: department, phone: "123-345-6789", support_poc: "support_poc",
unique_date: "2021-03-19 16:50:16", unique_app: "unique_app",
data_type: data_type)).to_not be_valid
end
it "is not valid without unique_app and unique_hardware" do
expect(LegacyOsRecord.new(owner_username: "brita", owner_full_name: "Rita Barvinok", remediation: "remediation",
review_contact: "review_contact", department: department, phone: "123-345-6789", support_poc: "support_poc",
unique_date: "2021-03-19 16:50:16",
device: device)).to_not be_valid
end
it "is incomplete with empty attributes" do
legacy_os_record = LegacyOsRecord.new(owner_username: "brita", owner_full_name: "Rita Barvinok",
department: department, phone: "123-345-6789",
unique_app: "unique_app",
device: device)
expect(legacy_os_record.not_completed?).to be(true)
end
it "is complete with all attributes" do
expect(legacy_os_record.not_completed?).to be(false)
legacy_os_record.update(notes: "")
expect(legacy_os_record.not_completed?).to be(false)
legacy_os_record.update(unique_app: "")
expect(legacy_os_record.not_completed?).to be(false)
legacy_os_record.update(unique_app: "unique_app", unique_hardware: "")
expect(legacy_os_record.not_completed?).to be(false)
end
it "is valid with all attributes" do
expect(legacy_os_record).to be_valid
end
end
| 48.571429 | 138 | 0.602745 |
b9f0caa681c1499427b7ef554f573e361002b907 | 130 | class AddContractIdColumnToCards < ActiveRecord::Migration[5.2]
def change
add_column :cards, :contract_id, :integer
end
end | 18.571429 | 63 | 0.784615 |
e2b22c2dc5ef30ea82c1623467f9e99295b30c67 | 3,871 | # See the Pagy documentation: https://ddnexus.github.io/pagy/extras/foundation
# frozen_string_literal: true
require 'pagy/extras/shared'
class Pagy
module Frontend
# Pagination for Foundation: it returns the html with the series of links to the pages
def pagy_foundation_nav(pagy)
link = pagy_link_proc(pagy)
html = +%(<nav class="pagy-foundation-nav" role="navigation" aria-label="Pagination"><ul class="pagination">)
html << pagy_foundation_prev_html(pagy, link)
pagy.series.each do |item| # series example: [1, :gap, 7, 8, "9", 10, 11, :gap, 36]
html << case item
when Integer then %(<li>#{link.call item}</li>) # page link
when String then %(<li class="current">#{item}</li>) # active page
when :gap then %(<li class="ellipsis gap" aria-hidden="true"></li>) # page gap
end
end
html << pagy_foundation_next_html(pagy, link)
html << %(</ul></nav>)
end
# Javascript pagination for foundation: it returns a nav and a JSON tag used by the Pagy.nav javascript
def pagy_foundation_nav_js(pagy, id=pagy_id)
link = pagy_link_proc(pagy)
tags = { 'before' => %(<ul class="pagination">#{pagy_foundation_prev_html(pagy, link)}),
'link' => %(<li>#{link.call PAGE_PLACEHOLDER}</li>),
'active' => %(<li class="current">#{pagy.page}</li>),
'gap' => %(<li class="ellipsis gap" aria-hidden="true"></li>),
'after' => %(#{pagy_foundation_next_html(pagy, link)}</ul>) }
html = %(<nav id="#{id}" class="pagy-foundation-nav-js" role="navigation" aria-label="Pagination"></nav>)
html << pagy_json_tag(pagy, :nav, id, tags, pagy.sequels)
end
# Javascript combo pagination for Foundation: it returns a nav and a JSON tag used by the Pagy.combo_nav javascript
def pagy_foundation_combo_nav_js(pagy, id=pagy_id)
link = pagy_link_proc(pagy)
p_page = pagy.page
p_pages = pagy.pages
input = %(<input class="input-group-field cell shrink" type="number" min="1" max="#{p_pages}" value="#{p_page}" style="width: #{p_pages.to_s.length+1}rem; padding: 0 0.3rem; margin: 0 0.3rem;">)
%(<nav id="#{id}" class="pagy-foundation-combo-nav-js" role="navigation" aria-label="Pagination"><div class="input-group">#{
if (p_prev = pagy.prev)
link.call p_prev, pagy_t('pagy.nav.prev'), 'style="margin-bottom: 0px;" aria-label="previous" class="prev button primary"'
else
%(<a style="margin-bottom: 0px;" class="prev button primary disabled" href="#">#{pagy_t('pagy.nav.prev')}</a>)
end
}<span class="input-group-label">#{pagy_t 'pagy.combo_nav_js', page_input: input, count: p_page, pages: p_pages}</span>#{
if (p_next = pagy.next)
link.call p_next, pagy_t('pagy.nav.next'), 'style="margin-bottom: 0px;" aria-label="next" class="next button primary"'
else
%(<a style="margin-bottom: 0px;" class="next button primary disabled" href="#">#{pagy_t 'pagy.nav.next'}</a>)
end
}</div></nav>#{pagy_json_tag(pagy, :combo_nav, id, p_page, pagy_marked_link(link))})
end
private
def pagy_foundation_prev_html(pagy, link)
if (p_prev = pagy.prev)
%(<li class="prev">#{link.call p_prev, pagy_t('pagy.nav.prev'), 'aria-label="previous"'}</li>)
else
%(<li class="prev disabled">#{pagy_t 'pagy.nav.prev' }</li>)
end
end
def pagy_foundation_next_html(pagy, link)
if (p_next = pagy.next)
%(<li class="next">#{link.call p_next, pagy_t('pagy.nav.next'), 'aria-label="next"'}</li>)
else
%(<li class="next disabled">#{pagy_t 'pagy.nav.next'}</li>)
end
end
end
end
| 47.790123 | 202 | 0.602687 |
01d28c43b5c69379962cef9a39f3f7aa72b213be | 906 |
require_relative "lib/relaxo/version"
Gem::Specification.new do |spec|
spec.name = "relaxo"
spec.version = Relaxo::VERSION
spec.summary = "Relaxo is versioned document database built on top of git."
spec.authors = ["Samuel Williams"]
spec.license = "MIT"
spec.homepage = "https://github.com/ioquatix/relaxo"
spec.metadata = {
"funding_uri" => "https://github.com/sponsors/ioquatix/",
}
spec.files = Dir.glob('{lib}/**/*', File::FNM_DOTMATCH, base: __dir__)
spec.required_ruby_version = ">= 2.5"
spec.add_dependency "console"
spec.add_dependency "rugged"
spec.add_development_dependency "bake"
spec.add_development_dependency "bake-bundler"
spec.add_development_dependency "bake-modernize"
spec.add_development_dependency "bundler"
spec.add_development_dependency "covered"
spec.add_development_dependency "msgpack"
spec.add_development_dependency "rspec", "~> 3.6"
end
| 27.454545 | 76 | 0.747241 |
4a1ae326b8f055803d414e904d17dea3f92428c3 | 2,966 | require "test_helper"
class PrettyrbTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Prettyrb::VERSION
end
def test_breaks_up_long_conditionals
source = <<~RUBY
if "hello" != "foo bar baz" && "foo" != "hello world" && "wow" != "okay this might be long" && "wow this is really really long" != "okay"
true
end
RUBY
result = Prettyrb::Formatter.new(source).format
expected = <<~RUBY
if
"hello" != "foo bar baz" &&
"foo" != "hello world" &&
"wow" != "okay this might be long" &&
"wow this is really really long" != "okay"
then
true
end
RUBY
assert_equal expected.rstrip, result.rstrip
end
def test_supports_booleans
source = <<~RUBY
if 1
true
else
false
end
RUBY
result = Prettyrb::Formatter.new(source).format
expected = <<~RUBY
if 1
true
else
false
end
RUBY
assert_equal expected.strip, result
end
def test_formats_basic_statement
source = <<~RUBY
if 1
'hello'
end
RUBY
result = Prettyrb::Formatter.new(source).format
expected = <<~RUBY
if 1
"hello"
end
RUBY
assert_equal expected.strip, result
end
def test_formats_conditionals
source = <<~RUBY
if (1&&2) ||
(1!=2)
'hello'
end
RUBY
result = Prettyrb::Formatter.new(source).format
expected = <<~RUBY
if (1 && 2) || 1 != 2
"hello"
end
RUBY
assert_equal expected.strip, result
end
def test_basic_class
source = <<~RUBY
class Foo
DATA = ["hello", "world"].freeze
end
RUBY
expected = <<~RUBY
class Foo
DATA = ["hello", "world"].freeze
end
RUBY
result = Prettyrb::Formatter.new(source).format
assert_equal expected, result
end
def test_array_assign
source = <<~RUBY
hello = ["really really really really really really really really really long", "really really really really really really really really really long"]
RUBY
expected = <<~RUBY
hello = [
"really really really really really really really really really long",
"really really really really really really really really really long",
]
RUBY
result = Prettyrb::Formatter.new(source).format
assert_equal expected.rstrip, result
end
def test_array_assign_with_join
source = <<~RUBY
def rad
hello = ["really really really really really really really really really long", "really really really really really really really really really long"].join(",")
end
RUBY
expected = <<~RUBY
def rad
hello = [
"really really really really really really really really really long",
"really really really really really really really really really long",
].join(",")
end
RUBY
result = Prettyrb::Formatter.new(source).format
assert_equal expected.rstrip, result
end
end
| 21.338129 | 166 | 0.626433 |
e21312eaf59a9b59a45cbc0d0e255a562fa5fa4e | 2,006 | module Nanoc::Helpers
# @see http://nanoc.ws/doc/reference/helpers/#rendering
module Rendering
include Nanoc::Helpers::Capturing
# @param [String] identifier
# @param [Hash] other_assigns
#
# @raise [Nanoc::Int::Errors::UnknownLayout]
# @raise [Nanoc::Int::Errors::CannotDetermineFilter]
# @raise [Nanoc::Int::Errors::UnknownFilter]
#
# @return [String, nil]
def render(identifier, other_assigns = {}, &block)
# Find layout
layout_view = @layouts[identifier]
layout_view ||= @layouts[identifier.__nanoc_cleaned_identifier]
raise Nanoc::Int::Errors::UnknownLayout.new(identifier) if layout_view.nil?
layout = layout_view.unwrap
# Visit
dependency_tracker = @config._context.dependency_tracker
dependency_tracker.bounce(layout, raw_content: true)
# Capture content, if any
captured_content = block_given? ? capture(&block) : nil
# Get assigns
assigns = {
content: captured_content,
item: @item,
item_rep: @item_rep,
items: @items,
layout: layout_view,
layouts: @layouts,
config: @config,
}.merge(other_assigns)
# Get filter name
filter_name, filter_args = *@config._context.compilation_context.filter_name_and_args_for_layout(layout)
raise Nanoc::Int::Errors::CannotDetermineFilter.new(layout.identifier) if filter_name.nil?
# Get filter class
filter_class = Nanoc::Filter.named!(filter_name)
# Create filter
filter = filter_class.new(assigns)
# Layout
content = layout.content
arg = content.binary? ? content.filename : content.string
result = filter.setup_and_run(arg, filter_args)
# Append to erbout if we have a block
if block_given?
# Append result and return nothing
erbout = eval('_erbout', block.binding)
erbout << result
''
else
# Return result
result
end
end
end
end
| 29.940299 | 110 | 0.648056 |
3993d9aec3d2a234c3bf46728456c4228581626a | 930 | module Users
class ConfirmationsController < Devise::ConfirmationsController
def show
super do |user|
if user.is_role_vendor?
vendor_invite = VendorInvite.for_user(user)
vendor_invite.set_status_to_invite_accepted!
end
end
end
protected
def after_confirmation_path_for(resource_name, user)
if user.is_role_vendor?
sign_in(resource_name, user)
vendor_invite = VendorInvite.for_user(user)
if vendor_invite.user.has_no_password?
set_password_vendor_invite_wizard_path
elsif vendor_invite.is_vendor_info_incomplete?
complete_info_vendor_invite_wizard_path
elsif vendor_invite.is_payment_info_incomplete?
payment_info_vendor_invite_wizard_path
else
dashboard_vendor_path
end
else
super
end
end
end
end
| 26.571429 | 65 | 0.662366 |
6aefde73d5d11fcaab2c6cf798d8c128cd3eca46 | 3,969 | # encoding: UTF-8
# This file contains data derived from the IANA Time Zone Database
# (http://www.iana.org/time-zones).
module TZInfo
module Data
module Definitions
module Asia
module Krasnoyarsk
include TimezoneDefinition
timezone 'Asia/Krasnoyarsk' do |tz|
tz.offset :o0, 22286, 0, :LMT
tz.offset :o1, 21600, 0, :KRAT
tz.offset :o2, 25200, 0, :KRAT
tz.offset :o3, 25200, 3600, :KRAST
tz.offset :o4, 21600, 3600, :KRAST
tz.offset :o5, 28800, 0, :KRAT
tz.transition 1920, 1, :o1, -1577513486, 104644623257, 43200
tz.transition 1930, 6, :o2, -1247551200, 9704593, 4
tz.transition 1981, 3, :o3, 354906000
tz.transition 1981, 9, :o2, 370713600
tz.transition 1982, 3, :o3, 386442000
tz.transition 1982, 9, :o2, 402249600
tz.transition 1983, 3, :o3, 417978000
tz.transition 1983, 9, :o2, 433785600
tz.transition 1984, 3, :o3, 449600400
tz.transition 1984, 9, :o2, 465332400
tz.transition 1985, 3, :o3, 481057200
tz.transition 1985, 9, :o2, 496782000
tz.transition 1986, 3, :o3, 512506800
tz.transition 1986, 9, :o2, 528231600
tz.transition 1987, 3, :o3, 543956400
tz.transition 1987, 9, :o2, 559681200
tz.transition 1988, 3, :o3, 575406000
tz.transition 1988, 9, :o2, 591130800
tz.transition 1989, 3, :o3, 606855600
tz.transition 1989, 9, :o2, 622580400
tz.transition 1990, 3, :o3, 638305200
tz.transition 1990, 9, :o2, 654634800
tz.transition 1991, 3, :o4, 670359600
tz.transition 1991, 9, :o1, 686088000
tz.transition 1992, 1, :o2, 695764800
tz.transition 1992, 3, :o3, 701798400
tz.transition 1992, 9, :o2, 717519600
tz.transition 1993, 3, :o3, 733258800
tz.transition 1993, 9, :o2, 748983600
tz.transition 1994, 3, :o3, 764708400
tz.transition 1994, 9, :o2, 780433200
tz.transition 1995, 3, :o3, 796158000
tz.transition 1995, 9, :o2, 811882800
tz.transition 1996, 3, :o3, 828212400
tz.transition 1996, 10, :o2, 846356400
tz.transition 1997, 3, :o3, 859662000
tz.transition 1997, 10, :o2, 877806000
tz.transition 1998, 3, :o3, 891111600
tz.transition 1998, 10, :o2, 909255600
tz.transition 1999, 3, :o3, 922561200
tz.transition 1999, 10, :o2, 941310000
tz.transition 2000, 3, :o3, 954010800
tz.transition 2000, 10, :o2, 972759600
tz.transition 2001, 3, :o3, 985460400
tz.transition 2001, 10, :o2, 1004209200
tz.transition 2002, 3, :o3, 1017514800
tz.transition 2002, 10, :o2, 1035658800
tz.transition 2003, 3, :o3, 1048964400
tz.transition 2003, 10, :o2, 1067108400
tz.transition 2004, 3, :o3, 1080414000
tz.transition 2004, 10, :o2, 1099162800
tz.transition 2005, 3, :o3, 1111863600
tz.transition 2005, 10, :o2, 1130612400
tz.transition 2006, 3, :o3, 1143313200
tz.transition 2006, 10, :o2, 1162062000
tz.transition 2007, 3, :o3, 1174762800
tz.transition 2007, 10, :o2, 1193511600
tz.transition 2008, 3, :o3, 1206817200
tz.transition 2008, 10, :o2, 1224961200
tz.transition 2009, 3, :o3, 1238266800
tz.transition 2009, 10, :o2, 1256410800
tz.transition 2010, 3, :o3, 1269716400
tz.transition 2010, 10, :o2, 1288465200
tz.transition 2011, 3, :o5, 1301166000
tz.transition 2014, 10, :o2, 1414260000
end
end
end
end
end
end
| 43.141304 | 72 | 0.563366 |
bb8e5f61e8ed3b04466e8528200fc2772fe0451b | 403 | class CreateTopics < ActiveRecord::Migration
def self.up
create_table :topics do |t|
t.string :slug, :null => false
t.string :name, :null => false
t.text :description, :null => false
t.timestamps
end
add_index :topics, :slug, :unique => true
add_index :topics, :name, :unique => true
end
def self.down
drop_table :topics
end
end
| 21.210526 | 45 | 0.598015 |
f829d918018728ad6aacf1967aadd839c2ab93da | 1,958 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
# Exploit mixins should be called first
include Msf::Exploit::Remote::SMB
include Msf::Auxiliary::Report
# Aliases for common classes
SIMPLE = Rex::Proto::SMB::SimpleClient
XCEPT = Rex::Proto::SMB::Exceptions
CONST = Rex::Proto::SMB::Constants
def initialize
super(
'Name' => 'SMB File Upload Utility',
'Description' => %Q{
This module uploads a file to a target share and path. The only reason
to use this module is if your existing SMB client is not able to support the features
of the Metasploit Framework that you need, like pass-the-hash authentication.
},
'Author' =>
[
'hdm' # metasploit module
],
'References' =>
[
],
'License' => MSF_LICENSE
)
register_options([
OptString.new('SMBSHARE', [true, 'The name of a writeable share on the server', 'C$']),
OptString.new('RPATH', [true, 'The name of the remote file relative to the share']),
OptString.new('LPATH', [true, 'The path of the local file to upload'])
], self.class)
end
def run
data = ::File.read(datastore['LPATH'], ::File.size(datastore['LPATH']))
print_status("Read #{data.length} bytes from #{datastore['LPATH']}...")
print_status("Connecting to the server...")
connect()
smb_login()
print_status("Mounting the remote share \\\\#{datastore['RHOST']}\\#{datastore['SMBSHARE']}'...")
self.simple.connect("\\\\#{rhost}\\#{datastore['SMBSHARE']}")
print_status("Trying to upload #{datastore['RPATH']}...")
fd = simple.open("\\#{datastore['RPATH']}", 'rwct')
fd.write(data)
fd.close
print_status("The file has been uploaded to #{datastore['RPATH']}...")
end
end
| 27.971429 | 101 | 0.627171 |
f81a2fa45f3f1977c8eb83c86caee5ca0205cebe | 266 | module MedlineplusRuby
module API
module ResponseMessage
ERROR_NO_RESPONSE = 'No response from National Library of Medicine API.'.freeze
ERROR_NO_PARSE = 'Unable to parse response from National Library of Medicine API.'.freeze
end
end
end
| 29.555556 | 98 | 0.744361 |
edf291ecd13cf426be2adbf5fb5271e8e76ecbfc | 395 | # Copyright (c) 2021-present, Facebook, Inc.
name 'fb_yum_repos'
maintainer 'Facebook'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Installs/Configures yum repos'
source_url 'https://github.com/facebook/chef-cookbooks/'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.0.1'
supports 'centos'
supports 'fedora'
depends 'fb_helpers'
| 30.384615 | 72 | 0.782278 |
e26159b2b72d2c0f719335374d97c5d90c8efcc8 | 35,253 | # frozen_string_literal: true
require "formula"
require "formula_versions"
require "utils/curl"
require "utils/notability"
require "extend/ENV"
require "formula_cellar_checks"
require "cmd/search"
require "style"
require "date"
require "missing_formula"
require "digest"
require "cli/parser"
require "json"
module Homebrew
module_function
def audit_args
Homebrew::CLI::Parser.new do
usage_banner <<~EOS
`audit` [<options>] [<formula>]
Check <formula> for Homebrew coding style violations. This should be run before
submitting a new formula. If no <formula> are provided, check all locally
available formulae and skip style checks. Will exit with a non-zero status if any
errors are found.
EOS
switch "--strict",
description: "Run additional, stricter style checks."
switch "--git",
description: "Run additional, slower style checks that navigate the Git repository."
switch "--online",
description: "Run additional, slower style checks that require a network connection."
switch "--new-formula",
description: "Run various additional style checks to determine if a new formula is eligible "\
"for Homebrew. This should be used when creating new formula and implies "\
"`--strict` and `--online`."
switch "--fix",
description: "Fix style violations automatically using RuboCop's auto-correct feature."
switch "--display-cop-names",
description: "Include the RuboCop cop name for each violation in the output."
switch "--display-filename",
description: "Prefix every line of output with the file or formula name being audited, to "\
"make output easy to grep."
switch "--skip-style",
description: "Skip running non-RuboCop style checks. Useful if you plan on running "\
"`brew style` separately."
switch "-D", "--audit-debug",
description: "Enable debugging and profiling of audit methods."
comma_array "--only",
description: "Specify a comma-separated <method> list to only run the methods named "\
"`audit_`<method>."
comma_array "--except",
description: "Specify a comma-separated <method> list to skip running the methods named "\
"`audit_`<method>."
comma_array "--only-cops",
description: "Specify a comma-separated <cops> list to check for violations of only the listed "\
"RuboCop cops."
comma_array "--except-cops",
description: "Specify a comma-separated <cops> list to skip checking for violations of the listed "\
"RuboCop cops."
switch :verbose
switch :debug
conflicts "--only", "--except"
conflicts "--only-cops", "--except-cops", "--strict"
conflicts "--only-cops", "--except-cops", "--only"
conflicts "--display-cop-names", "--skip-style"
conflicts "--display-cop-names", "--only-cops"
conflicts "--display-cop-names", "--except-cops"
end
end
def audit
audit_args.parse
Homebrew.auditing = true
inject_dump_stats!(FormulaAuditor, /^audit_/) if args.audit_debug?
formula_count = 0
problem_count = 0
corrected_problem_count = 0
new_formula_problem_count = 0
new_formula = args.new_formula?
strict = new_formula || args.strict?
online = new_formula || args.online?
git = args.git?
skip_style = args.skip_style? || args.no_named?
ENV.activate_extensions!
ENV.setup_build_environment
audit_formulae = args.no_named? ? Formula : args.resolved_formulae
style_files = args.formulae_paths unless skip_style
only_cops = args.only_cops
except_cops = args.except_cops
options = { fix: args.fix? }
if only_cops
options[:only_cops] = only_cops
elsif args.new_formula?
nil
elsif except_cops
options[:except_cops] = except_cops
elsif !strict
options[:except_cops] = [:FormulaAuditStrict]
end
# Check style in a single batch run up front for performance
style_results = Style.check_style_json(style_files, options) if style_files
# load licenses
spdx = HOMEBREW_LIBRARY_PATH/"data/spdx.json"
spdx_data = JSON.parse(spdx.read)
new_formula_problem_lines = []
audit_formulae.sort.each do |f|
only = only_cops ? ["style"] : args.only
options = {
new_formula: new_formula,
strict: strict,
online: online,
git: git,
only: only,
except: args.except,
spdx_data: spdx_data,
}
options[:style_offenses] = style_results.file_offenses(f.path) if style_results
options[:display_cop_names] = args.display_cop_names?
fa = FormulaAuditor.new(f, options)
fa.audit
next if fa.problems.empty? && fa.new_formula_problems.empty?
fa.problems
formula_count += 1
problem_count += fa.problems.size
problem_lines = format_problem_lines(fa.problems)
corrected_problem_count = options[:style_offenses].count(&:corrected?) if options[:style_offenses]
new_formula_problem_lines = format_problem_lines(fa.new_formula_problems)
if args.display_filename?
puts problem_lines.map { |s| "#{f.path}: #{s}" }
else
puts "#{f.full_name}:", problem_lines.map { |s| " #{s}" }
end
end
new_formula_problem_count += new_formula_problem_lines.size
puts new_formula_problem_lines.map { |s| " #{s}" }
total_problems_count = problem_count + new_formula_problem_count
problem_plural = "#{total_problems_count} #{"problem".pluralize(total_problems_count)}"
formula_plural = "#{formula_count} #{"formula".pluralize(formula_count)}"
corrected_problem_plural = "#{corrected_problem_count} #{"problem".pluralize(corrected_problem_count)}"
errors_summary = "#{problem_plural} in #{formula_plural} detected"
errors_summary += ", #{corrected_problem_plural} corrected" if corrected_problem_count.positive?
ofail errors_summary if problem_count.positive? || new_formula_problem_count.positive?
end
def format_problem_lines(problems)
problems.uniq.map { |p| "* #{p.chomp.gsub("\n", "\n ")}" }
end
class FormulaText
def initialize(path)
@text = path.open("rb", &:read)
@lines = @text.lines.to_a
end
def without_patch
@text.split("\n__END__").first
end
def trailing_newline?
/\Z\n/ =~ @text
end
def =~(other)
other =~ @text
end
def include?(s)
@text.include? s
end
def line_number(regex, skip = 0)
index = @lines.drop(skip).index { |line| line =~ regex }
index ? index + 1 : nil
end
def reverse_line_number(regex)
index = @lines.reverse.index { |line| line =~ regex }
index ? @lines.count - index : nil
end
end
class FormulaAuditor
include FormulaCellarChecks
attr_reader :formula, :text, :problems, :new_formula_problems
def initialize(formula, options = {})
@formula = formula
@versioned_formula = formula.versioned_formula?
@new_formula_inclusive = options[:new_formula]
@new_formula = options[:new_formula] && !@versioned_formula
@strict = options[:strict]
@online = options[:online]
@git = options[:git]
@display_cop_names = options[:display_cop_names]
@only = options[:only]
@except = options[:except]
# Accept precomputed style offense results, for efficiency
@style_offenses = options[:style_offenses]
# Allow the formula tap to be set as homebrew/core, for testing purposes
@core_tap = formula.tap&.core_tap? || options[:core_tap]
@problems = []
@new_formula_problems = []
@text = FormulaText.new(formula.path)
@specs = %w[stable devel head].map { |s| formula.send(s) }.compact
@spdx_data = options[:spdx_data]
end
def audit_style
return unless @style_offenses
@style_offenses.each do |offense|
problem offense.to_s(display_cop_name: @display_cop_names)
end
end
def audit_file
if formula.core_formula? && @versioned_formula
unversioned_formula = begin
# build this ourselves as we want e.g. homebrew/core to be present
full_name = if formula.tap
"#{formula.tap}/#{formula.name}"
else
formula.name
end
Formulary.factory(full_name.gsub(/@.*$/, "")).path
rescue FormulaUnavailableError, TapFormulaAmbiguityError,
TapFormulaWithOldnameAmbiguityError
Pathname.new formula.path.to_s.gsub(/@.*\.rb$/, ".rb")
end
unless unversioned_formula.exist?
unversioned_name = unversioned_formula.basename(".rb")
problem "#{formula} is versioned but no #{unversioned_name} formula exists"
end
elsif Homebrew.args.build_stable? && formula.stable? &&
!(versioned_formulae = formula.versioned_formulae).empty?
versioned_aliases = formula.aliases.grep(/.@\d/)
_, last_alias_version = versioned_formulae.map(&:name).last.split("@")
major, minor, = formula.version.to_s.split(".")
alias_name_major = "#{formula.name}@#{major}"
alias_name_major_minor = "#{alias_name_major}.#{minor}"
alias_name = if last_alias_version.split(".").length == 1
alias_name_major
else
alias_name_major_minor
end
valid_alias_names = [alias_name_major, alias_name_major_minor]
unless @core_tap
versioned_aliases.map! { |a| "#{formula.tap}/#{a}" }
valid_alias_names.map! { |a| "#{formula.tap}/#{a}" }
end
valid_versioned_aliases = versioned_aliases & valid_alias_names
invalid_versioned_aliases = versioned_aliases - valid_alias_names
if valid_versioned_aliases.empty?
if formula.tap
problem <<~EOS
Formula has other versions so create a versioned alias:
cd #{formula.tap.alias_dir}
ln -s #{formula.path.to_s.gsub(formula.tap.path, "..")} #{alias_name}
EOS
else
problem "Formula has other versions so create an alias named #{alias_name}."
end
end
unless invalid_versioned_aliases.empty?
problem <<~EOS
Formula has invalid versioned aliases:
#{invalid_versioned_aliases.join("\n ")}
EOS
end
end
end
def self.aliases
# core aliases + tap alias names + tap alias full name
@aliases ||= Formula.aliases + Formula.tap_aliases
end
def audit_formula_name
return unless @strict
return unless @core_tap
name = formula.name
problem "'#{name}' is not allowed in homebrew/core." if MissingFormula.disallowed_reason(name)
if Formula.aliases.include? name
problem "Formula name conflicts with existing aliases in homebrew/core."
return
end
if oldname = CoreTap.instance.formula_renames[name]
problem "'#{name}' is reserved as the old name of #{oldname} in homebrew/core."
return
end
return if formula.core_formula?
return unless Formula.core_names.include?(name)
problem "Formula name conflicts with existing core formula."
end
PROVIDED_BY_MACOS_DEPENDS_ON_ALLOWLIST = %w[
apr
apr-util
libressl
openblas
[email protected]
].freeze
def audit_license
if formula.license.present?
if @spdx_data["licenses"].any? { |lic| lic["licenseId"] == formula.license }
return unless @online
user, repo = get_repo_data(%r{https?://github\.com/([^/]+)/([^/]+)/?.*}) if @new_formula
return if user.blank?
github_license = GitHub.get_repo_license(user, repo)
return if github_license && [formula.license, "NOASSERTION"].include?(github_license)
problem "License mismatch - GitHub license is: #{github_license}, "\
"but Formulae license states: #{formula.license}."
else
problem "#{formula.license} is not a standard SPDX license."
end
elsif @new_formula
problem "No license specified for package."
end
end
def audit_deps
@specs.each do |spec|
# Check for things we don't like to depend on.
# We allow non-Homebrew installs whenever possible.
spec.deps.each do |dep|
begin
dep_f = dep.to_formula
rescue TapFormulaUnavailableError
# Don't complain about missing cross-tap dependencies
next
rescue FormulaUnavailableError
problem "Can't find dependency #{dep.name.inspect}."
next
rescue TapFormulaAmbiguityError
problem "Ambiguous dependency #{dep.name.inspect}."
next
rescue TapFormulaWithOldnameAmbiguityError
problem "Ambiguous oldname dependency #{dep.name.inspect}."
next
end
if dep_f.oldname && dep.name.split("/").last == dep_f.oldname
problem "Dependency '#{dep.name}' was renamed; use new name '#{dep_f.name}'."
end
if self.class.aliases.include?(dep.name) &&
(dep_f.core_formula? || !dep_f.versioned_formula?)
problem "Dependency '#{dep.name}' from homebrew/core is an alias; " \
"use the canonical name '#{dep.to_formula.full_name}'."
end
if @new_formula &&
dep_f.keg_only? &&
dep_f.keg_only_reason.provided_by_macos? &&
dep_f.keg_only_reason.applicable? &&
!PROVIDED_BY_MACOS_DEPENDS_ON_ALLOWLIST.include?(dep.name)
new_formula_problem(
"Dependency '#{dep.name}' is provided by macOS; " \
"please replace 'depends_on' with 'uses_from_macos'.",
)
end
dep.options.each do |opt|
next if @core_tap
next if dep_f.option_defined?(opt)
next if dep_f.requirements.find do |r|
if r.recommended?
opt.name == "with-#{r.name}"
elsif r.optional?
opt.name == "without-#{r.name}"
end
end
problem "Dependency #{dep} does not define option #{opt.name.inspect}"
end
problem "Don't use git as a dependency (it's always available)" if @new_formula && dep.name == "git"
problem "Dependency '#{dep.name}' is marked as :run. Remove :run; it is a no-op." if dep.tags.include?(:run)
next unless @core_tap
if dep.tags.include?(:recommended) || dep.tags.include?(:optional)
problem "Formulae in homebrew/core should not have optional or recommended dependencies"
end
end
next unless @core_tap
if spec.requirements.map(&:recommended?).any? || spec.requirements.map(&:optional?).any?
problem "Formulae in homebrew/core should not have optional or recommended requirements"
end
end
end
def audit_conflicts
formula.conflicts.each do |c|
Formulary.factory(c.name)
rescue TapFormulaUnavailableError
# Don't complain about missing cross-tap conflicts.
next
rescue FormulaUnavailableError
problem "Can't find conflicting formula #{c.name.inspect}."
rescue TapFormulaAmbiguityError, TapFormulaWithOldnameAmbiguityError
problem "Ambiguous conflicting formula #{c.name.inspect}."
end
end
def audit_postgresql
return unless formula.name == "postgresql"
return unless @core_tap
major_version = formula.version
.to_s
.split(".")
.first
.to_i
previous_major_version = major_version - 1
previous_formula_name = "postgresql@#{previous_major_version}"
begin
Formula[previous_formula_name]
rescue FormulaUnavailableError
problem "Versioned #{previous_formula_name} in homebrew/core must be created for " \
"`brew-postgresql-upgrade-database` and `pg_upgrade` to work."
end
end
VERSIONED_KEG_ONLY_ALLOWLIST = %w[
[email protected]
bash-completion@2
[email protected]
libsigc++@2
[email protected]
[email protected]
[email protected]
].freeze
def audit_versioned_keg_only
return unless @versioned_formula
return unless @core_tap
if formula.keg_only?
return if formula.keg_only_reason.versioned_formula?
if formula.name.start_with?("openssl", "libressl") &&
formula.keg_only_reason.by_macos?
return
end
end
return if VERSIONED_KEG_ONLY_ALLOWLIST.include?(formula.name) || formula.name.start_with?("gcc@")
problem "Versioned formulae in homebrew/core should use `keg_only :versioned_formula`"
end
def audit_homepage
homepage = formula.homepage
return if homepage.nil? || homepage.empty?
return unless @online
return unless DevelopmentTools.curl_handles_most_https_certificates?
if http_content_problem = curl_check_http_content(homepage,
user_agents: [:browser, :default],
check_content: true,
strict: @strict)
problem http_content_problem
end
end
def audit_bottle_spec
# special case: new versioned formulae should be audited
return unless @new_formula_inclusive
return unless @core_tap
return if formula.bottle_disabled?
return unless formula.bottle_defined?
new_formula_problem "New formulae in homebrew/core should not have a `bottle do` block"
end
def audit_bottle_disabled
return unless formula.bottle_disabled?
return if formula.bottle_unneeded?
problem "Unrecognized bottle modifier" unless formula.bottle_disable_reason.valid?
return unless @core_tap
problem "Formulae in homebrew/core should not use `bottle :disabled`"
end
def audit_github_repository_archived
return if formula.deprecated?
user, repo = get_repo_data(%r{https?://github\.com/([^/]+)/([^/]+)/?.*}) if @online
return if user.blank?
metadata = SharedAudits.github_repo_data(user, repo)
return if metadata.nil?
problem "GitHub repo is archived" if metadata["archived"]
end
def audit_gitlab_repository_archived
return if formula.deprecated?
user, repo = get_repo_data(%r{https?://gitlab\.com/([^/]+)/([^/]+)/?.*}) if @online
return if user.blank?
metadata = SharedAudits.gitlab_repo_data(user, repo)
return if metadata.nil?
problem "GitLab repo is archived" if metadata["archived"]
end
def audit_github_repository
user, repo = get_repo_data(%r{https?://github\.com/([^/]+)/([^/]+)/?.*}) if @new_formula
return if user.blank?
warning = SharedAudits.github(user, repo)
return if warning.nil?
new_formula_problem warning
end
def audit_gitlab_repository
user, repo = get_repo_data(%r{https?://gitlab\.com/([^/]+)/([^/]+)/?.*}) if @new_formula
return if user.blank?
warning = SharedAudits.gitlab(user, repo)
return if warning.nil?
new_formula_problem warning
end
def audit_bitbucket_repository
user, repo = get_repo_data(%r{https?://bitbucket\.org/([^/]+)/([^/]+)/?.*}) if @new_formula
return if user.blank?
warning = SharedAudits.bitbucket(user, repo)
return if warning.nil?
new_formula_problem warning
end
def get_repo_data(regex)
return unless @core_tap
return unless @online
_, user, repo = *regex.match(formula.stable.url) if formula.stable
_, user, repo = *regex.match(formula.homepage) unless user
_, user, repo = *regex.match(formula.head.url) if !user && formula.head
return if !user || !repo
repo.delete_suffix!(".git")
[user, repo]
end
VERSIONED_HEAD_SPEC_ALLOWLIST = %w[
bash-completion@2
imagemagick@6
].freeze
THROTTLED_FORMULAE = {
"aws-sdk-cpp" => 10,
"awscli@1" => 10,
"balena-cli" => 10,
"gatsby-cli" => 10,
"quicktype" => 10,
"vim" => 50,
}.freeze
UNSTABLE_ALLOWLIST = {
"aalib" => "1.4rc",
"automysqlbackup" => "3.0-rc",
"aview" => "1.3.0rc",
"elm-format" => "0.6.0-alpha",
"ftgl" => "2.1.3-rc",
"hidapi" => "0.8.0-rc",
"libcaca" => "0.99b",
"premake" => "4.4-beta",
"pwnat" => "0.3-beta",
"recode" => "3.7-beta",
"speexdsp" => "1.2rc",
"sqoop" => "1.4.",
"tcptraceroute" => "1.5beta",
"tiny-fugue" => "5.0b",
"vbindiff" => "3.0_beta",
}.freeze
GNOME_DEVEL_ALLOWLIST = {
"libart" => "2.3",
"gtk-mac-integration" => "2.1",
"gtk-doc" => "1.31",
"gcab" => "1.3",
"libepoxy" => "1.5",
}.freeze
GITHUB_PRERELEASE_ALLOWLIST = %w[].freeze
# version_prefix = stable_version_string.sub(/\d+$/, "")
# version_prefix = stable_version_string.split(".")[0..1].join(".")
def audit_specs
problem "Head-only (no stable download)" if head_only?(formula)
problem "Devel-only (no stable download)" if devel_only?(formula)
%w[Stable Devel HEAD].each do |name|
spec_name = name.downcase.to_sym
next unless spec = formula.send(spec_name)
ra = ResourceAuditor.new(spec, spec_name, online: @online, strict: @strict).audit
problems.concat ra.problems.map { |problem| "#{name}: #{problem}" }
spec.resources.each_value do |resource|
problem "Resource name should be different from the formula name" if resource.name == formula.name
ra = ResourceAuditor.new(resource, spec_name, online: @online, strict: @strict).audit
problems.concat ra.problems.map { |problem|
"#{name} resource #{resource.name.inspect}: #{problem}"
}
end
next if spec.patches.empty?
next unless @new_formula
new_formula_problem(
"Formulae should not require patches to build. " \
"Patches should be submitted and accepted upstream first.",
)
end
%w[Stable Devel].each do |name|
next unless spec = formula.send(name.downcase)
version = spec.version
problem "#{name}: version (#{version}) is set to a string without a digit" if version.to_s !~ /\d/
if version.to_s.start_with?("HEAD")
problem "#{name}: non-HEAD version name (#{version}) should not begin with HEAD"
end
end
if formula.stable && formula.devel
if formula.devel.version < formula.stable.version
problem "devel version #{formula.devel.version} is older than stable version #{formula.stable.version}"
elsif formula.devel.version == formula.stable.version
problem "stable and devel versions are identical"
end
end
return unless @core_tap
problem "Formulae in homebrew/core should not have a `devel` spec" if formula.devel
if formula.head && @versioned_formula
head_spec_message = "Versioned formulae should not have a `HEAD` spec"
problem head_spec_message unless VERSIONED_HEAD_SPEC_ALLOWLIST.include?(formula.name)
end
stable = formula.stable
return unless stable
return unless stable.url
stable_version_string = stable.version.to_s
stable_url_version = Version.parse(stable.url)
_, stable_url_minor_version, = stable_url_version.to_s
.split(".", 3)
.map(&:to_i)
formula_suffix = stable_version_string.split(".").last.to_i
throttled_rate = THROTTLED_FORMULAE[formula.name]
if throttled_rate && formula_suffix.modulo(throttled_rate).nonzero?
problem "should only be updated every #{throttled_rate} releases on multiples of #{throttled_rate}"
end
case (url = stable.url)
when /[\d._-](alpha|beta|rc\d)/
matched = Regexp.last_match(1)
version_prefix = stable_version_string.sub(/\d+$/, "")
return if UNSTABLE_ALLOWLIST[formula.name] == version_prefix
problem "Stable version URLs should not contain #{matched}"
when %r{download\.gnome\.org/sources}, %r{ftp\.gnome\.org/pub/GNOME/sources}i
version_prefix = stable_version_string.split(".")[0..1].join(".")
return if GNOME_DEVEL_ALLOWLIST[formula.name] == version_prefix
return if stable_url_version < Version.create("1.0")
return if stable_url_minor_version.even?
problem "#{stable.version} is a development release"
when %r{isc.org/isc/bind\d*/}i
return if stable_url_minor_version.even?
problem "#{stable.version} is a development release"
when %r{^https://github.com/([\w-]+)/([\w-]+)/}
owner = Regexp.last_match(1)
repo = Regexp.last_match(2)
tag = url.match(%r{^https://github\.com/[\w-]+/[\w-]+/archive/([^/]+)\.(tar\.gz|zip)$})
.to_a
.second
tag ||= url.match(%r{^https://github\.com/[\w-]+/[\w-]+/releases/download/([^/]+)/})
.to_a
.second
begin
if @online && (release = GitHub.open_api("#{GitHub::API_URL}/repos/#{owner}/#{repo}/releases/tags/#{tag}"))
if release["prerelease"] && !GITHUB_PRERELEASE_ALLOWLIST.include?(formula.name)
problem "#{tag} is a GitHub prerelease"
elsif release["draft"]
problem "#{tag} is a GitHub draft"
end
end
rescue GitHub::HTTPNotFoundError
# No-op if we can't find the release.
nil
end
end
end
def audit_revision_and_version_scheme
return unless @git
return unless formula.tap # skip formula not from core or any taps
return unless formula.tap.git? # git log is required
return if formula.stable.blank?
fv = FormulaVersions.new(formula)
current_version = formula.stable.version
current_checksum = formula.stable.checksum
current_version_scheme = formula.version_scheme
current_revision = formula.revision
previous_version = nil
previous_version_scheme = nil
previous_revision = nil
newest_committed_version = nil
newest_committed_checksum = nil
newest_committed_revision = nil
fv.rev_list("origin/master") do |rev|
fv.formula_at_revision(rev) do |f|
stable = f.stable
next if stable.blank?
previous_version = stable.version
previous_checksum = stable.checksum
previous_version_scheme = f.version_scheme
previous_revision = f.revision
newest_committed_version ||= previous_version
newest_committed_checksum ||= previous_checksum
newest_committed_revision ||= previous_revision
end
break if previous_version && current_version != previous_version
end
if current_version == previous_version &&
current_checksum != newest_committed_checksum
problem(
"stable sha256 changed without the version also changing; " \
"please create an issue upstream to rule out malicious " \
"circumstances and to find out why the file changed.",
)
end
if !newest_committed_version.nil? &&
current_version < newest_committed_version &&
current_version_scheme == previous_version_scheme
problem "stable version should not decrease (from #{newest_committed_version} to #{current_version})"
end
unless previous_version_scheme.nil?
if current_version_scheme < previous_version_scheme
problem "version_scheme should not decrease (from #{previous_version_scheme} " \
"to #{current_version_scheme})"
elsif current_version_scheme > (previous_version_scheme + 1)
problem "version_schemes should only increment by 1"
end
end
if previous_version != newest_committed_version &&
!current_revision.zero? &&
current_revision == newest_committed_revision &&
current_revision == previous_revision
problem "'revision #{current_revision}' should be removed"
elsif current_version == previous_version &&
!previous_revision.nil? &&
current_revision < previous_revision
problem "revision should not decrease (from #{previous_revision} to #{current_revision})"
elsif newest_committed_revision &&
current_revision > (newest_committed_revision + 1)
problem "revisions should only increment by 1"
end
end
def audit_text
bin_names = Set.new
bin_names << formula.name
bin_names += formula.aliases
[formula.bin, formula.sbin].each do |dir|
next unless dir.exist?
bin_names += dir.children.map(&:basename).map(&:to_s)
end
bin_names.each do |name|
["system", "shell_output", "pipe_output"].each do |cmd|
if text.to_s.match?(/test do.*#{cmd}[(\s]+['"]#{Regexp.escape(name)}[\s'"]/m)
problem %Q(fully scope test #{cmd} calls, e.g. #{cmd} "\#{bin}/#{name}")
end
end
end
end
def audit_reverse_migration
# Only enforce for new formula being re-added to core
return unless @strict
return unless @core_tap
return unless formula.tap.tap_migrations.key?(formula.name)
problem <<~EOS
#{formula.name} seems to be listed in tap_migrations.json!
Please remove #{formula.name} from present tap & tap_migrations.json
before submitting it to Homebrew/homebrew-#{formula.tap.repo}.
EOS
end
def audit_prefix_has_contents
return unless formula.prefix.directory?
return unless Keg.new(formula.prefix).empty_installation?
problem <<~EOS
The installation seems to be empty. Please ensure the prefix
is set correctly and expected files are installed.
The prefix configure/make argument may be case-sensitive.
EOS
end
def quote_dep(dep)
dep.is_a?(Symbol) ? dep.inspect : "'#{dep}'"
end
def problem_if_output(output)
problem(output) if output
end
def audit
only_audits = @only
except_audits = @except
methods.map(&:to_s).grep(/^audit_/).each do |audit_method_name|
name = audit_method_name.delete_prefix("audit_")
if only_audits
next unless only_audits.include?(name)
elsif except_audits
next if except_audits.include?(name)
end
send(audit_method_name)
end
end
private
def problem(p)
@problems << p
end
def new_formula_problem(p)
@new_formula_problems << p
end
def head_only?(formula)
formula.head && formula.devel.nil? && formula.stable.nil?
end
def devel_only?(formula)
formula.devel && formula.stable.nil?
end
end
class ResourceAuditor
attr_reader :name, :version, :checksum, :url, :mirrors, :using, :specs, :owner, :spec_name, :problems
def initialize(resource, spec_name, options = {})
@name = resource.name
@version = resource.version
@checksum = resource.checksum
@url = resource.url
@mirrors = resource.mirrors
@using = resource.using
@specs = resource.specs
@owner = resource.owner
@spec_name = spec_name
@online = options[:online]
@strict = options[:strict]
@problems = []
end
def audit
audit_version
audit_download_strategy
audit_urls
self
end
def audit_version
if version.nil?
problem "missing version"
elsif !version.detected_from_url?
version_text = version
version_url = Version.detect(url, specs)
if version_url.to_s == version_text.to_s && version.instance_of?(Version)
problem "version #{version_text} is redundant with version scanned from URL"
end
end
end
def audit_download_strategy
url_strategy = DownloadStrategyDetector.detect(url)
if using == :git || url_strategy == GitDownloadStrategy
problem "Git should specify :revision when a :tag is specified." if specs[:tag] && !specs[:revision]
end
return unless using
if using == :cvs
mod = specs[:module]
problem "Redundant :module value in URL" if mod == name
if url.match?(%r{:[^/]+$})
mod = url.split(":").last
if mod == name
problem "Redundant CVS module appended to URL"
else
problem "Specify CVS module as `:module => \"#{mod}\"` instead of appending it to the URL"
end
end
end
return unless url_strategy == DownloadStrategyDetector.detect("", using)
problem "Redundant :using value in URL"
end
def self.curl_openssl_and_deps
@curl_openssl_and_deps ||= begin
formulae_names = ["curl", "openssl"]
formulae_names += formulae_names.flat_map do |f|
Formula[f].recursive_dependencies.map(&:name)
end
formulae_names.uniq
rescue FormulaUnavailableError
[]
end
end
def audit_urls
return unless @online
urls = [url] + mirrors
urls.each do |url|
next if !@strict && mirrors.include?(url)
strategy = DownloadStrategyDetector.detect(url, using)
if strategy <= CurlDownloadStrategy && !url.start_with?("file")
# A `brew mirror`'ed URL is usually not yet reachable at the time of
# pull request.
next if url.match?(%r{^https://dl.bintray.com/homebrew/mirror/})
if http_content_problem = curl_check_http_content(url)
problem http_content_problem
end
elsif strategy <= GitDownloadStrategy
problem "The URL #{url} is not a valid git URL" unless Utils.git_remote_exists? url
elsif strategy <= SubversionDownloadStrategy
next unless DevelopmentTools.subversion_handles_most_https_certificates?
next unless Utils.svn_available?
problem "The URL #{url} is not a valid svn URL" unless Utils.svn_remote_exists? url
end
end
end
def problem(text)
@problems << text
end
end
end
| 34.292802 | 118 | 0.61938 |
ac8fd29817f47638a19523e203a9b628195c7177 | 504 | class Boris < Coder
def initialize
@name = '丁博约'
@salary = 100000
end
def work(remain_difficulty)
if rand(10) > 4
forward = rand(100...500)
puts "#{name} 灵感大发,成功将项目推进#{forward}"
remain_difficulty - forward
else
bugs = rand(1...5)
fallback = bugs * rand(0...50)
puts "#{name} 进入了拖延症晚期,项目难度增加#{fallback}"
remain_difficulty + fallback
end
end
def pay(company_money)
puts "#{name}领取了#{salary}元工资,然后一路向西去了东莞"
company_money - salary
end
end
| 20.16 | 47 | 0.630952 |
ed11699e494a315b9e53b7df396a379a383910c3 | 3,314 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::LooseForeignKeys do
describe 'verify all definitions' do
subject(:definitions) { described_class.definitions }
it 'all definitions have assigned a known gitlab_schema and on_delete' do
is_expected.to all(have_attributes(
options: a_hash_including(
column: be_a(String),
gitlab_schema: be_in(Gitlab::Database.schemas_to_base_models.symbolize_keys.keys),
on_delete: be_in([:async_delete, :async_nullify])
),
from_table: be_a(String),
to_table: be_a(String)
))
end
context 'ensure keys are sorted' do
it 'does not have any keys that are out of order' do
parsed = YAML.parse_file(described_class.loose_foreign_keys_yaml_path)
mapping = parsed.children.first
table_names = mapping.children.select(&:scalar?).map(&:value)
expect(table_names).to eq(table_names.sort), "expected sorted table names in the YAML file"
end
end
context 'ensure no duplicates are found' do
it 'does not have duplicate tables defined' do
# since we use hash to detect duplicate hash keys we need to parse YAML document
parsed = YAML.parse_file(described_class.loose_foreign_keys_yaml_path)
expect(parsed).to be_document
expect(parsed.children).to be_one, "YAML has a single document"
# require hash
mapping = parsed.children.first
expect(mapping).to be_mapping, "YAML has a top-level hash"
# find all scalars with names
table_names = mapping.children.select(&:scalar?).map(&:value)
expect(table_names).not_to be_empty, "YAML has a non-zero tables defined"
# expect to not have duplicates
expect(table_names).to contain_exactly(*table_names.uniq)
end
it 'does not have duplicate column definitions' do
# ignore other modifiers
all_definitions = definitions.map do |definition|
{ from_table: definition.from_table, to_table: definition.to_table, column: definition.column }
end
# expect to not have duplicates
expect(all_definitions).to contain_exactly(*all_definitions.uniq)
end
end
describe 'ensuring database integrity' do
def base_models_for(table)
parent_table_schema = Gitlab::Database::GitlabSchema.table_schema(table)
Gitlab::Database.schemas_to_base_models.fetch(parent_table_schema)
end
it 'all `to_table` tables are present' do
definitions.each do |definition|
base_models_for(definition.to_table).each do |model|
expect(model.connection).to be_table_exist(definition.to_table)
end
end
end
it 'all `from_table` tables are present' do
definitions.each do |definition|
base_models_for(definition.from_table).each do |model|
expect(model.connection).to be_table_exist(definition.from_table)
expect(model.connection).to be_column_exist(definition.from_table, definition.column)
end
end
end
end
end
end
| 38.988235 | 111 | 0.652384 |
e8c315c54a36052dfcc74251e0569a306fec0b8b | 2,005 | require "gitlab/ci/lint/yml"
require "gitlab/ci/lint/log"
require "gitlab/ci/lint/system"
require "gitlab/ci/lint/actions"
require "gitlab/ci/lint/configuration"
module Gitlab
module Ci
module Lint
def self.validate values, configuration, options
system = GitLab::CI::Lint::System.new
actions = GitLab::CI::Lint::Actions.new
configuration = configuration ? configuration : GitLab::CI::Lint::Configuration.new
system.file_exist?(values, "Error: You must specify the values.yml file")
values = File.absolute_path(values)
system.file_is_readable?(values, "Error: Could not find file at '#{values}'")
yml_reader = GitLab::CI::Lint::YMLReader.new(values)
content = yml_reader.get_content
gitlab = content["gitlab"]
logger = GitLab::CI::Lint::Log.instance
gitlab_endpoint = options["endpoint"] ?
options["endpoint"] : ((!gitlab["endpoint"].to_s.empty? && !gitlab["endpoint"].nil?) ?
gitlab["endpoint"] : configuration.gitlab_endpoint)
gitlab_token = options["token"] ?
options["token"] : ((!gitlab["token"].to_s.empty? && !gitlab["token"].nil?) ?
gitlab["token"] : configuration.gitlab_token)
timeout = options["timeout"] ?
options["timeout"] : ((!gitlab["timeout"].to_s.empty? && !gitlab["timeout"].nil?) ?
gitlab["timeout"] : configuration.timeout)
gitlab_ci_file = options["file"] ?
options["file"] : ((!gitlab["file"].to_s.empty? && !gitlab["file"].nil?) ?
gitlab["file"] : configuration.gitlab_ci_file)
logger.info("Starting GitLab CI YML Validation in #{gitlab_ci_file}...")
headers = gitlab_token ? { "Content-Type" => "application/json", "Private-Token" => gitlab_token } : { "Content-Type" => "application/json" }
actions.validate_gitlab_ci_yml(gitlab_endpoint, gitlab_ci_file, headers, timeout)
return 0
end
end
end
end
| 35.175439 | 149 | 0.630923 |
1a211f0279a34d40d3c29c5e99b235d9049e2581 | 378 | cask "timelane" do
version "2.0"
sha256 "3334fbb6945d1f0cb8f535c399297356037f4fdd5c570fd7a7325f5b4bd8b57a"
url "https://github.com/icanzilb/Timelane/releases/download/#{version}/Timelane.app-#{version}.zip",
verified: "github.com/icanzilb/Timelane/"
name "Timelane"
homepage "https://timelane.tools/"
depends_on macos: ">= :mojave"
app "Timelane.app"
end
| 27 | 102 | 0.738095 |
e9a2da7504201778ddfbaa0505e9f052b0927740 | 2,824 | require 'spec_helper'
describe FarmEvents::Create do
let(:seq) { FakeSequence.create() }
it 'Builds a farm_event' do
device = seq.device
start_time = '2027-02-17T15:16:17.000Z'
end_time = '2029-02-17T18:19:20.000Z'
farm_event = FarmEvents::Create.run!(device: device,
executable_id: seq.id,
executable_type: seq.class.name,
executable: seq,
start_time: start_time,
end_time: end_time,
repeat: 4,
time_unit: 'minutely').reload
expect(farm_event).to be_kind_of(FarmEvent)
expect(farm_event.device).to eq(device)
expect(farm_event.executable).to eq(seq)
expect(farm_event.start_time.to_time).to eq(Time.parse start_time)
expect(farm_event.end_time.to_time).to eq(Time.parse end_time)
expect(farm_event.repeat).to eq(4)
expect(farm_event.time_unit).to eq('minutely')
end
it 'Prevents backwards start/end times' do
device = seq.device
start_time = '2015-02-17T15:16:17.000Z'
end_time = '2099-02-17T18:19:20.000Z'
farm_event = FarmEvents::Create.run(device: device,
executable_id: seq.id,
executable_type: seq.class.name,
executable: seq,
start_time: end_time,
end_time: start_time,
repeat: 4,
time_unit: 'minutely')
expect(farm_event.errors["end_time"]).to be
expect(farm_event.errors["end_time"].message)
.to include(FarmEvents::Create::BACKWARDS_END_TIME)
end
it "disallows creation of farm events with 5 digit years" do
device = seq.device
start_time = "+099999-08-18T12:32:00.000Z"
end_time = "+099999-08-19T12:32:00.000Z"
farm_event = FarmEvents::Create.run(device: device,
executable_id: seq.id,
executable_type: seq.class.name,
executable: seq,
end_time: end_time,
start_time: start_time,
repeat: 4,
time_unit: 'minutely')
expect(farm_event.errors.message_list.join)
.to include("too far in the future")
end
end
| 47.066667 | 76 | 0.472734 |
abe08c50f363fc7004f3ae8dfc2bab3817f1041f | 574 | class Post < ActiveRecord::Base
belongs_to :user
has_many :comments, dependent: :destroy
has_many :hidden_posts
has_many :users, through: :hidden_posts
as_enum :category, everyday_life: 1, dreams_and_hopes: 2, regrets: 3, thoughts: 4, other: 5
acts_as_likeable
validates :content, presence: {message: I18n.t('posts.cannot_be_blank')}, allow_blank: false
attachment :image_content, type: :image
auto_html_for :content do
html_escape
image
youtube
# twitter
link :target => '_blank', :rel => 'nofollow'
simple_format
end
end | 22.076923 | 94 | 0.714286 |
39f769705ba81bae896c9e7e71d06d75d95b1c76 | 7,872 | def create_standard_decomposition(standard_decomposition_object, options = {})
default_params.merge(options)
standard_decomposition_object.create(params)
end
describe Pbl::Models::Projects::StandardDecomposition do
shared_examples 'collect standard_decomposition' do
it { expect(standard_decomposition.role).to eq('role') }
it { expect(standard_decomposition.verb).to eq('verb') }
it { expect(standard_decomposition.technique).to eq('technique') }
it { expect(standard_decomposition.noun).to eq('noun') }
it { expect(standard_decomposition.product_name).to eq('product_name') }
it { expect(standard_decomposition.product_id).to eq('product_id') }
end
subject(:standard_decomposition_object) { described_class }
let(:default_params) {
{
standard_decomposition: {
role: 'role',
verb: 'verb',
technique: 'technique',
noun: 'noun',
product_name: 'product_name',
product_id: 'product_id'
}
}
}
describe '.create' do
context 'successful' do
let(:params) { {} }
before(:each) do
stub_request(:post, 'http://0.0.0.0:3001/pbl/standard_decompositions').to_return(
body: standard_decomposition_object.new(default_params[:standard_decomposition]).to_json,
status: 201
)
end
subject!(:standard_decomposition) { create_standard_decomposition(standard_decomposition_object, params) }
it { expect(standard_decomposition.code).to eq(201) }
it_behaves_like 'collect standard_decomposition'
end
context 'failed' do
let(:return_body) {
body = {error: {name: ['name error']}}
JSON.generate(body)
}
let(:params) { {} }
before(:each) do
stub_request(:post, 'http://0.0.0.0:3001/pbl/standard_decompositions').to_return(
body: return_body,
status: 422,
headers: {'Header' => 'header'}
)
end
subject!(:standard_decomposition) { create_standard_decomposition(standard_decomposition_object, params) }
it { expect(standard_decomposition.code).to eq(422) }
it { expect(standard_decomposition.body).to eq(return_body) }
it { expect(standard_decomposition.headers).to eq({'Header' => 'header'}) }
end
end
describe '.update' do
context 'successful' do
let(:update_params) { {name: 'update_name'} }
before(:each) do
stub_request(:patch, 'http://0.0.0.0:3001/pbl/standard_decompositions/1').to_return(
body: nil,
status: 200
)
end
subject(:update_standard_decomposition) { standard_decomposition_object.update('1', update_params) }
# it { expect(update_standard_decomposition.first_name).to eq('update_first_name') }
# it { expect(update_standard_decomposition.last_name).to eq('update_last_name') }
# it { expect(update_standard_decomposition.age).to eq(21) }
# it { expect(update_standard_decomposition.gender).to eq(0) }
# it { expect(update_standard_decomposition.code).to eq(200) }
it { expect(update_standard_decomposition.body).to eq('') }
end
context 'failed' do
let(:update_params) { {name: 'update_name'} }
let(:return_body) {
body = {error: {name: ['name error']}}
JSON.generate(body)
}
before(:each) do
stub_request(:patch, 'http://0.0.0.0:3001/pbl/standard_decompositions/1').to_return(
body: return_body,
status: 422
)
end
subject(:update_standard_decomposition) { standard_decomposition_object.update('1', update_params) }
it { expect(update_standard_decomposition.code).to eq(422) }
it { expect(update_standard_decomposition.body).to eq(return_body) }
end
end
describe '.destroy' do
before(:each) do
stub_request(:delete, 'http://0.0.0.0:3001/pbl/standard_decompositions/1').to_return(
body: nil,
status: 200
)
end
subject(:result) { standard_decomposition_object.destroy(1) }
it { expect(result.code).to eq(200) }
it { expect(result.body).to eq('') }
end
describe '.find!' do
before(:each) do
stub_request(:get, 'http://0.0.0.0:3001/pbl/standard_decompositions/1').to_return(
body: standard_decomposition_object.new(default_params[:standard_decomposition]).to_json,
status: 200
)
stub_request(:get, 'http://0.0.0.0:3001/pbl/standard_decompositions/2').to_return(
body: nil,
status: 404
)
end
context 'standard_decomposition exist' do
subject(:standard_decomposition) { standard_decomposition_object.find!(1) }
it 'find' do
expect(standard_decomposition.success?).to be_truthy
expect(standard_decomposition.code).to eq(200)
expect(standard_decomposition.role).to eq('role')
expect(standard_decomposition.verb).to eq('verb')
expect(standard_decomposition.technique).to eq('technique')
expect(standard_decomposition.noun).to eq('noun')
expect(standard_decomposition.product_name).to eq('product_name')
expect(standard_decomposition.product_id).to eq('product_id')
end
end
context 'standard_decomposition do not exist' do
it { expect { standard_decomposition_object.find!(2) }.to raise_error(Pbl::Exceptions::NotFoundException) }
end
end
describe '.find' do
before(:each) do
stub_request(:get, 'http://0.0.0.0:3001/pbl/standard_decompositions/1').to_return(
body: standard_decomposition_object.new(default_params[:standard_decomposition]).to_json,
status: 200
)
stub_request(:get, 'http://0.0.0.0:3001/pbl/standard_decompositions/2').to_return(
body: '{}',
status: 404,
headers: {}
)
end
context 'standard_decomposition is exist' do
let(:standard_decomposition) { standard_decomposition_object.find(1) }
it 'find a standard_decomposition' do
expect(standard_decomposition).to be_truthy
expect(standard_decomposition.code).to eq(200)
expect(standard_decomposition.role).to eq('role')
expect(standard_decomposition.verb).to eq('verb')
expect(standard_decomposition.technique).to eq('technique')
expect(standard_decomposition.noun).to eq('noun')
expect(standard_decomposition.product_name).to eq('product_name')
expect(standard_decomposition.product_id).to eq('product_id')
end
end
context 'standard_decomposition is not exist' do
subject(:standard_decomposition) { standard_decomposition_object.find('2') }
it { expect(standard_decomposition.code).to eq(404) }
it { expect(standard_decomposition.headers).to be_a Hash }
it { expect(standard_decomposition.body).to eq('{}') }
it { expect(standard_decomposition.success?).to be_falsey }
end
end
describe '.where' do
before(:each) do
clazz_instances = []
clazz_instances << standard_decomposition_object.new(default_params[:standard_decomposition])
stub_request(:get, 'http://0.0.0.0:3001/pbl/standard_decompositions/').to_return(
body: {'data' => clazz_instances, 'meta' => {total_count: 1, total_pages: 1, per_page: 1, current_page: 1}}.to_json,
status: 200
)
end
let(:clazz_instances) { standard_decomposition_object.all }
it { expect(clazz_instances).to be_a Hash }
it { expect(clazz_instances.fetch(:data).first.role).to eq('role') }
it { expect(clazz_instances.fetch(:data).first.verb).to eq('verb') }
it { expect(clazz_instances.fetch(:meta)[:total_count]).to eq(1) }
it { expect(clazz_instances.fetch(:meta)[:total_pages]).to eq(1) }
it { expect(clazz_instances.fetch(:meta)[:per_page]).to eq(1) }
it { expect(clazz_instances.fetch(:meta)[:current_page]).to eq(1) }
end
end | 37.665072 | 124 | 0.669842 |
e2b5977667007d4c588ce3105916dade218d124d | 384 | class DiscussionPolicy < ApplicationPolicy
def show?
return true unless record.committee.private?
return false unless user
user.is_in_committee(record.committee)
end
def close?
return false unless record.active?
record.owner == user || admin? || committee_admin?(record.committee)
end
def create?
user.is_in_committee(record.committee)
end
end
| 20.210526 | 72 | 0.729167 |
1ae3ae84b31011eeac48068288871d61b2a5bebf | 356 | ##
# Kernel
#
# ISO 15.3.1
module Kernel
def print(*a)
raise NotImplementedError.new('print not available')
end
def puts(*a)
raise NotImplementedError.new('puts not available')
end
def p(*a)
raise NotImplementedError.new('p not available')
end
def printf(*args)
raise NotImplementedError.new('printf not available')
end
end
| 18.736842 | 57 | 0.69382 |
b962b2836626fa8c35adea4089e1337159e7dc55 | 2,094 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# mailer
config.action_mailer.default_url_options = {host: 'localhost:3000'}
config.mailer_sender = '[email protected]'
config.action_mailer.delivery_method = :smtp
config.action_mailer.asset_host = 'http://127.0.0.1:3000'
config.action_mailer.smtp_settings = {
address: 'localhost',
port: 1025,
domain: 'course-application',
}
if defined? BetterErrors
BetterErrors.editor = proc { |full_path, line|
local_path = "/Users/knx/Developer/Sites/course-application/"
full_path = full_path.sub(Rails.root.to_s, local_path)
"subl://open?url=file://#{full_path}&line=#{line}"
}
end
end
| 36.736842 | 85 | 0.743075 |
399241cda0602ae07ee478183262f128dc530dc0 | 2,233 | require 'spec_helper_acceptance'
apply_manifest_opts = {
catch_failures: true,
debug: true,
trace: true,
}
describe 'Centreon host group resource:' do
before(:each) do
end
describe 'Manage With minimal parameter' do
it 'create successfully' do
pp = <<-EOS
centreon_host_group{'test_rspec':
ensure => 'present'
}
EOS
result = apply_manifest(pp, apply_manifest_opts)
expect(result.exit_code).to eq 2
result = apply_manifest(pp, apply_manifest_opts)
expect(result.exit_code).to eq 0
end
end
describe 'Manage With all parameter' do
it 'create successfully' do
pp = <<-EOS
centreon_host_group{'test_rspec2':
ensure => 'present',
enable => true,
description => 'my HG',
comment => 'Managed by puppet',
note => 'this is my note',
note_url => 'http://localhost/note',
action_url => 'http://localhost/action',
}
EOS
result = apply_manifest(pp, apply_manifest_opts)
expect(result.exit_code).to eq 2
result = apply_manifest(pp, apply_manifest_opts)
expect(result.exit_code).to eq 0
end
end
describe 'Update' do
it 'update successfully' do
pp = <<-EOS
centreon_host_group{'test_rspec':
ensure => 'present',
enable => true,
description => 'my HG',
comment => 'Managed by puppet',
note => 'this is my note',
note_url => 'http://localhost/note',
action_url => 'http://localhost/action',
}
EOS
result = apply_manifest(pp, apply_manifest_opts)
expect(result.exit_code).to eq 2
result = apply_manifest(pp, apply_manifest_opts)
expect(result.exit_code).to eq 0
end
end
describe 'Destroy' do
it 'destroy successfully' do
pp = <<-EOS
centreon_host_group{'test_rspec':
ensure => 'absent',
}
centreon_host_group{'test_rspec2':
ensure => 'absent',
}
EOS
result = apply_manifest(pp, apply_manifest_opts)
expect(result.exit_code).to eq 2
end
end
end
| 26.270588 | 54 | 0.580385 |
1c54267f8b700ff96844296df61fd500b6220812 | 484 | # frozen_string_literal: true
class Folio::Console::Form::ErrorsCell < Folio::ConsoleCell
def show
render if errors.present?
end
def full_messages
@full_messages ||= errors.full_messages
end
def errors
options[:errors] || model.object.errors
end
def field_name(key)
key
# *parts, final = key.to_s.split('.')
# [
# model.object_name,
# *parts.map { |part| "[#{part}_attributes]" },
# "[#{final}]",
# ].join('')
end
end
| 18.615385 | 59 | 0.609504 |
bb840be283c6cae98ceb8f6c592ca116ee675b61 | 139 | require File.expand_path('../../../spec_helper', __FILE__)
describe "Array#size" do
it "needs to be reviewed for spec completeness"
end
| 23.166667 | 58 | 0.726619 |
219826edd760613ad1a4dd5582f77bfe45e58659 | 3,420 | # frozen_string_literal: true
# Cloud Foundry Java Buildpack
# Copyright 2013-2020 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'java_buildpack/component/versioned_dependency_component'
require 'java_buildpack/framework'
module JavaBuildpack
module Framework
# Encapsulates the functionality for running with Checkmarx IAST Agent
class CheckmarxIastAgent < JavaBuildpack::Component::VersionedDependencyComponent
include JavaBuildpack::Util
# Creates an instance. In addition to the functionality inherited from +BaseComponent+, +@version+ and +@uri+
# instance variables are exposed.
#
# @param [Hash] context a collection of utilities used by components
def initialize(context)
@application = context[:application]
@component_name = self.class.to_s.space_case
@configuration = context[:configuration]
@droplet = context[:droplet]
if supports?
@version = ''
@uri = @application.services.find_service(FILTER, 'server')['credentials']['server'].chomp +
'/iast/compilation/download/JAVA'
end
@logger = JavaBuildpack::Logging::LoggerFactory.instance.get_logger CheckmarxIastAgent
end
# (see JavaBuildpack::Component::BaseComponent#compile)
def compile
JavaBuildpack::Util::Cache::InternetAvailability.instance.available(
true, 'The Checkmarx IAST download location is always accessible'
) do
download_zip(false)
end
# Disable cache (no point, when running in a container)
File.open(@droplet.sandbox + 'cx_agent.override.properties', 'a') do |f|
f.write("\nenableWeavedClassCache=false\n")
end
end
# (see JavaBuildpack::Component::BaseComponent#release)
def release
# Default cxAppTag to application name if not set as an env var
app_tag = ENV.fetch('cxAppTag', nil) || application_name
# Default team to CxServer if not set as env var
team = ENV.fetch('cxTeam', nil) || 'CxServer'
@droplet.java_opts
.add_javaagent(@droplet.sandbox + 'cx-launcher.jar')
.add_preformatted_options('-Xverify:none')
.add_system_property('cx.logToConsole', 'true')
.add_system_property('cx.appName', application_name)
.add_system_property('cxAppTag', app_tag)
.add_system_property('cxTeam', team)
end
protected
# (see JavaBuildpack::Component::VersionedDependencyComponent#supports?)
def supports?
@application.services.find_service(FILTER, 'server')
end
private
FILTER = /^checkmarx-iast$/.freeze
private_constant :FILTER
def application_name
@application.details['application_name'] || 'ROOT'
end
end
end
end
| 34.545455 | 116 | 0.676608 |
6aa1280fdb929e33286b23d5dd63e803c4c16ba2 | 1,547 | module Moveable
attr_reader :speed, :heading
attr_writer :fuel_efficiency, :fuel_capacity
def range
@fuel_capacity * @fuel_efficiency
end
end
class WheeledVehicle
include Moveable
def initialize(tire_array, km_traveled_per_liter, liters_of_fuel_capacity)
@tires = tire_array
self.fuel_efficiency = km_traveled_per_liter
self.fuel_capacity = liters_of_fuel_capacity
end
def tire_pressure(tire_index)
@tires[tire_index]
end
def inflate_tire(tire_index, pressure)
@tires[tire_index] = pressure
end
end
class Auto < WheeledVehicle
def initialize
# 4 tires are various tire pressures
super([30,30,32,32], 50, 25.0)
end
end
class Motorcycle < WheeledVehicle
include Moveable
def initialize
# 2 tires are various tire pressures
super([20,20], 80, 8.0)
end
end
class Seacraft
include Moveable
def range
range_by_using_fuel = super
return range_by_using_fuel + 10
end
attr_accessor :hull_count, :propeller_count
def initialize(num_propellers, num_hulls, fuel_efficiency, fuel_capacity)
@propeller_count = num_propellers
@hull_count = num_hulls
@fuel_efficiency = fuel_efficiency
@fuel_capacity = fuel_capacity
end
end
class Catamaran < Seacraft
end
class Motorboat < Seacraft
def initialize(km_traveled_per_liter, liters_of_fuel_capacity)
# set up 1 hull and 1 propeller
super(1, 1, km_traveled_per_liter, liters_of_fuel_capacity)
end
end
hello = Catamaran.new(2, 2, 50, 2.0)
p hello.propeller_count
p hello.hull_count
| 20.626667 | 76 | 0.751778 |
1a5d4783ef79dbc5ae7ccb3518fb62e334fc6850 | 1,640 | module Alchemy
module Custom
module Model
class Cloner < ::ActiveType::Object
attribute :language_id, :integer
attribute :site_id, :integer
validates :language_id, :site_id, presence: true
validate :check_lang_in_site, unless: -> {site_id.blank?}
###
# ritorna true se il clone è andato a buon fine false altrimenti
def apply
if valid?
cloned = self.send(self.to_cloner_name).clone_to_other_lang(get_attributes_for_clone.with_indifferent_access)
if cloned.valid?
true
else
cloned.errors.full_messages.each do | error|
self.errors.add(:base, error)
end
false
end
else
false
end
end
private
class << self
def cloner_of(name,scope = nil, options={})
class_attribute :to_cloner_name
name = name.to_sym
if options[:foreign_key]
attribute options[:foreign_key]
else
attribute :"#{name}_id"
end
belongs_to name, scope, options
self.to_cloner_name = name
end
end
def get_attributes_for_clone
self.attributes
end
protected
def check_lang_in_site
site = Alchemy::Site.find self.site_id
if !site.languages.pluck(:id).include? self.language_id
errors.add(:language_id, :isnt_in_site)
end
end
end
end
end
end | 24.117647 | 121 | 0.534756 |
6a69031a5d4fbc8e987d4512af81b1fb5a8fae92 | 853 | class CreateInvoices < ActiveRecord::Migration
def self.up
begin
create_table(:accounts, :options => 'ENGINE=InnoDB DEFAULT CHARSET=utf8') do |t|
t.timestamps
end
rescue ActiveRecord::StatementInvalid => e
if e.message =~ /Table .* already exists/
puts "accounts table already exists, skipping..."
else
raise
end
end
create_table(:invoices, :options => 'ENGINE=InnoDB DEFAULT CHARSET=utf8') do |t|
t.references :account, :null => false
t.text :bill_to
t.date :date
t.string :terms
t.text :message
t.timestamps
end
execute "ALTER TABLE invoices ADD FOREIGN KEY (account_id) REFERENCES accounts(id)"
end
def self.down
execute "ALTER TABLE invoices DROP FOREIGN KEY invoices_ibfk_1"
drop_table :invoices
end
end
| 25.848485 | 87 | 0.644783 |
f891499c71dc237664802b29e532804a1175a1ca | 137 | require "test_helper"
class AdminControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 17.125 | 59 | 0.737226 |
e8a8174f1edb08064779c1ecb6d29896184b78c1 | 2,261 | class Jeweler
class GemSpecHelper
attr_accessor :spec, :base_dir
def initialize(spec, base_dir = nil)
self.spec = spec
self.base_dir = base_dir || ''
yield spec if block_given?
end
def valid?
begin
parse
true
rescue
false
end
end
def write
File.open(path, 'w') do |f|
f.write self.to_ruby
end
end
def to_ruby
normalize_files(:files)
normalize_files(:extra_rdoc_files)
gemspec_ruby = @spec.to_ruby
gemspec_ruby = prettyify_array(gemspec_ruby, :files)
gemspec_ruby = prettyify_array(gemspec_ruby, :extra_rdoc_files)
gemspec_ruby = <<-END
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in #{Rake.application.rakefile}, and run 'rake gemspec'
#{gemspec_ruby}
END
end
def path
denormalized_path = File.join(@base_dir, "#{@spec.name}.gemspec")
absolute_path = File.expand_path(denormalized_path)
absolute_path.gsub(Dir.getwd + File::SEPARATOR, '')
end
def parse
data = self.to_ruby
parsed_gemspec = nil
Thread.new { parsed_gemspec = eval("$SAFE = 3\n#{data}", binding, path) }.join
parsed_gemspec
end
def normalize_files(array_attribute)
array = @spec.send(array_attribute)
# only keep files, no directories, and sort
array = array.select do |path|
File.file? File.join(@base_dir, path)
end.sort
@spec.send("#{array_attribute}=", array)
end
# Adds extra space when outputting an array. This helps create better version control diffs, because otherwise it is all on the same line.
def prettyify_array(gemspec_ruby, array_name)
gemspec_ruby.gsub(/s\.#{array_name.to_s} = \[.+?\]/) do |match|
leadin, files = match[0..-2].split("[")
leadin + "[\n #{files.gsub(%|", "|, %|",\n "|)}\n ]"
end
end
def gem_path
File.join(@base_dir, 'pkg', parse.file_name)
end
def update_version(version)
@spec.version = version.to_s
end
# Checks whether it uses the version helper or the users defined version.
def has_version?
[email protected]?
end
end
end
| 25.693182 | 142 | 0.623618 |
03d7c1661745c2e8a920cff7f7666bfadaf18196 | 343 | class Question < ApplicationRecord
belongs_to :student
belongs_to :course
has_many :comments
validates :content, presence: true
def asked_by
self.student.preferred_name
end
def asked_when
date_time = self.created_at
date_time.strftime("Asked %m/%d/%Y at %I:%M%p")
end
end
| 20.176471 | 55 | 0.635569 |
f71fcdb3336819ff50dca06681457f34500af406 | 858 | class PDFWriter
attr_reader :user, :table_array
def initialize(user, years:)
@user = user
@years = years
@table_array = user.table_array(readings)
end
def self.new_from_user_id(user_id, years:)
new(User.find(user_id), years: years)
end
def filename
user.last_name
end
def readings
user.readings.where(
"created_at >= ? AND created_at < ?",
DateTime.new(@years[0], 10, 1),
DateTime.new(@years[1], 10, 1)
)
end
def readings_count
@readings_count ||= readings.count
end
def readings_where_violation_is_possible_count
readings.select { |r| r.user.violation_possible?(r.created_at, r.outdoor_temp) }.count
end
def violation_count
@violation_count ||= readings.where(violation: true).count
end
def unit
user.apartment ? ", Unit #{user.apartment}" : ""
end
end
| 20.428571 | 90 | 0.671329 |
bb17a30f7cceeb345904bbee3e039380704d1e31 | 347 | module Moneybird::Service
class Webhook
include Moneybird::Traits::AdministrationService
include Moneybird::Traits::Service
include Moneybird::Traits::FindAll
include Moneybird::Traits::Delete
def resource_class
Moneybird::Resource::Webhook
end
def path
"#{administration_id}/webhooks"
end
end
end | 21.6875 | 52 | 0.714697 |
7999c2ed5a9938e7dd9a2fa4b540a56d28666fc4 | 128 | require 'test_helper'
class CheckedOutItemTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16 | 50 | 0.71875 |
2899327eac980b7bcfc02924f0d8b6432e7b60be | 226 | class Api::ApiController < ApplicationController
before_action :ensure_logged_in
private
def ensure_logged_in
unless logged_in?
render json: ["You must be logged in to do that"], status: 401
end
end
end
| 20.545455 | 68 | 0.730088 |
ff25118fdaeb63a7cc6b5556c447b61e45b4489a | 1,965 | # encoding: utf-8
require 'travis/client'
require 'travis/tools/safe_string'
module Travis
module Client
class Artifact < Entity
CHUNKED = "application/json; chunked=true; version=2, application/json; version=2"
# @!parse attr_reader :job_id, :type, :body
attributes :job_id, :type, :body
# @!parse attr_reader :job
has :job
def encoded_body
Tools::SafeString.encoded(body)
end
def colorized_body
attributes['colorized_body'] ||= Tools::SafeString.colorized(body)
end
def clean_body
attributes['clean_body'] ||= Tools::SafeString.clean(body)
end
def current_body
attributes['current_body'] ||= begin
body = load_attribute('body')
body.to_s.empty? ? session.get_raw("jobs/#{job_id}/log") : body
end
end
def body(stream = block_given?)
return current_body unless block_given? or stream
return yield(current_body) unless stream and job.pending?
number = 0
session.listen(self) do |listener|
listener.on 'job:log' do |event|
next unless event.payload['number'] > number
number = event.payload['number']
yield event.payload['_log']
listener.disconnect if event.payload['final']
end
listener.on 'job:finished' do |event|
listener.disconnect
end
listener.on_connect do
data = session.get_raw("/logs/#{id}", nil, "Accept" => CHUNKED)['log']
if data['parts']
data['parts'].each { |p| yield p['content'] }
number = data['parts'].last['number'] if data['parts'].any?
else
yield data['body']
listener.disconnect
end
end
end
end
def pusher_entity
job
end
one :log
many :logs
aka :artifact
end
end
end
| 26.2 | 88 | 0.569466 |
269233b456a77a14854c233dfac30dc3d6a34216 | 1,265 | module Fog
module Parsers
module Compute
module AWS
class DescribeSubnets < Fog::Parsers::Base
def reset
@subnet = { 'tagSet' => {} }
@response = { 'subnetSet' => [] }
@tag = {}
end
def start_element(name, attrs = [])
super
case name
when 'tagSet'
@in_tag_set = true
end
end
def end_element(name)
if @in_tag_set
case name
when 'item'
@subnet['tagSet'][@tag['key']] = @tag['value']
@tag = {}
when 'key', 'value'
@tag[name] = value
when 'tagSet'
@in_tag_set = false
end
else
case name
when 'subnetId', 'state', 'vpcId', 'cidrBlock', 'availableIpAddressCount', 'availabilityZone'
@subnet[name] = value
when 'item'
@response['subnetSet'] << @subnet
@subnet = { 'tagSet' => {} }
when 'requestId'
@response[name] = value
end
end
end
end
end
end
end
end
| 26.354167 | 107 | 0.403162 |
ed6ceedc10fd10c9cfe31e23a8e46fb055517764 | 223,901 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module HealthcareV1beta1
# Activates the latest revision of the specified Consent by committing a new
# revision with `state` updated to `ACTIVE`. If the latest revision of the given
# Consent is in the `ACTIVE` state, no new revision is committed. A
# FAILED_PRECONDITION error occurs if the latest revision of the given consent
# is in the `REJECTED` or `REVOKED` state.
class ActivateConsentRequest
include Google::Apis::Core::Hashable
# Required. The resource name of the Consent artifact that contains
# documentation of the user's consent, of the form `projects/`project_id`/
# locations/`location_id`/datasets/`dataset_id`/consentStores/`consent_store_id`/
# consentArtifacts/`consent_artifact_id``. If the draft Consent had a Consent
# artifact, this Consent artifact overwrites it.
# Corresponds to the JSON property `consentArtifact`
# @return [String]
attr_accessor :consent_artifact
# Timestamp in UTC of when this Consent is considered expired.
# Corresponds to the JSON property `expireTime`
# @return [String]
attr_accessor :expire_time
# The time to live for this Consent from when it is marked as active.
# Corresponds to the JSON property `ttl`
# @return [String]
attr_accessor :ttl
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_artifact = args[:consent_artifact] if args.key?(:consent_artifact)
@expire_time = args[:expire_time] if args.key?(:expire_time)
@ttl = args[:ttl] if args.key?(:ttl)
end
end
# The request to analyze healthcare entities in a document.
class AnalyzeEntitiesRequest
include Google::Apis::Core::Hashable
# document_content is a document to be annotated.
# Corresponds to the JSON property `documentContent`
# @return [String]
attr_accessor :document_content
# A list of licensed vocabularies to use in the request, in addition to the
# default unlicensed vocabularies.
# Corresponds to the JSON property `licensedVocabularies`
# @return [Array<String>]
attr_accessor :licensed_vocabularies
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@document_content = args[:document_content] if args.key?(:document_content)
@licensed_vocabularies = args[:licensed_vocabularies] if args.key?(:licensed_vocabularies)
end
end
# Includes recognized entity mentions and relationships between them.
class AnalyzeEntitiesResponse
include Google::Apis::Core::Hashable
# The union of all the candidate entities that the entity_mentions in this
# response could link to. These are UMLS concepts or normalized mention content.
# Corresponds to the JSON property `entities`
# @return [Array<Google::Apis::HealthcareV1beta1::Entity>]
attr_accessor :entities
# entity_mentions contains all the annotated medical entities that were
# mentioned in the provided document.
# Corresponds to the JSON property `entityMentions`
# @return [Array<Google::Apis::HealthcareV1beta1::EntityMention>]
attr_accessor :entity_mentions
# relationships contains all the binary relationships that were identified
# between entity mentions within the provided document.
# Corresponds to the JSON property `relationships`
# @return [Array<Google::Apis::HealthcareV1beta1::EntityMentionRelationship>]
attr_accessor :relationships
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@entities = args[:entities] if args.key?(:entities)
@entity_mentions = args[:entity_mentions] if args.key?(:entity_mentions)
@relationships = args[:relationships] if args.key?(:relationships)
end
end
# An annotation record.
class Annotation
include Google::Apis::Core::Hashable
# AnnotationSource holds the source information of the annotation.
# Corresponds to the JSON property `annotationSource`
# @return [Google::Apis::HealthcareV1beta1::AnnotationSource]
attr_accessor :annotation_source
# Additional information for this annotation record, such as annotator and
# verifier information or study campaign.
# Corresponds to the JSON property `customData`
# @return [Hash<String,String>]
attr_accessor :custom_data
# Image annotation.
# Corresponds to the JSON property `imageAnnotation`
# @return [Google::Apis::HealthcareV1beta1::ImageAnnotation]
attr_accessor :image_annotation
# Resource name of the Annotation, of the form `projects/`project_id`/locations/`
# location_id`/datasets/`dataset_id`/annotationStores/`annotation_store_id`/
# annotations/`annotation_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Resource level annotation.
# Corresponds to the JSON property `resourceAnnotation`
# @return [Google::Apis::HealthcareV1beta1::ResourceAnnotation]
attr_accessor :resource_annotation
# A TextAnnotation specifies a text range that includes sensitive information.
# Corresponds to the JSON property `textAnnotation`
# @return [Google::Apis::HealthcareV1beta1::SensitiveTextAnnotation]
attr_accessor :text_annotation
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@annotation_source = args[:annotation_source] if args.key?(:annotation_source)
@custom_data = args[:custom_data] if args.key?(:custom_data)
@image_annotation = args[:image_annotation] if args.key?(:image_annotation)
@name = args[:name] if args.key?(:name)
@resource_annotation = args[:resource_annotation] if args.key?(:resource_annotation)
@text_annotation = args[:text_annotation] if args.key?(:text_annotation)
end
end
# Specifies how to store annotations during de-identification operation.
class AnnotationConfig
include Google::Apis::Core::Hashable
# The name of the annotation store, in the form `projects/`project_id`/locations/
# `location_id`/datasets/`dataset_id`/annotationStores/`annotation_store_id``). *
# The destination annotation store must be in the same project as the source
# data. De-identifying data across multiple projects is not supported. * The
# destination annotation store must exist when using DeidentifyDicomStore or
# DeidentifyFhirStore. DeidentifyDataset automatically creates the destination
# annotation store.
# Corresponds to the JSON property `annotationStoreName`
# @return [String]
attr_accessor :annotation_store_name
# If set to true, the sensitive texts are included in SensitiveTextAnnotation of
# Annotation.
# Corresponds to the JSON property `storeQuote`
# @return [Boolean]
attr_accessor :store_quote
alias_method :store_quote?, :store_quote
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@annotation_store_name = args[:annotation_store_name] if args.key?(:annotation_store_name)
@store_quote = args[:store_quote] if args.key?(:store_quote)
end
end
# AnnotationSource holds the source information of the annotation.
class AnnotationSource
include Google::Apis::Core::Hashable
# Cloud Healthcare API resource.
# Corresponds to the JSON property `cloudHealthcareSource`
# @return [Google::Apis::HealthcareV1beta1::CloudHealthcareSource]
attr_accessor :cloud_healthcare_source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@cloud_healthcare_source = args[:cloud_healthcare_source] if args.key?(:cloud_healthcare_source)
end
end
# An Annotation store that can store annotation resources such as labels and
# tags for text, image and audio.
class AnnotationStore
include Google::Apis::Core::Hashable
# Optional. User-supplied key-value pairs used to organize Annotation stores.
# Label keys must be between 1 and 63 characters long, have a UTF-8 encoding of
# maximum 128 bytes, and must conform to the following PCRE regular expression: \
# p`Ll`\p`Lo``0,62` Label values must be between 1 and 63 characters long, have
# a UTF-8 encoding of maximum 128 bytes, and must conform to the following PCRE
# regular expression: [\p`Ll`\p`Lo`\p`N`_-]`0,63` No more than 64 labels can be
# associated with a given store.
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Resource name of the Annotation store, of the form `projects/`project_id`/
# locations/`location_id`/datasets/`dataset_id`/annotationStores/`
# annotation_store_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
end
end
# Archives the specified User data mapping.
class ArchiveUserDataMappingRequest
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Archives the specified User data mapping.
class ArchiveUserDataMappingResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# An attribute value for a Consent or User data mapping. Each Attribute must
# have a corresponding AttributeDefinition in the consent store that defines the
# default and allowed values.
class Attribute
include Google::Apis::Core::Hashable
# Indicates the name of an attribute defined in the consent store.
# Corresponds to the JSON property `attributeDefinitionId`
# @return [String]
attr_accessor :attribute_definition_id
# The value of the attribute. Must be an acceptable value as defined in the
# consent store. For example, if the consent store defines "data type" with
# acceptable values "questionnaire" and "step-count", when the attribute name is
# data type, this field must contain one of those values.
# Corresponds to the JSON property `values`
# @return [Array<String>]
attr_accessor :values
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@attribute_definition_id = args[:attribute_definition_id] if args.key?(:attribute_definition_id)
@values = args[:values] if args.key?(:values)
end
end
# A client-defined consent attribute.
class AttributeDefinition
include Google::Apis::Core::Hashable
# Required. Possible values for the attribute. The number of allowed values must
# not exceed 100. An empty list is invalid. The list can only be expanded after
# creation.
# Corresponds to the JSON property `allowedValues`
# @return [Array<String>]
attr_accessor :allowed_values
# Required. The category of the attribute. The value of this field cannot be
# changed after creation.
# Corresponds to the JSON property `category`
# @return [String]
attr_accessor :category
# Optional. Default values of the attribute in Consents. If no default values
# are specified, it defaults to an empty value.
# Corresponds to the JSON property `consentDefaultValues`
# @return [Array<String>]
attr_accessor :consent_default_values
# Optional. Default value of the attribute in User data mappings. If no default
# value is specified, it defaults to an empty value. This field is only
# applicable to attributes of the category `RESOURCE`.
# Corresponds to the JSON property `dataMappingDefaultValue`
# @return [String]
attr_accessor :data_mapping_default_value
# Optional. A description of the attribute.
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# Resource name of the Attribute definition, of the form `projects/`project_id`/
# locations/`location_id`/datasets/`dataset_id`/consentStores/`consent_store_id`/
# attributeDefinitions/`attribute_definition_id``. Cannot be changed after
# creation.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@allowed_values = args[:allowed_values] if args.key?(:allowed_values)
@category = args[:category] if args.key?(:category)
@consent_default_values = args[:consent_default_values] if args.key?(:consent_default_values)
@data_mapping_default_value = args[:data_mapping_default_value] if args.key?(:data_mapping_default_value)
@description = args[:description] if args.key?(:description)
@name = args[:name] if args.key?(:name)
end
end
# Specifies the audit configuration for a service. The configuration determines
# which permission types are logged, and what identities, if any, are exempted
# from logging. An AuditConfig must have one or more AuditLogConfigs. If there
# are AuditConfigs for both `allServices` and a specific service, the union of
# the two AuditConfigs is used for that service: the log_types specified in each
# AuditConfig are enabled, and the exempted_members in each AuditLogConfig are
# exempted. Example Policy with multiple AuditConfigs: ` "audit_configs": [ ` "
# service": "allServices", "audit_log_configs": [ ` "log_type": "DATA_READ", "
# exempted_members": [ "user:[email protected]" ] `, ` "log_type": "DATA_WRITE" `,
# ` "log_type": "ADMIN_READ" ` ] `, ` "service": "sampleservice.googleapis.com",
# "audit_log_configs": [ ` "log_type": "DATA_READ" `, ` "log_type": "DATA_WRITE"
# , "exempted_members": [ "user:[email protected]" ] ` ] ` ] ` For sampleservice,
# this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also
# exempts [email protected] from DATA_READ logging, and [email protected] from
# DATA_WRITE logging.
class AuditConfig
include Google::Apis::Core::Hashable
# The configuration for logging of each type of permission.
# Corresponds to the JSON property `auditLogConfigs`
# @return [Array<Google::Apis::HealthcareV1beta1::AuditLogConfig>]
attr_accessor :audit_log_configs
# Specifies a service that will be enabled for audit logging. For example, `
# storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special
# value that covers all services.
# Corresponds to the JSON property `service`
# @return [String]
attr_accessor :service
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@audit_log_configs = args[:audit_log_configs] if args.key?(:audit_log_configs)
@service = args[:service] if args.key?(:service)
end
end
# Provides the configuration for logging a type of permissions. Example: ` "
# audit_log_configs": [ ` "log_type": "DATA_READ", "exempted_members": [ "user:
# [email protected]" ] `, ` "log_type": "DATA_WRITE" ` ] ` This enables '
# DATA_READ' and 'DATA_WRITE' logging, while exempting [email protected] from
# DATA_READ logging.
class AuditLogConfig
include Google::Apis::Core::Hashable
# Specifies the identities that do not cause logging for this type of permission.
# Follows the same format of Binding.members.
# Corresponds to the JSON property `exemptedMembers`
# @return [Array<String>]
attr_accessor :exempted_members
# The log type that this config enables.
# Corresponds to the JSON property `logType`
# @return [String]
attr_accessor :log_type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@exempted_members = args[:exempted_members] if args.key?(:exempted_members)
@log_type = args[:log_type] if args.key?(:log_type)
end
end
# Gets multiple messages in a specified HL7v2 store.
class BatchGetMessagesResponse
include Google::Apis::Core::Hashable
# The returned Messages. See `MessageView` for populated fields.
# Corresponds to the JSON property `messages`
# @return [Array<Google::Apis::HealthcareV1beta1::Message>]
attr_accessor :messages
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@messages = args[:messages] if args.key?(:messages)
end
end
# Associates `members` with a `role`.
class Binding
include Google::Apis::Core::Hashable
# Represents a textual expression in the Common Expression Language (CEL) syntax.
# CEL is a C-like expression language. The syntax and semantics of CEL are
# documented at https://github.com/google/cel-spec. Example (Comparison): title:
# "Summary size limit" description: "Determines if a summary is less than 100
# chars" expression: "document.summary.size() < 100" Example (Equality): title: "
# Requestor is owner" description: "Determines if requestor is the document
# owner" expression: "document.owner == request.auth.claims.email" Example (
# Logic): title: "Public documents" description: "Determine whether the document
# should be publicly visible" expression: "document.type != 'private' &&
# document.type != 'internal'" Example (Data Manipulation): title: "Notification
# string" description: "Create a notification string with a timestamp."
# expression: "'New message received at ' + string(document.create_time)" The
# exact variables and functions that may be referenced within an expression are
# determined by the service that evaluates it. See the service documentation for
# additional information.
# Corresponds to the JSON property `condition`
# @return [Google::Apis::HealthcareV1beta1::Expr]
attr_accessor :condition
# Specifies the identities requesting access for a Cloud Platform resource. `
# members` can have the following values: * `allUsers`: A special identifier
# that represents anyone who is on the internet; with or without a Google
# account. * `allAuthenticatedUsers`: A special identifier that represents
# anyone who is authenticated with a Google account or a service account. * `
# user:`emailid``: An email address that represents a specific Google account.
# For example, `[email protected]` . * `serviceAccount:`emailid``: An email
# address that represents a service account. For example, `my-other-app@appspot.
# gserviceaccount.com`. * `group:`emailid``: An email address that represents a
# Google group. For example, `[email protected]`. * `deleted:user:`emailid`?uid=
# `uniqueid``: An email address (plus unique identifier) representing a user
# that has been recently deleted. For example, `[email protected]?uid=
# 123456789012345678901`. If the user is recovered, this value reverts to `user:`
# emailid`` and the recovered user retains the role in the binding. * `deleted:
# serviceAccount:`emailid`?uid=`uniqueid``: An email address (plus unique
# identifier) representing a service account that has been recently deleted. For
# example, `[email protected]?uid=123456789012345678901`.
# If the service account is undeleted, this value reverts to `serviceAccount:`
# emailid`` and the undeleted service account retains the role in the binding. *
# `deleted:group:`emailid`?uid=`uniqueid``: An email address (plus unique
# identifier) representing a Google group that has been recently deleted. For
# example, `[email protected]?uid=123456789012345678901`. If the group is
# recovered, this value reverts to `group:`emailid`` and the recovered group
# retains the role in the binding. * `domain:`domain``: The G Suite domain (
# primary) that represents all the users of that domain. For example, `google.
# com` or `example.com`.
# Corresponds to the JSON property `members`
# @return [Array<String>]
attr_accessor :members
# Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`
# , or `roles/owner`.
# Corresponds to the JSON property `role`
# @return [String]
attr_accessor :role
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@condition = args[:condition] if args.key?(:condition)
@members = args[:members] if args.key?(:members)
@role = args[:role] if args.key?(:role)
end
end
# A bounding polygon for the detected image annotation.
class BoundingPoly
include Google::Apis::Core::Hashable
# A description of this polygon.
# Corresponds to the JSON property `label`
# @return [String]
attr_accessor :label
# List of the vertices of this polygon.
# Corresponds to the JSON property `vertices`
# @return [Array<Google::Apis::HealthcareV1beta1::Vertex>]
attr_accessor :vertices
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@label = args[:label] if args.key?(:label)
@vertices = args[:vertices] if args.key?(:vertices)
end
end
# The request message for Operations.CancelOperation.
class CancelOperationRequest
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Mask a string by replacing its characters with a fixed character.
class CharacterMaskConfig
include Google::Apis::Core::Hashable
# Character to mask the sensitive values. If not supplied, defaults to "*".
# Corresponds to the JSON property `maskingCharacter`
# @return [String]
attr_accessor :masking_character
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@masking_character = args[:masking_character] if args.key?(:masking_character)
end
end
# Checks if a particular data_id of a User data mapping in the given consent
# store is consented for a given use.
class CheckDataAccessRequest
include Google::Apis::Core::Hashable
# List of resource names of Consent resources.
# Corresponds to the JSON property `consentList`
# @return [Google::Apis::HealthcareV1beta1::ConsentList]
attr_accessor :consent_list
# Required. The unique identifier of the resource to check access for. This
# identifier must correspond to a User data mapping in the given consent store.
# Corresponds to the JSON property `dataId`
# @return [String]
attr_accessor :data_id
# The values of request attributes associated with this access request.
# Corresponds to the JSON property `requestAttributes`
# @return [Hash<String,String>]
attr_accessor :request_attributes
# Optional. The view for CheckDataAccessResponse. If unspecified, defaults to `
# BASIC` and returns `consented` as `TRUE` or `FALSE`.
# Corresponds to the JSON property `responseView`
# @return [String]
attr_accessor :response_view
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_list = args[:consent_list] if args.key?(:consent_list)
@data_id = args[:data_id] if args.key?(:data_id)
@request_attributes = args[:request_attributes] if args.key?(:request_attributes)
@response_view = args[:response_view] if args.key?(:response_view)
end
end
# Checks if a particular data_id of a User data mapping in the given consent
# store is consented for a given use.
class CheckDataAccessResponse
include Google::Apis::Core::Hashable
# The resource names of all evaluated Consents mapped to their evaluation.
# Corresponds to the JSON property `consentDetails`
# @return [Hash<String,Google::Apis::HealthcareV1beta1::ConsentEvaluation>]
attr_accessor :consent_details
# Whether the requested resource is consented for the given use.
# Corresponds to the JSON property `consented`
# @return [Boolean]
attr_accessor :consented
alias_method :consented?, :consented
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_details = args[:consent_details] if args.key?(:consent_details)
@consented = args[:consented] if args.key?(:consented)
end
end
# Cloud Healthcare API resource.
class CloudHealthcareSource
include Google::Apis::Core::Hashable
# Full path of a Cloud Healthcare API resource.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@name = args[:name] if args.key?(:name)
end
end
# Represents a user's consent.
class Consent
include Google::Apis::Core::Hashable
# Required. The resource name of the Consent artifact that contains proof of the
# end user's consent, of the form `projects/`project_id`/locations/`location_id`/
# datasets/`dataset_id`/consentStores/`consent_store_id`/consentArtifacts/`
# consent_artifact_id``.
# Corresponds to the JSON property `consentArtifact`
# @return [String]
attr_accessor :consent_artifact
# Timestamp in UTC of when this Consent is considered expired.
# Corresponds to the JSON property `expireTime`
# @return [String]
attr_accessor :expire_time
# Optional. User-supplied key-value pairs used to organize Consent resources.
# Metadata keys must: - be between 1 and 63 characters long - have a UTF-8
# encoding of maximum 128 bytes - begin with a letter - consist of up to 63
# characters including lowercase letters, numeric characters, underscores, and
# dashes Metadata values must be: - be between 1 and 63 characters long - have a
# UTF-8 encoding of maximum 128 bytes - consist of up to 63 characters including
# lowercase letters, numeric characters, underscores, and dashes No more than 64
# metadata entries can be associated with a given consent.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,String>]
attr_accessor :metadata
# Resource name of the Consent, of the form `projects/`project_id`/locations/`
# location_id`/datasets/`dataset_id`/consentStores/`consent_store_id`/consents/`
# consent_id``. Cannot be changed after creation.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Optional. Represents a user's consent in terms of the resources that can be
# accessed and under what conditions.
# Corresponds to the JSON property `policies`
# @return [Array<Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1ConsentPolicy>]
attr_accessor :policies
# Output only. The timestamp that the revision was created.
# Corresponds to the JSON property `revisionCreateTime`
# @return [String]
attr_accessor :revision_create_time
# Output only. The revision ID of the Consent. The format is an 8-character
# hexadecimal string. Refer to a specific revision of a Consent by appending `@`
# revision_id`` to the Consent's resource name.
# Corresponds to the JSON property `revisionId`
# @return [String]
attr_accessor :revision_id
# Required. Indicates the current state of this Consent.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# Input only. The time to live for this Consent from when it is created.
# Corresponds to the JSON property `ttl`
# @return [String]
attr_accessor :ttl
# Required. User's UUID provided by the client.
# Corresponds to the JSON property `userId`
# @return [String]
attr_accessor :user_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_artifact = args[:consent_artifact] if args.key?(:consent_artifact)
@expire_time = args[:expire_time] if args.key?(:expire_time)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
@policies = args[:policies] if args.key?(:policies)
@revision_create_time = args[:revision_create_time] if args.key?(:revision_create_time)
@revision_id = args[:revision_id] if args.key?(:revision_id)
@state = args[:state] if args.key?(:state)
@ttl = args[:ttl] if args.key?(:ttl)
@user_id = args[:user_id] if args.key?(:user_id)
end
end
# Documentation of a user's consent.
class ConsentArtifact
include Google::Apis::Core::Hashable
# Optional. Screenshots, PDFs, or other binary information documenting the user'
# s consent.
# Corresponds to the JSON property `consentContentScreenshots`
# @return [Array<Google::Apis::HealthcareV1beta1::Image>]
attr_accessor :consent_content_screenshots
# Optional. An string indicating the version of the consent information shown to
# the user.
# Corresponds to the JSON property `consentContentVersion`
# @return [String]
attr_accessor :consent_content_version
# User signature.
# Corresponds to the JSON property `guardianSignature`
# @return [Google::Apis::HealthcareV1beta1::Signature]
attr_accessor :guardian_signature
# Optional. Metadata associated with the Consent artifact. For example, the
# consent locale or user agent version.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,String>]
attr_accessor :metadata
# Resource name of the Consent artifact, of the form `projects/`project_id`/
# locations/`location_id`/datasets/`dataset_id`/consentStores/`consent_store_id`/
# consentArtifacts/`consent_artifact_id``. Cannot be changed after creation.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Required. User's UUID provided by the client.
# Corresponds to the JSON property `userId`
# @return [String]
attr_accessor :user_id
# User signature.
# Corresponds to the JSON property `userSignature`
# @return [Google::Apis::HealthcareV1beta1::Signature]
attr_accessor :user_signature
# User signature.
# Corresponds to the JSON property `witnessSignature`
# @return [Google::Apis::HealthcareV1beta1::Signature]
attr_accessor :witness_signature
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_content_screenshots = args[:consent_content_screenshots] if args.key?(:consent_content_screenshots)
@consent_content_version = args[:consent_content_version] if args.key?(:consent_content_version)
@guardian_signature = args[:guardian_signature] if args.key?(:guardian_signature)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
@user_id = args[:user_id] if args.key?(:user_id)
@user_signature = args[:user_signature] if args.key?(:user_signature)
@witness_signature = args[:witness_signature] if args.key?(:witness_signature)
end
end
# The detailed evaluation of a particular Consent.
class ConsentEvaluation
include Google::Apis::Core::Hashable
# The evaluation result.
# Corresponds to the JSON property `evaluationResult`
# @return [String]
attr_accessor :evaluation_result
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@evaluation_result = args[:evaluation_result] if args.key?(:evaluation_result)
end
end
# List of resource names of Consent resources.
class ConsentList
include Google::Apis::Core::Hashable
# The resource names of the Consents to evaluate against, of the form `projects/`
# project_id`/locations/`location_id`/datasets/`dataset_id`/consentStores/`
# consent_store_id`/consents/`consent_id``.
# Corresponds to the JSON property `consents`
# @return [Array<String>]
attr_accessor :consents
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consents = args[:consents] if args.key?(:consents)
end
end
# Represents a consent store.
class ConsentStore
include Google::Apis::Core::Hashable
# Optional. Default time to live for Consents created in this store. Must be at
# least 24 hours. Updating this field will not affect the expiration time of
# existing consents.
# Corresponds to the JSON property `defaultConsentTtl`
# @return [String]
attr_accessor :default_consent_ttl
# Optional. If `true`, UpdateConsent creates the Consent if it does not already
# exist. If unspecified, defaults to `false`.
# Corresponds to the JSON property `enableConsentCreateOnUpdate`
# @return [Boolean]
attr_accessor :enable_consent_create_on_update
alias_method :enable_consent_create_on_update?, :enable_consent_create_on_update
# Optional. User-supplied key-value pairs used to organize consent stores. Label
# keys must be between 1 and 63 characters long, have a UTF-8 encoding of
# maximum 128 bytes, and must conform to the following PCRE regular expression: \
# p`Ll`\p`Lo``0,62`. Label values must be between 1 and 63 characters long, have
# a UTF-8 encoding of maximum 128 bytes, and must conform to the following PCRE
# regular expression: [\p`Ll`\p`Lo`\p`N`_-]`0,63`. No more than 64 labels can be
# associated with a given store. For more information: https://cloud.google.com/
# healthcare/docs/how-tos/labeling-resources
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Resource name of the consent store, of the form `projects/`project_id`/
# locations/`location_id`/datasets/`dataset_id`/consentStores/`consent_store_id``
# . Cannot be changed after creation.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@default_consent_ttl = args[:default_consent_ttl] if args.key?(:default_consent_ttl)
@enable_consent_create_on_update = args[:enable_consent_create_on_update] if args.key?(:enable_consent_create_on_update)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
end
end
# Creates a new message.
class CreateMessageRequest
include Google::Apis::Core::Hashable
# A complete HL7v2 message. See [Introduction to HL7 Standards] (https://www.hl7.
# org/implement/standards/index.cfm?ref=common) for details on the standard.
# Corresponds to the JSON property `message`
# @return [Google::Apis::HealthcareV1beta1::Message]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@message = args[:message] if args.key?(:message)
end
end
# Pseudonymization method that generates surrogates via cryptographic hashing.
# Uses SHA-256. Outputs a base64-encoded representation of the hashed output.
# For example, `L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=`.
class CryptoHashConfig
include Google::Apis::Core::Hashable
# An AES 128/192/256 bit key. Causes the hash to be computed based on this key.
# A default key is generated for each Deidentify operation and is used wherever
# crypto_key is not specified.
# Corresponds to the JSON property `cryptoKey`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :crypto_key
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@crypto_key = args[:crypto_key] if args.key?(:crypto_key)
end
end
# A message representing a health dataset. A health dataset represents a
# collection of healthcare data pertaining to one or more patients. This may
# include multiple modalities of healthcare data, such as electronic medical
# records or medical imaging data.
class Dataset
include Google::Apis::Core::Hashable
# Resource name of the dataset, of the form `projects/`project_id`/locations/`
# location_id`/datasets/`dataset_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The default timezone used by this dataset. Must be a either a valid IANA time
# zone name such as "America/New_York" or empty, which defaults to UTC. This is
# used for parsing times in resources, such as HL7 messages, where no explicit
# timezone is specified.
# Corresponds to the JSON property `timeZone`
# @return [String]
attr_accessor :time_zone
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@name = args[:name] if args.key?(:name)
@time_zone = args[:time_zone] if args.key?(:time_zone)
end
end
# Shift a date forward or backward in time by a random amount which is
# consistent for a given patient and crypto key combination.
class DateShiftConfig
include Google::Apis::Core::Hashable
# An AES 128/192/256 bit key. Causes the shift to be computed based on this key
# and the patient ID. A default key is generated for each Deidentify operation
# and is used wherever crypto_key is not specified.
# Corresponds to the JSON property `cryptoKey`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :crypto_key
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@crypto_key = args[:crypto_key] if args.key?(:crypto_key)
end
end
# Configures de-id options specific to different types of content. Each
# submessage customizes the handling of an https://tools.ietf.org/html/rfc6838
# media type or subtype. Configs are applied in a nested manner at runtime.
class DeidentifyConfig
include Google::Apis::Core::Hashable
# Specifies how to store annotations during de-identification operation.
# Corresponds to the JSON property `annotation`
# @return [Google::Apis::HealthcareV1beta1::AnnotationConfig]
attr_accessor :annotation
# Specifies the parameters needed for de-identification of DICOM stores.
# Corresponds to the JSON property `dicom`
# @return [Google::Apis::HealthcareV1beta1::DicomConfig]
attr_accessor :dicom
# Specifies how to handle de-identification of a FHIR store.
# Corresponds to the JSON property `fhir`
# @return [Google::Apis::HealthcareV1beta1::FhirConfig]
attr_accessor :fhir
# Specifies how to handle de-identification of image pixels.
# Corresponds to the JSON property `image`
# @return [Google::Apis::HealthcareV1beta1::ImageConfig]
attr_accessor :image
# Configures de-identification of text wherever it is found in the
# source_dataset.
# Corresponds to the JSON property `text`
# @return [Google::Apis::HealthcareV1beta1::TextConfig]
attr_accessor :text
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@annotation = args[:annotation] if args.key?(:annotation)
@dicom = args[:dicom] if args.key?(:dicom)
@fhir = args[:fhir] if args.key?(:fhir)
@image = args[:image] if args.key?(:image)
@text = args[:text] if args.key?(:text)
end
end
# Redacts identifying information from the specified dataset.
class DeidentifyDatasetRequest
include Google::Apis::Core::Hashable
# Configures de-id options specific to different types of content. Each
# submessage customizes the handling of an https://tools.ietf.org/html/rfc6838
# media type or subtype. Configs are applied in a nested manner at runtime.
# Corresponds to the JSON property `config`
# @return [Google::Apis::HealthcareV1beta1::DeidentifyConfig]
attr_accessor :config
# The name of the dataset resource to create and write the redacted data to. *
# The destination dataset must not exist. * The destination dataset must be in
# the same project and location as the source dataset. De-identifying data
# across multiple projects or locations is not supported.
# Corresponds to the JSON property `destinationDataset`
# @return [String]
attr_accessor :destination_dataset
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@config = args[:config] if args.key?(:config)
@destination_dataset = args[:destination_dataset] if args.key?(:destination_dataset)
end
end
# Creates a new DICOM store with sensitive information de-identified.
class DeidentifyDicomStoreRequest
include Google::Apis::Core::Hashable
# Configures de-id options specific to different types of content. Each
# submessage customizes the handling of an https://tools.ietf.org/html/rfc6838
# media type or subtype. Configs are applied in a nested manner at runtime.
# Corresponds to the JSON property `config`
# @return [Google::Apis::HealthcareV1beta1::DeidentifyConfig]
attr_accessor :config
# The name of the DICOM store to create and write the redacted data to. For
# example, `projects/`project_id`/locations/`location_id`/datasets/`dataset_id`/
# dicomStores/`dicom_store_id``. * The destination dataset must exist. * The
# source dataset and destination dataset must both reside in the same project.
# De-identifying data across multiple projects is not supported. * The
# destination DICOM store must not exist. * The caller must have the necessary
# permissions to create the destination DICOM store.
# Corresponds to the JSON property `destinationStore`
# @return [String]
attr_accessor :destination_store
# Specifies the filter configuration for DICOM resources.
# Corresponds to the JSON property `filterConfig`
# @return [Google::Apis::HealthcareV1beta1::DicomFilterConfig]
attr_accessor :filter_config
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@config = args[:config] if args.key?(:config)
@destination_store = args[:destination_store] if args.key?(:destination_store)
@filter_config = args[:filter_config] if args.key?(:filter_config)
end
end
# Creates a new FHIR store with sensitive information de-identified.
class DeidentifyFhirStoreRequest
include Google::Apis::Core::Hashable
# Configures de-id options specific to different types of content. Each
# submessage customizes the handling of an https://tools.ietf.org/html/rfc6838
# media type or subtype. Configs are applied in a nested manner at runtime.
# Corresponds to the JSON property `config`
# @return [Google::Apis::HealthcareV1beta1::DeidentifyConfig]
attr_accessor :config
# The name of the FHIR store to create and write the redacted data to. For
# example, `projects/`project_id`/locations/`location_id`/datasets/`dataset_id`/
# fhirStores/`fhir_store_id``. * The destination dataset must exist. * The
# source dataset and destination dataset must both reside in the same project.
# De-identifying data across multiple projects is not supported. * The
# destination FHIR store must exist. * The caller must have the healthcare.
# fhirResources.update permission to write to the destination FHIR store.
# Corresponds to the JSON property `destinationStore`
# @return [String]
attr_accessor :destination_store
# Filter configuration.
# Corresponds to the JSON property `resourceFilter`
# @return [Google::Apis::HealthcareV1beta1::FhirFilter]
attr_accessor :resource_filter
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@config = args[:config] if args.key?(:config)
@destination_store = args[:destination_store] if args.key?(:destination_store)
@resource_filter = args[:resource_filter] if args.key?(:resource_filter)
end
end
# Contains a detailed summary of the Deidentify operation.
class DeidentifySummary
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Contains multiple sensitive information findings for each resource slice.
class Detail
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `findings`
# @return [Array<Google::Apis::HealthcareV1beta1::Finding>]
attr_accessor :findings
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@findings = args[:findings] if args.key?(:findings)
end
end
# Specifies the parameters needed for de-identification of DICOM stores.
class DicomConfig
include Google::Apis::Core::Hashable
# Tag filtering profile that determines which tags to keep/remove.
# Corresponds to the JSON property `filterProfile`
# @return [String]
attr_accessor :filter_profile
# List of tags to be filtered.
# Corresponds to the JSON property `keepList`
# @return [Google::Apis::HealthcareV1beta1::TagFilterList]
attr_accessor :keep_list
# List of tags to be filtered.
# Corresponds to the JSON property `removeList`
# @return [Google::Apis::HealthcareV1beta1::TagFilterList]
attr_accessor :remove_list
# If true, skip replacing StudyInstanceUID, SeriesInstanceUID, SOPInstanceUID,
# and MediaStorageSOPInstanceUID and leave them untouched. The Cloud Healthcare
# API regenerates these UIDs by default based on the DICOM Standard's reasoning:
# "Whilst these UIDs cannot be mapped directly to an individual out of context,
# given access to the original images, or to a database of the original images
# containing the UIDs, it would be possible to recover the individual's identity.
# " http://dicom.nema.org/medical/dicom/current/output/chtml/part15/sect_E.3.9.
# html
# Corresponds to the JSON property `skipIdRedaction`
# @return [Boolean]
attr_accessor :skip_id_redaction
alias_method :skip_id_redaction?, :skip_id_redaction
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@filter_profile = args[:filter_profile] if args.key?(:filter_profile)
@keep_list = args[:keep_list] if args.key?(:keep_list)
@remove_list = args[:remove_list] if args.key?(:remove_list)
@skip_id_redaction = args[:skip_id_redaction] if args.key?(:skip_id_redaction)
end
end
# Specifies the filter configuration for DICOM resources.
class DicomFilterConfig
include Google::Apis::Core::Hashable
# The Cloud Storage location of the filter configuration file. The `gcs_uri`
# must be in the format `gs://bucket/path/to/object`. The filter configuration
# file must contain a list of resource paths separated by newline characters (\n
# or \r\n). Each resource path must be in the format "/studies/`studyUID`[/
# series/`seriesUID`[/instances/`instanceUID`]]" The Cloud Healthcare API
# service account must have the `roles/storage.objectViewer` Cloud IAM role for
# this Cloud Storage location.
# Corresponds to the JSON property `resourcePathsGcsUri`
# @return [String]
attr_accessor :resource_paths_gcs_uri
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@resource_paths_gcs_uri = args[:resource_paths_gcs_uri] if args.key?(:resource_paths_gcs_uri)
end
end
# Represents a DICOM store.
class DicomStore
include Google::Apis::Core::Hashable
# User-supplied key-value pairs used to organize DICOM stores. Label keys must
# be between 1 and 63 characters long, have a UTF-8 encoding of maximum 128
# bytes, and must conform to the following PCRE regular expression: \p`Ll`\p`Lo``
# 0,62` Label values are optional, must be between 1 and 63 characters long,
# have a UTF-8 encoding of maximum 128 bytes, and must conform to the following
# PCRE regular expression: [\p`Ll`\p`Lo`\p`N`_-]`0,63` No more than 64 labels
# can be associated with a given store.
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Resource name of the DICOM store, of the form `projects/`project_id`/locations/
# `location_id`/datasets/`dataset_id`/dicomStores/`dicom_store_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Specifies where to send notifications upon changes to a data store.
# Corresponds to the JSON property `notificationConfig`
# @return [Google::Apis::HealthcareV1beta1::NotificationConfig]
attr_accessor :notification_config
# A list of streaming configs used to configure the destination of streaming
# exports for every DICOM instance insertion in this DICOM store. After a new
# config is added to `stream_configs`, DICOM instance insertions are streamed to
# the new destination. When a config is removed from `stream_configs`, the
# server stops streaming to that destination. Each config must contain a unique
# destination.
# Corresponds to the JSON property `streamConfigs`
# @return [Array<Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1DicomStreamConfig>]
attr_accessor :stream_configs
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
@notification_config = args[:notification_config] if args.key?(:notification_config)
@stream_configs = args[:stream_configs] if args.key?(:stream_configs)
end
end
# A generic empty message that you can re-use to avoid defining duplicated empty
# messages in your APIs. A typical example is to use it as the request or the
# response type of an API method. For instance: service Foo ` rpc Bar(google.
# protobuf.Empty) returns (google.protobuf.Empty); ` The JSON representation for
# `Empty` is empty JSON object ````.
class Empty
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# The candidate entities that an entity mention could link to.
class Entity
include Google::Apis::Core::Hashable
# entity_id is a first class field entity_id uniquely identifies this concept
# and its meta-vocabulary. For example, "UMLS/C0000970".
# Corresponds to the JSON property `entityId`
# @return [String]
attr_accessor :entity_id
# preferred_term is the preferred term for this concept. For example, "
# Acetaminophen". For ad hoc entities formed by normalization, this is the most
# popular unnormalized string.
# Corresponds to the JSON property `preferredTerm`
# @return [String]
attr_accessor :preferred_term
# Vocabulary codes are first-class fields and differentiated from the concept
# unique identifier (entity_id). vocabulary_codes contains the representation of
# this concept in particular vocabularies, such as ICD-10, SNOMED-CT and RxNORM.
# These are prefixed by the name of the vocabulary, followed by the unique code
# within that vocabulary. For example, "RXNORM/A10334543".
# Corresponds to the JSON property `vocabularyCodes`
# @return [Array<String>]
attr_accessor :vocabulary_codes
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@entity_id = args[:entity_id] if args.key?(:entity_id)
@preferred_term = args[:preferred_term] if args.key?(:preferred_term)
@vocabulary_codes = args[:vocabulary_codes] if args.key?(:vocabulary_codes)
end
end
# An entity mention in the document.
class EntityMention
include Google::Apis::Core::Hashable
# A feature of an entity mention.
# Corresponds to the JSON property `certaintyAssessment`
# @return [Google::Apis::HealthcareV1beta1::Feature]
attr_accessor :certainty_assessment
# The model's confidence in this entity mention annotation. A number between 0
# and 1.
# Corresponds to the JSON property `confidence`
# @return [Float]
attr_accessor :confidence
# linked_entities are candidate ontological concepts that this entity mention
# may refer to. They are sorted by decreasing confidence.it
# Corresponds to the JSON property `linkedEntities`
# @return [Array<Google::Apis::HealthcareV1beta1::LinkedEntity>]
attr_accessor :linked_entities
# mention_id uniquely identifies each entity mention in a single response.
# Corresponds to the JSON property `mentionId`
# @return [String]
attr_accessor :mention_id
# A feature of an entity mention.
# Corresponds to the JSON property `subject`
# @return [Google::Apis::HealthcareV1beta1::Feature]
attr_accessor :subject
# A feature of an entity mention.
# Corresponds to the JSON property `temporalAssessment`
# @return [Google::Apis::HealthcareV1beta1::Feature]
attr_accessor :temporal_assessment
# A span of text in the provided document.
# Corresponds to the JSON property `text`
# @return [Google::Apis::HealthcareV1beta1::TextSpan]
attr_accessor :text
# The semantic type of the entity: UNKNOWN_ENTITY_TYPE, ALONE,
# ANATOMICAL_STRUCTURE, ASSISTED_LIVING, BF_RESULT, BM_RESULT, BM_UNIT, BM_VALUE,
# BODY_FUNCTION, BODY_MEASUREMENT, COMPLIANT, DOESNOT_FOLLOWUP, FAMILY,
# FOLLOWSUP, LABORATORY_DATA, LAB_RESULT, LAB_UNIT, LAB_VALUE, MEDICAL_DEVICE,
# MEDICINE, MED_DOSE, MED_DURATION, MED_FORM, MED_FREQUENCY, MED_ROUTE,
# MED_STATUS, MED_STRENGTH, MED_TOTALDOSE, MED_UNIT, NON_COMPLIANT,
# OTHER_LIVINGSTATUS, PROBLEM, PROCEDURE, PROCEDURE_RESULT, PROC_METHOD,
# REASON_FOR_NONCOMPLIANCE, SEVERITY, SUBSTANCE_ABUSE, UNCLEAR_FOLLOWUP.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@certainty_assessment = args[:certainty_assessment] if args.key?(:certainty_assessment)
@confidence = args[:confidence] if args.key?(:confidence)
@linked_entities = args[:linked_entities] if args.key?(:linked_entities)
@mention_id = args[:mention_id] if args.key?(:mention_id)
@subject = args[:subject] if args.key?(:subject)
@temporal_assessment = args[:temporal_assessment] if args.key?(:temporal_assessment)
@text = args[:text] if args.key?(:text)
@type = args[:type] if args.key?(:type)
end
end
# Defines directed relationship from one entity mention to another.
class EntityMentionRelationship
include Google::Apis::Core::Hashable
# The model's confidence in this annotation. A number between 0 and 1.
# Corresponds to the JSON property `confidence`
# @return [Float]
attr_accessor :confidence
# object_id is the id of the object entity mention.
# Corresponds to the JSON property `objectId`
# @return [String]
attr_accessor :object_id_prop
# subject_id is the id of the subject entity mention.
# Corresponds to the JSON property `subjectId`
# @return [String]
attr_accessor :subject_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@confidence = args[:confidence] if args.key?(:confidence)
@object_id_prop = args[:object_id_prop] if args.key?(:object_id_prop)
@subject_id = args[:subject_id] if args.key?(:subject_id)
end
end
# Request to evaluate an Annotation store against a ground truth [Annotation
# store].
class EvaluateAnnotationStoreRequest
include Google::Apis::Core::Hashable
# The BigQuery table for export.
# Corresponds to the JSON property `bigqueryDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1AnnotationBigQueryDestination]
attr_accessor :bigquery_destination
# Optional. InfoType mapping for `eval_store`. Different resources can map to
# the same infoType. For example, `PERSON_NAME`, `PERSON`, `NAME`, and `HUMAN`
# are different. To map all of these into a single infoType (such as `
# PERSON_NAME`), specify the following mapping: ``` info_type_mapping["PERSON"] =
# "PERSON_NAME" info_type_mapping["NAME"] = "PERSON_NAME" info_type_mapping["
# HUMAN"] = "PERSON_NAME" ``` Unmentioned infoTypes, such as `DATE`, are treated
# as identity mapping. For example: ``` info_type_mapping["DATE"] = "DATE" ```
# InfoTypes are case-insensitive.
# Corresponds to the JSON property `evalInfoTypeMapping`
# @return [Hash<String,String>]
attr_accessor :eval_info_type_mapping
# Optional. Similar to `eval_info_type_mapping`, infoType mapping for `
# golden_store`.
# Corresponds to the JSON property `goldenInfoTypeMapping`
# @return [Hash<String,String>]
attr_accessor :golden_info_type_mapping
# The Annotation store to use as ground truth, in the format of `projects/`
# project_id`/locations/`location_id`/datasets/`dataset_id`/annotationStores/`
# annotation_store_id``.
# Corresponds to the JSON property `goldenStore`
# @return [String]
attr_accessor :golden_store
# Specifies how to use infoTypes for evaluation. For example, a user might only
# want to evaluate `PERSON`, `LOCATION`, and `AGE`.
# Corresponds to the JSON property `infoTypeConfig`
# @return [Google::Apis::HealthcareV1beta1::InfoTypeConfig]
attr_accessor :info_type_config
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@bigquery_destination = args[:bigquery_destination] if args.key?(:bigquery_destination)
@eval_info_type_mapping = args[:eval_info_type_mapping] if args.key?(:eval_info_type_mapping)
@golden_info_type_mapping = args[:golden_info_type_mapping] if args.key?(:golden_info_type_mapping)
@golden_store = args[:golden_store] if args.key?(:golden_store)
@info_type_config = args[:info_type_config] if args.key?(:info_type_config)
end
end
# Response for successful Annotation store evaluation operations. This structure
# is included in the response upon operation completion.
class EvaluateAnnotationStoreResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Evaluate a user's Consents for all matching User data mappings. Note: User
# data mappings are indexed asynchronously, causing slight delays between the
# time mappings are created or updated and when they are included in
# EvaluateUserConsents results.
class EvaluateUserConsentsRequest
include Google::Apis::Core::Hashable
# List of resource names of Consent resources.
# Corresponds to the JSON property `consentList`
# @return [Google::Apis::HealthcareV1beta1::ConsentList]
attr_accessor :consent_list
# Optional. Limit on the number of User data mappings to return in a single
# response. If not specified, 100 is used. May not be larger than 1000.
# Corresponds to the JSON property `pageSize`
# @return [Fixnum]
attr_accessor :page_size
# Optional. Token to retrieve the next page of results, or empty to get the
# first page.
# Corresponds to the JSON property `pageToken`
# @return [String]
attr_accessor :page_token
# Required. The values of request attributes associated with this access request.
# Corresponds to the JSON property `requestAttributes`
# @return [Hash<String,String>]
attr_accessor :request_attributes
# Optional. The values of resource attributes associated with the resources
# being requested. If no values are specified, then all resources are queried.
# Corresponds to the JSON property `resourceAttributes`
# @return [Hash<String,String>]
attr_accessor :resource_attributes
# Optional. The view for EvaluateUserConsentsResponse. If unspecified, defaults
# to `BASIC` and returns `consented` as `TRUE` or `FALSE`.
# Corresponds to the JSON property `responseView`
# @return [String]
attr_accessor :response_view
# Required. User ID to evaluate consents for.
# Corresponds to the JSON property `userId`
# @return [String]
attr_accessor :user_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_list = args[:consent_list] if args.key?(:consent_list)
@page_size = args[:page_size] if args.key?(:page_size)
@page_token = args[:page_token] if args.key?(:page_token)
@request_attributes = args[:request_attributes] if args.key?(:request_attributes)
@resource_attributes = args[:resource_attributes] if args.key?(:resource_attributes)
@response_view = args[:response_view] if args.key?(:response_view)
@user_id = args[:user_id] if args.key?(:user_id)
end
end
#
class EvaluateUserConsentsResponse
include Google::Apis::Core::Hashable
# Token to retrieve the next page of results, or empty if there are no more
# results in the list. This token is valid for 72 hours after it is created.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# The consent evaluation result for each `data_id`.
# Corresponds to the JSON property `results`
# @return [Array<Google::Apis::HealthcareV1beta1::Result>]
attr_accessor :results
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@results = args[:results] if args.key?(:results)
end
end
# Request to export Annotations. The export operation is not atomic. If a
# failure occurs, any annotations already exported are not removed.
class ExportAnnotationsRequest
include Google::Apis::Core::Hashable
# The BigQuery table for export.
# Corresponds to the JSON property `bigqueryDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1AnnotationBigQueryDestination]
attr_accessor :bigquery_destination
# The Cloud Storage location for export.
# Corresponds to the JSON property `gcsDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1AnnotationGcsDestination]
attr_accessor :gcs_destination
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@bigquery_destination = args[:bigquery_destination] if args.key?(:bigquery_destination)
@gcs_destination = args[:gcs_destination] if args.key?(:gcs_destination)
end
end
# Response for successful annotation export operations. This structure is
# included in response upon operation completion.
class ExportAnnotationsResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Exports data from the specified DICOM store. If a given resource, such as a
# DICOM object with the same SOPInstance UID, already exists in the output, it
# is overwritten with the version in the source dataset. Exported DICOM data
# persists when the DICOM store from which it was exported is deleted.
class ExportDicomDataRequest
include Google::Apis::Core::Hashable
# The BigQuery table where the server writes output.
# Corresponds to the JSON property `bigqueryDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1DicomBigQueryDestination]
attr_accessor :bigquery_destination
# Specifies the filter configuration for DICOM resources.
# Corresponds to the JSON property `filterConfig`
# @return [Google::Apis::HealthcareV1beta1::DicomFilterConfig]
attr_accessor :filter_config
# The Cloud Storage location where the server writes the output and the export
# configuration.
# Corresponds to the JSON property `gcsDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1DicomGcsDestination]
attr_accessor :gcs_destination
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@bigquery_destination = args[:bigquery_destination] if args.key?(:bigquery_destination)
@filter_config = args[:filter_config] if args.key?(:filter_config)
@gcs_destination = args[:gcs_destination] if args.key?(:gcs_destination)
end
end
# Returns additional information in regards to a completed DICOM store export.
class ExportDicomDataResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Request to schedule an export.
class ExportMessagesRequest
include Google::Apis::Core::Hashable
# The end of the range in `send_time` (MSH.7, https://www.hl7.org/documentcenter/
# public_temp_2E58C1F9-1C23-BA17-0C6126475344DA9D/wg/conf/HL7MSH.htm) to process.
# If not specified, the time when the export is scheduled is used. This value
# has to come after the `start_time` defined below. Only messages whose `
# send_time` lies in the range `start_time` (inclusive) to `end_time` (exclusive)
# are exported.
# Corresponds to the JSON property `endTime`
# @return [String]
attr_accessor :end_time
# The Cloud Storage output destination. The Cloud Healthcare Service Agent
# requires the `roles/storage.objectAdmin` Cloud IAM roles on the Cloud Storage
# location.
# Corresponds to the JSON property `gcsDestination`
# @return [Google::Apis::HealthcareV1beta1::GcsDestination]
attr_accessor :gcs_destination
# The start of the range in `send_time` (MSH.7, https://www.hl7.org/
# documentcenter/public_temp_2E58C1F9-1C23-BA17-0C6126475344DA9D/wg/conf/HL7MSH.
# htm) to process. If not specified, the UNIX epoch (1970-01-01T00:00:00Z) is
# used. This value has to come before the `end_time` defined below. Only
# messages whose `send_time` lies in the range `start_time` (inclusive) to `
# end_time` (exclusive) are exported.
# Corresponds to the JSON property `startTime`
# @return [String]
attr_accessor :start_time
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@end_time = args[:end_time] if args.key?(:end_time)
@gcs_destination = args[:gcs_destination] if args.key?(:gcs_destination)
@start_time = args[:start_time] if args.key?(:start_time)
end
end
# Request to export resources.
class ExportResourcesRequest
include Google::Apis::Core::Hashable
# If provided, only resources updated after this time are exported. The time
# uses the format YYYY-MM-DDThh:mm:ss.sss+zz:zz. For example, `2015-02-07T13:28:
# 17.239+02:00` or `2017-01-01T00:00:00Z`. The time must be specified to the
# second and include a time zone.
# Corresponds to the JSON property `_since`
# @return [String]
attr_accessor :_since
# String of comma-delimited FHIR resource types. If provided, only resources of
# the specified resource type(s) are exported.
# Corresponds to the JSON property `_type`
# @return [String]
attr_accessor :_type
# The configuration for exporting to BigQuery.
# Corresponds to the JSON property `bigqueryDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1FhirBigQueryDestination]
attr_accessor :bigquery_destination
# The configuration for exporting to Cloud Storage.
# Corresponds to the JSON property `gcsDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1FhirGcsDestination]
attr_accessor :gcs_destination
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@_since = args[:_since] if args.key?(:_since)
@_type = args[:_type] if args.key?(:_type)
@bigquery_destination = args[:bigquery_destination] if args.key?(:bigquery_destination)
@gcs_destination = args[:gcs_destination] if args.key?(:gcs_destination)
end
end
# Represents a textual expression in the Common Expression Language (CEL) syntax.
# CEL is a C-like expression language. The syntax and semantics of CEL are
# documented at https://github.com/google/cel-spec. Example (Comparison): title:
# "Summary size limit" description: "Determines if a summary is less than 100
# chars" expression: "document.summary.size() < 100" Example (Equality): title: "
# Requestor is owner" description: "Determines if requestor is the document
# owner" expression: "document.owner == request.auth.claims.email" Example (
# Logic): title: "Public documents" description: "Determine whether the document
# should be publicly visible" expression: "document.type != 'private' &&
# document.type != 'internal'" Example (Data Manipulation): title: "Notification
# string" description: "Create a notification string with a timestamp."
# expression: "'New message received at ' + string(document.create_time)" The
# exact variables and functions that may be referenced within an expression are
# determined by the service that evaluates it. See the service documentation for
# additional information.
class Expr
include Google::Apis::Core::Hashable
# Optional. Description of the expression. This is a longer text which describes
# the expression, e.g. when hovered over it in a UI.
# Corresponds to the JSON property `description`
# @return [String]
attr_accessor :description
# Textual representation of an expression in Common Expression Language syntax.
# Corresponds to the JSON property `expression`
# @return [String]
attr_accessor :expression
# Optional. String indicating the location of the expression for error reporting,
# e.g. a file name and a position in the file.
# Corresponds to the JSON property `location`
# @return [String]
attr_accessor :location
# Optional. Title for the expression, i.e. a short string describing its purpose.
# This can be used e.g. in UIs which allow to enter the expression.
# Corresponds to the JSON property `title`
# @return [String]
attr_accessor :title
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@description = args[:description] if args.key?(:description)
@expression = args[:expression] if args.key?(:expression)
@location = args[:location] if args.key?(:location)
@title = args[:title] if args.key?(:title)
end
end
# A feature of an entity mention.
class Feature
include Google::Apis::Core::Hashable
# The model's confidence in this feature annotation. A number between 0 and 1.
# Corresponds to the JSON property `confidence`
# @return [Float]
attr_accessor :confidence
# The value of this feature annotation. Its range depends on the type of the
# feature.
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@confidence = args[:confidence] if args.key?(:confidence)
@value = args[:value] if args.key?(:value)
end
end
# Specifies how to handle de-identification of a FHIR store.
class FhirConfig
include Google::Apis::Core::Hashable
# Specifies FHIR paths to match and how to transform them. Any field that is not
# matched by a FieldMetadata is passed through to the output dataset unmodified.
# All extensions are removed in the output. If a field can be matched by more
# than one FieldMetadata, the first FieldMetadata.Action is applied.
# Corresponds to the JSON property `fieldMetadataList`
# @return [Array<Google::Apis::HealthcareV1beta1::FieldMetadata>]
attr_accessor :field_metadata_list
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@field_metadata_list = args[:field_metadata_list] if args.key?(:field_metadata_list)
end
end
# Filter configuration.
class FhirFilter
include Google::Apis::Core::Hashable
# A list of FHIR resources.
# Corresponds to the JSON property `resources`
# @return [Google::Apis::HealthcareV1beta1::Resources]
attr_accessor :resources
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@resources = args[:resources] if args.key?(:resources)
end
end
# Represents a FHIR store.
class FhirStore
include Google::Apis::Core::Hashable
# If true, overrides the default search behavior for this FHIR store to `
# handling=strict` which returns an error for unrecognized search parameters. If
# false, uses the FHIR specification default `handling=lenient` which ignores
# unrecognized search parameters. The handling can always be changed from the
# default on an individual API call by setting the HTTP header `Prefer: handling=
# strict` or `Prefer: handling=lenient`.
# Corresponds to the JSON property `defaultSearchHandlingStrict`
# @return [Boolean]
attr_accessor :default_search_handling_strict
alias_method :default_search_handling_strict?, :default_search_handling_strict
# Immutable. Whether to disable referential integrity in this FHIR store. This
# field is immutable after FHIR store creation. The default value is false,
# meaning that the API enforces referential integrity and fails the requests
# that result in inconsistent state in the FHIR store. When this field is set to
# true, the API skips referential integrity checks. Consequently, operations
# that rely on references, such as GetPatientEverything, do not return all the
# results if broken references exist.
# Corresponds to the JSON property `disableReferentialIntegrity`
# @return [Boolean]
attr_accessor :disable_referential_integrity
alias_method :disable_referential_integrity?, :disable_referential_integrity
# Immutable. Whether to disable resource versioning for this FHIR store. This
# field can not be changed after the creation of FHIR store. If set to false,
# which is the default behavior, all write operations cause historical versions
# to be recorded automatically. The historical versions can be fetched through
# the history APIs, but cannot be updated. If set to true, no historical
# versions are kept. The server sends errors for attempts to read the historical
# versions.
# Corresponds to the JSON property `disableResourceVersioning`
# @return [Boolean]
attr_accessor :disable_resource_versioning
alias_method :disable_resource_versioning?, :disable_resource_versioning
# Whether this FHIR store has the [updateCreate capability](https://www.hl7.org/
# fhir/capabilitystatement-definitions.html#CapabilityStatement.rest.resource.
# updateCreate). This determines if the client can use an Update operation to
# create a new resource with a client-specified ID. If false, all IDs are server-
# assigned through the Create operation and attempts to update a non-existent
# resource return errors. It is strongly advised not to include or encode any
# sensitive data such as patient identifiers in client-specified resource IDs.
# Those IDs are part of the FHIR resource path recorded in Cloud audit logs and
# Pub/Sub notifications. Those IDs can also be contained in reference fields
# within other resources.
# Corresponds to the JSON property `enableUpdateCreate`
# @return [Boolean]
attr_accessor :enable_update_create
alias_method :enable_update_create?, :enable_update_create
# User-supplied key-value pairs used to organize FHIR stores. Label keys must be
# between 1 and 63 characters long, have a UTF-8 encoding of maximum 128 bytes,
# and must conform to the following PCRE regular expression: \p`Ll`\p`Lo``0,62`
# Label values are optional, must be between 1 and 63 characters long, have a
# UTF-8 encoding of maximum 128 bytes, and must conform to the following PCRE
# regular expression: [\p`Ll`\p`Lo`\p`N`_-]`0,63` No more than 64 labels can be
# associated with a given store.
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Output only. Resource name of the FHIR store, of the form `projects/`
# project_id`/datasets/`dataset_id`/fhirStores/`fhir_store_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Specifies where to send notifications upon changes to a data store.
# Corresponds to the JSON property `notificationConfig`
# @return [Google::Apis::HealthcareV1beta1::NotificationConfig]
attr_accessor :notification_config
# A list of streaming configs that configure the destinations of streaming
# export for every resource mutation in this FHIR store. Each store is allowed
# to have up to 10 streaming configs. After a new config is added, the next
# resource mutation is streamed to the new location in addition to the existing
# ones. When a location is removed from the list, the server stops streaming to
# that location. Before adding a new config, you must add the required [`
# bigquery.dataEditor`](https://cloud.google.com/bigquery/docs/access-control#
# bigquery.dataEditor) role to your project's **Cloud Healthcare Service Agent**
# [service account](https://cloud.google.com/iam/docs/service-accounts). Some
# lag (typically on the order of dozens of seconds) is expected before the
# results show up in the streaming destination.
# Corresponds to the JSON property `streamConfigs`
# @return [Array<Google::Apis::HealthcareV1beta1::StreamConfig>]
attr_accessor :stream_configs
# Contains the configuration for FHIR profiles and validation.
# Corresponds to the JSON property `validationConfig`
# @return [Google::Apis::HealthcareV1beta1::ValidationConfig]
attr_accessor :validation_config
# Immutable. The FHIR specification version that this FHIR store supports
# natively. This field is immutable after store creation. Requests are rejected
# if they contain FHIR resources of a different version. Version is required for
# every FHIR store.
# Corresponds to the JSON property `version`
# @return [String]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@default_search_handling_strict = args[:default_search_handling_strict] if args.key?(:default_search_handling_strict)
@disable_referential_integrity = args[:disable_referential_integrity] if args.key?(:disable_referential_integrity)
@disable_resource_versioning = args[:disable_resource_versioning] if args.key?(:disable_resource_versioning)
@enable_update_create = args[:enable_update_create] if args.key?(:enable_update_create)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
@notification_config = args[:notification_config] if args.key?(:notification_config)
@stream_configs = args[:stream_configs] if args.key?(:stream_configs)
@validation_config = args[:validation_config] if args.key?(:validation_config)
@version = args[:version] if args.key?(:version)
end
end
# A (sub) field of a type.
class Field
include Google::Apis::Core::Hashable
# The maximum number of times this field can be repeated. 0 or -1 means
# unbounded.
# Corresponds to the JSON property `maxOccurs`
# @return [Fixnum]
attr_accessor :max_occurs
# The minimum number of times this field must be present/repeated.
# Corresponds to the JSON property `minOccurs`
# @return [Fixnum]
attr_accessor :min_occurs
# The name of the field. For example, "PID-1" or just "1".
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The HL7v2 table this field refers to. For example, PID-15 (Patient's Primary
# Language) usually refers to table "0296".
# Corresponds to the JSON property `table`
# @return [String]
attr_accessor :table
# The type of this field. A Type with this name must be defined in an
# Hl7TypesConfig.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@max_occurs = args[:max_occurs] if args.key?(:max_occurs)
@min_occurs = args[:min_occurs] if args.key?(:min_occurs)
@name = args[:name] if args.key?(:name)
@table = args[:table] if args.key?(:table)
@type = args[:type] if args.key?(:type)
end
end
# Specifies FHIR paths to match, and how to handle de-identification of matching
# fields.
class FieldMetadata
include Google::Apis::Core::Hashable
# Deidentify action for one field.
# Corresponds to the JSON property `action`
# @return [String]
attr_accessor :action
# List of paths to FHIR fields to redact. Each path is a period-separated list
# where each component is either a field name or FHIR type name. All types begin
# with an upper case letter. For example, the resource field "Patient.Address.
# city", which uses a string type, can be matched by "Patient.Address.String".
# Path also supports partial matching. For example, "Patient.Address.city" can
# be matched by "Address.city" (Patient omitted). Partial matching and type
# matching can be combined. For example, "Patient.Address.city" can be matched
# by "Address.String". For "choice" types (those defined in the FHIR spec with
# the form: field[x]), use two separate components. For example, "deceasedAge.
# unit" is matched by "Deceased.Age.unit". Supported types are:
# AdministrativeGenderCode, Code, Date, DateTime, Decimal, HumanName, Id,
# LanguageCode, Markdown, Oid, String, Uri, Uuid, Xhtml. The sub-type for
# HumanName, such as HumanName.given or HumanName.family, can be omitted.
# Corresponds to the JSON property `paths`
# @return [Array<String>]
attr_accessor :paths
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@action = args[:action] if args.key?(:action)
@paths = args[:paths] if args.key?(:paths)
end
end
# List of infoTypes to be filtered.
class FilterList
include Google::Apis::Core::Hashable
# These infoTypes are based on after the `eval_info_type_mapping` and `
# golden_info_type_mapping`.
# Corresponds to the JSON property `infoTypes`
# @return [Array<String>]
attr_accessor :info_types
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@info_types = args[:info_types] if args.key?(:info_types)
end
end
#
class Finding
include Google::Apis::Core::Hashable
# Zero-based ending index of the found text, exclusively.
# Corresponds to the JSON property `end`
# @return [Fixnum]
attr_accessor :end
# The type of information stored in this text range. For example, HumanName,
# BirthDate, or Address.
# Corresponds to the JSON property `infoType`
# @return [String]
attr_accessor :info_type
# The snippet of the sensitive text. This field is only populated during
# deidentification if `store_quote` is set to true in DeidentifyConfig.
# Corresponds to the JSON property `quote`
# @return [String]
attr_accessor :quote
# Zero-based starting index of the found text, inclusively.
# Corresponds to the JSON property `start`
# @return [Fixnum]
attr_accessor :start
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@end = args[:end] if args.key?(:end)
@info_type = args[:info_type] if args.key?(:info_type)
@quote = args[:quote] if args.key?(:quote)
@start = args[:start] if args.key?(:start)
end
end
# The Cloud Storage output destination. The Cloud Healthcare Service Agent
# requires the `roles/storage.objectAdmin` Cloud IAM roles on the Cloud Storage
# location.
class GcsDestination
include Google::Apis::Core::Hashable
# The format of the exported HL7v2 message files.
# Corresponds to the JSON property `contentStructure`
# @return [String]
attr_accessor :content_structure
# Specifies the parts of the Message resource to include in the export. If not
# specified, FULL is used.
# Corresponds to the JSON property `messageView`
# @return [String]
attr_accessor :message_view
# URI of an existing Cloud Storage directory where the server writes result
# files, in the format `gs://`bucket-id`/`path/to/destination/dir``. If there is
# no trailing slash, the service appends one when composing the object path.
# Corresponds to the JSON property `uriPrefix`
# @return [String]
attr_accessor :uri_prefix
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@content_structure = args[:content_structure] if args.key?(:content_structure)
@message_view = args[:message_view] if args.key?(:message_view)
@uri_prefix = args[:uri_prefix] if args.key?(:uri_prefix)
end
end
# Specifies the configuration for importing data from Cloud Storage.
class GcsSource
include Google::Apis::Core::Hashable
# Points to a Cloud Storage URI containing file(s) to import. The URI must be in
# the following format: `gs://`bucket_id`/`object_id``. The URI can include
# wildcards in `object_id` and thus identify multiple files. Supported wildcards:
# * `*` to match 0 or more non-separator characters * `**` to match 0 or more
# characters (including separators). Must be used at the end of a path and with
# no other wildcards in the path. Can also be used with a file extension (such
# as .ndjson), which imports all files with the extension in the specified
# directory and its sub-directories. For example, `gs://my-bucket/my-directory/**
# .ndjson` imports all files with `.ndjson` extensions in `my-directory/` and
# its sub-directories. * `?` to match 1 character Files matching the wildcard
# are expected to contain content only, no metadata.
# Corresponds to the JSON property `uri`
# @return [String]
attr_accessor :uri
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@uri = args[:uri] if args.key?(:uri)
end
end
# The BigQuery table for export.
class GoogleCloudHealthcareV1beta1AnnotationBigQueryDestination
include Google::Apis::Core::Hashable
# Use `write_disposition` instead. If `write_disposition` is specified, this
# parameter is ignored. force=false is equivalent to write_disposition=
# WRITE_EMPTY and force=true is equivalent to write_disposition=WRITE_TRUNCATE.
# Corresponds to the JSON property `force`
# @return [Boolean]
attr_accessor :force
alias_method :force?, :force
# Specifies the schema format to export.
# Corresponds to the JSON property `schemaType`
# @return [String]
attr_accessor :schema_type
# BigQuery URI to a table, up to 2000 characters long, must be of the form bq://
# projectId.bqDatasetId.tableId.
# Corresponds to the JSON property `tableUri`
# @return [String]
attr_accessor :table_uri
# Determines if existing data in the destination dataset is overwritten,
# appended to, or not written if the tables contain data. If a write_disposition
# is specified, the `force` parameter is ignored.
# Corresponds to the JSON property `writeDisposition`
# @return [String]
attr_accessor :write_disposition
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@force = args[:force] if args.key?(:force)
@schema_type = args[:schema_type] if args.key?(:schema_type)
@table_uri = args[:table_uri] if args.key?(:table_uri)
@write_disposition = args[:write_disposition] if args.key?(:write_disposition)
end
end
# The Cloud Storage location for export.
class GoogleCloudHealthcareV1beta1AnnotationGcsDestination
include Google::Apis::Core::Hashable
# The Cloud Storage destination to export to. URI for a Cloud Storage directory
# where the server writes result files, in the format `gs://`bucket-id`/`path/to/
# destination/dir``. If there is no trailing slash, the service appends one when
# composing the object path. The user is responsible for creating the Cloud
# Storage bucket referenced in `uri_prefix`.
# Corresponds to the JSON property `uriPrefix`
# @return [String]
attr_accessor :uri_prefix
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@uri_prefix = args[:uri_prefix] if args.key?(:uri_prefix)
end
end
# Specifies the configuration for importing data from Cloud Storage.
class GoogleCloudHealthcareV1beta1AnnotationGcsSource
include Google::Apis::Core::Hashable
# Points to a Cloud Storage URI containing file(s) with content only. The URI
# must be in the following format: `gs://`bucket_id`/`object_id``. The URI can
# include wildcards in `object_id` and thus identify multiple files. Supported
# wildcards: '*' to match 0 or more non-separator characters '**' to match 0 or
# more characters (including separators). Must be used at the end of a path and
# with no other wildcards in the path. Can also be used with a file extension (
# such as .dcm), which imports all files with the extension in the specified
# directory and its sub-directories. For example, `gs://my-bucket/my-directory/**
# .json` imports all files with .json extensions in `my-directory/` and its sub-
# directories. '?' to match 1 character All other URI formats are invalid. Files
# matching the wildcard are expected to contain content only, no metadata.
# Corresponds to the JSON property `uri`
# @return [String]
attr_accessor :uri
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@uri = args[:uri] if args.key?(:uri)
end
end
# The Cloud Storage location for export.
class GoogleCloudHealthcareV1beta1ConsentGcsDestination
include Google::Apis::Core::Hashable
# URI for a Cloud Storage directory where the server writes result files, in the
# format `gs://`bucket-id`/`path/to/destination/dir``. If there is no trailing
# slash, the service appends one when composing the object path. The user is
# responsible for creating the Cloud Storage bucket and directory referenced in `
# uri_prefix`.
# Corresponds to the JSON property `uriPrefix`
# @return [String]
attr_accessor :uri_prefix
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@uri_prefix = args[:uri_prefix] if args.key?(:uri_prefix)
end
end
# Represents a user's consent in terms of the resources that can be accessed and
# under what conditions.
class GoogleCloudHealthcareV1beta1ConsentPolicy
include Google::Apis::Core::Hashable
# Represents a textual expression in the Common Expression Language (CEL) syntax.
# CEL is a C-like expression language. The syntax and semantics of CEL are
# documented at https://github.com/google/cel-spec. Example (Comparison): title:
# "Summary size limit" description: "Determines if a summary is less than 100
# chars" expression: "document.summary.size() < 100" Example (Equality): title: "
# Requestor is owner" description: "Determines if requestor is the document
# owner" expression: "document.owner == request.auth.claims.email" Example (
# Logic): title: "Public documents" description: "Determine whether the document
# should be publicly visible" expression: "document.type != 'private' &&
# document.type != 'internal'" Example (Data Manipulation): title: "Notification
# string" description: "Create a notification string with a timestamp."
# expression: "'New message received at ' + string(document.create_time)" The
# exact variables and functions that may be referenced within an expression are
# determined by the service that evaluates it. See the service documentation for
# additional information.
# Corresponds to the JSON property `authorizationRule`
# @return [Google::Apis::HealthcareV1beta1::Expr]
attr_accessor :authorization_rule
# The resources that this policy applies to. A resource is a match if it matches
# all the attributes listed here. If empty, this policy applies to all User data
# mappings for the given user.
# Corresponds to the JSON property `resourceAttributes`
# @return [Array<Google::Apis::HealthcareV1beta1::Attribute>]
attr_accessor :resource_attributes
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@authorization_rule = args[:authorization_rule] if args.key?(:authorization_rule)
@resource_attributes = args[:resource_attributes] if args.key?(:resource_attributes)
end
end
# Contains a summary of the DeidentifyDicomStore operation.
class GoogleCloudHealthcareV1beta1DeidentifyDeidentifyDicomStoreSummary
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Contains a summary of the DeidentifyFhirStore operation.
class GoogleCloudHealthcareV1beta1DeidentifyDeidentifyFhirStoreSummary
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# The BigQuery table where the server writes output.
class GoogleCloudHealthcareV1beta1DicomBigQueryDestination
include Google::Apis::Core::Hashable
# Use `write_disposition` instead. If `write_disposition` is specified, this
# parameter is ignored. force=false is equivalent to write_disposition=
# WRITE_EMPTY and force=true is equivalent to write_disposition=WRITE_TRUNCATE.
# Corresponds to the JSON property `force`
# @return [Boolean]
attr_accessor :force
alias_method :force?, :force
# BigQuery URI to a table, up to 2000 characters long, in the format `bq://
# projectId.bqDatasetId.tableId`
# Corresponds to the JSON property `tableUri`
# @return [String]
attr_accessor :table_uri
# Determines whether the existing table in the destination is to be overwritten
# or appended to. If a write_disposition is specified, the `force` parameter is
# ignored.
# Corresponds to the JSON property `writeDisposition`
# @return [String]
attr_accessor :write_disposition
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@force = args[:force] if args.key?(:force)
@table_uri = args[:table_uri] if args.key?(:table_uri)
@write_disposition = args[:write_disposition] if args.key?(:write_disposition)
end
end
# The Cloud Storage location where the server writes the output and the export
# configuration.
class GoogleCloudHealthcareV1beta1DicomGcsDestination
include Google::Apis::Core::Hashable
# MIME types supported by DICOM spec. Each file is written in the following
# format: `.../`study_id`/`series_id`/`instance_id`[/`frame_number`].`extension``
# The frame_number component exists only for multi-frame instances. Supported
# MIME types are consistent with supported formats in DICOMweb: https://cloud.
# google.com/healthcare/docs/dicom#retrieve_transaction. Specifically, the
# following are supported: - application/dicom; transfer-syntax=1.2.840.10008.1.
# 2.1 (uncompressed DICOM) - application/dicom; transfer-syntax=1.2.840.10008.1.
# 2.4.50 (DICOM with embedded JPEG Baseline) - application/dicom; transfer-
# syntax=1.2.840.10008.1.2.4.90 (DICOM with embedded JPEG 2000 Lossless Only) -
# application/dicom; transfer-syntax=1.2.840.10008.1.2.4.91 (DICOM with embedded
# JPEG 2000)h - application/dicom; transfer-syntax=* (DICOM with no transcoding)
# - application/octet-stream; transfer-syntax=1.2.840.10008.1.2.1 (raw
# uncompressed PixelData) - application/octet-stream; transfer-syntax=* (raw
# PixelData in whatever format it was uploaded in) - image/jpeg; transfer-syntax=
# 1.2.840.10008.1.2.4.50 (Consumer JPEG) - image/png The following extensions
# are used for output files: - application/dicom -> .dcm - image/jpeg -> .jpg -
# image/png -> .png - application/octet-stream -> no extension If unspecified,
# the instances are exported in the original DICOM format they were uploaded in.
# Corresponds to the JSON property `mimeType`
# @return [String]
attr_accessor :mime_type
# The Cloud Storage destination to export to. URI for a Cloud Storage directory
# where the server writes the result files, in the format `gs://`bucket-id`/`
# path/to/destination/dir``). If there is no trailing slash, the service appends
# one when composing the object path. The user is responsible for creating the
# Cloud Storage bucket referenced in `uri_prefix`.
# Corresponds to the JSON property `uriPrefix`
# @return [String]
attr_accessor :uri_prefix
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@mime_type = args[:mime_type] if args.key?(:mime_type)
@uri_prefix = args[:uri_prefix] if args.key?(:uri_prefix)
end
end
# Specifies the configuration for importing data from Cloud Storage.
class GoogleCloudHealthcareV1beta1DicomGcsSource
include Google::Apis::Core::Hashable
# Points to a Cloud Storage URI containing file(s) with content only. The URI
# must be in the following format: `gs://`bucket_id`/`object_id``. The URI can
# include wildcards in `object_id` and thus identify multiple files. Supported
# wildcards: * '*' to match 0 or more non-separator characters * '**' to match 0
# or more characters (including separators). Must be used at the end of a path
# and with no other wildcards in the path. Can also be used with a file
# extension (such as .dcm), which imports all files with the extension in the
# specified directory and its sub-directories. For example, `gs://my-bucket/my-
# directory/**.dcm` imports all files with .dcm extensions in `my-directory/`
# and its sub-directories. * '?' to match 1 character. All other URI formats are
# invalid. Files matching the wildcard are expected to contain content only, no
# metadata.
# Corresponds to the JSON property `uri`
# @return [String]
attr_accessor :uri
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@uri = args[:uri] if args.key?(:uri)
end
end
# StreamConfig specifies configuration for a streaming DICOM export.
class GoogleCloudHealthcareV1beta1DicomStreamConfig
include Google::Apis::Core::Hashable
# The BigQuery table where the server writes output.
# Corresponds to the JSON property `bigqueryDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1DicomBigQueryDestination]
attr_accessor :bigquery_destination
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@bigquery_destination = args[:bigquery_destination] if args.key?(:bigquery_destination)
end
end
# The configuration for exporting to BigQuery.
class GoogleCloudHealthcareV1beta1FhirBigQueryDestination
include Google::Apis::Core::Hashable
# BigQuery URI to an existing dataset, up to 2000 characters long, in the format
# `bq://projectId.bqDatasetId`.
# Corresponds to the JSON property `datasetUri`
# @return [String]
attr_accessor :dataset_uri
# Use `write_disposition` instead. If `write_disposition` is specified, this
# parameter is ignored. force=false is equivalent to write_disposition=
# WRITE_EMPTY and force=true is equivalent to write_disposition=WRITE_TRUNCATE.
# Corresponds to the JSON property `force`
# @return [Boolean]
attr_accessor :force
alias_method :force?, :force
# Configuration for the FHIR BigQuery schema. Determines how the server
# generates the schema.
# Corresponds to the JSON property `schemaConfig`
# @return [Google::Apis::HealthcareV1beta1::SchemaConfig]
attr_accessor :schema_config
# Determines if existing data in the destination dataset is overwritten,
# appended to, or not written if the tables contain data. If a write_disposition
# is specified, the `force` parameter is ignored.
# Corresponds to the JSON property `writeDisposition`
# @return [String]
attr_accessor :write_disposition
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@dataset_uri = args[:dataset_uri] if args.key?(:dataset_uri)
@force = args[:force] if args.key?(:force)
@schema_config = args[:schema_config] if args.key?(:schema_config)
@write_disposition = args[:write_disposition] if args.key?(:write_disposition)
end
end
# Response when all resources export successfully. This structure is included in
# the response to describe the detailed outcome after the operation finishes
# successfully.
class GoogleCloudHealthcareV1beta1FhirExportResourcesResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# The configuration for exporting to Cloud Storage.
class GoogleCloudHealthcareV1beta1FhirGcsDestination
include Google::Apis::Core::Hashable
# URI for a Cloud Storage directory where result files should be written (in the
# format `gs://`bucket-id`/`path/to/destination/dir``). If there is no trailing
# slash, the service appends one when composing the object path. The Cloud
# Storage bucket referenced in `uri_prefix` must exist or an error occurs.
# Corresponds to the JSON property `uriPrefix`
# @return [String]
attr_accessor :uri_prefix
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@uri_prefix = args[:uri_prefix] if args.key?(:uri_prefix)
end
end
# Specifies the configuration for importing data from Cloud Storage.
class GoogleCloudHealthcareV1beta1FhirGcsSource
include Google::Apis::Core::Hashable
# Points to a Cloud Storage URI containing file(s) to import. The URI must be in
# the following format: `gs://`bucket_id`/`object_id``. The URI can include
# wildcards in `object_id` and thus identify multiple files. Supported wildcards:
# * `*` to match 0 or more non-separator characters * `**` to match 0 or more
# characters (including separators). Must be used at the end of a path and with
# no other wildcards in the path. Can also be used with a file extension (such
# as .ndjson), which imports all files with the extension in the specified
# directory and its sub-directories. For example, `gs://my-bucket/my-directory/**
# .ndjson` imports all files with `.ndjson` extensions in `my-directory/` and
# its sub-directories. * `?` to match 1 character Files matching the wildcard
# are expected to contain content only, no metadata.
# Corresponds to the JSON property `uri`
# @return [String]
attr_accessor :uri
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@uri = args[:uri] if args.key?(:uri)
end
end
# Final response of importing resources. This structure is included in the
# response to describe the detailed outcome after the operation finishes
# successfully.
class GoogleCloudHealthcareV1beta1FhirImportResourcesResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Construct representing a logical group or a segment.
class GroupOrSegment
include Google::Apis::Core::Hashable
# An HL7v2 logical group construct.
# Corresponds to the JSON property `group`
# @return [Google::Apis::HealthcareV1beta1::SchemaGroup]
attr_accessor :group
# An HL7v2 Segment.
# Corresponds to the JSON property `segment`
# @return [Google::Apis::HealthcareV1beta1::SchemaSegment]
attr_accessor :segment
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@group = args[:group] if args.key?(:group)
@segment = args[:segment] if args.key?(:segment)
end
end
# Root config message for HL7v2 schema. This contains a schema structure of
# groups and segments, and filters that determine which messages to apply the
# schema structure to.
class Hl7SchemaConfig
include Google::Apis::Core::Hashable
# Map from each HL7v2 message type and trigger event pair, such as ADT_A04, to
# its schema configuration root group.
# Corresponds to the JSON property `messageSchemaConfigs`
# @return [Hash<String,Google::Apis::HealthcareV1beta1::SchemaGroup>]
attr_accessor :message_schema_configs
# Each VersionSource is tested and only if they all match is the schema used for
# the message.
# Corresponds to the JSON property `version`
# @return [Array<Google::Apis::HealthcareV1beta1::VersionSource>]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@message_schema_configs = args[:message_schema_configs] if args.key?(:message_schema_configs)
@version = args[:version] if args.key?(:version)
end
end
# Root config for HL7v2 datatype definitions for a specific HL7v2 version.
class Hl7TypesConfig
include Google::Apis::Core::Hashable
# The HL7v2 type definitions.
# Corresponds to the JSON property `type`
# @return [Array<Google::Apis::HealthcareV1beta1::Type>]
attr_accessor :type
# The version selectors that this config applies to. A message must match ALL
# version sources to apply.
# Corresponds to the JSON property `version`
# @return [Array<Google::Apis::HealthcareV1beta1::VersionSource>]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@type = args[:type] if args.key?(:type)
@version = args[:version] if args.key?(:version)
end
end
# Specifies where and whether to send notifications upon changes to a data store.
class Hl7V2NotificationConfig
include Google::Apis::Core::Hashable
# Restricts notifications sent for messages matching a filter. If this is empty,
# all messages are matched. The following syntax is available: * A string field
# value can be written as text inside quotation marks, for example `"query text"`
# . The only valid relational operation for text fields is equality (`=`), where
# text is searched within the field, rather than having the field be equal to
# the text. For example, `"Comment = great"` returns messages with `great` in
# the comment field. * A number field value can be written as an integer, a
# decimal, or an exponential. The valid relational operators for number fields
# are the equality operator (`=`), along with the less than/greater than
# operators (`<`, `<=`, `>`, `>=`). Note that there is no inequality (`!=`)
# operator. You can prepend the `NOT` operator to an expression to negate it. *
# A date field value must be written in `yyyy-mm-dd` form. Fields with date and
# time use the RFC3339 time format. Leading zeros are required for one-digit
# months and days. The valid relational operators for date fields are the
# equality operator (`=`) , along with the less than/greater than operators (`<`,
# `<=`, `>`, `>=`). Note that there is no inequality (`!=`) operator. You can
# prepend the `NOT` operator to an expression to negate it. * Multiple field
# query expressions can be combined in one query by adding `AND` or `OR`
# operators between the expressions. If a boolean operator appears within a
# quoted string, it is not treated as special, it's just another part of the
# character string to be matched. You can prepend the `NOT` operator to an
# expression to negate it. Fields/functions available for filtering are: * `
# message_type`, from the MSH-9.1 field. For example, `NOT message_type = "ADT"`.
# * `send_date` or `sendDate`, the YYYY-MM-DD date the message was sent in the
# dataset's time_zone, from the MSH-7 segment. For example, `send_date < "2017-
# 01-02"`. * `send_time`, the timestamp when the message was sent, using the
# RFC3339 time format for comparisons, from the MSH-7 segment. For example, `
# send_time < "2017-01-02T00:00:00-05:00"`. * `create_time`, the timestamp when
# the message was created in the HL7v2 store. Use the RFC3339 time format for
# comparisons. For example, `create_time < "2017-01-02T00:00:00-05:00"`. * `
# send_facility`, the care center that the message came from, from the MSH-4
# segment. For example, `send_facility = "ABC"`. * `PatientId(value, type)`,
# which matches if the message lists a patient having an ID of the given value
# and type in the PID-2, PID-3, or PID-4 segments. For example, `PatientId("
# 123456", "MRN")`. * `labels.x`, a string value of the label with key `x` as
# set using the Message.labels map. For example, `labels."priority"="high"`. The
# operator `:*` can be used to assert the existence of a label. For example, `
# labels."priority":*`.
# Corresponds to the JSON property `filter`
# @return [String]
attr_accessor :filter
# The [Pub/Sub](https://cloud.google.com/pubsub/docs/) topic that notifications
# of changes are published on. Supplied by the client. The notification is a `
# PubsubMessage` with the following fields: * `PubsubMessage.Data` contains the
# resource name. * `PubsubMessage.MessageId` is the ID of this notification. It
# is guaranteed to be unique within the topic. * `PubsubMessage.PublishTime` is
# the time when the message was published. Note that notifications are only sent
# if the topic is non-empty. [Topic names](https://cloud.google.com/pubsub/docs/
# overview#names) must be scoped to a project. Cloud Healthcare API service
# account must have publisher permissions on the given Pub/Sub topic. Not having
# adequate permissions causes the calls that send notifications to fail. If a
# notification can't be published to Pub/Sub, errors are logged to Cloud Logging.
# For more information, see [Viewing error logs in Cloud Logging](https://cloud.
# google.com/healthcare/docs/how-tos/logging).
# Corresponds to the JSON property `pubsubTopic`
# @return [String]
attr_accessor :pubsub_topic
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@filter = args[:filter] if args.key?(:filter)
@pubsub_topic = args[:pubsub_topic] if args.key?(:pubsub_topic)
end
end
# Represents an HL7v2 store.
class Hl7V2Store
include Google::Apis::Core::Hashable
# User-supplied key-value pairs used to organize HL7v2 stores. Label keys must
# be between 1 and 63 characters long, have a UTF-8 encoding of maximum 128
# bytes, and must conform to the following PCRE regular expression: \p`Ll`\p`Lo``
# 0,62` Label values are optional, must be between 1 and 63 characters long,
# have a UTF-8 encoding of maximum 128 bytes, and must conform to the following
# PCRE regular expression: [\p`Ll`\p`Lo`\p`N`_-]`0,63` No more than 64 labels
# can be associated with a given store.
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# Resource name of the HL7v2 store, of the form `projects/`project_id`/datasets/`
# dataset_id`/hl7V2Stores/`hl7v2_store_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Specifies where to send notifications upon changes to a data store.
# Corresponds to the JSON property `notificationConfig`
# @return [Google::Apis::HealthcareV1beta1::NotificationConfig]
attr_accessor :notification_config
# A list of notification configs. Each configuration uses a filter to determine
# whether to publish a message (both Ingest & Create) on the corresponding
# notification destination. Only the message name is sent as part of the
# notification. Supplied by the client.
# Corresponds to the JSON property `notificationConfigs`
# @return [Array<Google::Apis::HealthcareV1beta1::Hl7V2NotificationConfig>]
attr_accessor :notification_configs
# The configuration for the parser. It determines how the server parses the
# messages.
# Corresponds to the JSON property `parserConfig`
# @return [Google::Apis::HealthcareV1beta1::ParserConfig]
attr_accessor :parser_config
# Determines whether to reject duplicate messages. A duplicate message is a
# message with the same raw bytes as a message that has already been ingested/
# created in this HL7v2 store. The default value is false, meaning that the
# store accepts the duplicate messages and it also returns the same ACK message
# in the IngestMessageResponse as has been returned previously. Note that only
# one resource is created in the store. When this field is set to true,
# CreateMessage/IngestMessage requests with a duplicate message will be rejected
# by the store, and IngestMessageErrorDetail returns a NACK message upon
# rejection.
# Corresponds to the JSON property `rejectDuplicateMessage`
# @return [Boolean]
attr_accessor :reject_duplicate_message
alias_method :reject_duplicate_message?, :reject_duplicate_message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@labels = args[:labels] if args.key?(:labels)
@name = args[:name] if args.key?(:name)
@notification_config = args[:notification_config] if args.key?(:notification_config)
@notification_configs = args[:notification_configs] if args.key?(:notification_configs)
@parser_config = args[:parser_config] if args.key?(:parser_config)
@reject_duplicate_message = args[:reject_duplicate_message] if args.key?(:reject_duplicate_message)
end
end
# Message that represents an arbitrary HTTP body. It should only be used for
# payload formats that can't be represented as JSON, such as raw binary or an
# HTML page. This message can be used both in streaming and non-streaming API
# methods in the request as well as the response. It can be used as a top-level
# request field, which is convenient if one wants to extract parameters from
# either the URL or HTTP template into the request fields and also want access
# to the raw HTTP body. Example: message GetResourceRequest ` // A unique
# request id. string request_id = 1; // The raw HTTP body is bound to this field.
# google.api.HttpBody http_body = 2; ` service ResourceService ` rpc
# GetResource(GetResourceRequest) returns (google.api.HttpBody); rpc
# UpdateResource(google.api.HttpBody) returns (google.protobuf.Empty); ` Example
# with streaming methods: service CaldavService ` rpc GetCalendar(stream google.
# api.HttpBody) returns (stream google.api.HttpBody); rpc UpdateCalendar(stream
# google.api.HttpBody) returns (stream google.api.HttpBody); ` Use of this type
# only changes how the request and response bodies are handled, all other
# features will continue to work unchanged.
class HttpBody
include Google::Apis::Core::Hashable
# The HTTP Content-Type header value specifying the content type of the body.
# Corresponds to the JSON property `contentType`
# @return [String]
attr_accessor :content_type
# The HTTP request/response body as raw binary.
# Corresponds to the JSON property `data`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :data
# Application specific response metadata. Must be set in the first response for
# streaming APIs.
# Corresponds to the JSON property `extensions`
# @return [Array<Hash<String,Object>>]
attr_accessor :extensions
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@content_type = args[:content_type] if args.key?(:content_type)
@data = args[:data] if args.key?(:data)
@extensions = args[:extensions] if args.key?(:extensions)
end
end
# Raw bytes representing consent artifact content.
class Image
include Google::Apis::Core::Hashable
# Input only. Points to a Cloud Storage URI containing the consent artifact
# content. The URI must be in the following format: `gs://`bucket_id`/`object_id`
# `. The Cloud Healthcare API service account must have the `roles/storage.
# objectViewer` Cloud IAM role for this Cloud Storage location. The consent
# artifact content at this URI is copied to a Cloud Storage location managed by
# the Cloud Healthcare API. Responses to fetching requests return the consent
# artifact content in raw_bytes.
# Corresponds to the JSON property `gcsUri`
# @return [String]
attr_accessor :gcs_uri
# Consent artifact content represented as a stream of bytes. This field is
# populated when returned in GetConsentArtifact response, but not included in
# CreateConsentArtifact and ListConsentArtifact response.
# Corresponds to the JSON property `rawBytes`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :raw_bytes
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@gcs_uri = args[:gcs_uri] if args.key?(:gcs_uri)
@raw_bytes = args[:raw_bytes] if args.key?(:raw_bytes)
end
end
# Image annotation.
class ImageAnnotation
include Google::Apis::Core::Hashable
# The list of polygons outlining the sensitive regions in the image.
# Corresponds to the JSON property `boundingPolys`
# @return [Array<Google::Apis::HealthcareV1beta1::BoundingPoly>]
attr_accessor :bounding_polys
# 0-based index of the image frame. For example, an image frame in a DICOM
# instance.
# Corresponds to the JSON property `frameIndex`
# @return [Fixnum]
attr_accessor :frame_index
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@bounding_polys = args[:bounding_polys] if args.key?(:bounding_polys)
@frame_index = args[:frame_index] if args.key?(:frame_index)
end
end
# Specifies how to handle de-identification of image pixels.
class ImageConfig
include Google::Apis::Core::Hashable
# Determines how to redact text from image.
# Corresponds to the JSON property `textRedactionMode`
# @return [String]
attr_accessor :text_redaction_mode
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@text_redaction_mode = args[:text_redaction_mode] if args.key?(:text_redaction_mode)
end
end
# Request to import Annotations. The Annotations to be imported must have client-
# supplied resource names which indicate the annotation resource. The import
# operation is not atomic. If a failure occurs, any annotations already imported
# are not removed.
class ImportAnnotationsRequest
include Google::Apis::Core::Hashable
# Specifies the configuration for importing data from Cloud Storage.
# Corresponds to the JSON property `gcsSource`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1AnnotationGcsSource]
attr_accessor :gcs_source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@gcs_source = args[:gcs_source] if args.key?(:gcs_source)
end
end
# Final response of importing Annotations in successful case. This structure is
# included in the response. It is only included when the operation finishes.
class ImportAnnotationsResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Imports data into the specified DICOM store. Returns an error if any of the
# files to import are not DICOM files. This API accepts duplicate DICOM
# instances by ignoring the newly-pushed instance. It does not overwrite.
class ImportDicomDataRequest
include Google::Apis::Core::Hashable
# Specifies the configuration for importing data from Cloud Storage.
# Corresponds to the JSON property `gcsSource`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1DicomGcsSource]
attr_accessor :gcs_source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@gcs_source = args[:gcs_source] if args.key?(:gcs_source)
end
end
# Returns additional information in regards to a completed DICOM store import.
class ImportDicomDataResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Request to import messages.
class ImportMessagesRequest
include Google::Apis::Core::Hashable
# Specifies the configuration for importing data from Cloud Storage.
# Corresponds to the JSON property `gcsSource`
# @return [Google::Apis::HealthcareV1beta1::GcsSource]
attr_accessor :gcs_source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@gcs_source = args[:gcs_source] if args.key?(:gcs_source)
end
end
# Final response of importing messages. This structure is included in the
# response to describe the detailed outcome. It is only included when the
# operation finishes successfully.
class ImportMessagesResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Request to import resources.
class ImportResourcesRequest
include Google::Apis::Core::Hashable
# The content structure in the source location. If not specified, the server
# treats the input source files as BUNDLE.
# Corresponds to the JSON property `contentStructure`
# @return [String]
attr_accessor :content_structure
# Specifies the configuration for importing data from Cloud Storage.
# Corresponds to the JSON property `gcsSource`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1FhirGcsSource]
attr_accessor :gcs_source
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@content_structure = args[:content_structure] if args.key?(:content_structure)
@gcs_source = args[:gcs_source] if args.key?(:gcs_source)
end
end
# Specifies how to use infoTypes for evaluation. For example, a user might only
# want to evaluate `PERSON`, `LOCATION`, and `AGE`.
class InfoTypeConfig
include Google::Apis::Core::Hashable
# List of infoTypes to be filtered.
# Corresponds to the JSON property `evaluateList`
# @return [Google::Apis::HealthcareV1beta1::FilterList]
attr_accessor :evaluate_list
# List of infoTypes to be filtered.
# Corresponds to the JSON property `ignoreList`
# @return [Google::Apis::HealthcareV1beta1::FilterList]
attr_accessor :ignore_list
# If `TRUE`, infoTypes described by `filter` are used for evaluation. Otherwise,
# infoTypes are not considered for evaluation. For example: * Annotated text: "
# Toronto is a location" * Finding 1: ``"infoType": "PERSON", "quote": "Toronto",
# "start": 0, "end": 7`` * Finding 2: ``"infoType": "CITY", "quote": "Toronto",
# "start": 0, "end": 7`` * Finding 3: ```` * Ground truth: ``"infoType": "
# LOCATION", "quote": "Toronto", "start": 0, "end": 7`` When `strict_matching`
# is `TRUE`: * Finding 1: 1 false positive * Finding 2: 1 false positive *
# Finding 3: 1 false negative When `strict_matching` is `FALSE`: * Finding 1: 1
# true positive * Finding 2: 1 true positive * Finding 3: 1 false negative
# Corresponds to the JSON property `strictMatching`
# @return [Boolean]
attr_accessor :strict_matching
alias_method :strict_matching?, :strict_matching
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@evaluate_list = args[:evaluate_list] if args.key?(:evaluate_list)
@ignore_list = args[:ignore_list] if args.key?(:ignore_list)
@strict_matching = args[:strict_matching] if args.key?(:strict_matching)
end
end
# A transformation to apply to text that is identified as a specific info_type.
class InfoTypeTransformation
include Google::Apis::Core::Hashable
# Mask a string by replacing its characters with a fixed character.
# Corresponds to the JSON property `characterMaskConfig`
# @return [Google::Apis::HealthcareV1beta1::CharacterMaskConfig]
attr_accessor :character_mask_config
# Pseudonymization method that generates surrogates via cryptographic hashing.
# Uses SHA-256. Outputs a base64-encoded representation of the hashed output.
# For example, `L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=`.
# Corresponds to the JSON property `cryptoHashConfig`
# @return [Google::Apis::HealthcareV1beta1::CryptoHashConfig]
attr_accessor :crypto_hash_config
# Shift a date forward or backward in time by a random amount which is
# consistent for a given patient and crypto key combination.
# Corresponds to the JSON property `dateShiftConfig`
# @return [Google::Apis::HealthcareV1beta1::DateShiftConfig]
attr_accessor :date_shift_config
# InfoTypes to apply this transformation to. If this is not specified, this
# transformation becomes the default transformation, and is used for any
# info_type that is not specified in another transformation.
# Corresponds to the JSON property `infoTypes`
# @return [Array<String>]
attr_accessor :info_types
# Define how to redact sensitive values. Default behaviour is erase. For example,
# "My name is Jane." becomes "My name is ."
# Corresponds to the JSON property `redactConfig`
# @return [Google::Apis::HealthcareV1beta1::RedactConfig]
attr_accessor :redact_config
# When using the INSPECT_AND_TRANSFORM action, each match is replaced with the
# name of the info_type. For example, "My name is Jane" becomes "My name is [
# PERSON_NAME]." The TRANSFORM action is equivalent to redacting.
# Corresponds to the JSON property `replaceWithInfoTypeConfig`
# @return [Google::Apis::HealthcareV1beta1::ReplaceWithInfoTypeConfig]
attr_accessor :replace_with_info_type_config
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@character_mask_config = args[:character_mask_config] if args.key?(:character_mask_config)
@crypto_hash_config = args[:crypto_hash_config] if args.key?(:crypto_hash_config)
@date_shift_config = args[:date_shift_config] if args.key?(:date_shift_config)
@info_types = args[:info_types] if args.key?(:info_types)
@redact_config = args[:redact_config] if args.key?(:redact_config)
@replace_with_info_type_config = args[:replace_with_info_type_config] if args.key?(:replace_with_info_type_config)
end
end
# Ingests a message into the specified HL7v2 store.
class IngestMessageRequest
include Google::Apis::Core::Hashable
# A complete HL7v2 message. See [Introduction to HL7 Standards] (https://www.hl7.
# org/implement/standards/index.cfm?ref=common) for details on the standard.
# Corresponds to the JSON property `message`
# @return [Google::Apis::HealthcareV1beta1::Message]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@message = args[:message] if args.key?(:message)
end
end
# Acknowledges that a message has been ingested into the specified HL7v2 store.
class IngestMessageResponse
include Google::Apis::Core::Hashable
# HL7v2 ACK message.
# Corresponds to the JSON property `hl7Ack`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :hl7_ack
# A complete HL7v2 message. See [Introduction to HL7 Standards] (https://www.hl7.
# org/implement/standards/index.cfm?ref=common) for details on the standard.
# Corresponds to the JSON property `message`
# @return [Google::Apis::HealthcareV1beta1::Message]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@hl7_ack = args[:hl7_ack] if args.key?(:hl7_ack)
@message = args[:message] if args.key?(:message)
end
end
# EntityMentions can be linked to multiple entities using a LinkedEntity message
# lets us add other fields, e.g. confidence.
class LinkedEntity
include Google::Apis::Core::Hashable
# entity_id is a concept unique identifier. These are prefixed by a string that
# identifies the entity coding system, followed by the unique identifier within
# that system. For example, "UMLS/C0000970". This also supports ad hoc entities,
# which are formed by normalizing entity mention content.
# Corresponds to the JSON property `entityId`
# @return [String]
attr_accessor :entity_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@entity_id = args[:entity_id] if args.key?(:entity_id)
end
end
# Lists the Annotation stores in the given dataset.
class ListAnnotationStoresResponse
include Google::Apis::Core::Hashable
# The returned Annotation stores. Won't be more Annotation stores than the value
# of page_size in the request.
# Corresponds to the JSON property `annotationStores`
# @return [Array<Google::Apis::HealthcareV1beta1::AnnotationStore>]
attr_accessor :annotation_stores
# Token to retrieve the next page of results or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@annotation_stores = args[:annotation_stores] if args.key?(:annotation_stores)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Lists the Annotations in the specified Annotation store.
class ListAnnotationsResponse
include Google::Apis::Core::Hashable
# The returned Annotations. Won't be more values than the value of page_size in
# the request. See `AnnotationView` in the request for populated fields.
# Corresponds to the JSON property `annotations`
# @return [Array<Google::Apis::HealthcareV1beta1::Annotation>]
attr_accessor :annotations
# Token to retrieve the next page of results or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@annotations = args[:annotations] if args.key?(:annotations)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
#
class ListAttributeDefinitionsResponse
include Google::Apis::Core::Hashable
# The returned Attribute definitions. The maximum number of attributes returned
# is determined by the value of page_size in the ListAttributeDefinitionsRequest.
# Corresponds to the JSON property `attributeDefinitions`
# @return [Array<Google::Apis::HealthcareV1beta1::AttributeDefinition>]
attr_accessor :attribute_definitions
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@attribute_definitions = args[:attribute_definitions] if args.key?(:attribute_definitions)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
#
class ListConsentArtifactsResponse
include Google::Apis::Core::Hashable
# The returned Consent artifacts. The maximum number of artifacts returned is
# determined by the value of page_size in the ListConsentArtifactsRequest.
# Corresponds to the JSON property `consentArtifacts`
# @return [Array<Google::Apis::HealthcareV1beta1::ConsentArtifact>]
attr_accessor :consent_artifacts
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_artifacts = args[:consent_artifacts] if args.key?(:consent_artifacts)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
#
class ListConsentRevisionsResponse
include Google::Apis::Core::Hashable
# The returned Consent revisions. The maximum number of revisions returned is
# determined by the value of `page_size` in the ListConsentRevisionsRequest.
# Corresponds to the JSON property `consents`
# @return [Array<Google::Apis::HealthcareV1beta1::Consent>]
attr_accessor :consents
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consents = args[:consents] if args.key?(:consents)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
#
class ListConsentStoresResponse
include Google::Apis::Core::Hashable
# The returned consent stores. The maximum number of stores returned is
# determined by the value of page_size in the ListConsentStoresRequest.
# Corresponds to the JSON property `consentStores`
# @return [Array<Google::Apis::HealthcareV1beta1::ConsentStore>]
attr_accessor :consent_stores
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_stores = args[:consent_stores] if args.key?(:consent_stores)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
#
class ListConsentsResponse
include Google::Apis::Core::Hashable
# The returned Consents. The maximum number of Consents returned is determined
# by the value of page_size in the ListConsentsRequest.
# Corresponds to the JSON property `consents`
# @return [Array<Google::Apis::HealthcareV1beta1::Consent>]
attr_accessor :consents
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consents = args[:consents] if args.key?(:consents)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Lists the available datasets.
class ListDatasetsResponse
include Google::Apis::Core::Hashable
# The first page of datasets.
# Corresponds to the JSON property `datasets`
# @return [Array<Google::Apis::HealthcareV1beta1::Dataset>]
attr_accessor :datasets
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@datasets = args[:datasets] if args.key?(:datasets)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Lists the DICOM stores in the given dataset.
class ListDicomStoresResponse
include Google::Apis::Core::Hashable
# The returned DICOM stores. Won't be more DICOM stores than the value of
# page_size in the request.
# Corresponds to the JSON property `dicomStores`
# @return [Array<Google::Apis::HealthcareV1beta1::DicomStore>]
attr_accessor :dicom_stores
# Token to retrieve the next page of results or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@dicom_stores = args[:dicom_stores] if args.key?(:dicom_stores)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Lists the FHIR stores in the given dataset.
class ListFhirStoresResponse
include Google::Apis::Core::Hashable
# The returned FHIR stores. Won't be more FHIR stores than the value of
# page_size in the request.
# Corresponds to the JSON property `fhirStores`
# @return [Array<Google::Apis::HealthcareV1beta1::FhirStore>]
attr_accessor :fhir_stores
# Token to retrieve the next page of results or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@fhir_stores = args[:fhir_stores] if args.key?(:fhir_stores)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Lists the HL7v2 stores in the given dataset.
class ListHl7V2StoresResponse
include Google::Apis::Core::Hashable
# The returned HL7v2 stores. Won't be more HL7v2 stores than the value of
# page_size in the request.
# Corresponds to the JSON property `hl7V2Stores`
# @return [Array<Google::Apis::HealthcareV1beta1::Hl7V2Store>]
attr_accessor :hl7_v2_stores
# Token to retrieve the next page of results or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@hl7_v2_stores = args[:hl7_v2_stores] if args.key?(:hl7_v2_stores)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# The response message for Locations.ListLocations.
class ListLocationsResponse
include Google::Apis::Core::Hashable
# A list of locations that matches the specified filter in the request.
# Corresponds to the JSON property `locations`
# @return [Array<Google::Apis::HealthcareV1beta1::Location>]
attr_accessor :locations
# The standard List next-page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@locations = args[:locations] if args.key?(:locations)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# Lists the messages in the specified HL7v2 store.
class ListMessagesResponse
include Google::Apis::Core::Hashable
# The returned Messages. Won't be more Messages than the value of page_size in
# the request. See view for populated fields.
# Corresponds to the JSON property `hl7V2Messages`
# @return [Array<Google::Apis::HealthcareV1beta1::Message>]
attr_accessor :hl7_v2_messages
# Token to retrieve the next page of results or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@hl7_v2_messages = args[:hl7_v2_messages] if args.key?(:hl7_v2_messages)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
end
end
# The response message for Operations.ListOperations.
class ListOperationsResponse
include Google::Apis::Core::Hashable
# The standard List next-page token.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# A list of operations that matches the specified filter in the request.
# Corresponds to the JSON property `operations`
# @return [Array<Google::Apis::HealthcareV1beta1::Operation>]
attr_accessor :operations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@operations = args[:operations] if args.key?(:operations)
end
end
#
class ListUserDataMappingsResponse
include Google::Apis::Core::Hashable
# Token to retrieve the next page of results, or empty if there are no more
# results in the list.
# Corresponds to the JSON property `nextPageToken`
# @return [String]
attr_accessor :next_page_token
# The returned User data mappings. The maximum number of User data mappings
# returned is determined by the value of page_size in the
# ListUserDataMappingsRequest.
# Corresponds to the JSON property `userDataMappings`
# @return [Array<Google::Apis::HealthcareV1beta1::UserDataMapping>]
attr_accessor :user_data_mappings
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
@user_data_mappings = args[:user_data_mappings] if args.key?(:user_data_mappings)
end
end
# A resource that represents Google Cloud Platform location.
class Location
include Google::Apis::Core::Hashable
# The friendly name for this location, typically a nearby city name. For example,
# "Tokyo".
# Corresponds to the JSON property `displayName`
# @return [String]
attr_accessor :display_name
# Cross-service attributes for the location. For example `"cloud.googleapis.com/
# region": "us-east1"`
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# The canonical id for this location. For example: `"us-east1"`.
# Corresponds to the JSON property `locationId`
# @return [String]
attr_accessor :location_id
# Service-specific metadata. For example the available capacity at the given
# location.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,Object>]
attr_accessor :metadata
# Resource name for the location, which may vary between implementations. For
# example: `"projects/example-project/locations/us-east1"`
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@display_name = args[:display_name] if args.key?(:display_name)
@labels = args[:labels] if args.key?(:labels)
@location_id = args[:location_id] if args.key?(:location_id)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
end
end
# A complete HL7v2 message. See [Introduction to HL7 Standards] (https://www.hl7.
# org/implement/standards/index.cfm?ref=common) for details on the standard.
class Message
include Google::Apis::Core::Hashable
# Output only. The datetime when the message was created. Set by the server.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# Raw message bytes.
# Corresponds to the JSON property `data`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :data
# User-supplied key-value pairs used to organize HL7v2 stores. Label keys must
# be between 1 and 63 characters long, have a UTF-8 encoding of maximum 128
# bytes, and must conform to the following PCRE regular expression: \p`Ll`\p`Lo``
# 0,62` Label values are optional, must be between 1 and 63 characters long,
# have a UTF-8 encoding of maximum 128 bytes, and must conform to the following
# PCRE regular expression: [\p`Ll`\p`Lo`\p`N`_-]`0,63` No more than 64 labels
# can be associated with a given store.
# Corresponds to the JSON property `labels`
# @return [Hash<String,String>]
attr_accessor :labels
# The message type for this message. MSH-9.1.
# Corresponds to the JSON property `messageType`
# @return [String]
attr_accessor :message_type
# Resource name of the Message, of the form `projects/`project_id`/datasets/`
# dataset_id`/hl7V2Stores/`hl7_v2_store_id`/messages/`message_id``. Assigned by
# the server.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The content of an HL7v2 message in a structured format.
# Corresponds to the JSON property `parsedData`
# @return [Google::Apis::HealthcareV1beta1::ParsedData]
attr_accessor :parsed_data
# All patient IDs listed in the PID-2, PID-3, and PID-4 segments of this message.
# Corresponds to the JSON property `patientIds`
# @return [Array<Google::Apis::HealthcareV1beta1::PatientId>]
attr_accessor :patient_ids
# The content of an HL7v2 message in a structured format as specified by a
# schema.
# Corresponds to the JSON property `schematizedData`
# @return [Google::Apis::HealthcareV1beta1::SchematizedData]
attr_accessor :schematized_data
# The hospital that this message came from. MSH-4.
# Corresponds to the JSON property `sendFacility`
# @return [String]
attr_accessor :send_facility
# The datetime the sending application sent this message. MSH-7.
# Corresponds to the JSON property `sendTime`
# @return [String]
attr_accessor :send_time
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@create_time = args[:create_time] if args.key?(:create_time)
@data = args[:data] if args.key?(:data)
@labels = args[:labels] if args.key?(:labels)
@message_type = args[:message_type] if args.key?(:message_type)
@name = args[:name] if args.key?(:name)
@parsed_data = args[:parsed_data] if args.key?(:parsed_data)
@patient_ids = args[:patient_ids] if args.key?(:patient_ids)
@schematized_data = args[:schematized_data] if args.key?(:schematized_data)
@send_facility = args[:send_facility] if args.key?(:send_facility)
@send_time = args[:send_time] if args.key?(:send_time)
end
end
# Specifies where to send notifications upon changes to a data store.
class NotificationConfig
include Google::Apis::Core::Hashable
# The [Pub/Sub](https://cloud.google.com/pubsub/docs/) topic that notifications
# of changes are published on. Supplied by the client. PubsubMessage.Data
# contains the resource name. PubsubMessage.MessageId is the ID of this message.
# It is guaranteed to be unique within the topic. PubsubMessage.PublishTime is
# the time at which the message was published. Notifications are only sent if
# the topic is non-empty. [Topic names](https://cloud.google.com/pubsub/docs/
# overview#names) must be scoped to a project. Cloud Healthcare API service
# account must have publisher permissions on the given Pub/Sub topic. Not having
# adequate permissions causes the calls that send notifications to fail. If a
# notification can't be published to Pub/Sub, errors are logged to Cloud Logging
# (see [Viewing error logs in Cloud Logging](https://cloud.google.com/healthcare/
# docs/how-tos/logging)). If the number of errors exceeds a certain rate, some
# aren't submitted. Note that not all operations trigger notifications, see [
# Configuring Pub/Sub notifications](https://cloud.google.com/healthcare/docs/
# how-tos/pubsub) for specific details.
# Corresponds to the JSON property `pubsubTopic`
# @return [String]
attr_accessor :pubsub_topic
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@pubsub_topic = args[:pubsub_topic] if args.key?(:pubsub_topic)
end
end
# This resource represents a long-running operation that is the result of a
# network API call.
class Operation
include Google::Apis::Core::Hashable
# If the value is `false`, it means the operation is still in progress. If `true`
# , the operation is completed, and either `error` or `response` is available.
# Corresponds to the JSON property `done`
# @return [Boolean]
attr_accessor :done
alias_method :done?, :done
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
# Corresponds to the JSON property `error`
# @return [Google::Apis::HealthcareV1beta1::Status]
attr_accessor :error
# Service-specific metadata associated with the operation. It typically contains
# progress information and common metadata such as create time. Some services
# might not provide such metadata. Any method that returns a long-running
# operation should document the metadata type, if any.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,Object>]
attr_accessor :metadata
# The server-assigned name, which is only unique within the same service that
# originally returns it. If you use the default HTTP mapping, the `name` should
# be a resource name ending with `operations/`unique_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The normal response of the operation in case of success. If the original
# method returns no data on success, such as `Delete`, the response is `google.
# protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`,
# the response should be the resource. For other methods, the response should
# have the type `XxxResponse`, where `Xxx` is the original method name. For
# example, if the original method name is `TakeSnapshot()`, the inferred
# response type is `TakeSnapshotResponse`.
# Corresponds to the JSON property `response`
# @return [Hash<String,Object>]
attr_accessor :response
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@done = args[:done] if args.key?(:done)
@error = args[:error] if args.key?(:error)
@metadata = args[:metadata] if args.key?(:metadata)
@name = args[:name] if args.key?(:name)
@response = args[:response] if args.key?(:response)
end
end
# OperationMetadata provides information about the operation execution. Returned
# in the long-running operation's metadata field.
class OperationMetadata
include Google::Apis::Core::Hashable
# The name of the API method that initiated the operation.
# Corresponds to the JSON property `apiMethodName`
# @return [String]
attr_accessor :api_method_name
# Specifies if cancellation was requested for the operation.
# Corresponds to the JSON property `cancelRequested`
# @return [Boolean]
attr_accessor :cancel_requested
alias_method :cancel_requested?, :cancel_requested
# ProgressCounter provides counters to describe an operation's progress.
# Corresponds to the JSON property `counter`
# @return [Google::Apis::HealthcareV1beta1::ProgressCounter]
attr_accessor :counter
# The time at which the operation was created by the API.
# Corresponds to the JSON property `createTime`
# @return [String]
attr_accessor :create_time
# The time at which execution was completed.
# Corresponds to the JSON property `endTime`
# @return [String]
attr_accessor :end_time
# A link to audit and error logs in the log viewer. Error logs are generated
# only by some operations, listed at [Viewing error logs in Cloud Logging](https:
# //cloud.google.com/healthcare/docs/how-tos/logging).
# Corresponds to the JSON property `logsUrl`
# @return [String]
attr_accessor :logs_url
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@api_method_name = args[:api_method_name] if args.key?(:api_method_name)
@cancel_requested = args[:cancel_requested] if args.key?(:cancel_requested)
@counter = args[:counter] if args.key?(:counter)
@create_time = args[:create_time] if args.key?(:create_time)
@end_time = args[:end_time] if args.key?(:end_time)
@logs_url = args[:logs_url] if args.key?(:logs_url)
end
end
# The content of an HL7v2 message in a structured format.
class ParsedData
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `segments`
# @return [Array<Google::Apis::HealthcareV1beta1::Segment>]
attr_accessor :segments
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@segments = args[:segments] if args.key?(:segments)
end
end
# The configuration for the parser. It determines how the server parses the
# messages.
class ParserConfig
include Google::Apis::Core::Hashable
# Determines whether messages with no header are allowed.
# Corresponds to the JSON property `allowNullHeader`
# @return [Boolean]
attr_accessor :allow_null_header
alias_method :allow_null_header?, :allow_null_header
# A schema package contains a set of schemas and type definitions.
# Corresponds to the JSON property `schema`
# @return [Google::Apis::HealthcareV1beta1::SchemaPackage]
attr_accessor :schema
# Byte(s) to use as the segment terminator. If this is unset, '\r' is used as
# segment terminator, matching the HL7 version 2 specification.
# Corresponds to the JSON property `segmentTerminator`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :segment_terminator
# Immutable. Determines the version of the unschematized parser to be used when `
# schema` is not given. This field is immutable after store creation.
# Corresponds to the JSON property `version`
# @return [String]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@allow_null_header = args[:allow_null_header] if args.key?(:allow_null_header)
@schema = args[:schema] if args.key?(:schema)
@segment_terminator = args[:segment_terminator] if args.key?(:segment_terminator)
@version = args[:version] if args.key?(:version)
end
end
# A patient identifier and associated type.
class PatientId
include Google::Apis::Core::Hashable
# ID type. For example, MRN or NHS.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
# The patient's unique identifier.
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@type = args[:type] if args.key?(:type)
@value = args[:value] if args.key?(:value)
end
end
# An Identity and Access Management (IAM) policy, which specifies access
# controls for Google Cloud resources. A `Policy` is a collection of `bindings`.
# A `binding` binds one or more `members` to a single `role`. Members can be
# user accounts, service accounts, Google groups, and domains (such as G Suite).
# A `role` is a named list of permissions; each `role` can be an IAM predefined
# role or a user-created custom role. For some types of Google Cloud resources,
# a `binding` can also specify a `condition`, which is a logical expression that
# allows access to a resource only if the expression evaluates to `true`. A
# condition can add constraints based on attributes of the request, the resource,
# or both. To learn which resources support conditions in their IAM policies,
# see the [IAM documentation](https://cloud.google.com/iam/help/conditions/
# resource-policies). **JSON example:** ` "bindings": [ ` "role": "roles/
# resourcemanager.organizationAdmin", "members": [ "user:[email protected]", "
# group:[email protected]", "domain:google.com", "serviceAccount:my-project-id@
# appspot.gserviceaccount.com" ] `, ` "role": "roles/resourcemanager.
# organizationViewer", "members": [ "user:[email protected]" ], "condition": ` "
# title": "expirable access", "description": "Does not grant access after Sep
# 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", `
# ` ], "etag": "BwWWja0YfJA=", "version": 3 ` **YAML example:** bindings: -
# members: - user:[email protected] - group:[email protected] - domain:google.
# com - serviceAccount:[email protected] role: roles/
# resourcemanager.organizationAdmin - members: - user:[email protected] role:
# roles/resourcemanager.organizationViewer condition: title: expirable access
# description: Does not grant access after Sep 2020 expression: request.time <
# timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 For a
# description of IAM and its features, see the [IAM documentation](https://cloud.
# google.com/iam/docs/).
class Policy
include Google::Apis::Core::Hashable
# Specifies cloud audit logging configuration for this policy.
# Corresponds to the JSON property `auditConfigs`
# @return [Array<Google::Apis::HealthcareV1beta1::AuditConfig>]
attr_accessor :audit_configs
# Associates a list of `members` to a `role`. Optionally, may specify a `
# condition` that determines how and when the `bindings` are applied. Each of
# the `bindings` must contain at least one member.
# Corresponds to the JSON property `bindings`
# @return [Array<Google::Apis::HealthcareV1beta1::Binding>]
attr_accessor :bindings
# `etag` is used for optimistic concurrency control as a way to help prevent
# simultaneous updates of a policy from overwriting each other. It is strongly
# suggested that systems make use of the `etag` in the read-modify-write cycle
# to perform policy updates in order to avoid race conditions: An `etag` is
# returned in the response to `getIamPolicy`, and systems are expected to put
# that etag in the request to `setIamPolicy` to ensure that their change will be
# applied to the same version of the policy. **Important:** If you use IAM
# Conditions, you must include the `etag` field whenever you call `setIamPolicy`.
# If you omit this field, then IAM allows you to overwrite a version `3` policy
# with a version `1` policy, and all of the conditions in the version `3` policy
# are lost.
# Corresponds to the JSON property `etag`
# NOTE: Values are automatically base64 encoded/decoded in the client library.
# @return [String]
attr_accessor :etag
# Specifies the format of the policy. Valid values are `0`, `1`, and `3`.
# Requests that specify an invalid value are rejected. Any operation that
# affects conditional role bindings must specify version `3`. This requirement
# applies to the following operations: * Getting a policy that includes a
# conditional role binding * Adding a conditional role binding to a policy *
# Changing a conditional role binding in a policy * Removing any role binding,
# with or without a condition, from a policy that includes conditions **
# Important:** If you use IAM Conditions, you must include the `etag` field
# whenever you call `setIamPolicy`. If you omit this field, then IAM allows you
# to overwrite a version `3` policy with a version `1` policy, and all of the
# conditions in the version `3` policy are lost. If a policy does not include
# any conditions, operations on that policy may specify any valid version or
# leave the field unset. To learn which resources support conditions in their
# IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/
# conditions/resource-policies).
# Corresponds to the JSON property `version`
# @return [Fixnum]
attr_accessor :version
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@audit_configs = args[:audit_configs] if args.key?(:audit_configs)
@bindings = args[:bindings] if args.key?(:bindings)
@etag = args[:etag] if args.key?(:etag)
@version = args[:version] if args.key?(:version)
end
end
# ProgressCounter provides counters to describe an operation's progress.
class ProgressCounter
include Google::Apis::Core::Hashable
# The number of units that failed in the operation.
# Corresponds to the JSON property `failure`
# @return [Fixnum]
attr_accessor :failure
# The number of units that are pending in the operation.
# Corresponds to the JSON property `pending`
# @return [Fixnum]
attr_accessor :pending
# The number of units that succeeded in the operation.
# Corresponds to the JSON property `success`
# @return [Fixnum]
attr_accessor :success
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@failure = args[:failure] if args.key?(:failure)
@pending = args[:pending] if args.key?(:pending)
@success = args[:success] if args.key?(:success)
end
end
# Queries all data_ids that are consented for a given use in the given consent
# store and writes them to a specified destination. The returned Operation
# includes a progress counter for the number of User data mappings processed.
# Errors are logged to Cloud Logging (see [Viewing error logs in Cloud Logging] (
# https://cloud.google.com/healthcare/docs/how-tos/logging) and [
# QueryAccessibleData] for a sample log entry).
class QueryAccessibleDataRequest
include Google::Apis::Core::Hashable
# The Cloud Storage location for export.
# Corresponds to the JSON property `gcsDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1ConsentGcsDestination]
attr_accessor :gcs_destination
# The values of request attributes associated with this access request.
# Corresponds to the JSON property `requestAttributes`
# @return [Hash<String,String>]
attr_accessor :request_attributes
# Optional. The values of resource attributes associated with the type of
# resources being requested. If no values are specified, then all resource types
# are included in the output.
# Corresponds to the JSON property `resourceAttributes`
# @return [Hash<String,String>]
attr_accessor :resource_attributes
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@gcs_destination = args[:gcs_destination] if args.key?(:gcs_destination)
@request_attributes = args[:request_attributes] if args.key?(:request_attributes)
@resource_attributes = args[:resource_attributes] if args.key?(:resource_attributes)
end
end
# Response for successful QueryAccessibleData operations. This structure is
# included in the response upon operation completion.
class QueryAccessibleDataResponse
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Define how to redact sensitive values. Default behaviour is erase. For example,
# "My name is Jane." becomes "My name is ."
class RedactConfig
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Rejects the latest revision of the specified Consent by committing a new
# revision with `state` updated to `REJECTED`. If the latest revision of the
# given Consent is in the `REJECTED` state, no new revision is committed.
class RejectConsentRequest
include Google::Apis::Core::Hashable
# Optional. The resource name of the Consent artifact that contains
# documentation of the user's rejection of the draft Consent, of the form `
# projects/`project_id`/locations/`location_id`/datasets/`dataset_id`/
# consentStores/`consent_store_id`/consentArtifacts/`consent_artifact_id``. If
# the draft Consent had a Consent artifact, this Consent artifact overwrites it.
# Corresponds to the JSON property `consentArtifact`
# @return [String]
attr_accessor :consent_artifact
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_artifact = args[:consent_artifact] if args.key?(:consent_artifact)
end
end
# When using the INSPECT_AND_TRANSFORM action, each match is replaced with the
# name of the info_type. For example, "My name is Jane" becomes "My name is [
# PERSON_NAME]." The TRANSFORM action is equivalent to redacting.
class ReplaceWithInfoTypeConfig
include Google::Apis::Core::Hashable
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
end
end
# Resource level annotation.
class ResourceAnnotation
include Google::Apis::Core::Hashable
# A description of the annotation record.
# Corresponds to the JSON property `label`
# @return [String]
attr_accessor :label
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@label = args[:label] if args.key?(:label)
end
end
# A list of FHIR resources.
class Resources
include Google::Apis::Core::Hashable
# List of resources IDs. For example, "Patient/1234".
# Corresponds to the JSON property `resources`
# @return [Array<String>]
attr_accessor :resources
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@resources = args[:resources] if args.key?(:resources)
end
end
# The consent evaluation result for a single `data_id`.
class Result
include Google::Apis::Core::Hashable
# The resource names of all evaluated Consents mapped to their evaluation.
# Corresponds to the JSON property `consentDetails`
# @return [Hash<String,Google::Apis::HealthcareV1beta1::ConsentEvaluation>]
attr_accessor :consent_details
# Whether the resource is consented for the given use.
# Corresponds to the JSON property `consented`
# @return [Boolean]
attr_accessor :consented
alias_method :consented?, :consented
# The unique identifier of the evaluated resource.
# Corresponds to the JSON property `dataId`
# @return [String]
attr_accessor :data_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_details = args[:consent_details] if args.key?(:consent_details)
@consented = args[:consented] if args.key?(:consented)
@data_id = args[:data_id] if args.key?(:data_id)
end
end
# Revokes the latest revision of the specified Consent by committing a new
# revision with `state` updated to `REVOKED`. If the latest revision of the
# given Consent is in the `REVOKED` state, no new revision is committed.
class RevokeConsentRequest
include Google::Apis::Core::Hashable
# Optional. The resource name of the Consent artifact that contains proof of the
# user's revocation of the Consent, of the form `projects/`project_id`/locations/
# `location_id`/datasets/`dataset_id`/consentStores/`consent_store_id`/
# consentArtifacts/`consent_artifact_id``.
# Corresponds to the JSON property `consentArtifact`
# @return [String]
attr_accessor :consent_artifact
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@consent_artifact = args[:consent_artifact] if args.key?(:consent_artifact)
end
end
# Configuration for the FHIR BigQuery schema. Determines how the server
# generates the schema.
class SchemaConfig
include Google::Apis::Core::Hashable
# The depth for all recursive structures in the output analytics schema. For
# example, `concept` in the CodeSystem resource is a recursive structure; when
# the depth is 2, the CodeSystem table will have a column called `concept.
# concept` but not `concept.concept.concept`. If not specified or set to 0, the
# server will use the default value 2. The maximum depth allowed is 5.
# Corresponds to the JSON property `recursiveStructureDepth`
# @return [Fixnum]
attr_accessor :recursive_structure_depth
# Specifies the output schema type. Schema type is required.
# Corresponds to the JSON property `schemaType`
# @return [String]
attr_accessor :schema_type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@recursive_structure_depth = args[:recursive_structure_depth] if args.key?(:recursive_structure_depth)
@schema_type = args[:schema_type] if args.key?(:schema_type)
end
end
# An HL7v2 logical group construct.
class SchemaGroup
include Google::Apis::Core::Hashable
# True indicates that this is a choice group, meaning that only one of its
# segments can exist in a given message.
# Corresponds to the JSON property `choice`
# @return [Boolean]
attr_accessor :choice
alias_method :choice?, :choice
# The maximum number of times this group can be repeated. 0 or -1 means
# unbounded.
# Corresponds to the JSON property `maxOccurs`
# @return [Fixnum]
attr_accessor :max_occurs
# Nested groups and/or segments.
# Corresponds to the JSON property `members`
# @return [Array<Google::Apis::HealthcareV1beta1::GroupOrSegment>]
attr_accessor :members
# The minimum number of times this group must be present/repeated.
# Corresponds to the JSON property `minOccurs`
# @return [Fixnum]
attr_accessor :min_occurs
# The name of this group. For example, "ORDER_DETAIL".
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@choice = args[:choice] if args.key?(:choice)
@max_occurs = args[:max_occurs] if args.key?(:max_occurs)
@members = args[:members] if args.key?(:members)
@min_occurs = args[:min_occurs] if args.key?(:min_occurs)
@name = args[:name] if args.key?(:name)
end
end
# A schema package contains a set of schemas and type definitions.
class SchemaPackage
include Google::Apis::Core::Hashable
# Flag to ignore all min_occurs restrictions in the schema. This means that
# incoming messages can omit any group, segment, field, component, or
# subcomponent.
# Corresponds to the JSON property `ignoreMinOccurs`
# @return [Boolean]
attr_accessor :ignore_min_occurs
alias_method :ignore_min_occurs?, :ignore_min_occurs
# Schema configs that are layered based on their VersionSources that match the
# incoming message. Schema configs present in higher indices override those in
# lower indices with the same message type and trigger event if their
# VersionSources all match an incoming message.
# Corresponds to the JSON property `schemas`
# @return [Array<Google::Apis::HealthcareV1beta1::Hl7SchemaConfig>]
attr_accessor :schemas
# Determines how messages that fail to parse are handled.
# Corresponds to the JSON property `schematizedParsingType`
# @return [String]
attr_accessor :schematized_parsing_type
# Schema type definitions that are layered based on their VersionSources that
# match the incoming message. Type definitions present in higher indices
# override those in lower indices with the same type name if their
# VersionSources all match an incoming message.
# Corresponds to the JSON property `types`
# @return [Array<Google::Apis::HealthcareV1beta1::Hl7TypesConfig>]
attr_accessor :types
# Determines how unexpected segments (segments not matched to the schema) are
# handled.
# Corresponds to the JSON property `unexpectedSegmentHandling`
# @return [String]
attr_accessor :unexpected_segment_handling
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@ignore_min_occurs = args[:ignore_min_occurs] if args.key?(:ignore_min_occurs)
@schemas = args[:schemas] if args.key?(:schemas)
@schematized_parsing_type = args[:schematized_parsing_type] if args.key?(:schematized_parsing_type)
@types = args[:types] if args.key?(:types)
@unexpected_segment_handling = args[:unexpected_segment_handling] if args.key?(:unexpected_segment_handling)
end
end
# An HL7v2 Segment.
class SchemaSegment
include Google::Apis::Core::Hashable
# The maximum number of times this segment can be present in this group. 0 or -1
# means unbounded.
# Corresponds to the JSON property `maxOccurs`
# @return [Fixnum]
attr_accessor :max_occurs
# The minimum number of times this segment can be present in this group.
# Corresponds to the JSON property `minOccurs`
# @return [Fixnum]
attr_accessor :min_occurs
# The Segment type. For example, "PID".
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@max_occurs = args[:max_occurs] if args.key?(:max_occurs)
@min_occurs = args[:min_occurs] if args.key?(:min_occurs)
@type = args[:type] if args.key?(:type)
end
end
# The content of an HL7v2 message in a structured format as specified by a
# schema.
class SchematizedData
include Google::Apis::Core::Hashable
# JSON output of the parser.
# Corresponds to the JSON property `data`
# @return [String]
attr_accessor :data
# The error output of the parser.
# Corresponds to the JSON property `error`
# @return [String]
attr_accessor :error
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@data = args[:data] if args.key?(:data)
@error = args[:error] if args.key?(:error)
end
end
# Request to search the resources in the specified FHIR store.
class SearchResourcesRequest
include Google::Apis::Core::Hashable
# The FHIR resource type to search, such as Patient or Observation. For a
# complete list, see the FHIR Resource Index ([DSTU2](https://hl7.org/implement/
# standards/fhir/DSTU2/resourcelist.html), [STU3](https://hl7.org/implement/
# standards/fhir/STU3/resourcelist.html), [R4](https://hl7.org/implement/
# standards/fhir/R4/resourcelist.html)).
# Corresponds to the JSON property `resourceType`
# @return [String]
attr_accessor :resource_type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@resource_type = args[:resource_type] if args.key?(:resource_type)
end
end
# A segment in a structured format.
class Segment
include Google::Apis::Core::Hashable
# A mapping from the positional location to the value. The key string uses zero-
# based indexes separated by dots to identify Fields, components and sub-
# components. A bracket notation is also used to identify different instances of
# a repeated field. Regex for key: (\d+)(\[\d+\])?(.\d+)?(.\d+)? Examples of (
# key, value) pairs: * (0.1, "hemoglobin") denotes that the first component of
# Field 0 has the value "hemoglobin". * (1.1.2, "CBC") denotes that the second
# sub-component of the first component of Field 1 has the value "CBC". * (1[0].1,
# "HbA1c") denotes that the first component of the first Instance of Field 1,
# which is repeated, has the value "HbA1c".
# Corresponds to the JSON property `fields`
# @return [Hash<String,String>]
attr_accessor :fields
# A string that indicates the type of segment. For example, EVN or PID.
# Corresponds to the JSON property `segmentId`
# @return [String]
attr_accessor :segment_id
# Set ID for segments that can be in a set. This can be empty if it's missing or
# isn't applicable.
# Corresponds to the JSON property `setId`
# @return [String]
attr_accessor :set_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@fields = args[:fields] if args.key?(:fields)
@segment_id = args[:segment_id] if args.key?(:segment_id)
@set_id = args[:set_id] if args.key?(:set_id)
end
end
# A TextAnnotation specifies a text range that includes sensitive information.
class SensitiveTextAnnotation
include Google::Apis::Core::Hashable
# Maps from a resource slice. For example, FHIR resource field path to a set of
# sensitive text findings. For example, Appointment.Narrative text1 --> `
# findings_1, findings_2, findings_3`
# Corresponds to the JSON property `details`
# @return [Hash<String,Google::Apis::HealthcareV1beta1::Detail>]
attr_accessor :details
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@details = args[:details] if args.key?(:details)
end
end
# Request message for `SetIamPolicy` method.
class SetIamPolicyRequest
include Google::Apis::Core::Hashable
# An Identity and Access Management (IAM) policy, which specifies access
# controls for Google Cloud resources. A `Policy` is a collection of `bindings`.
# A `binding` binds one or more `members` to a single `role`. Members can be
# user accounts, service accounts, Google groups, and domains (such as G Suite).
# A `role` is a named list of permissions; each `role` can be an IAM predefined
# role or a user-created custom role. For some types of Google Cloud resources,
# a `binding` can also specify a `condition`, which is a logical expression that
# allows access to a resource only if the expression evaluates to `true`. A
# condition can add constraints based on attributes of the request, the resource,
# or both. To learn which resources support conditions in their IAM policies,
# see the [IAM documentation](https://cloud.google.com/iam/help/conditions/
# resource-policies). **JSON example:** ` "bindings": [ ` "role": "roles/
# resourcemanager.organizationAdmin", "members": [ "user:[email protected]", "
# group:[email protected]", "domain:google.com", "serviceAccount:my-project-id@
# appspot.gserviceaccount.com" ] `, ` "role": "roles/resourcemanager.
# organizationViewer", "members": [ "user:[email protected]" ], "condition": ` "
# title": "expirable access", "description": "Does not grant access after Sep
# 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", `
# ` ], "etag": "BwWWja0YfJA=", "version": 3 ` **YAML example:** bindings: -
# members: - user:[email protected] - group:[email protected] - domain:google.
# com - serviceAccount:[email protected] role: roles/
# resourcemanager.organizationAdmin - members: - user:[email protected] role:
# roles/resourcemanager.organizationViewer condition: title: expirable access
# description: Does not grant access after Sep 2020 expression: request.time <
# timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 For a
# description of IAM and its features, see the [IAM documentation](https://cloud.
# google.com/iam/docs/).
# Corresponds to the JSON property `policy`
# @return [Google::Apis::HealthcareV1beta1::Policy]
attr_accessor :policy
# OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only
# the fields in the mask will be modified. If no mask is provided, the following
# default mask is used: `paths: "bindings, etag"`
# Corresponds to the JSON property `updateMask`
# @return [String]
attr_accessor :update_mask
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@policy = args[:policy] if args.key?(:policy)
@update_mask = args[:update_mask] if args.key?(:update_mask)
end
end
# User signature.
class Signature
include Google::Apis::Core::Hashable
# Raw bytes representing consent artifact content.
# Corresponds to the JSON property `image`
# @return [Google::Apis::HealthcareV1beta1::Image]
attr_accessor :image
# Optional. Metadata associated with the user's signature. For example, the user'
# s name or the user's title.
# Corresponds to the JSON property `metadata`
# @return [Hash<String,String>]
attr_accessor :metadata
# Optional. Timestamp of the signature.
# Corresponds to the JSON property `signatureTime`
# @return [String]
attr_accessor :signature_time
# Required. User's UUID provided by the client.
# Corresponds to the JSON property `userId`
# @return [String]
attr_accessor :user_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@image = args[:image] if args.key?(:image)
@metadata = args[:metadata] if args.key?(:metadata)
@signature_time = args[:signature_time] if args.key?(:signature_time)
@user_id = args[:user_id] if args.key?(:user_id)
end
end
# The `Status` type defines a logical error model that is suitable for different
# programming environments, including REST APIs and RPC APIs. It is used by [
# gRPC](https://github.com/grpc). Each `Status` message contains three pieces of
# data: error code, error message, and error details. You can find out more
# about this error model and how to work with it in the [API Design Guide](https:
# //cloud.google.com/apis/design/errors).
class Status
include Google::Apis::Core::Hashable
# The status code, which should be an enum value of google.rpc.Code.
# Corresponds to the JSON property `code`
# @return [Fixnum]
attr_accessor :code
# A list of messages that carry the error details. There is a common set of
# message types for APIs to use.
# Corresponds to the JSON property `details`
# @return [Array<Hash<String,Object>>]
attr_accessor :details
# A developer-facing error message, which should be in English. Any user-facing
# error message should be localized and sent in the google.rpc.Status.details
# field, or localized by the client.
# Corresponds to the JSON property `message`
# @return [String]
attr_accessor :message
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@code = args[:code] if args.key?(:code)
@details = args[:details] if args.key?(:details)
@message = args[:message] if args.key?(:message)
end
end
# Contains configuration for streaming FHIR export.
class StreamConfig
include Google::Apis::Core::Hashable
# The configuration for exporting to BigQuery.
# Corresponds to the JSON property `bigqueryDestination`
# @return [Google::Apis::HealthcareV1beta1::GoogleCloudHealthcareV1beta1FhirBigQueryDestination]
attr_accessor :bigquery_destination
# Supply a FHIR resource type (such as "Patient" or "Observation"). See https://
# www.hl7.org/fhir/valueset-resource-types.html for a list of all FHIR resource
# types. The server treats an empty list as an intent to stream all the
# supported resource types in this FHIR store.
# Corresponds to the JSON property `resourceTypes`
# @return [Array<String>]
attr_accessor :resource_types
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@bigquery_destination = args[:bigquery_destination] if args.key?(:bigquery_destination)
@resource_types = args[:resource_types] if args.key?(:resource_types)
end
end
# List of tags to be filtered.
class TagFilterList
include Google::Apis::Core::Hashable
# Tags to be filtered. Tags must be DICOM Data Elements, File Meta Elements, or
# Directory Structuring Elements, as defined at: http://dicom.nema.org/medical/
# dicom/current/output/html/part06.html#table_6-1,. They may be provided by "
# Keyword" or "Tag". For example, "PatientID", "00100010".
# Corresponds to the JSON property `tags`
# @return [Array<String>]
attr_accessor :tags
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@tags = args[:tags] if args.key?(:tags)
end
end
# Request message for `TestIamPermissions` method.
class TestIamPermissionsRequest
include Google::Apis::Core::Hashable
# The set of permissions to check for the `resource`. Permissions with wildcards
# (such as '*' or 'storage.*') are not allowed. For more information see [IAM
# Overview](https://cloud.google.com/iam/docs/overview#permissions).
# Corresponds to the JSON property `permissions`
# @return [Array<String>]
attr_accessor :permissions
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@permissions = args[:permissions] if args.key?(:permissions)
end
end
# Response message for `TestIamPermissions` method.
class TestIamPermissionsResponse
include Google::Apis::Core::Hashable
# A subset of `TestPermissionsRequest.permissions` that the caller is allowed.
# Corresponds to the JSON property `permissions`
# @return [Array<String>]
attr_accessor :permissions
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@permissions = args[:permissions] if args.key?(:permissions)
end
end
#
class TextConfig
include Google::Apis::Core::Hashable
# The transformations to apply to the detected data.
# Corresponds to the JSON property `transformations`
# @return [Array<Google::Apis::HealthcareV1beta1::InfoTypeTransformation>]
attr_accessor :transformations
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@transformations = args[:transformations] if args.key?(:transformations)
end
end
# A span of text in the provided document.
class TextSpan
include Google::Apis::Core::Hashable
# The unicode codepoint index of the beginning of this span.
# Corresponds to the JSON property `beginOffset`
# @return [Fixnum]
attr_accessor :begin_offset
# The original text contained in this span.
# Corresponds to the JSON property `content`
# @return [String]
attr_accessor :content
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@begin_offset = args[:begin_offset] if args.key?(:begin_offset)
@content = args[:content] if args.key?(:content)
end
end
# A type definition for some HL7v2 type (incl. Segments and Datatypes).
class Type
include Google::Apis::Core::Hashable
# The (sub) fields this type has (if not primitive).
# Corresponds to the JSON property `fields`
# @return [Array<Google::Apis::HealthcareV1beta1::Field>]
attr_accessor :fields
# The name of this type. This would be the segment or datatype name. For example,
# "PID" or "XPN".
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# If this is a primitive type then this field is the type of the primitive For
# example, STRING. Leave unspecified for composite types.
# Corresponds to the JSON property `primitive`
# @return [String]
attr_accessor :primitive
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@fields = args[:fields] if args.key?(:fields)
@name = args[:name] if args.key?(:name)
@primitive = args[:primitive] if args.key?(:primitive)
end
end
# Maps a resource to the associated user and Attributes.
class UserDataMapping
include Google::Apis::Core::Hashable
# Output only. Indicates the time when this mapping was archived.
# Corresponds to the JSON property `archiveTime`
# @return [String]
attr_accessor :archive_time
# Output only. Indicates whether this mapping is archived.
# Corresponds to the JSON property `archived`
# @return [Boolean]
attr_accessor :archived
alias_method :archived?, :archived
# Required. A unique identifier for the mapped resource.
# Corresponds to the JSON property `dataId`
# @return [String]
attr_accessor :data_id
# Resource name of the User data mapping, of the form `projects/`project_id`/
# locations/`location_id`/datasets/`dataset_id`/consentStores/`consent_store_id`/
# userDataMappings/`user_data_mapping_id``.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Attributes of the resource. Only explicitly set attributes are displayed here.
# Attribute definitions with defaults set implicitly apply to these User data
# mappings. Attributes listed here must be single valued, that is, exactly one
# value is specified for the field "values" in each Attribute.
# Corresponds to the JSON property `resourceAttributes`
# @return [Array<Google::Apis::HealthcareV1beta1::Attribute>]
attr_accessor :resource_attributes
# Required. User's UUID provided by the client.
# Corresponds to the JSON property `userId`
# @return [String]
attr_accessor :user_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@archive_time = args[:archive_time] if args.key?(:archive_time)
@archived = args[:archived] if args.key?(:archived)
@data_id = args[:data_id] if args.key?(:data_id)
@name = args[:name] if args.key?(:name)
@resource_attributes = args[:resource_attributes] if args.key?(:resource_attributes)
@user_id = args[:user_id] if args.key?(:user_id)
end
end
# Contains the configuration for FHIR profiles and validation.
class ValidationConfig
include Google::Apis::Core::Hashable
# Whether to disable profile validation for this FHIR store. Set this to true to
# disable checking incoming resources for conformance against
# StructureDefinitions in this FHIR store.
# Corresponds to the JSON property `disableProfileValidation`
# @return [Boolean]
attr_accessor :disable_profile_validation
alias_method :disable_profile_validation?, :disable_profile_validation
# A list of ImplementationGuide URLs in this FHIR store that are used to
# configure the profiles to use for validation. For example, to use the US Core
# profiles for validation, set `enabled_implementation_guides` to `["http://hl7.
# org/fhir/us/core/ImplementationGuide/ig"]`. If `enabled_implementation_guides`
# is empty or omitted, then incoming resources are only required to conform to
# the base FHIR profiles. Otherwise, a resource must conform to at least one
# profile listed in the `global` property of one of the enabled
# ImplementationGuides. The Cloud Healthcare API does not currently enforce all
# of the rules in a StructureDefinition. The following rules are supported: -
# min/max - minValue/maxValue - maxLength - type - fixed[x] - pattern[x] on
# simple types - slicing, when using "value" as the discriminator type When a
# URL cannot be resolved (for example, in a type assertion), the server does not
# return an error.
# Corresponds to the JSON property `enabledImplementationGuides`
# @return [Array<String>]
attr_accessor :enabled_implementation_guides
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@disable_profile_validation = args[:disable_profile_validation] if args.key?(:disable_profile_validation)
@enabled_implementation_guides = args[:enabled_implementation_guides] if args.key?(:enabled_implementation_guides)
end
end
# Describes a selector for extracting and matching an MSH field to a value.
class VersionSource
include Google::Apis::Core::Hashable
# The field to extract from the MSH segment. For example, "3.1" or "18[1].1".
# Corresponds to the JSON property `mshField`
# @return [String]
attr_accessor :msh_field
# The value to match with the field. For example, "My Application Name" or "2.3".
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@msh_field = args[:msh_field] if args.key?(:msh_field)
@value = args[:value] if args.key?(:value)
end
end
# A 2D coordinate in an image. The origin is the top-left.
class Vertex
include Google::Apis::Core::Hashable
# X coordinate.
# Corresponds to the JSON property `x`
# @return [Float]
attr_accessor :x
# Y coordinate.
# Corresponds to the JSON property `y`
# @return [Float]
attr_accessor :y
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@x = args[:x] if args.key?(:x)
@y = args[:y] if args.key?(:y)
end
end
end
end
end
| 43.400078 | 130 | 0.636786 |
7a766d0dcaaa514919b36b260e220c5edf590f60 | 3,053 | #
# Cookbook Name:: hdf-chef
# Recipe:: hdf_cluster
#
# The MIT License (MIT)
#
# Copyright:: 2018, Ryan Hansohn
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
hdf_version = -> { node['hw']['hdf']['version'] }
hdf_version_full = -> { node['hw']['hdf'][hdf_version.call]['version_full'] }
hdf_vdf = -> { node['hw']['hdf'][hdf_version.call]['vdf'] }
# hdf-cluster: create clusters directory
directory 'make_/var/lib/ambari-clusters' do
path '/var/lib/ambari-clusters'
recursive true
owner node['hw']['ambari']['server']['user']['name']
group 'root'
end
# hdf-cluster: add version definition file to cluster dir
remote_file "create_/var/lib/ambari-clusters/HDF-#{hdf_version_full.call}.xml" do
source hdf_vdf.call
path "/var/lib/ambari-clusters/HDF-#{hdf_version_full.call}.xml"
owner node['hw']['ambari']['server']['user']['name']
group 'root'
action :create_if_missing
end
# hdf-cluster: add version definition post to cluster dir
template "create_/var/lib/ambari-clusters/#{node['hw']['cluster']['version_definition_file']}" do
path "/var/lib/ambari-clusters/#{node['hw']['cluster']['version_definition_file']}"
source 'version_definition_file.json.erb'
variables(
'version_full' => hdf_version_full.call
)
owner node['hw']['ambari']['server']['user']['name']
group 'root'
end
# hdf-cluster: add blueprint to cluster dir
template "create_/var/lib/ambari-clusters/#{node['hw']['cluster']['blueprint_file']}" do
path "/var/lib/ambari-clusters/#{node['hw']['cluster']['blueprint_file']}"
source "#{node['hw']['cluster']['blueprint_file']}.erb"
sensitive true
owner node['hw']['ambari']['server']['user']['name']
group 'root'
end
# hdf-cluster: add hostmapping to cluster dir
template "create_/var/lib/ambari-clusters/#{node['hw']['cluster']['hostmapping_file']}" do
path "/var/lib/ambari-clusters/#{node['hw']['cluster']['hostmapping_file']}"
source "#{node['hw']['cluster']['hostmapping_file']}.erb"
sensitive true
owner node['hw']['ambari']['server']['user']['name']
group 'root'
end
| 40.171053 | 97 | 0.719948 |
26545ecc1bfc5d802a98c57e3d4a3d7cecb39c54 | 2,272 | # frozen_string_literal: true
module RuboCop
module Cop
module Performance
# This cop identifies unnecessary use of a regex where `String#end_with?` would suffice.
#
# This cop has `SafeMultiline` configuration option that `true` by default because
# `end$` is unsafe as it will behave incompatible with `end_with?`
# for receiver is multiline string.
#
# @example
# # bad
# 'abc'.match?(/bc\Z/)
# /bc\Z/.match?('abc')
# 'abc' =~ /bc\Z/
# /bc\Z/ =~ 'abc'
# 'abc'.match(/bc\Z/)
# /bc\Z/.match('abc')
#
# # good
# 'abc'.end_with?('bc')
#
# @example SafeMultiline: true (default)
#
# # good
# 'abc'.match?(/bc$/)
# /bc$/.match?('abc')
# 'abc' =~ /bc$/
# /bc$/ =~ 'abc'
# 'abc'.match(/bc$/)
# /bc$/.match('abc')
#
# @example SafeMultiline: false
#
# # bad
# 'abc'.match?(/bc$/)
# /bc$/.match?('abc')
# 'abc' =~ /bc$/
# /bc$/ =~ 'abc'
# 'abc'.match(/bc$/)
# /bc$/.match('abc')
#
class EndWith < Base
include RegexpMetacharacter
extend AutoCorrector
MSG = 'Use `String#end_with?` instead of a regex match anchored to ' \
'the end of the string.'
def_node_matcher :redundant_regex?, <<~PATTERN
{(send $!nil? {:match :=~ :match?} (regexp (str $#literal_at_end?) (regopt)))
(send (regexp (str $#literal_at_end?) (regopt)) {:match :match?} $_)
(match-with-lvasgn (regexp (str $#literal_at_end?) (regopt)) $_)}
PATTERN
def on_send(node)
return unless (receiver, regex_str = redundant_regex?(node))
add_offense(node) do |corrector|
receiver, regex_str = regex_str, receiver if receiver.is_a?(String)
regex_str = drop_end_metacharacter(regex_str)
regex_str = interpret_string_escapes(regex_str)
new_source = "#{receiver.source}.end_with?(#{to_string_literal(regex_str)})"
corrector.replace(node.source_range, new_source)
end
end
alias on_match_with_lvasgn on_send
end
end
end
end
| 30.293333 | 94 | 0.53037 |
bb22a3f841527da45b081c3b4e150c9441c4f89c | 1,734 | require 'puppet/dsl/blank_slate'
require 'puppet/dsl/resource_reference'
module Puppet
# @since 3.1
# @status EXPERIMENTAL
module DSL
# Thin decorator layer for accessing attributes of array/hash-like objects.
# @see Puppet::DSL::Context#create_resource Context#create_resource for examples of usage
#
class ResourceDecorator < BlankSlate
# Initializes new object.
# @overload initialize(resource, {|r| block})
# @param resource [#[], #[]=] any object responding to these methods
# @yieldparam r [ResourceDecorator] the `self` when evaluating the ruby block
# @param block [ruby] the Ruby DSL statements to evaluate.
#
def initialize(resource, &block)
@resource = resource
block.call self
end
# A proxy method allowing direct access to resource parameters instead of
# having to use `#[]` or `#[]=`
#
# After a first call it creates a cached version of the created access method.
#
# @example
# # allows using this
# r.title = "I am a resource"
# # instead of this
# r[:title] = "I am a resource"
#
def method_missing(name, *args)
if name.to_s =~ /\A(.*)=\z/
define_singleton_method name do |*a|
value = a.first
value = value.reference if value.is_a? ::Puppet::DSL::ResourceReference
value = value.to_s unless value.is_a? ::Puppet::Resource
@resource[$1.to_sym] = value
end
self.__send__ name, *args
else
define_singleton_method name do
@resource[name]
end
self.__send__ name, *args
end
end
end
end
end
| 30.421053 | 94 | 0.60323 |
6af8d37ef245d93653dc55f65bbaa0f19a9698ce | 4,412 | require 'cgi'
require 'uri'
require 'rollbar/language_support'
module Rollbar
module Scrubbers
class URL
SCRUB_ALL = :scrub_all
def self.call(*args)
new.call(*args)
end
def call(options = {})
url = ascii_encode(options[:url])
filter(url,
build_regex(options[:scrub_fields]),
options[:scrub_user],
options[:scrub_password],
options.fetch(:randomize_scrub_length, true),
options[:scrub_fields].include?(SCRUB_ALL),
build_whitelist_regex(options[:whitelist] || []))
rescue StandardError => e
Rollbar.logger.error("[Rollbar] There was an error scrubbing the url: #{e}, options: #{options.inspect}")
url
end
private
def ascii_encode(url)
# In some cases non-ascii characters won't be properly encoded, so we do it here.
#
# The standard encoders (the CGI and URI methods) are not reliable when the query string
# is already embedded in the full URL, but the inconsistencies are limited to issues
# with characters in the ascii range. (For example, the '#' if it appears in an unexpected place.)
# For escaping non-ascii, they are all OK, so we'll take care to skip the ascii chars.
return url if url.ascii_only?
# Iterate each char and only escape non-ascii characters.
url.each_char.map { |c| c.ascii_only? ? c : CGI.escape(c) }.join
end
def build_whitelist_regex(whitelist)
fields = whitelist.find_all { |f| f.is_a?(String) || f.is_a?(Symbol) }
return unless fields.any?
Regexp.new(fields.map { |val| /\A#{Regexp.escape(val.to_s)}\z/ }.join('|'))
end
def filter(url, regex, scrub_user, scrub_password, randomize_scrub_length, scrub_all, whitelist)
uri = URI.parse(url)
uri.user = filter_user(uri.user, scrub_user, randomize_scrub_length)
uri.password = filter_password(uri.password, scrub_password, randomize_scrub_length)
uri.query = filter_query(uri.query, regex, randomize_scrub_length, scrub_all, whitelist)
uri.to_s
end
# Builds a regex to match with any of the received fields.
# The built regex will also match array params like 'user_ids[]'.
def build_regex(fields)
fields_or = fields.map { |field| "#{field}(\\[\\])?" }.join('|')
Regexp.new("^#{fields_or}$")
end
def filter_user(user, scrub_user, randomize_scrub_length)
scrub_user && user ? filtered_value(user, randomize_scrub_length) : user
end
def filter_password(password, scrub_password, randomize_scrub_length)
scrub_password && password ? filtered_value(password, randomize_scrub_length) : password
end
def filter_query(query, regex, randomize_scrub_length, scrub_all, whitelist)
return query unless query
params = decode_www_form(query)
encode_www_form(filter_query_params(params, regex, randomize_scrub_length, scrub_all, whitelist))
end
def decode_www_form(query)
URI.decode_www_form(query)
end
def encode_www_form(params)
restore_square_brackets(URI.encode_www_form(params))
end
def restore_square_brackets(query)
# We want this to rebuild array params like foo[]=1&foo[]=2
#
# URI.encode_www_form follows the spec at https://url.spec.whatwg.org/#concept-urlencoded-serializer
# and percent encodes square brackets. Here we change them back.
query.gsub('%5B', '[').gsub('%5D', ']')
end
def filter_query_params(params, regex, randomize_scrub_length, scrub_all, whitelist)
params.map do |key, value|
[key, filter_key?(key, regex, scrub_all, whitelist) ? filtered_value(value, randomize_scrub_length) : value]
end
end
def filter_key?(key, regex, scrub_all, whitelist)
!(whitelist === key) && (scrub_all || regex === key)
end
def filtered_value(value, randomize_scrub_length)
if randomize_scrub_length
random_filtered_value
else
'*' * (begin
value.length
rescue StandardError
8
end)
end
end
def random_filtered_value
'*' * rand(3..7)
end
end
end
end
| 33.679389 | 118 | 0.633726 |
e892c736c69f6698775975b7ec7d3b88e150faf6 | 19,496 | # frozen_string_literal: true
require 'spec_helper'
describe SnippetsController do
let(:user) { create(:user) }
describe 'GET #index' do
let(:user) { create(:user) }
context 'when username parameter is present' do
it_behaves_like 'paginated collection' do
let(:collection) { Snippet.all }
let(:params) { { username: user.username } }
before do
create(:personal_snippet, :public, author: user)
end
end
it 'renders snippets of a user when username is present' do
get :index, params: { username: user.username }
expect(response).to render_template(:index)
end
end
context 'when username parameter is not present' do
it 'redirects to explore snippets page when user is not logged in' do
get :index
expect(response).to redirect_to(explore_snippets_path)
end
it 'redirects to snippets dashboard page when user is logged in' do
sign_in(user)
get :index
expect(response).to redirect_to(dashboard_snippets_path)
end
end
end
describe 'GET #new' do
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 200' do
get :new
expect(response).to have_gitlab_http_status(200)
end
end
context 'when not signed in' do
it 'redirects to the sign in page' do
get :new
expect(response).to redirect_to(new_user_session_path)
end
end
end
describe 'GET #show' do
context 'when the personal snippet is private' do
let(:personal_snippet) { create(:personal_snippet, :private, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
context 'when signed in user is not the author' do
let(:other_author) { create(:author) }
let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_author) }
it 'responds with status 404' do
get :show, params: { id: other_personal_snippet.to_param }
expect(response).to have_gitlab_http_status(404)
end
end
context 'when signed in user is the author' do
it 'renders the snippet' do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
it 'responds with status 404 when embeddable content is requested' do
get :show, params: { id: personal_snippet.to_param }, format: :js
expect(response).to have_gitlab_http_status(404)
end
end
end
context 'when not signed in' do
it 'redirects to the sign in page' do
get :show, params: { id: personal_snippet.to_param }
expect(response).to redirect_to(new_user_session_path)
end
end
end
context 'when the personal snippet is internal' do
let(:personal_snippet) { create(:personal_snippet, :internal, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'renders the snippet' do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
it 'responds with status 404 when embeddable content is requested' do
get :show, params: { id: personal_snippet.to_param }, format: :js
expect(response).to have_gitlab_http_status(404)
end
end
context 'when not signed in' do
it 'redirects to the sign in page' do
get :show, params: { id: personal_snippet.to_param }
expect(response).to redirect_to(new_user_session_path)
end
end
end
context 'when the personal snippet is public' do
let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'renders the snippet' do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
it 'responds with status 200 when embeddable content is requested' do
get :show, params: { id: personal_snippet.to_param }, format: :js
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
end
context 'when not signed in' do
it 'renders the snippet' do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
end
end
context 'when the personal snippet does not exist' do
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 404' do
get :show, params: { id: 'doesntexist' }
expect(response).to have_gitlab_http_status(404)
end
end
context 'when not signed in' do
it 'responds with status 404' do
get :show, params: { id: 'doesntexist' }
expect(response).to redirect_to(new_user_session_path)
end
end
end
end
describe 'POST #create' do
def create_snippet(snippet_params = {}, additional_params = {})
sign_in(user)
post :create, params: {
personal_snippet: { title: 'Title', content: 'Content', description: 'Description' }.merge(snippet_params)
}.merge(additional_params)
Snippet.last
end
it 'creates the snippet correctly' do
snippet = create_snippet(visibility_level: Snippet::PRIVATE)
expect(snippet.title).to eq('Title')
expect(snippet.content).to eq('Content')
expect(snippet.description).to eq('Description')
end
context 'when the snippet description contains a file' do
include FileMoverHelpers
let(:picture_file) { "/-/system/user/#{user.id}/secret56/picture.jpg" }
let(:text_file) { "/-/system/user/#{user.id}/secret78/text.txt" }
let(:description) do
"Description with picture:  and "\
"text: [text.txt](/uploads#{text_file})"
end
before do
allow(FileUtils).to receive(:mkdir_p)
allow(FileUtils).to receive(:move)
stub_file_mover(text_file)
stub_file_mover(picture_file)
end
subject { create_snippet({ description: description }, { files: [picture_file, text_file] }) }
it 'creates the snippet' do
expect { subject }.to change { Snippet.count }.by(1)
end
it 'stores the snippet description correctly' do
snippet = subject
expected_description = "Description with picture: "\
" and "\
"text: [text.txt](/uploads/-/system/personal_snippet/#{snippet.id}/secret78/text.txt)"
expect(snippet.description).to eq(expected_description)
end
end
context 'when the snippet is spam' do
before do
allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
end
context 'when the snippet is private' do
it 'creates the snippet' do
expect { create_snippet(visibility_level: Snippet::PRIVATE) }
.to change { Snippet.count }.by(1)
end
end
context 'when the snippet is public' do
it 'rejects the shippet' do
expect { create_snippet(visibility_level: Snippet::PUBLIC) }
.not_to change { Snippet.count }
end
it 'creates a spam log' do
expect { create_snippet(visibility_level: Snippet::PUBLIC) }
.to log_spam(title: 'Title', user: user, noteable_type: 'PersonalSnippet')
end
it 'renders :new with recaptcha disabled' do
stub_application_setting(recaptcha_enabled: false)
create_snippet(visibility_level: Snippet::PUBLIC)
expect(response).to render_template(:new)
end
context 'recaptcha enabled' do
before do
stub_application_setting(recaptcha_enabled: true)
end
it 'renders :verify with recaptcha enabled' do
create_snippet(visibility_level: Snippet::PUBLIC)
expect(response).to render_template(:verify)
end
it 'renders snippet page when recaptcha verified' do
spammy_title = 'Whatever'
spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
snippet = create_snippet({ title: spammy_title },
{ spam_log_id: spam_logs.last.id,
recaptcha_verification: true })
expect(response).to redirect_to(snippet_path(snippet))
end
end
end
end
end
describe 'PUT #update' do
let(:project) { create :project }
let(:snippet) { create :personal_snippet, author: user, project: project, visibility_level: visibility_level }
def update_snippet(snippet_params = {}, additional_params = {})
sign_in(user)
put :update, params: {
id: snippet.id,
personal_snippet: { title: 'Title', content: 'Content' }.merge(snippet_params)
}.merge(additional_params)
snippet.reload
end
context 'when the snippet is spam' do
before do
allow_any_instance_of(AkismetService).to receive(:spam?).and_return(true)
end
context 'when the snippet is private' do
let(:visibility_level) { Snippet::PRIVATE }
it 'updates the snippet' do
expect { update_snippet(title: 'Foo') }
.to change { snippet.reload.title }.to('Foo')
end
end
context 'when a private snippet is made public' do
let(:visibility_level) { Snippet::PRIVATE }
it 'rejects the snippet' do
expect { update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC) }
.not_to change { snippet.reload.title }
end
it 'creates a spam log' do
expect { update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC) }
.to log_spam(title: 'Foo', user: user, noteable_type: 'PersonalSnippet')
end
it 'renders :edit with recaptcha disabled' do
stub_application_setting(recaptcha_enabled: false)
update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC)
expect(response).to render_template(:edit)
end
context 'recaptcha enabled' do
before do
stub_application_setting(recaptcha_enabled: true)
end
it 'renders :verify with recaptcha enabled' do
update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC)
expect(response).to render_template(:verify)
end
it 'renders snippet page when recaptcha verified' do
spammy_title = 'Whatever'
spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
snippet = update_snippet({ title: spammy_title, visibility_level: Snippet::PUBLIC },
{ spam_log_id: spam_logs.last.id,
recaptcha_verification: true })
expect(response).to redirect_to(snippet_path(snippet))
end
end
end
context 'when the snippet is public' do
let(:visibility_level) { Snippet::PUBLIC }
it 'rejects the shippet' do
expect { update_snippet(title: 'Foo') }
.not_to change { snippet.reload.title }
end
it 'creates a spam log' do
expect {update_snippet(title: 'Foo') }
.to log_spam(title: 'Foo', user: user, noteable_type: 'PersonalSnippet')
end
it 'renders :edit with recaptcha disabled' do
stub_application_setting(recaptcha_enabled: false)
update_snippet(title: 'Foo')
expect(response).to render_template(:edit)
end
context 'recaptcha enabled' do
before do
stub_application_setting(recaptcha_enabled: true)
end
it 'renders :verify with recaptcha enabled' do
update_snippet(title: 'Foo')
expect(response).to render_template(:verify)
end
it 'renders snippet page when recaptcha verified' do
spammy_title = 'Whatever'
spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
snippet = update_snippet({ title: spammy_title },
{ spam_log_id: spam_logs.last.id,
recaptcha_verification: true })
expect(response).to redirect_to(snippet_path(snippet))
end
end
end
end
end
describe 'POST #mark_as_spam' do
let(:snippet) { create(:personal_snippet, :public, author: user) }
before do
allow_any_instance_of(AkismetService).to receive_messages(submit_spam: true)
stub_application_setting(akismet_enabled: true)
end
def mark_as_spam
admin = create(:admin)
create(:user_agent_detail, subject: snippet)
sign_in(admin)
post :mark_as_spam, params: { id: snippet.id }
end
it 'updates the snippet' do
mark_as_spam
expect(snippet.reload).not_to be_submittable_as_spam
end
end
describe "GET #raw" do
context 'when the personal snippet is private' do
let(:personal_snippet) { create(:personal_snippet, :private, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
context 'when signed in user is not the author' do
let(:other_author) { create(:author) }
let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_author) }
it 'responds with status 404' do
get :raw, params: { id: other_personal_snippet.to_param }
expect(response).to have_gitlab_http_status(404)
end
end
context 'when signed in user is the author' do
before do
get :raw, params: { id: personal_snippet.to_param }
end
it 'responds with status 200' do
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
it 'has expected headers' do
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header['Content-Disposition']).to match(/inline/)
end
it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
expect(response).to have_gitlab_http_status(200)
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
end
end
end
context 'when not signed in' do
it 'redirects to the sign in page' do
get :raw, params: { id: personal_snippet.to_param }
expect(response).to redirect_to(new_user_session_path)
end
end
end
context 'when the personal snippet is internal' do
let(:personal_snippet) { create(:personal_snippet, :internal, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 200' do
get :raw, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
end
context 'when not signed in' do
it 'redirects to the sign in page' do
get :raw, params: { id: personal_snippet.to_param }
expect(response).to redirect_to(new_user_session_path)
end
end
end
context 'when the personal snippet is public' do
let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 200' do
get :raw, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
context 'CRLF line ending' do
let(:personal_snippet) do
create(:personal_snippet, :public, author: user, content: "first line\r\nsecond line\r\nthird line")
end
it 'returns LF line endings by default' do
get :raw, params: { id: personal_snippet.to_param }
expect(response.body).to eq("first line\nsecond line\nthird line")
end
it 'does not convert line endings when parameter present' do
get :raw, params: { id: personal_snippet.to_param, line_ending: :raw }
expect(response.body).to eq("first line\r\nsecond line\r\nthird line")
end
end
end
context 'when not signed in' do
it 'responds with status 200' do
get :raw, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(200)
end
end
end
context 'when the personal snippet does not exist' do
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 404' do
get :raw, params: { id: 'doesntexist' }
expect(response).to have_gitlab_http_status(404)
end
end
context 'when not signed in' do
it 'redirects to the sign in path' do
get :raw, params: { id: 'doesntexist' }
expect(response).to redirect_to(new_user_session_path)
end
end
end
end
context 'award emoji on snippets' do
let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
let(:another_user) { create(:user) }
before do
sign_in(another_user)
end
describe 'POST #toggle_award_emoji' do
it "toggles the award emoji" do
expect do
post(:toggle_award_emoji, params: { id: personal_snippet.to_param, name: "thumbsup" })
end.to change { personal_snippet.award_emoji.count }.from(0).to(1)
expect(response.status).to eq(200)
end
it "removes the already awarded emoji" do
post(:toggle_award_emoji, params: { id: personal_snippet.to_param, name: "thumbsup" })
expect do
post(:toggle_award_emoji, params: { id: personal_snippet.to_param, name: "thumbsup" })
end.to change { personal_snippet.award_emoji.count }.from(1).to(0)
expect(response.status).to eq(200)
end
end
end
describe 'POST #preview_markdown' do
let(:snippet) { create(:personal_snippet, :public) }
it 'renders json in a correct format' do
sign_in(user)
post :preview_markdown, params: { id: snippet, text: '*Markdown* text' }
expect(json_response.keys).to match_array(%w(body references))
end
end
end
| 30.557994 | 114 | 0.616537 |
b9321f66836279f13c0317e91bbea38634f6821b | 111 | class Settings < Settingslogic
source Rails.root.join('config', 'application.yml')
namespace Rails.env
end
| 22.2 | 53 | 0.765766 |
21fe5dfe20f87509072467b4a4497051d6a9df92 | 1,452 | require 'spec_helper'
describe "JudgeAbility" do
let(:judge) { Fabricate :judge }
subject(:ability) {JudgeAbility.new(judge)}
describe "contest" do
let(:contest) { Fabricate.build(:contest) }
it{ should be_able_to(:manage, contest) }
end
describe "problem" do
let(:problem) { Fabricate.build(:problem) }
it{ should be_able_to(:manage, problem) }
it{ should be_able_to(:check, problem) }
it{ should be_able_to(:approve, problem) }
end
describe "test" do
let(:test) { Fabricate.build(:problem_test) }
it{ should be_able_to(:manage, test) }
end
describe "solution" do
let(:solution) { Fabricate.build(:solution) }
it{ should be_able_to(:update, solution) }
it{ should be_able_to(:check, solution) }
context "contest is not selected" do
let(:problem) { Fabricate.build :problem }
let(:solution) { Fabricate.build :solution, contest: nil, problem: problem, user: judge }
context "problem is owned" do
before { problem.user = judge }
it{ should be_able_to(:create, solution) }
end
context "problem is not owned" do
before { expect(problem.user).not_to eq(judge) }
it{ should be_able_to(:create, solution) }
end
end
end
describe "result" do
it{ should be_able_to(:read, Fabricate.build(:result, hidden: true)) }
it{ should be_able_to(:read, Fabricate.build(:result, hidden: false)) }
end
end
| 25.928571 | 95 | 0.65427 |
abaadc80b5e843e49c8c85eb3eeaef6da1fc75b6 | 1,442 | module Transparam
class FactCollector
attr_reader :project_path
def initialize(project_path:)
@project_path = project_path
end
def call
write_helper
perform_fact_collection
collect_facts
ensure
cleanup
end
def self.call(*args)
self.new(*args).call
end
private
def cleanup
[output_file_path, collector_path].each do |path|
File.delete(path) if File.exist?(path)
end
end
def collect_facts
JSON.parse(File.read(output_file_path)).tap do |result|
puts result if debug?
end
end
def perform_fact_collection
`RUBYOPT='-W0' bundle exec rails runner '#{collector_path}'`.tap do |result|
puts result if debug?
end
end
def write_helper
File.write(collector_path, source)
end
def base_helper_path
File.expand_path('fact_collector/helper.rb', File.dirname(__FILE__))
end
def collector_path
File.expand_path('tmp/transparam-fact-collector.rb', project_path)
end
def output_file_path
File.expand_path('tmp/transparam-app-data.json', project_path)
end
def source
init_command = "Transparam::FactCollector::Helper.call(project_path: '#{project_path.to_s}', output_path: '#{output_file_path.to_s}')"
File.read(base_helper_path) + "\n\n#{init_command}"
end
def debug?
!ENV['DEBUG'].nil?
end
end
end | 21.848485 | 140 | 0.656727 |
38556e23525f3aefee6c2f43816157605d7bd1a0 | 594 | # frozen_string_literal: true
module Generator
# Defines a method that can be used in rake tasks to generate user accounts.
module Accounts
def self.create_account(name, email, password, admin: false)
if User.where(email: email).count.positive?
Rails.logger.info "Account #{email} already exists."
return
end
Rails.logger.info "Creating account #{email} with password #{password}"
User.create!(name: name, email: email,
password: password, password_confirmation: password,
admin: admin)
end
end
end
| 29.7 | 78 | 0.659933 |
ff8b397b4eb6ff837c97cf6777baf1a27017010f | 1,257 | module Cubic
# All classes generated by Cubic inherit from CubicController
class CubicController
extend Logable
class << self
attr_accessor :namespace
# Allows for namespacing within controllers who inherit
# from CubicController
def namespace(name, &b)
@namespace = name
b.call
ensure
@namespace = nil
end
def get(url, &block)
namespace_url(url) if @namespace
url = format_url(url)
route_setter('GET', url, block)
end
def post(url, &block)
namespace_url(url) if @namespace
route_setter('POST', url, block)
end
def put(url, &block)
namespace_url(url) if @namespace
route_setter('PUT', url, block)
end
def delete(url, &block)
namespace_url(url) if @namespace
route_setter('DELETE', url, block)
end
def route_setter(request_method, url, block)
Router.set_route(request_method, url, block)
end
def format_url(url)
if url.is_a?(String) && url[0] != '/'
url.prepend('/')
else
url
end
end
def namespace_url(url)
url.prepend(@namespace + '/')
end
end
end
end
| 22.052632 | 63 | 0.577566 |
610b272b06c5760b3142c678320d86cd0889bbe4 | 593 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "react-native-qiyu"
s.version = package["version"]
s.summary = package["description"]
s.homepage = package["homepage"]
s.license = package["license"]
s.authors = package["author"]
s.platforms = { :ios => "9.0" }
s.source = { :git => "https://github.com/kafudev/react-native-qiyu.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,mm}"
s.dependency "React"
s.dependency "QY_iOS_SDK", "~> 6.9.2"
end
| 25.782609 | 105 | 0.58516 |
6273a2383e5f15220bc1b0b14e22ffddc951774f | 1,718 | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib/', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'phraseapp-in-context-editor-ruby/version'
Gem::Specification.new do |s|
s.name = "phraseapp-in-context-editor-ruby"
s.version = PhraseApp::InContextEditor::VERSION
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 2.1'
s.authors = ["Dynport GmbH"]
s.email = ["[email protected]"]
s.homepage = "https://phrase.com"
s.summary = %q{Translation management solution for web and mobile applications}
s.licenses = ['MIT']
s.description = %q{Phrase In-Context-Editor allows you to edit translations directly on the website. More information: phrase.com}
s.required_rubygems_version = ">= 1.3.6"
s.rubyforge_project = "phraseapp-in-context-editor-ruby"
git_files = `git ls-files | grep -v spec/`.split("\n") rescue ''
s.files = git_files
s.test_files = s.files.grep(%r{^(spec)/})
s.require_paths = ["lib"]
if RUBY_VERSION >= '2.4'
s.add_dependency('json', '>= 1.8', '< 3')
else
s.add_dependency('json', '>= 1.7', '< 3')
end
s.add_dependency('i18n', '>= 0.6')
s.add_dependency('phraseapp-ruby', '>= 1.3')
s.add_dependency('request_store', '~> 1.3')
s.add_development_dependency('rspec', '~> 3.2')
s.add_development_dependency('webmock', '~> 1.21')
s.add_development_dependency('vcr', '~> 2.9')
s.add_development_dependency('timecop', '~> 0.7')
if RUBY_VERSION < '2.2'
s.add_development_dependency('mime-types', '< 3.0') # for 1.9.3 to work
s.add_development_dependency('rails', '~> 4.2')
else
s.add_development_dependency('rails', '>= 4.2', '< 5.1')
end
s.add_development_dependency('github_changelog_generator')
end
| 39.045455 | 132 | 0.671129 |
0302dde8f0df245f065efa81b77551077cd831cf | 2,849 | # frozen_string_literal: true
# require "spec_helper"
#
# describe Mongoid::Relations::Proxy do
#
# describe '#with', if: non_legacy_server? do
#
# let(:circus) do
# Circus.new
# end
#
# let(:animal) do
# Animal.new
# end
#
# before do
# circus.animals << animal
# circus.save
# end
#
# it 'uses the new persistence options' do
# expect {
# animal.with(write: { w: 100 }) do |an|
# an.update_attribute(:name, 'kangaroo')
# end
# }.to raise_exception(Mongo::Error::OperationFailure)
# end
# end
#
# describe "#find" do
# let(:person) do
# Person.create
# end
#
# let(:messages) do
# person.messages
# end
#
# let(:msg1) do
# messages.create(body: 'msg1')
# end
#
# it "returns nil with no arguments" do
# expect(messages.find).to be_nil
# expect(messages.send(:find)).to be_nil
# expect(messages.__send__(:find)).to be_nil
# expect(messages.public_send(:find)).to be_nil
# end
#
# it "returns the object corresponding to the id" do
# expect(messages.find(msg1.id)).to eq(msg1)
# expect(messages.send(:find, msg1.id)).to eq(msg1)
# expect(messages.__send__(:find, msg1.id)).to eq(msg1)
# expect(messages.public_send(:find, msg1.id)).to eq(msg1)
# end
# end
#
# describe "#extend" do
#
# before(:all) do
# Person.reset_callbacks(:validate)
# module Testable
# end
# end
#
# after(:all) do
# Object.send(:remove_const, :Testable)
# end
#
# let(:person) do
# Person.create
# end
#
# let(:name) do
# person.build_name
# end
#
# before do
# name.namable.extend(Testable)
# end
#
# it "extends the proxied object" do
# expect(person).to be_a(Testable)
# end
#
# context "when extending from the relation definition" do
#
# let!(:address) do
# person.addresses.create(street: "hobrecht")
# end
#
# let(:found) do
# person.addresses.find_by_street("hobrecht")
# end
#
# it "extends the proxy" do
# expect(found).to eq(address)
# end
# end
# end
#
# describe "equality" do
# let(:messages) do
# Person.create.messages
# end
#
# it "is #equal? to itself" do
# expect(messages.equal?(messages)).to eq(true)
# end
#
# it "is == to itself" do
# expect(messages == messages).to eq(true)
# end
#
# it "is not #equal? to its target" do
# expect(messages.equal?(messages.target)).to eq(false)
# expect(messages.target.equal?(messages)).to eq(false)
# end
#
# it "is == to its target" do
# expect(messages == messages.target).to eq(true)
# expect(messages.target == messages).to eq(true)
# end
# end
# end
| 22.433071 | 64 | 0.570727 |
6a2e28d74cb997dc9fc8f1139d30b44c61a6759b | 325 | require "danbooru/resource"
class Danbooru::Resource::Posts < Danbooru::Resource
def search(workers: 2, by: :page, **params)
all(workers: workers, by: by, **params)
end
def tag(id, tags)
tags = tags.join(" ") if tags.is_a?(Array)
update(id, "post[old_tag_string]": "", "post[tag_string]": tags)
end
end
| 25 | 68 | 0.655385 |
186848ae47b78063b19be3bc934e6c00cadfec73 | 3,560 | module Koyori
class Toc
include HtmlBuilder
attr_reader :chapters, :preface_heading
def initialize
@chapters = []
@chapter_number = 0
@section_number = 0
@subsection_number = 0
@preface_heading = nil
end
def add_chapter(heading)
@chapters << {
heading: heading,
sections: []
}
@chapter_number += 1
@section_number = 0
sprintf "chapter-%02d", @chapter_number
end
def add_section(heading)
chapter = @chapters.last || raise("No current chapter")
chapter[:sections] << {
heading: heading,
subsections: []
}
@section_number += 1
@subsection_number = 0
sprintf "section-%02d-%02d", @chapter_number, @section_number
end
def add_subsection(heading)
chapter = @chapters.last || raise("No current chapter")
section = chapter[:sections].last || raise("No current section")
section[:subsections] << heading
@subsection_number += 1
sprintf "subsection-%02d-%02d-%02d", @chapter_number, @section_number, @subsection_number
end
def set_preface_heading(heading)
@preface_heading = heading
end
def to_html
markup('nav', 'epub:type' => 'toc', 'id' => 'nav') do |m|
m.h1 '目次'
m.ol do
@chapters.each_with_index do |chapter, i|
m.li do
m.a(href: sprintf("#chapter-%02d", i + 1)) do
m << chapter[:heading]
end
next if chapter[:sections].empty?
m.ol do
chapter[:sections].each_with_index do |section, j|
m.li do
m.a(href: sprintf("#section-%02d-%02d", i + 1, j + 1)) do
m << section[:heading]
end
next if section[:subsections].empty?
m.ol do
section[:subsections].each_with_index do |subsection, k|
m.li do
m.a(href: sprintf("#subsection-%02d-%02d-%02d", i + 1, j + 1, k + 1)) do
m << subsection
end
end
end
end
end
end
end
end
end
end
end
end
def to_nav_map
builder = Nokogiri::XML::Builder.new(encoding: 'utf-8') do |m|
m.ncx do
m.navMap do
if @preface_heading
m.navPoint(:class => 'preface', :id => 'preface', :playOrder => 0) do
m.navLabel do
heading = @preface_heading
heading.gsub!(%r{</?\w+[^>]*>}, '')
m << "<text>#{heading}</text>\n"
end
m.content(:src => "book.html#preface")
end
end
@chapters.each_with_index do |chapter, i|
m.navPoint(:class => 'chapter', :id => sprintf("chapter-%02d", i + 1), :playOrder => i + 1) do
m.navLabel do
heading = chapter[:heading]
heading.gsub!(%r{</?\w+[^>]*>}, '')
m << "<text>Chapter #{i + 1} #{heading}</text>\n"
end
m.content(:src => sprintf("book.html#chapter-%02d", i + 1))
end
end
end
end
end
builder.to_xml
end
class << self
def get
@singleton ||= self.new
end
end
end
end
| 29.666667 | 108 | 0.470506 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.